From 9a4fde97661595630ea840917ef83b4786f2e2d3 Mon Sep 17 00:00:00 2001 From: Alexander Strizhakov Date: Sun, 31 May 2020 10:46:02 +0300 Subject: [PATCH] Mogrify args as custom tuples --- lib/mix/tasks/pleroma/config.ex | 10 +- lib/pleroma/config/config_db.ex | 274 ++++---- lib/pleroma/config/transfer_task.ex | 12 +- lib/pleroma/config/type/atom.ex | 22 + lib/pleroma/config/type/binary_value.ex | 23 + .../controllers/config_controller.ex | 34 +- .../web/admin_api/views/config_view.ex | 21 +- test/config/config_db_test.exs | 587 +++++++----------- test/config/transfer_task_test.exs | 94 +-- test/support/factory.ex | 17 +- test/tasks/config_test.exs | 41 +- test/upload/filter/mogrify_test.exs | 8 +- .../controllers/config_controller_test.exs | 256 +++++--- 13 files changed, 620 insertions(+), 779 deletions(-) create mode 100644 lib/pleroma/config/type/atom.ex create mode 100644 lib/pleroma/config/type/binary_value.ex diff --git a/lib/mix/tasks/pleroma/config.ex b/lib/mix/tasks/pleroma/config.ex index 5c9ef6904..f1b3a8766 100644 --- a/lib/mix/tasks/pleroma/config.ex +++ b/lib/mix/tasks/pleroma/config.ex @@ -72,8 +72,7 @@ defp create(group, settings) do group |> Pleroma.Config.Loader.filter_group(settings) |> Enum.each(fn {key, value} -> - key = inspect(key) - {:ok, _} = ConfigDB.update_or_create(%{group: inspect(group), key: key, value: value}) + {:ok, _} = ConfigDB.update_or_create(%{group: group, key: key, value: value}) shell_info("Settings for key #{key} migrated.") end) @@ -131,12 +130,9 @@ defp write_and_delete(config, file, delete?) do end defp write(config, file) do - value = - config.value - |> ConfigDB.from_binary() - |> inspect(limit: :infinity) + value = inspect(config.value, limit: :infinity) - IO.write(file, "config #{config.group}, #{config.key}, #{value}\r\n\r\n") + IO.write(file, "config #{inspect(config.group)}, #{inspect(config.key)}, #{value}\r\n\r\n") config end diff --git a/lib/pleroma/config/config_db.ex b/lib/pleroma/config/config_db.ex index 2b43d4c36..39b37c42e 100644 --- a/lib/pleroma/config/config_db.ex +++ b/lib/pleroma/config/config_db.ex @@ -6,7 +6,7 @@ defmodule Pleroma.ConfigDB do use Ecto.Schema import Ecto.Changeset - import Ecto.Query + import Ecto.Query, only: [select: 3] import Pleroma.Web.Gettext alias __MODULE__ @@ -14,16 +14,6 @@ defmodule Pleroma.ConfigDB do @type t :: %__MODULE__{} - @full_key_update [ - {:pleroma, :ecto_repos}, - {:quack, :meta}, - {:mime, :types}, - {:cors_plug, [:max_age, :methods, :expose, :headers]}, - {:auto_linker, :opts}, - {:swarm, :node_blacklist}, - {:logger, :backends} - ] - @full_subkey_update [ {:pleroma, :assets, :mascots}, {:pleroma, :emoji, :groups}, @@ -32,14 +22,10 @@ defmodule Pleroma.ConfigDB do {:pleroma, :mrf_keyword, :replace} ] - @regex ~r/^~r(?'delimiter'[\/|"'([{<]{1})(?'pattern'.+)[\/|"')\]}>]{1}(?'modifier'[uismxfU]*)/u - - @delimiters ["/", "|", "\"", "'", {"(", ")"}, {"[", "]"}, {"{", "}"}, {"<", ">"}] - schema "config" do - field(:key, :string) - field(:group, :string) - field(:value, :binary) + field(:key, Pleroma.Config.Type.Atom) + field(:group, Pleroma.Config.Type.Atom) + field(:value, Pleroma.Config.Type.BinaryValue) field(:db, {:array, :string}, virtual: true, default: []) timestamps() @@ -51,10 +37,6 @@ def get_all_as_keyword do |> select([c], {c.group, c.key, c.value}) |> Repo.all() |> Enum.reduce([], fn {group, key, value}, acc -> - group = ConfigDB.from_string(group) - key = ConfigDB.from_string(key) - value = from_binary(value) - Keyword.update(acc, group, [{key, value}], &Keyword.merge(&1, [{key, value}])) end) end @@ -64,50 +46,41 @@ def get_by_params(params), do: Repo.get_by(ConfigDB, params) @spec changeset(ConfigDB.t(), map()) :: Changeset.t() def changeset(config, params \\ %{}) do - params = Map.put(params, :value, transform(params[:value])) - config |> cast(params, [:key, :group, :value]) |> validate_required([:key, :group, :value]) |> unique_constraint(:key, name: :config_group_key_index) end - @spec create(map()) :: {:ok, ConfigDB.t()} | {:error, Changeset.t()} - def create(params) do + defp create(params) do %ConfigDB{} |> changeset(params) |> Repo.insert() end - @spec update(ConfigDB.t(), map()) :: {:ok, ConfigDB.t()} | {:error, Changeset.t()} - def update(%ConfigDB{} = config, %{value: value}) do + defp update(%ConfigDB{} = config, %{value: value}) do config |> changeset(%{value: value}) |> Repo.update() end - @spec get_db_keys(ConfigDB.t()) :: [String.t()] - def get_db_keys(%ConfigDB{} = config) do - config.value - |> ConfigDB.from_binary() - |> get_db_keys(config.key) - end - @spec get_db_keys(keyword(), any()) :: [String.t()] def get_db_keys(value, key) do - if Keyword.keyword?(value) do - value |> Keyword.keys() |> Enum.map(&convert(&1)) - else - [convert(key)] - end + keys = + if Keyword.keyword?(value) do + Keyword.keys(value) + else + [key] + end + + Enum.map(keys, &to_json_types(&1)) end @spec merge_group(atom(), atom(), keyword(), keyword()) :: keyword() def merge_group(group, key, old_value, new_value) do - new_keys = to_map_set(new_value) + new_keys = to_mapset(new_value) - intersect_keys = - old_value |> to_map_set() |> MapSet.intersection(new_keys) |> MapSet.to_list() + intersect_keys = old_value |> to_mapset() |> MapSet.intersection(new_keys) |> MapSet.to_list() merged_value = ConfigDB.merge(old_value, new_value) @@ -120,12 +93,10 @@ def merge_group(group, key, old_value, new_value) do [] end) |> List.flatten() - |> Enum.reduce(merged_value, fn subkey, acc -> - Keyword.put(acc, subkey, new_value[subkey]) - end) + |> Enum.reduce(merged_value, &Keyword.put(&2, &1, new_value[&1])) end - defp to_map_set(keyword) do + defp to_mapset(keyword) do keyword |> Keyword.keys() |> MapSet.new() @@ -159,43 +130,39 @@ defp deep_merge(_key, value1, value2) do @spec update_or_create(map()) :: {:ok, ConfigDB.t()} | {:error, Changeset.t()} def update_or_create(params) do + params = Map.put(params, :value, to_elixir_types(params[:value])) search_opts = Map.take(params, [:group, :key]) with %ConfigDB{} = config <- ConfigDB.get_by_params(search_opts), - {:partial_update, true, config} <- - {:partial_update, can_be_partially_updated?(config), config}, - old_value <- from_binary(config.value), - transformed_value <- do_transform(params[:value]), - {:can_be_merged, true, config} <- {:can_be_merged, is_list(transformed_value), config}, - new_value <- - merge_group( - ConfigDB.from_string(config.group), - ConfigDB.from_string(config.key), - old_value, - transformed_value - ) do - ConfigDB.update(config, %{value: new_value}) + {_, true, config} <- {:partial_update, can_be_partially_updated?(config), config}, + {_, true, config} <- {:can_be_merged, is_list(params[:value]), config} do + new_value = merge_group(config.group, config.key, config.value, params[:value]) + update(config, %{value: new_value}) else {reason, false, config} when reason in [:partial_update, :can_be_merged] -> - ConfigDB.update(config, params) + update(config, params) nil -> - ConfigDB.create(params) + create(params) end end defp can_be_partially_updated?(%ConfigDB{} = config), do: not only_full_update?(config) - defp only_full_update?(%ConfigDB{} = config) do - config_group = ConfigDB.from_string(config.group) - config_key = ConfigDB.from_string(config.key) + defp only_full_update?(%ConfigDB{group: group, key: key}) do + full_key_update = [ + {:pleroma, :ecto_repos}, + {:quack, :meta}, + {:mime, :types}, + {:cors_plug, [:max_age, :methods, :expose, :headers]}, + {:auto_linker, :opts}, + {:swarm, :node_blacklist}, + {:logger, :backends} + ] - Enum.any?(@full_key_update, fn - {group, key} when is_list(key) -> - config_group == group and config_key in key - - {group, key} -> - config_group == group and config_key == key + Enum.any?(full_key_update, fn + {s_group, s_key} -> + group == s_group and ((is_list(s_key) and key in s_key) or key == s_key) end) end @@ -205,11 +172,10 @@ def delete(params) do with %ConfigDB{} = config <- ConfigDB.get_by_params(search_opts), {config, sub_keys} when is_list(sub_keys) <- {config, params[:subkeys]}, - old_value <- from_binary(config.value), - keys <- Enum.map(sub_keys, &do_transform_string(&1)), - {:partial_remove, config, new_value} when new_value != [] <- - {:partial_remove, config, Keyword.drop(old_value, keys)} do - ConfigDB.update(config, %{value: new_value}) + keys <- Enum.map(sub_keys, &string_to_elixir_types(&1)), + {_, config, new_value} when new_value != [] <- + {:partial_remove, config, Keyword.drop(config.value, keys)} do + update(config, %{value: new_value}) else {:partial_remove, config, []} -> Repo.delete(config) @@ -225,37 +191,32 @@ def delete(params) do end end - @spec from_binary(binary()) :: term() - def from_binary(binary), do: :erlang.binary_to_term(binary) - - @spec from_binary_with_convert(binary()) :: any() - def from_binary_with_convert(binary) do - binary - |> from_binary() - |> do_convert() + @spec to_json_types(term()) :: map() | list() | boolean() | String.t() + def to_json_types(entity) when is_list(entity) do + Enum.map(entity, &to_json_types/1) end - @spec from_string(String.t()) :: atom() | no_return() - def from_string(string), do: do_transform_string(string) + def to_json_types(%Regex{} = entity), do: inspect(entity) - @spec convert(any()) :: any() - def convert(entity), do: do_convert(entity) - - defp do_convert(entity) when is_list(entity) do - for v <- entity, into: [], do: do_convert(v) + def to_json_types(entity) when is_map(entity) do + Map.new(entity, fn {k, v} -> {to_json_types(k), to_json_types(v)} end) end - defp do_convert(%Regex{} = entity), do: inspect(entity) + def to_json_types({:args, args}) when is_list(args) do + arguments = + Enum.map(args, fn + arg when is_tuple(arg) -> inspect(arg) + arg -> to_json_types(arg) + end) - defp do_convert(entity) when is_map(entity) do - for {k, v} <- entity, into: %{}, do: {do_convert(k), do_convert(v)} + %{"tuple" => [":args", arguments]} end - defp do_convert({:proxy_url, {type, :localhost, port}}) do - %{"tuple" => [":proxy_url", %{"tuple" => [do_convert(type), "localhost", port]}]} + def to_json_types({:proxy_url, {type, :localhost, port}}) do + %{"tuple" => [":proxy_url", %{"tuple" => [to_json_types(type), "localhost", port]}]} end - defp do_convert({:proxy_url, {type, host, port}}) when is_tuple(host) do + def to_json_types({:proxy_url, {type, host, port}}) when is_tuple(host) do ip = host |> :inet_parse.ntoa() @@ -264,66 +225,64 @@ defp do_convert({:proxy_url, {type, host, port}}) when is_tuple(host) do %{ "tuple" => [ ":proxy_url", - %{"tuple" => [do_convert(type), ip, port]} + %{"tuple" => [to_json_types(type), ip, port]} ] } end - defp do_convert({:proxy_url, {type, host, port}}) do + def to_json_types({:proxy_url, {type, host, port}}) do %{ "tuple" => [ ":proxy_url", - %{"tuple" => [do_convert(type), to_string(host), port]} + %{"tuple" => [to_json_types(type), to_string(host), port]} ] } end - defp do_convert({:partial_chain, entity}), do: %{"tuple" => [":partial_chain", inspect(entity)]} + def to_json_types({:partial_chain, entity}), + do: %{"tuple" => [":partial_chain", inspect(entity)]} - defp do_convert(entity) when is_tuple(entity) do + def to_json_types(entity) when is_tuple(entity) do value = entity |> Tuple.to_list() - |> do_convert() + |> to_json_types() %{"tuple" => value} end - defp do_convert(entity) when is_boolean(entity) or is_number(entity) or is_nil(entity) do + def to_json_types(entity) when is_binary(entity), do: entity + + def to_json_types(entity) when is_boolean(entity) or is_number(entity) or is_nil(entity) do entity end - defp do_convert(entity) - when is_atom(entity) and entity in [:"tlsv1.1", :"tlsv1.2", :"tlsv1.3"] do + def to_json_types(entity) when entity in [:"tlsv1.1", :"tlsv1.2", :"tlsv1.3"] do ":#{entity}" end - defp do_convert(entity) when is_atom(entity), do: inspect(entity) + def to_json_types(entity) when is_atom(entity), do: inspect(entity) - defp do_convert(entity) when is_binary(entity), do: entity + @spec to_elixir_types(boolean() | String.t() | map() | list()) :: term() + def to_elixir_types(%{"tuple" => [":args", args]}) when is_list(args) do + arguments = + Enum.map(args, fn arg -> + if String.contains?(arg, ["{", "}"]) do + {elem, []} = Code.eval_string(arg) + elem + else + to_elixir_types(arg) + end + end) - @spec transform(any()) :: binary() | no_return() - def transform(entity) when is_binary(entity) or is_map(entity) or is_list(entity) do - entity - |> do_transform() - |> to_binary() + {:args, arguments} end - def transform(entity), do: to_binary(entity) - - @spec transform_with_out_binary(any()) :: any() - def transform_with_out_binary(entity), do: do_transform(entity) - - @spec to_binary(any()) :: binary() - def to_binary(entity), do: :erlang.term_to_binary(entity) - - defp do_transform(%Regex{} = entity), do: entity - - defp do_transform(%{"tuple" => [":proxy_url", %{"tuple" => [type, host, port]}]}) do - {:proxy_url, {do_transform_string(type), parse_host(host), port}} + def to_elixir_types(%{"tuple" => [":proxy_url", %{"tuple" => [type, host, port]}]}) do + {:proxy_url, {string_to_elixir_types(type), parse_host(host), port}} end - defp do_transform(%{"tuple" => [":partial_chain", entity]}) do + def to_elixir_types(%{"tuple" => [":partial_chain", entity]}) do {partial_chain, []} = entity |> String.replace(~r/[^\w|^{:,[|^,|^[|^\]^}|^\/|^\.|^"]^\s/, "") @@ -332,25 +291,51 @@ defp do_transform(%{"tuple" => [":partial_chain", entity]}) do {:partial_chain, partial_chain} end - defp do_transform(%{"tuple" => entity}) do - Enum.reduce(entity, {}, fn val, acc -> Tuple.append(acc, do_transform(val)) end) + def to_elixir_types(%{"tuple" => entity}) do + Enum.reduce(entity, {}, &Tuple.append(&2, to_elixir_types(&1))) end - defp do_transform(entity) when is_map(entity) do - for {k, v} <- entity, into: %{}, do: {do_transform(k), do_transform(v)} + def to_elixir_types(entity) when is_map(entity) do + Map.new(entity, fn {k, v} -> {to_elixir_types(k), to_elixir_types(v)} end) end - defp do_transform(entity) when is_list(entity) do - for v <- entity, into: [], do: do_transform(v) + def to_elixir_types(entity) when is_list(entity) do + Enum.map(entity, &to_elixir_types/1) end - defp do_transform(entity) when is_binary(entity) do + def to_elixir_types(entity) when is_binary(entity) do entity |> String.trim() - |> do_transform_string() + |> string_to_elixir_types() end - defp do_transform(entity), do: entity + def to_elixir_types(entity), do: entity + + @spec string_to_elixir_types(String.t()) :: + atom() | Regex.t() | module() | String.t() | no_return() + def string_to_elixir_types("~r" <> _pattern = regex) do + pattern = + ~r/^~r(?'delimiter'[\/|"'([{<]{1})(?'pattern'.+)[\/|"')\]}>]{1}(?'modifier'[uismxfU]*)/u + + delimiters = ["/", "|", "\"", "'", {"(", ")"}, {"[", "]"}, {"{", "}"}, {"<", ">"}] + + with %{"modifier" => modifier, "pattern" => pattern, "delimiter" => regex_delimiter} <- + Regex.named_captures(pattern, regex), + {:ok, {leading, closing}} <- find_valid_delimiter(delimiters, pattern, regex_delimiter), + {result, _} <- Code.eval_string("~r#{leading}#{pattern}#{closing}#{modifier}") do + result + end + end + + def string_to_elixir_types(":" <> atom), do: String.to_atom(atom) + + def string_to_elixir_types(value) do + if is_module_name?(value) do + String.to_existing_atom("Elixir." <> value) + else + value + end + end defp parse_host("localhost"), do: :localhost @@ -387,25 +372,6 @@ defp find_valid_delimiter([delimiter | others], pattern, regex_delimiter) do end end - defp do_transform_string("~r" <> _pattern = regex) do - with %{"modifier" => modifier, "pattern" => pattern, "delimiter" => regex_delimiter} <- - Regex.named_captures(@regex, regex), - {:ok, {leading, closing}} <- find_valid_delimiter(@delimiters, pattern, regex_delimiter), - {result, _} <- Code.eval_string("~r#{leading}#{pattern}#{closing}#{modifier}") do - result - end - end - - defp do_transform_string(":" <> atom), do: String.to_atom(atom) - - defp do_transform_string(value) do - if is_module_name?(value) do - String.to_existing_atom("Elixir." <> value) - else - value - end - end - @spec is_module_name?(String.t()) :: boolean() def is_module_name?(string) do Regex.match?(~r/^(Pleroma|Phoenix|Tesla|Quack|Ueberauth|Swoosh)\./, string) or diff --git a/lib/pleroma/config/transfer_task.ex b/lib/pleroma/config/transfer_task.ex index c02b70e96..eb86b8ff4 100644 --- a/lib/pleroma/config/transfer_task.ex +++ b/lib/pleroma/config/transfer_task.ex @@ -28,10 +28,6 @@ defmodule Pleroma.Config.TransferTask do {:pleroma, Pleroma.Captcha, [:seconds_valid]}, {:pleroma, Pleroma.Upload, [:proxy_remote]}, {:pleroma, :instance, [:upload_limit]}, - {:pleroma, :email_notifications, [:digest]}, - {:pleroma, :oauth2, [:clean_expired_tokens]}, - {:pleroma, Pleroma.ActivityExpiration, [:enabled]}, - {:pleroma, Pleroma.ScheduledActivity, [:enabled]}, {:pleroma, :gopher, [:enabled]} ] @@ -48,7 +44,7 @@ def load_and_update_env(deleted_settings \\ [], restart_pleroma? \\ true) do {logger, other} = (Repo.all(ConfigDB) ++ deleted_settings) - |> Enum.map(&transform_and_merge/1) + |> Enum.map(&merge_with_default/1) |> Enum.split_with(fn {group, _, _, _} -> group in [:logger, :quack] end) logger @@ -92,11 +88,7 @@ defp maybe_set_pleroma_last(apps) do end end - defp transform_and_merge(%{group: group, key: key, value: value} = setting) do - group = ConfigDB.from_string(group) - key = ConfigDB.from_string(key) - value = ConfigDB.from_binary(value) - + defp merge_with_default(%{group: group, key: key, value: value} = setting) do default = Config.Holder.default_config(group, key) merged = diff --git a/lib/pleroma/config/type/atom.ex b/lib/pleroma/config/type/atom.ex new file mode 100644 index 000000000..387869284 --- /dev/null +++ b/lib/pleroma/config/type/atom.ex @@ -0,0 +1,22 @@ +defmodule Pleroma.Config.Type.Atom do + use Ecto.Type + + def type, do: :atom + + def cast(key) when is_atom(key) do + {:ok, key} + end + + def cast(key) when is_binary(key) do + {:ok, Pleroma.ConfigDB.string_to_elixir_types(key)} + end + + def cast(_), do: :error + + def load(key) do + {:ok, Pleroma.ConfigDB.string_to_elixir_types(key)} + end + + def dump(key) when is_atom(key), do: {:ok, inspect(key)} + def dump(_), do: :error +end diff --git a/lib/pleroma/config/type/binary_value.ex b/lib/pleroma/config/type/binary_value.ex new file mode 100644 index 000000000..17c5524a3 --- /dev/null +++ b/lib/pleroma/config/type/binary_value.ex @@ -0,0 +1,23 @@ +defmodule Pleroma.Config.Type.BinaryValue do + use Ecto.Type + + def type, do: :term + + def cast(value) when is_binary(value) do + if String.valid?(value) do + {:ok, value} + else + {:ok, :erlang.binary_to_term(value)} + end + end + + def cast(value), do: {:ok, value} + + def load(value) when is_binary(value) do + {:ok, :erlang.binary_to_term(value)} + end + + def dump(value) do + {:ok, :erlang.term_to_binary(value)} + end +end diff --git a/lib/pleroma/web/admin_api/controllers/config_controller.ex b/lib/pleroma/web/admin_api/controllers/config_controller.ex index d6e2019bc..7f60470cb 100644 --- a/lib/pleroma/web/admin_api/controllers/config_controller.ex +++ b/lib/pleroma/web/admin_api/controllers/config_controller.ex @@ -33,7 +33,11 @@ def descriptions(conn, _params) do def show(conn, %{only_db: true}) do with :ok <- configurable_from_database() do configs = Pleroma.Repo.all(ConfigDB) - render(conn, "index.json", %{configs: configs}) + + render(conn, "index.json", %{ + configs: configs, + need_reboot: Restarter.Pleroma.need_reboot?() + }) end end @@ -61,17 +65,20 @@ def show(conn, _params) do value end - %{ - group: ConfigDB.convert(group), - key: ConfigDB.convert(key), - value: ConfigDB.convert(merged_value) + %ConfigDB{ + group: group, + key: key, + value: merged_value } |> Pleroma.Maps.put_if_present(:db, db) end) end) |> List.flatten() - json(conn, %{configs: merged, need_reboot: Restarter.Pleroma.need_reboot?()}) + render(conn, "index.json", %{ + configs: merged, + need_reboot: Restarter.Pleroma.need_reboot?() + }) end end @@ -91,24 +98,17 @@ def update(%{body_params: %{configs: configs}} = conn, _) do {deleted, updated} = results - |> Enum.map(fn {:ok, config} -> - Map.put(config, :db, ConfigDB.get_db_keys(config)) - end) - |> Enum.split_with(fn config -> - Ecto.get_meta(config, :state) == :deleted + |> Enum.map(fn {:ok, %{key: key, value: value} = config} -> + Map.put(config, :db, ConfigDB.get_db_keys(value, key)) end) + |> Enum.split_with(&(Ecto.get_meta(&1, :state) == :deleted)) Config.TransferTask.load_and_update_env(deleted, false) if not Restarter.Pleroma.need_reboot?() do changed_reboot_settings? = (updated ++ deleted) - |> Enum.any?(fn config -> - group = ConfigDB.from_string(config.group) - key = ConfigDB.from_string(config.key) - value = ConfigDB.from_binary(config.value) - Config.TransferTask.pleroma_need_restart?(group, key, value) - end) + |> Enum.any?(&Config.TransferTask.pleroma_need_restart?(&1.group, &1.key, &1.value)) if changed_reboot_settings?, do: Restarter.Pleroma.need_reboot() end diff --git a/lib/pleroma/web/admin_api/views/config_view.ex b/lib/pleroma/web/admin_api/views/config_view.ex index 587ef760e..d2d8b5907 100644 --- a/lib/pleroma/web/admin_api/views/config_view.ex +++ b/lib/pleroma/web/admin_api/views/config_view.ex @@ -5,23 +5,20 @@ defmodule Pleroma.Web.AdminAPI.ConfigView do use Pleroma.Web, :view - def render("index.json", %{configs: configs} = params) do - map = %{ - configs: render_many(configs, __MODULE__, "show.json", as: :config) - } + alias Pleroma.ConfigDB - if params[:need_reboot] do - Map.put(map, :need_reboot, true) - else - map - end + def render("index.json", %{configs: configs} = params) do + %{ + configs: render_many(configs, __MODULE__, "show.json", as: :config), + need_reboot: params[:need_reboot] + } end def render("show.json", %{config: config}) do map = %{ - key: config.key, - group: config.group, - value: Pleroma.ConfigDB.from_binary_with_convert(config.value) + key: ConfigDB.to_json_types(config.key), + group: ConfigDB.to_json_types(config.group), + value: ConfigDB.to_json_types(config.value) } if config.db != [] do diff --git a/test/config/config_db_test.exs b/test/config/config_db_test.exs index 336de7359..a04575c6f 100644 --- a/test/config/config_db_test.exs +++ b/test/config/config_db_test.exs @@ -7,40 +7,28 @@ defmodule Pleroma.ConfigDBTest do import Pleroma.Factory alias Pleroma.ConfigDB - test "get_by_key/1" do + test "get_by_params/1" do config = insert(:config) insert(:config) assert config == ConfigDB.get_by_params(%{group: config.group, key: config.key}) end - test "create/1" do - {:ok, config} = ConfigDB.create(%{group: ":pleroma", key: ":some_key", value: "some_value"}) - assert config == ConfigDB.get_by_params(%{group: ":pleroma", key: ":some_key"}) - end - - test "update/1" do - config = insert(:config) - {:ok, updated} = ConfigDB.update(config, %{value: "some_value"}) - loaded = ConfigDB.get_by_params(%{group: config.group, key: config.key}) - assert loaded == updated - end - test "get_all_as_keyword/0" do saved = insert(:config) - insert(:config, group: ":quack", key: ":level", value: ConfigDB.to_binary(:info)) - insert(:config, group: ":quack", key: ":meta", value: ConfigDB.to_binary([:none])) + insert(:config, group: ":quack", key: ":level", value: :info) + insert(:config, group: ":quack", key: ":meta", value: [:none]) insert(:config, group: ":quack", key: ":webhook_url", - value: ConfigDB.to_binary("https://hooks.slack.com/services/KEY/some_val") + value: "https://hooks.slack.com/services/KEY/some_val" ) config = ConfigDB.get_all_as_keyword() assert config[:pleroma] == [ - {ConfigDB.from_string(saved.key), ConfigDB.from_binary(saved.value)} + {saved.key, saved.value} ] assert config[:quack][:level] == :info @@ -51,11 +39,11 @@ test "get_all_as_keyword/0" do describe "update_or_create/1" do test "common" do config = insert(:config) - key2 = "another_key" + key2 = :another_key params = [ - %{group: "pleroma", key: key2, value: "another_value"}, - %{group: config.group, key: config.key, value: "new_value"} + %{group: :pleroma, key: key2, value: "another_value"}, + %{group: :pleroma, key: config.key, value: "new_value"} ] assert Repo.all(ConfigDB) |> length() == 1 @@ -65,16 +53,16 @@ test "common" do assert Repo.all(ConfigDB) |> length() == 2 config1 = ConfigDB.get_by_params(%{group: config.group, key: config.key}) - config2 = ConfigDB.get_by_params(%{group: "pleroma", key: key2}) + config2 = ConfigDB.get_by_params(%{group: :pleroma, key: key2}) - assert config1.value == ConfigDB.transform("new_value") - assert config2.value == ConfigDB.transform("another_value") + assert config1.value == "new_value" + assert config2.value == "another_value" end test "partial update" do - config = insert(:config, value: ConfigDB.to_binary(key1: "val1", key2: :val2)) + config = insert(:config, value: [key1: "val1", key2: :val2]) - {:ok, _config} = + {:ok, config} = ConfigDB.update_or_create(%{ group: config.group, key: config.key, @@ -83,15 +71,14 @@ test "partial update" do updated = ConfigDB.get_by_params(%{group: config.group, key: config.key}) - value = ConfigDB.from_binary(updated.value) - assert length(value) == 3 - assert value[:key1] == :val1 - assert value[:key2] == :val2 - assert value[:key3] == :val3 + assert config.value == updated.value + assert updated.value[:key1] == :val1 + assert updated.value[:key2] == :val2 + assert updated.value[:key3] == :val3 end test "deep merge" do - config = insert(:config, value: ConfigDB.to_binary(key1: "val1", key2: [k1: :v1, k2: "v2"])) + config = insert(:config, value: [key1: "val1", key2: [k1: :v1, k2: "v2"]]) {:ok, config} = ConfigDB.update_or_create(%{ @@ -103,18 +90,15 @@ test "deep merge" do updated = ConfigDB.get_by_params(%{group: config.group, key: config.key}) assert config.value == updated.value - - value = ConfigDB.from_binary(updated.value) - assert value[:key1] == :val1 - assert value[:key2] == [k1: :v1, k2: :v2, k3: :v3] - assert value[:key3] == :val3 + assert updated.value[:key1] == :val1 + assert updated.value[:key2] == [k1: :v1, k2: :v2, k3: :v3] + assert updated.value[:key3] == :val3 end test "only full update for some keys" do - config1 = insert(:config, key: ":ecto_repos", value: ConfigDB.to_binary(repo: Pleroma.Repo)) + config1 = insert(:config, key: :ecto_repos, value: [repo: Pleroma.Repo]) - config2 = - insert(:config, group: ":cors_plug", key: ":max_age", value: ConfigDB.to_binary(18)) + config2 = insert(:config, group: :cors_plug, key: :max_age, value: 18) {:ok, _config} = ConfigDB.update_or_create(%{ @@ -133,8 +117,8 @@ test "only full update for some keys" do updated1 = ConfigDB.get_by_params(%{group: config1.group, key: config1.key}) updated2 = ConfigDB.get_by_params(%{group: config2.group, key: config2.key}) - assert ConfigDB.from_binary(updated1.value) == [another_repo: [Pleroma.Repo]] - assert ConfigDB.from_binary(updated2.value) == 777 + assert updated1.value == [another_repo: [Pleroma.Repo]] + assert updated2.value == 777 end test "full update if value is not keyword" do @@ -142,7 +126,7 @@ test "full update if value is not keyword" do insert(:config, group: ":tesla", key: ":adapter", - value: ConfigDB.to_binary(Tesla.Adapter.Hackney) + value: Tesla.Adapter.Hackney ) {:ok, _config} = @@ -154,20 +138,20 @@ test "full update if value is not keyword" do updated = ConfigDB.get_by_params(%{group: config.group, key: config.key}) - assert ConfigDB.from_binary(updated.value) == Tesla.Adapter.Httpc + assert updated.value == Tesla.Adapter.Httpc end test "only full update for some subkeys" do config1 = insert(:config, key: ":emoji", - value: ConfigDB.to_binary(groups: [a: 1, b: 2], key: [a: 1]) + value: [groups: [a: 1, b: 2], key: [a: 1]] ) config2 = insert(:config, key: ":assets", - value: ConfigDB.to_binary(mascots: [a: 1, b: 2], key: [a: 1]) + value: [mascots: [a: 1, b: 2], key: [a: 1]] ) {:ok, _config} = @@ -187,8 +171,8 @@ test "only full update for some subkeys" do updated1 = ConfigDB.get_by_params(%{group: config1.group, key: config1.key}) updated2 = ConfigDB.get_by_params(%{group: config2.group, key: config2.key}) - assert ConfigDB.from_binary(updated1.value) == [groups: [c: 3, d: 4], key: [a: 1, b: 2]] - assert ConfigDB.from_binary(updated2.value) == [mascots: [c: 3, d: 4], key: [a: 1, b: 2]] + assert updated1.value == [groups: [c: 3, d: 4], key: [a: 1, b: 2]] + assert updated2.value == [mascots: [c: 3, d: 4], key: [a: 1, b: 2]] end end @@ -206,14 +190,14 @@ test "full delete" do end test "partial subkeys delete" do - config = insert(:config, value: ConfigDB.to_binary(groups: [a: 1, b: 2], key: [a: 1])) + config = insert(:config, value: [groups: [a: 1, b: 2], key: [a: 1]]) {:ok, deleted} = ConfigDB.delete(%{group: config.group, key: config.key, subkeys: [":groups"]}) assert Ecto.get_meta(deleted, :state) == :loaded - assert deleted.value == ConfigDB.to_binary(key: [a: 1]) + assert deleted.value == [key: [a: 1]] updated = ConfigDB.get_by_params(%{group: config.group, key: config.key}) @@ -221,7 +205,7 @@ test "partial subkeys delete" do end test "full delete if remaining value after subkeys deletion is empty list" do - config = insert(:config, value: ConfigDB.to_binary(groups: [a: 1, b: 2])) + config = insert(:config, value: [groups: [a: 1, b: 2]]) {:ok, deleted} = ConfigDB.delete(%{group: config.group, key: config.key, subkeys: [":groups"]}) @@ -232,234 +216,159 @@ test "full delete if remaining value after subkeys deletion is empty list" do end end - describe "transform/1" do + describe "to_elixir_types/1" do test "string" do - binary = ConfigDB.transform("value as string") - assert binary == :erlang.term_to_binary("value as string") - assert ConfigDB.from_binary(binary) == "value as string" + assert ConfigDB.to_elixir_types("value as string") == "value as string" end test "boolean" do - binary = ConfigDB.transform(false) - assert binary == :erlang.term_to_binary(false) - assert ConfigDB.from_binary(binary) == false + assert ConfigDB.to_elixir_types(false) == false end test "nil" do - binary = ConfigDB.transform(nil) - assert binary == :erlang.term_to_binary(nil) - assert ConfigDB.from_binary(binary) == nil + assert ConfigDB.to_elixir_types(nil) == nil end test "integer" do - binary = ConfigDB.transform(150) - assert binary == :erlang.term_to_binary(150) - assert ConfigDB.from_binary(binary) == 150 + assert ConfigDB.to_elixir_types(150) == 150 end test "atom" do - binary = ConfigDB.transform(":atom") - assert binary == :erlang.term_to_binary(:atom) - assert ConfigDB.from_binary(binary) == :atom + assert ConfigDB.to_elixir_types(":atom") == :atom end test "ssl options" do - binary = ConfigDB.transform([":tlsv1", ":tlsv1.1", ":tlsv1.2"]) - assert binary == :erlang.term_to_binary([:tlsv1, :"tlsv1.1", :"tlsv1.2"]) - assert ConfigDB.from_binary(binary) == [:tlsv1, :"tlsv1.1", :"tlsv1.2"] + assert ConfigDB.to_elixir_types([":tlsv1", ":tlsv1.1", ":tlsv1.2"]) == [ + :tlsv1, + :"tlsv1.1", + :"tlsv1.2" + ] end test "pleroma module" do - binary = ConfigDB.transform("Pleroma.Bookmark") - assert binary == :erlang.term_to_binary(Pleroma.Bookmark) - assert ConfigDB.from_binary(binary) == Pleroma.Bookmark + assert ConfigDB.to_elixir_types("Pleroma.Bookmark") == Pleroma.Bookmark end test "pleroma string" do - binary = ConfigDB.transform("Pleroma") - assert binary == :erlang.term_to_binary("Pleroma") - assert ConfigDB.from_binary(binary) == "Pleroma" + assert ConfigDB.to_elixir_types("Pleroma") == "Pleroma" end test "phoenix module" do - binary = ConfigDB.transform("Phoenix.Socket.V1.JSONSerializer") - assert binary == :erlang.term_to_binary(Phoenix.Socket.V1.JSONSerializer) - assert ConfigDB.from_binary(binary) == Phoenix.Socket.V1.JSONSerializer + assert ConfigDB.to_elixir_types("Phoenix.Socket.V1.JSONSerializer") == + Phoenix.Socket.V1.JSONSerializer end test "tesla module" do - binary = ConfigDB.transform("Tesla.Adapter.Hackney") - assert binary == :erlang.term_to_binary(Tesla.Adapter.Hackney) - assert ConfigDB.from_binary(binary) == Tesla.Adapter.Hackney + assert ConfigDB.to_elixir_types("Tesla.Adapter.Hackney") == Tesla.Adapter.Hackney end test "ExSyslogger module" do - binary = ConfigDB.transform("ExSyslogger") - assert binary == :erlang.term_to_binary(ExSyslogger) - assert ConfigDB.from_binary(binary) == ExSyslogger + assert ConfigDB.to_elixir_types("ExSyslogger") == ExSyslogger end test "Quack.Logger module" do - binary = ConfigDB.transform("Quack.Logger") - assert binary == :erlang.term_to_binary(Quack.Logger) - assert ConfigDB.from_binary(binary) == Quack.Logger + assert ConfigDB.to_elixir_types("Quack.Logger") == Quack.Logger end test "Swoosh.Adapters modules" do - binary = ConfigDB.transform("Swoosh.Adapters.SMTP") - assert binary == :erlang.term_to_binary(Swoosh.Adapters.SMTP) - assert ConfigDB.from_binary(binary) == Swoosh.Adapters.SMTP - binary = ConfigDB.transform("Swoosh.Adapters.AmazonSES") - assert binary == :erlang.term_to_binary(Swoosh.Adapters.AmazonSES) - assert ConfigDB.from_binary(binary) == Swoosh.Adapters.AmazonSES + assert ConfigDB.to_elixir_types("Swoosh.Adapters.SMTP") == Swoosh.Adapters.SMTP + assert ConfigDB.to_elixir_types("Swoosh.Adapters.AmazonSES") == Swoosh.Adapters.AmazonSES end test "sigil" do - binary = ConfigDB.transform("~r[comp[lL][aA][iI][nN]er]") - assert binary == :erlang.term_to_binary(~r/comp[lL][aA][iI][nN]er/) - assert ConfigDB.from_binary(binary) == ~r/comp[lL][aA][iI][nN]er/ + assert ConfigDB.to_elixir_types("~r[comp[lL][aA][iI][nN]er]") == ~r/comp[lL][aA][iI][nN]er/ end test "link sigil" do - binary = ConfigDB.transform("~r/https:\/\/example.com/") - assert binary == :erlang.term_to_binary(~r/https:\/\/example.com/) - assert ConfigDB.from_binary(binary) == ~r/https:\/\/example.com/ + assert ConfigDB.to_elixir_types("~r/https:\/\/example.com/") == ~r/https:\/\/example.com/ end test "link sigil with um modifiers" do - binary = ConfigDB.transform("~r/https:\/\/example.com/um") - assert binary == :erlang.term_to_binary(~r/https:\/\/example.com/um) - assert ConfigDB.from_binary(binary) == ~r/https:\/\/example.com/um + assert ConfigDB.to_elixir_types("~r/https:\/\/example.com/um") == + ~r/https:\/\/example.com/um end test "link sigil with i modifier" do - binary = ConfigDB.transform("~r/https:\/\/example.com/i") - assert binary == :erlang.term_to_binary(~r/https:\/\/example.com/i) - assert ConfigDB.from_binary(binary) == ~r/https:\/\/example.com/i + assert ConfigDB.to_elixir_types("~r/https:\/\/example.com/i") == ~r/https:\/\/example.com/i end test "link sigil with s modifier" do - binary = ConfigDB.transform("~r/https:\/\/example.com/s") - assert binary == :erlang.term_to_binary(~r/https:\/\/example.com/s) - assert ConfigDB.from_binary(binary) == ~r/https:\/\/example.com/s + assert ConfigDB.to_elixir_types("~r/https:\/\/example.com/s") == ~r/https:\/\/example.com/s end test "raise if valid delimiter not found" do assert_raise ArgumentError, "valid delimiter for Regex expression not found", fn -> - ConfigDB.transform("~r/https://[]{}<>\"'()|example.com/s") + ConfigDB.to_elixir_types("~r/https://[]{}<>\"'()|example.com/s") end end test "2 child tuple" do - binary = ConfigDB.transform(%{"tuple" => ["v1", ":v2"]}) - assert binary == :erlang.term_to_binary({"v1", :v2}) - assert ConfigDB.from_binary(binary) == {"v1", :v2} + assert ConfigDB.to_elixir_types(%{"tuple" => ["v1", ":v2"]}) == {"v1", :v2} end test "proxy tuple with localhost" do - binary = - ConfigDB.transform(%{ - "tuple" => [":proxy_url", %{"tuple" => [":socks5", "localhost", 1234]}] - }) - - assert binary == :erlang.term_to_binary({:proxy_url, {:socks5, :localhost, 1234}}) - assert ConfigDB.from_binary(binary) == {:proxy_url, {:socks5, :localhost, 1234}} + assert ConfigDB.to_elixir_types(%{ + "tuple" => [":proxy_url", %{"tuple" => [":socks5", "localhost", 1234]}] + }) == {:proxy_url, {:socks5, :localhost, 1234}} end test "proxy tuple with domain" do - binary = - ConfigDB.transform(%{ - "tuple" => [":proxy_url", %{"tuple" => [":socks5", "domain.com", 1234]}] - }) - - assert binary == :erlang.term_to_binary({:proxy_url, {:socks5, 'domain.com', 1234}}) - assert ConfigDB.from_binary(binary) == {:proxy_url, {:socks5, 'domain.com', 1234}} + assert ConfigDB.to_elixir_types(%{ + "tuple" => [":proxy_url", %{"tuple" => [":socks5", "domain.com", 1234]}] + }) == {:proxy_url, {:socks5, 'domain.com', 1234}} end test "proxy tuple with ip" do - binary = - ConfigDB.transform(%{ - "tuple" => [":proxy_url", %{"tuple" => [":socks5", "127.0.0.1", 1234]}] - }) - - assert binary == :erlang.term_to_binary({:proxy_url, {:socks5, {127, 0, 0, 1}, 1234}}) - assert ConfigDB.from_binary(binary) == {:proxy_url, {:socks5, {127, 0, 0, 1}, 1234}} + assert ConfigDB.to_elixir_types(%{ + "tuple" => [":proxy_url", %{"tuple" => [":socks5", "127.0.0.1", 1234]}] + }) == {:proxy_url, {:socks5, {127, 0, 0, 1}, 1234}} end test "tuple with n childs" do - binary = - ConfigDB.transform(%{ - "tuple" => [ - "v1", - ":v2", - "Pleroma.Bookmark", - 150, - false, - "Phoenix.Socket.V1.JSONSerializer" - ] - }) - - assert binary == - :erlang.term_to_binary( - {"v1", :v2, Pleroma.Bookmark, 150, false, Phoenix.Socket.V1.JSONSerializer} - ) - - assert ConfigDB.from_binary(binary) == - {"v1", :v2, Pleroma.Bookmark, 150, false, Phoenix.Socket.V1.JSONSerializer} + assert ConfigDB.to_elixir_types(%{ + "tuple" => [ + "v1", + ":v2", + "Pleroma.Bookmark", + 150, + false, + "Phoenix.Socket.V1.JSONSerializer" + ] + }) == {"v1", :v2, Pleroma.Bookmark, 150, false, Phoenix.Socket.V1.JSONSerializer} end test "map with string key" do - binary = ConfigDB.transform(%{"key" => "value"}) - assert binary == :erlang.term_to_binary(%{"key" => "value"}) - assert ConfigDB.from_binary(binary) == %{"key" => "value"} + assert ConfigDB.to_elixir_types(%{"key" => "value"}) == %{"key" => "value"} end test "map with atom key" do - binary = ConfigDB.transform(%{":key" => "value"}) - assert binary == :erlang.term_to_binary(%{key: "value"}) - assert ConfigDB.from_binary(binary) == %{key: "value"} + assert ConfigDB.to_elixir_types(%{":key" => "value"}) == %{key: "value"} end test "list of strings" do - binary = ConfigDB.transform(["v1", "v2", "v3"]) - assert binary == :erlang.term_to_binary(["v1", "v2", "v3"]) - assert ConfigDB.from_binary(binary) == ["v1", "v2", "v3"] + assert ConfigDB.to_elixir_types(["v1", "v2", "v3"]) == ["v1", "v2", "v3"] end test "list of modules" do - binary = ConfigDB.transform(["Pleroma.Repo", "Pleroma.Activity"]) - assert binary == :erlang.term_to_binary([Pleroma.Repo, Pleroma.Activity]) - assert ConfigDB.from_binary(binary) == [Pleroma.Repo, Pleroma.Activity] + assert ConfigDB.to_elixir_types(["Pleroma.Repo", "Pleroma.Activity"]) == [ + Pleroma.Repo, + Pleroma.Activity + ] end test "list of atoms" do - binary = ConfigDB.transform([":v1", ":v2", ":v3"]) - assert binary == :erlang.term_to_binary([:v1, :v2, :v3]) - assert ConfigDB.from_binary(binary) == [:v1, :v2, :v3] + assert ConfigDB.to_elixir_types([":v1", ":v2", ":v3"]) == [:v1, :v2, :v3] end test "list of mixed values" do - binary = - ConfigDB.transform([ - "v1", - ":v2", - "Pleroma.Repo", - "Phoenix.Socket.V1.JSONSerializer", - 15, - false - ]) - - assert binary == - :erlang.term_to_binary([ - "v1", - :v2, - Pleroma.Repo, - Phoenix.Socket.V1.JSONSerializer, - 15, - false - ]) - - assert ConfigDB.from_binary(binary) == [ + assert ConfigDB.to_elixir_types([ + "v1", + ":v2", + "Pleroma.Repo", + "Phoenix.Socket.V1.JSONSerializer", + 15, + false + ]) == [ "v1", :v2, Pleroma.Repo, @@ -470,40 +379,23 @@ test "list of mixed values" do end test "simple keyword" do - binary = ConfigDB.transform([%{"tuple" => [":key", "value"]}]) - assert binary == :erlang.term_to_binary([{:key, "value"}]) - assert ConfigDB.from_binary(binary) == [{:key, "value"}] - assert ConfigDB.from_binary(binary) == [key: "value"] + assert ConfigDB.to_elixir_types([%{"tuple" => [":key", "value"]}]) == [key: "value"] end test "keyword with partial_chain key" do - binary = - ConfigDB.transform([%{"tuple" => [":partial_chain", "&:hackney_connect.partial_chain/1"]}]) - - assert binary == :erlang.term_to_binary(partial_chain: &:hackney_connect.partial_chain/1) - assert ConfigDB.from_binary(binary) == [partial_chain: &:hackney_connect.partial_chain/1] + assert ConfigDB.to_elixir_types([ + %{"tuple" => [":partial_chain", "&:hackney_connect.partial_chain/1"]} + ]) == [partial_chain: &:hackney_connect.partial_chain/1] end test "keyword" do - binary = - ConfigDB.transform([ - %{"tuple" => [":types", "Pleroma.PostgresTypes"]}, - %{"tuple" => [":telemetry_event", ["Pleroma.Repo.Instrumenter"]]}, - %{"tuple" => [":migration_lock", nil]}, - %{"tuple" => [":key1", 150]}, - %{"tuple" => [":key2", "string"]} - ]) - - assert binary == - :erlang.term_to_binary( - types: Pleroma.PostgresTypes, - telemetry_event: [Pleroma.Repo.Instrumenter], - migration_lock: nil, - key1: 150, - key2: "string" - ) - - assert ConfigDB.from_binary(binary) == [ + assert ConfigDB.to_elixir_types([ + %{"tuple" => [":types", "Pleroma.PostgresTypes"]}, + %{"tuple" => [":telemetry_event", ["Pleroma.Repo.Instrumenter"]]}, + %{"tuple" => [":migration_lock", nil]}, + %{"tuple" => [":key1", 150]}, + %{"tuple" => [":key2", "string"]} + ]) == [ types: Pleroma.PostgresTypes, telemetry_event: [Pleroma.Repo.Instrumenter], migration_lock: nil, @@ -513,85 +405,55 @@ test "keyword" do end test "complex keyword with nested mixed childs" do - binary = - ConfigDB.transform([ - %{"tuple" => [":uploader", "Pleroma.Uploaders.Local"]}, - %{"tuple" => [":filters", ["Pleroma.Upload.Filter.Dedupe"]]}, - %{"tuple" => [":link_name", true]}, - %{"tuple" => [":proxy_remote", false]}, - %{"tuple" => [":common_map", %{":key" => "value"}]}, - %{ - "tuple" => [ - ":proxy_opts", - [ - %{"tuple" => [":redirect_on_failure", false]}, - %{"tuple" => [":max_body_length", 1_048_576]}, - %{ - "tuple" => [ - ":http", - [%{"tuple" => [":follow_redirect", true]}, %{"tuple" => [":pool", ":upload"]}] - ] - } - ] - ] - } - ]) - - assert binary == - :erlang.term_to_binary( - uploader: Pleroma.Uploaders.Local, - filters: [Pleroma.Upload.Filter.Dedupe], - link_name: true, - proxy_remote: false, - common_map: %{key: "value"}, - proxy_opts: [ - redirect_on_failure: false, - max_body_length: 1_048_576, - http: [ - follow_redirect: true, - pool: :upload + assert ConfigDB.to_elixir_types([ + %{"tuple" => [":uploader", "Pleroma.Uploaders.Local"]}, + %{"tuple" => [":filters", ["Pleroma.Upload.Filter.Dedupe"]]}, + %{"tuple" => [":link_name", true]}, + %{"tuple" => [":proxy_remote", false]}, + %{"tuple" => [":common_map", %{":key" => "value"}]}, + %{ + "tuple" => [ + ":proxy_opts", + [ + %{"tuple" => [":redirect_on_failure", false]}, + %{"tuple" => [":max_body_length", 1_048_576]}, + %{ + "tuple" => [ + ":http", + [ + %{"tuple" => [":follow_redirect", true]}, + %{"tuple" => [":pool", ":upload"]} + ] + ] + } ] ] - ) - - assert ConfigDB.from_binary(binary) == - [ - uploader: Pleroma.Uploaders.Local, - filters: [Pleroma.Upload.Filter.Dedupe], - link_name: true, - proxy_remote: false, - common_map: %{key: "value"}, - proxy_opts: [ - redirect_on_failure: false, - max_body_length: 1_048_576, - http: [ - follow_redirect: true, - pool: :upload - ] + } + ]) == [ + uploader: Pleroma.Uploaders.Local, + filters: [Pleroma.Upload.Filter.Dedupe], + link_name: true, + proxy_remote: false, + common_map: %{key: "value"}, + proxy_opts: [ + redirect_on_failure: false, + max_body_length: 1_048_576, + http: [ + follow_redirect: true, + pool: :upload ] ] + ] end test "common keyword" do - binary = - ConfigDB.transform([ - %{"tuple" => [":level", ":warn"]}, - %{"tuple" => [":meta", [":all"]]}, - %{"tuple" => [":path", ""]}, - %{"tuple" => [":val", nil]}, - %{"tuple" => [":webhook_url", "https://hooks.slack.com/services/YOUR-KEY-HERE"]} - ]) - - assert binary == - :erlang.term_to_binary( - level: :warn, - meta: [:all], - path: "", - val: nil, - webhook_url: "https://hooks.slack.com/services/YOUR-KEY-HERE" - ) - - assert ConfigDB.from_binary(binary) == [ + assert ConfigDB.to_elixir_types([ + %{"tuple" => [":level", ":warn"]}, + %{"tuple" => [":meta", [":all"]]}, + %{"tuple" => [":path", ""]}, + %{"tuple" => [":val", nil]}, + %{"tuple" => [":webhook_url", "https://hooks.slack.com/services/YOUR-KEY-HERE"]} + ]) == [ level: :warn, meta: [:all], path: "", @@ -601,98 +463,73 @@ test "common keyword" do end test "complex keyword with sigil" do - binary = - ConfigDB.transform([ - %{"tuple" => [":federated_timeline_removal", []]}, - %{"tuple" => [":reject", ["~r/comp[lL][aA][iI][nN]er/"]]}, - %{"tuple" => [":replace", []]} - ]) - - assert binary == - :erlang.term_to_binary( - federated_timeline_removal: [], - reject: [~r/comp[lL][aA][iI][nN]er/], - replace: [] - ) - - assert ConfigDB.from_binary(binary) == - [federated_timeline_removal: [], reject: [~r/comp[lL][aA][iI][nN]er/], replace: []] + assert ConfigDB.to_elixir_types([ + %{"tuple" => [":federated_timeline_removal", []]}, + %{"tuple" => [":reject", ["~r/comp[lL][aA][iI][nN]er/"]]}, + %{"tuple" => [":replace", []]} + ]) == [ + federated_timeline_removal: [], + reject: [~r/comp[lL][aA][iI][nN]er/], + replace: [] + ] end test "complex keyword with tuples with more than 2 values" do - binary = - ConfigDB.transform([ - %{ - "tuple" => [ - ":http", - [ - %{ - "tuple" => [ - ":key1", - [ - %{ - "tuple" => [ - ":_", - [ - %{ - "tuple" => [ - "/api/v1/streaming", - "Pleroma.Web.MastodonAPI.WebsocketHandler", - [] - ] - }, - %{ - "tuple" => [ - "/websocket", - "Phoenix.Endpoint.CowboyWebSocket", - %{ - "tuple" => [ - "Phoenix.Transports.WebSocket", - %{ - "tuple" => [ - "Pleroma.Web.Endpoint", - "Pleroma.Web.UserSocket", - [] - ] - } - ] - } - ] - }, - %{ - "tuple" => [ - ":_", - "Phoenix.Endpoint.Cowboy2Handler", - %{"tuple" => ["Pleroma.Web.Endpoint", []]} - ] - } - ] - ] - } - ] - ] - } - ] - ] - } - ]) - - assert binary == - :erlang.term_to_binary( - http: [ - key1: [ - _: [ - {"/api/v1/streaming", Pleroma.Web.MastodonAPI.WebsocketHandler, []}, - {"/websocket", Phoenix.Endpoint.CowboyWebSocket, - {Phoenix.Transports.WebSocket, - {Pleroma.Web.Endpoint, Pleroma.Web.UserSocket, []}}}, - {:_, Phoenix.Endpoint.Cowboy2Handler, {Pleroma.Web.Endpoint, []}} - ] + assert ConfigDB.to_elixir_types([ + %{ + "tuple" => [ + ":http", + [ + %{ + "tuple" => [ + ":key1", + [ + %{ + "tuple" => [ + ":_", + [ + %{ + "tuple" => [ + "/api/v1/streaming", + "Pleroma.Web.MastodonAPI.WebsocketHandler", + [] + ] + }, + %{ + "tuple" => [ + "/websocket", + "Phoenix.Endpoint.CowboyWebSocket", + %{ + "tuple" => [ + "Phoenix.Transports.WebSocket", + %{ + "tuple" => [ + "Pleroma.Web.Endpoint", + "Pleroma.Web.UserSocket", + [] + ] + } + ] + } + ] + }, + %{ + "tuple" => [ + ":_", + "Phoenix.Endpoint.Cowboy2Handler", + %{"tuple" => ["Pleroma.Web.Endpoint", []]} + ] + } + ] + ] + } + ] + ] + } ] ] - ) - - assert ConfigDB.from_binary(binary) == [ + } + ]) == [ http: [ key1: [ {:_, diff --git a/test/config/transfer_task_test.exs b/test/config/transfer_task_test.exs index 473899d1d..f53829e09 100644 --- a/test/config/transfer_task_test.exs +++ b/test/config/transfer_task_test.exs @@ -6,9 +6,9 @@ defmodule Pleroma.Config.TransferTaskTest do use Pleroma.DataCase import ExUnit.CaptureLog + import Pleroma.Factory alias Pleroma.Config.TransferTask - alias Pleroma.ConfigDB setup do: clear_config(:configurable_from_database, true) @@ -19,31 +19,11 @@ test "transfer config values from db to env" do refute Application.get_env(:postgrex, :test_key) initial = Application.get_env(:logger, :level) - ConfigDB.create(%{ - group: ":pleroma", - key: ":test_key", - value: [live: 2, com: 3] - }) - - ConfigDB.create(%{ - group: ":idna", - key: ":test_key", - value: [live: 15, com: 35] - }) - - ConfigDB.create(%{ - group: ":quack", - key: ":test_key", - value: [:test_value1, :test_value2] - }) - - ConfigDB.create(%{ - group: ":postgrex", - key: ":test_key", - value: :value - }) - - ConfigDB.create(%{group: ":logger", key: ":level", value: :debug}) + insert(:config, key: :test_key, value: [live: 2, com: 3]) + insert(:config, group: :idna, key: :test_key, value: [live: 15, com: 35]) + insert(:config, group: :quack, key: :test_key, value: [:test_value1, :test_value2]) + insert(:config, group: :postgrex, key: :test_key, value: :value) + insert(:config, group: :logger, key: :level, value: :debug) TransferTask.start_link([]) @@ -66,17 +46,8 @@ test "transfer config values for 1 group and some keys" do level = Application.get_env(:quack, :level) meta = Application.get_env(:quack, :meta) - ConfigDB.create(%{ - group: ":quack", - key: ":level", - value: :info - }) - - ConfigDB.create(%{ - group: ":quack", - key: ":meta", - value: [:none] - }) + insert(:config, group: :quack, key: :level, value: :info) + insert(:config, group: :quack, key: :meta, value: [:none]) TransferTask.start_link([]) @@ -95,17 +66,8 @@ test "transfer config values with full subkey update" do clear_config(:emoji) clear_config(:assets) - ConfigDB.create(%{ - group: ":pleroma", - key: ":emoji", - value: [groups: [a: 1, b: 2]] - }) - - ConfigDB.create(%{ - group: ":pleroma", - key: ":assets", - value: [mascots: [a: 1, b: 2]] - }) + insert(:config, key: :emoji, value: [groups: [a: 1, b: 2]]) + insert(:config, key: :assets, value: [mascots: [a: 1, b: 2]]) TransferTask.start_link([]) @@ -122,12 +84,7 @@ test "transfer config values with full subkey update" do test "don't restart if no reboot time settings were changed" do clear_config(:emoji) - - ConfigDB.create(%{ - group: ":pleroma", - key: ":emoji", - value: [groups: [a: 1, b: 2]] - }) + insert(:config, key: :emoji, value: [groups: [a: 1, b: 2]]) refute String.contains?( capture_log(fn -> TransferTask.start_link([]) end), @@ -137,25 +94,13 @@ test "don't restart if no reboot time settings were changed" do test "on reboot time key" do clear_config(:chat) - - ConfigDB.create(%{ - group: ":pleroma", - key: ":chat", - value: [enabled: false] - }) - + insert(:config, key: :chat, value: [enabled: false]) assert capture_log(fn -> TransferTask.start_link([]) end) =~ "pleroma restarted" end test "on reboot time subkey" do clear_config(Pleroma.Captcha) - - ConfigDB.create(%{ - group: ":pleroma", - key: "Pleroma.Captcha", - value: [seconds_valid: 60] - }) - + insert(:config, key: Pleroma.Captcha, value: [seconds_valid: 60]) assert capture_log(fn -> TransferTask.start_link([]) end) =~ "pleroma restarted" end @@ -163,17 +108,8 @@ test "don't restart pleroma on reboot time key and subkey if there is false flag clear_config(:chat) clear_config(Pleroma.Captcha) - ConfigDB.create(%{ - group: ":pleroma", - key: ":chat", - value: [enabled: false] - }) - - ConfigDB.create(%{ - group: ":pleroma", - key: "Pleroma.Captcha", - value: [seconds_valid: 60] - }) + insert(:config, key: :chat, value: [enabled: false]) + insert(:config, key: Pleroma.Captcha, value: [seconds_valid: 60]) refute String.contains?( capture_log(fn -> TransferTask.load_and_update_env([], false) end), diff --git a/test/support/factory.ex b/test/support/factory.ex index 6e3676aca..e517d5bc6 100644 --- a/test/support/factory.ex +++ b/test/support/factory.ex @@ -396,24 +396,17 @@ def registration_factory do } end - def config_factory do + def config_factory(attrs \\ %{}) do %Pleroma.ConfigDB{ - key: - sequence(:key, fn key -> - # Atom dynamic registration hack in tests - "some_key_#{key}" - |> String.to_atom() - |> inspect() - end), - group: ":pleroma", + key: sequence(:key, &String.to_atom("some_key_#{&1}")), + group: :pleroma, value: sequence( :value, - fn key -> - :erlang.term_to_binary(%{another_key: "#{key}somevalue", another: "#{key}somevalue"}) - end + &%{another_key: "#{&1}somevalue", another: "#{&1}somevalue"} ) } + |> merge_attributes(attrs) end def marker_factory do diff --git a/test/tasks/config_test.exs b/test/tasks/config_test.exs index 04bc947a9..e1bddfebf 100644 --- a/test/tasks/config_test.exs +++ b/test/tasks/config_test.exs @@ -5,6 +5,8 @@ defmodule Mix.Tasks.Pleroma.ConfigTest do use Pleroma.DataCase + import Pleroma.Factory + alias Pleroma.ConfigDB alias Pleroma.Repo @@ -49,24 +51,19 @@ test "filtered settings are migrated to db" do refute ConfigDB.get_by_params(%{group: ":pleroma", key: "Pleroma.Repo"}) refute ConfigDB.get_by_params(%{group: ":postgrex", key: ":json_library"}) - assert ConfigDB.from_binary(config1.value) == [key: "value", key2: [Repo]] - assert ConfigDB.from_binary(config2.value) == [key: "value2", key2: ["Activity"]] - assert ConfigDB.from_binary(config3.value) == :info + assert config1.value == [key: "value", key2: [Repo]] + assert config2.value == [key: "value2", key2: ["Activity"]] + assert config3.value == :info end test "config table is truncated before migration" do - ConfigDB.create(%{ - group: ":pleroma", - key: ":first_setting", - value: [key: "value", key2: ["Activity"]] - }) - + insert(:config, key: :first_setting, value: [key: "value", key2: ["Activity"]]) assert Repo.aggregate(ConfigDB, :count, :id) == 1 Mix.Tasks.Pleroma.Config.migrate_to_db("test/fixtures/config/temp.secret.exs") config = ConfigDB.get_by_params(%{group: ":pleroma", key: ":first_setting"}) - assert ConfigDB.from_binary(config.value) == [key: "value", key2: [Repo]] + assert config.value == [key: "value", key2: [Repo]] end end @@ -82,19 +79,9 @@ test "config table is truncated before migration" do end test "settings are migrated to file and deleted from db", %{temp_file: temp_file} do - ConfigDB.create(%{ - group: ":pleroma", - key: ":setting_first", - value: [key: "value", key2: ["Activity"]] - }) - - ConfigDB.create(%{ - group: ":pleroma", - key: ":setting_second", - value: [key: "value2", key2: [Repo]] - }) - - ConfigDB.create(%{group: ":quack", key: ":level", value: :info}) + insert(:config, key: :setting_first, value: [key: "value", key2: ["Activity"]]) + insert(:config, key: :setting_second, value: [key: "value2", key2: [Repo]]) + insert(:config, group: :quack, key: :level, value: :info) Mix.Tasks.Pleroma.Config.run(["migrate_from_db", "--env", "temp", "-d"]) @@ -107,9 +94,8 @@ test "settings are migrated to file and deleted from db", %{temp_file: temp_file end test "load a settings with large values and pass to file", %{temp_file: temp_file} do - ConfigDB.create(%{ - group: ":pleroma", - key: ":instance", + insert(:config, + key: :instance, value: [ name: "Pleroma", email: "example@example.com", @@ -163,7 +149,6 @@ test "load a settings with large values and pass to file", %{temp_file: temp_fil extended_nickname_format: true, multi_factor_authentication: [ totp: [ - # digits 6 or 8 digits: 6, period: 30 ], @@ -173,7 +158,7 @@ test "load a settings with large values and pass to file", %{temp_file: temp_fil ] ] ] - }) + ) Mix.Tasks.Pleroma.Config.run(["migrate_from_db", "--env", "temp", "-d"]) diff --git a/test/upload/filter/mogrify_test.exs b/test/upload/filter/mogrify_test.exs index b6a463e8c..62ca30487 100644 --- a/test/upload/filter/mogrify_test.exs +++ b/test/upload/filter/mogrify_test.exs @@ -6,21 +6,17 @@ defmodule Pleroma.Upload.Filter.MogrifyTest do use Pleroma.DataCase import Mock - alias Pleroma.Config - alias Pleroma.Upload alias Pleroma.Upload.Filter - setup do: clear_config([Filter.Mogrify, :args]) - test "apply mogrify filter" do - Config.put([Filter.Mogrify, :args], [{"tint", "40"}]) + clear_config(Filter.Mogrify, args: [{"tint", "40"}]) File.cp!( "test/fixtures/image.jpg", "test/fixtures/image_tmp.jpg" ) - upload = %Upload{ + upload = %Pleroma.Upload{ name: "an… image.jpg", content_type: "image/jpg", path: Path.absname("test/fixtures/image_tmp.jpg"), diff --git a/test/web/admin_api/controllers/config_controller_test.exs b/test/web/admin_api/controllers/config_controller_test.exs index 780de8d18..064ef9bc7 100644 --- a/test/web/admin_api/controllers/config_controller_test.exs +++ b/test/web/admin_api/controllers/config_controller_test.exs @@ -57,12 +57,12 @@ test "with settings only in db", %{conn: conn} do ] } = json_response_and_validate_schema(conn, 200) - assert key1 == config1.key - assert key2 == config2.key + assert key1 == inspect(config1.key) + assert key2 == inspect(config2.key) end test "db is added to settings that are in db", %{conn: conn} do - _config = insert(:config, key: ":instance", value: ConfigDB.to_binary(name: "Some name")) + _config = insert(:config, key: ":instance", value: [name: "Some name"]) %{"configs" => configs} = conn @@ -83,7 +83,7 @@ test "merged default setting with db settings", %{conn: conn} do config3 = insert(:config, - value: ConfigDB.to_binary(k1: :v1, k2: :v2) + value: [k1: :v1, k2: :v2] ) %{"configs" => configs} = @@ -93,42 +93,45 @@ test "merged default setting with db settings", %{conn: conn} do assert length(configs) > 3 + saved_configs = [config1, config2, config3] + keys = Enum.map(saved_configs, &inspect(&1.key)) + received_configs = Enum.filter(configs, fn %{"group" => group, "key" => key} -> - group == ":pleroma" and key in [config1.key, config2.key, config3.key] + group == ":pleroma" and key in keys end) assert length(received_configs) == 3 db_keys = config3.value - |> ConfigDB.from_binary() |> Keyword.keys() - |> ConfigDB.convert() + |> ConfigDB.to_json_types() + + keys = Enum.map(saved_configs -- [config3], &inspect(&1.key)) + + values = Enum.map(saved_configs, &ConfigDB.to_json_types(&1.value)) + + mapset_keys = MapSet.new(keys ++ db_keys) Enum.each(received_configs, fn %{"value" => value, "db" => db} -> - assert db in [[config1.key], [config2.key], db_keys] + db = MapSet.new(db) + assert MapSet.subset?(db, mapset_keys) - assert value in [ - ConfigDB.from_binary_with_convert(config1.value), - ConfigDB.from_binary_with_convert(config2.value), - ConfigDB.from_binary_with_convert(config3.value) - ] + assert value in values end) end test "subkeys with full update right merge", %{conn: conn} do - config1 = - insert(:config, - key: ":emoji", - value: ConfigDB.to_binary(groups: [a: 1, b: 2], key: [a: 1]) - ) + insert(:config, + key: ":emoji", + value: [groups: [a: 1, b: 2], key: [a: 1]] + ) - config2 = - insert(:config, - key: ":assets", - value: ConfigDB.to_binary(mascots: [a: 1, b: 2], key: [a: 1]) - ) + insert(:config, + key: ":assets", + value: [mascots: [a: 1, b: 2], key: [a: 1]] + ) %{"configs" => configs} = conn @@ -137,14 +140,14 @@ test "subkeys with full update right merge", %{conn: conn} do vals = Enum.filter(configs, fn %{"group" => group, "key" => key} -> - group == ":pleroma" and key in [config1.key, config2.key] + group == ":pleroma" and key in [":emoji", ":assets"] end) emoji = Enum.find(vals, fn %{"key" => key} -> key == ":emoji" end) assets = Enum.find(vals, fn %{"key" => key} -> key == ":assets" end) - emoji_val = ConfigDB.transform_with_out_binary(emoji["value"]) - assets_val = ConfigDB.transform_with_out_binary(assets["value"]) + emoji_val = ConfigDB.to_elixir_types(emoji["value"]) + assets_val = ConfigDB.to_elixir_types(assets["value"]) assert emoji_val[:groups] == [a: 1, b: 2] assert assets_val[:mascots] == [a: 1, b: 2] @@ -277,7 +280,8 @@ test "create new config setting in db", %{conn: conn} do "value" => %{"tuple" => ["string", "Pleroma.Captcha.NotReal", []]}, "db" => [":key5"] } - ] + ], + "need_reboot" => false } assert Application.get_env(:pleroma, :key1) == "value1" @@ -357,7 +361,8 @@ test "save configs setting without explicit key", %{conn: conn} do "value" => "https://hooks.slack.com/services/KEY", "db" => [":webhook_url"] } - ] + ], + "need_reboot" => false } assert Application.get_env(:quack, :level) == :info @@ -366,14 +371,14 @@ test "save configs setting without explicit key", %{conn: conn} do end test "saving config with partial update", %{conn: conn} do - config = insert(:config, key: ":key1", value: :erlang.term_to_binary(key1: 1, key2: 2)) + insert(:config, key: ":key1", value: :erlang.term_to_binary(key1: 1, key2: 2)) conn = conn |> put_req_header("content-type", "application/json") |> post("/api/pleroma/admin/config", %{ configs: [ - %{group: config.group, key: config.key, value: [%{"tuple" => [":key3", 3]}]} + %{group: ":pleroma", key: ":key1", value: [%{"tuple" => [":key3", 3]}]} ] }) @@ -389,7 +394,8 @@ test "saving config with partial update", %{conn: conn} do ], "db" => [":key1", ":key2", ":key3"] } - ] + ], + "need_reboot" => false } end @@ -500,8 +506,7 @@ test "update setting which need reboot, don't change reboot flag until reboot", end test "saving config with nested merge", %{conn: conn} do - config = - insert(:config, key: ":key1", value: :erlang.term_to_binary(key1: 1, key2: [k1: 1, k2: 2])) + insert(:config, key: :key1, value: [key1: 1, key2: [k1: 1, k2: 2]]) conn = conn @@ -509,8 +514,8 @@ test "saving config with nested merge", %{conn: conn} do |> post("/api/pleroma/admin/config", %{ configs: [ %{ - group: config.group, - key: config.key, + group: ":pleroma", + key: ":key1", value: [ %{"tuple" => [":key3", 3]}, %{ @@ -548,7 +553,8 @@ test "saving config with nested merge", %{conn: conn} do ], "db" => [":key1", ":key3", ":key2"] } - ] + ], + "need_reboot" => false } end @@ -588,7 +594,8 @@ test "saving special atoms", %{conn: conn} do ], "db" => [":ssl_options"] } - ] + ], + "need_reboot" => false } assert Application.get_env(:pleroma, :key1) == [ @@ -600,12 +607,11 @@ test "saving full setting if value is in full_key_update list", %{conn: conn} do backends = Application.get_env(:logger, :backends) on_exit(fn -> Application.put_env(:logger, :backends, backends) end) - config = - insert(:config, - group: ":logger", - key: ":backends", - value: :erlang.term_to_binary([]) - ) + insert(:config, + group: :logger, + key: :backends, + value: [] + ) Pleroma.Config.TransferTask.load_and_update_env([], false) @@ -617,8 +623,8 @@ test "saving full setting if value is in full_key_update list", %{conn: conn} do |> post("/api/pleroma/admin/config", %{ configs: [ %{ - group: config.group, - key: config.key, + group: ":logger", + key: ":backends", value: [":console"] } ] @@ -634,7 +640,8 @@ test "saving full setting if value is in full_key_update list", %{conn: conn} do ], "db" => [":backends"] } - ] + ], + "need_reboot" => false } assert Application.get_env(:logger, :backends) == [ @@ -643,19 +650,18 @@ test "saving full setting if value is in full_key_update list", %{conn: conn} do end test "saving full setting if value is not keyword", %{conn: conn} do - config = - insert(:config, - group: ":tesla", - key: ":adapter", - value: :erlang.term_to_binary(Tesla.Adapter.Hackey) - ) + insert(:config, + group: :tesla, + key: :adapter, + value: Tesla.Adapter.Hackey + ) conn = conn |> put_req_header("content-type", "application/json") |> post("/api/pleroma/admin/config", %{ configs: [ - %{group: config.group, key: config.key, value: "Tesla.Adapter.Httpc"} + %{group: ":tesla", key: ":adapter", value: "Tesla.Adapter.Httpc"} ] }) @@ -667,7 +673,8 @@ test "saving full setting if value is not keyword", %{conn: conn} do "value" => "Tesla.Adapter.Httpc", "db" => [":adapter"] } - ] + ], + "need_reboot" => false } end @@ -677,13 +684,13 @@ test "update config setting & delete with fallback to default value", %{ token: token } do ueberauth = Application.get_env(:ueberauth, Ueberauth) - config1 = insert(:config, key: ":keyaa1") - config2 = insert(:config, key: ":keyaa2") + insert(:config, key: :keyaa1) + insert(:config, key: :keyaa2) config3 = insert(:config, - group: ":ueberauth", - key: "Ueberauth" + group: :ueberauth, + key: Ueberauth ) conn = @@ -691,8 +698,8 @@ test "update config setting & delete with fallback to default value", %{ |> put_req_header("content-type", "application/json") |> post("/api/pleroma/admin/config", %{ configs: [ - %{group: config1.group, key: config1.key, value: "another_value"}, - %{group: config2.group, key: config2.key, value: "another_value"} + %{group: ":pleroma", key: ":keyaa1", value: "another_value"}, + %{group: ":pleroma", key: ":keyaa2", value: "another_value"} ] }) @@ -700,22 +707,23 @@ test "update config setting & delete with fallback to default value", %{ "configs" => [ %{ "group" => ":pleroma", - "key" => config1.key, + "key" => ":keyaa1", "value" => "another_value", "db" => [":keyaa1"] }, %{ "group" => ":pleroma", - "key" => config2.key, + "key" => ":keyaa2", "value" => "another_value", "db" => [":keyaa2"] } - ] + ], + "need_reboot" => false } assert Application.get_env(:pleroma, :keyaa1) == "another_value" assert Application.get_env(:pleroma, :keyaa2) == "another_value" - assert Application.get_env(:ueberauth, Ueberauth) == ConfigDB.from_binary(config3.value) + assert Application.get_env(:ueberauth, Ueberauth) == config3.value conn = build_conn() @@ -724,7 +732,7 @@ test "update config setting & delete with fallback to default value", %{ |> put_req_header("content-type", "application/json") |> post("/api/pleroma/admin/config", %{ configs: [ - %{group: config2.group, key: config2.key, delete: true}, + %{group: ":pleroma", key: ":keyaa2", delete: true}, %{ group: ":ueberauth", key: "Ueberauth", @@ -734,7 +742,8 @@ test "update config setting & delete with fallback to default value", %{ }) assert json_response_and_validate_schema(conn, 200) == %{ - "configs" => [] + "configs" => [], + "need_reboot" => false } assert Application.get_env(:ueberauth, Ueberauth) == ueberauth @@ -801,7 +810,8 @@ test "common config example", %{conn: conn} do ":name" ] } - ] + ], + "need_reboot" => false } end @@ -935,7 +945,8 @@ test "tuples with more than two values", %{conn: conn} do ], "db" => [":http"] } - ] + ], + "need_reboot" => false } end @@ -1000,7 +1011,8 @@ test "settings with nesting map", %{conn: conn} do ], "db" => [":key2", ":key3"] } - ] + ], + "need_reboot" => false } end @@ -1027,7 +1039,8 @@ test "value as map", %{conn: conn} do "value" => %{"key" => "some_val"}, "db" => [":key1"] } - ] + ], + "need_reboot" => false } end @@ -1077,16 +1090,16 @@ test "queues key as atom", %{conn: conn} do ":background" ] } - ] + ], + "need_reboot" => false } end test "delete part of settings by atom subkeys", %{conn: conn} do - config = - insert(:config, - key: ":keyaa1", - value: :erlang.term_to_binary(subkey1: "val1", subkey2: "val2", subkey3: "val3") - ) + insert(:config, + key: :keyaa1, + value: [subkey1: "val1", subkey2: "val2", subkey3: "val3"] + ) conn = conn @@ -1094,8 +1107,8 @@ test "delete part of settings by atom subkeys", %{conn: conn} do |> post("/api/pleroma/admin/config", %{ configs: [ %{ - group: config.group, - key: config.key, + group: ":pleroma", + key: ":keyaa1", subkeys: [":subkey1", ":subkey3"], delete: true } @@ -1110,7 +1123,8 @@ test "delete part of settings by atom subkeys", %{conn: conn} do "value" => [%{"tuple" => [":subkey2", "val2"]}], "db" => [":subkey2"] } - ] + ], + "need_reboot" => false } end @@ -1236,6 +1250,90 @@ test "doesn't set keys not in the whitelist", %{conn: conn} do assert Application.get_env(:pleroma, Pleroma.Captcha.NotReal) == "value5" assert Application.get_env(:not_real, :anything) == "value6" end + + test "args for Pleroma.Upload.Filter.Mogrify with custom tuples", %{conn: conn} do + clear_config(Pleroma.Upload.Filter.Mogrify) + + assert conn + |> put_req_header("content-type", "application/json") + |> post("/api/pleroma/admin/config", %{ + configs: [ + %{ + group: ":pleroma", + key: "Pleroma.Upload.Filter.Mogrify", + value: [ + %{"tuple" => [":args", ["auto-orient", "strip"]]} + ] + } + ] + }) + |> json_response_and_validate_schema(200) == %{ + "configs" => [ + %{ + "group" => ":pleroma", + "key" => "Pleroma.Upload.Filter.Mogrify", + "value" => [ + %{"tuple" => [":args", ["auto-orient", "strip"]]} + ], + "db" => [":args"] + } + ], + "need_reboot" => false + } + + assert Config.get(Pleroma.Upload.Filter.Mogrify) == [args: ["auto-orient", "strip"]] + + assert conn + |> put_req_header("content-type", "application/json") + |> post("/api/pleroma/admin/config", %{ + configs: [ + %{ + group: ":pleroma", + key: "Pleroma.Upload.Filter.Mogrify", + value: [ + %{ + "tuple" => [ + ":args", + [ + "auto-orient", + "strip", + "{\"implode\", \"1\"}", + "{\"resize\", \"3840x1080>\"}" + ] + ] + } + ] + } + ] + }) + |> json_response(200) == %{ + "configs" => [ + %{ + "group" => ":pleroma", + "key" => "Pleroma.Upload.Filter.Mogrify", + "value" => [ + %{ + "tuple" => [ + ":args", + [ + "auto-orient", + "strip", + "{\"implode\", \"1\"}", + "{\"resize\", \"3840x1080>\"}" + ] + ] + } + ], + "db" => [":args"] + } + ], + "need_reboot" => false + } + + assert Config.get(Pleroma.Upload.Filter.Mogrify) == [ + args: ["auto-orient", "strip", {"implode", "1"}, {"resize", "3840x1080>"}] + ] + end end describe "GET /api/pleroma/admin/config/descriptions" do