Merge branch 'fix/1787-mogrify-args' into 'develop'

Mogrify args in adminFE

Closes #1787

See merge request pleroma/pleroma!2616
This commit is contained in:
lain 2020-06-16 13:17:52 +00:00
commit 4baf5ffe73
13 changed files with 624 additions and 784 deletions

View File

@ -72,8 +72,7 @@ defp create(group, settings) do
group group
|> Pleroma.Config.Loader.filter_group(settings) |> Pleroma.Config.Loader.filter_group(settings)
|> Enum.each(fn {key, value} -> |> Enum.each(fn {key, value} ->
key = inspect(key) {:ok, _} = ConfigDB.update_or_create(%{group: group, key: key, value: value})
{:ok, _} = ConfigDB.update_or_create(%{group: inspect(group), key: key, value: value})
shell_info("Settings for key #{key} migrated.") shell_info("Settings for key #{key} migrated.")
end) end)
@ -131,12 +130,9 @@ defp write_and_delete(config, file, delete?) do
end end
defp write(config, file) do defp write(config, file) do
value = value = inspect(config.value, limit: :infinity)
config.value
|> ConfigDB.from_binary()
|> inspect(limit: :infinity)
IO.write(file, "config #{config.group}, #{config.key}, #{value}\r\n\r\n") IO.write(file, "config #{inspect(config.group)}, #{inspect(config.key)}, #{value}\r\n\r\n")
config config
end end

View File

@ -6,7 +6,7 @@ defmodule Pleroma.ConfigDB do
use Ecto.Schema use Ecto.Schema
import Ecto.Changeset import Ecto.Changeset
import Ecto.Query import Ecto.Query, only: [select: 3]
import Pleroma.Web.Gettext import Pleroma.Web.Gettext
alias __MODULE__ alias __MODULE__
@ -14,16 +14,6 @@ defmodule Pleroma.ConfigDB do
@type t :: %__MODULE__{} @type t :: %__MODULE__{}
@full_key_update [
{:pleroma, :ecto_repos},
{:quack, :meta},
{:mime, :types},
{:cors_plug, [:max_age, :methods, :expose, :headers]},
{:auto_linker, :opts},
{:swarm, :node_blacklist},
{:logger, :backends}
]
@full_subkey_update [ @full_subkey_update [
{:pleroma, :assets, :mascots}, {:pleroma, :assets, :mascots},
{:pleroma, :emoji, :groups}, {:pleroma, :emoji, :groups},
@ -32,14 +22,10 @@ defmodule Pleroma.ConfigDB do
{:pleroma, :mrf_keyword, :replace} {:pleroma, :mrf_keyword, :replace}
] ]
@regex ~r/^~r(?'delimiter'[\/|"'([{<]{1})(?'pattern'.+)[\/|"')\]}>]{1}(?'modifier'[uismxfU]*)/u
@delimiters ["/", "|", "\"", "'", {"(", ")"}, {"[", "]"}, {"{", "}"}, {"<", ">"}]
schema "config" do schema "config" do
field(:key, :string) field(:key, Pleroma.Config.Type.Atom)
field(:group, :string) field(:group, Pleroma.Config.Type.Atom)
field(:value, :binary) field(:value, Pleroma.Config.Type.BinaryValue)
field(:db, {:array, :string}, virtual: true, default: []) field(:db, {:array, :string}, virtual: true, default: [])
timestamps() timestamps()
@ -51,10 +37,6 @@ def get_all_as_keyword do
|> select([c], {c.group, c.key, c.value}) |> select([c], {c.group, c.key, c.value})
|> Repo.all() |> Repo.all()
|> Enum.reduce([], fn {group, key, value}, acc -> |> Enum.reduce([], fn {group, key, value}, acc ->
group = ConfigDB.from_string(group)
key = ConfigDB.from_string(key)
value = from_binary(value)
Keyword.update(acc, group, [{key, value}], &Keyword.merge(&1, [{key, value}])) Keyword.update(acc, group, [{key, value}], &Keyword.merge(&1, [{key, value}]))
end) end)
end end
@ -64,50 +46,41 @@ def get_by_params(params), do: Repo.get_by(ConfigDB, params)
@spec changeset(ConfigDB.t(), map()) :: Changeset.t() @spec changeset(ConfigDB.t(), map()) :: Changeset.t()
def changeset(config, params \\ %{}) do def changeset(config, params \\ %{}) do
params = Map.put(params, :value, transform(params[:value]))
config config
|> cast(params, [:key, :group, :value]) |> cast(params, [:key, :group, :value])
|> validate_required([:key, :group, :value]) |> validate_required([:key, :group, :value])
|> unique_constraint(:key, name: :config_group_key_index) |> unique_constraint(:key, name: :config_group_key_index)
end end
@spec create(map()) :: {:ok, ConfigDB.t()} | {:error, Changeset.t()} defp create(params) do
def create(params) do
%ConfigDB{} %ConfigDB{}
|> changeset(params) |> changeset(params)
|> Repo.insert() |> Repo.insert()
end end
@spec update(ConfigDB.t(), map()) :: {:ok, ConfigDB.t()} | {:error, Changeset.t()} defp update(%ConfigDB{} = config, %{value: value}) do
def update(%ConfigDB{} = config, %{value: value}) do
config config
|> changeset(%{value: value}) |> changeset(%{value: value})
|> Repo.update() |> Repo.update()
end end
@spec get_db_keys(ConfigDB.t()) :: [String.t()]
def get_db_keys(%ConfigDB{} = config) do
config.value
|> ConfigDB.from_binary()
|> get_db_keys(config.key)
end
@spec get_db_keys(keyword(), any()) :: [String.t()] @spec get_db_keys(keyword(), any()) :: [String.t()]
def get_db_keys(value, key) do def get_db_keys(value, key) do
if Keyword.keyword?(value) do keys =
value |> Keyword.keys() |> Enum.map(&convert(&1)) if Keyword.keyword?(value) do
else Keyword.keys(value)
[convert(key)] else
end [key]
end
Enum.map(keys, &to_json_types(&1))
end end
@spec merge_group(atom(), atom(), keyword(), keyword()) :: keyword() @spec merge_group(atom(), atom(), keyword(), keyword()) :: keyword()
def merge_group(group, key, old_value, new_value) do def merge_group(group, key, old_value, new_value) do
new_keys = to_map_set(new_value) new_keys = to_mapset(new_value)
intersect_keys = intersect_keys = old_value |> to_mapset() |> MapSet.intersection(new_keys) |> MapSet.to_list()
old_value |> to_map_set() |> MapSet.intersection(new_keys) |> MapSet.to_list()
merged_value = ConfigDB.merge(old_value, new_value) merged_value = ConfigDB.merge(old_value, new_value)
@ -120,12 +93,10 @@ def merge_group(group, key, old_value, new_value) do
[] []
end) end)
|> List.flatten() |> List.flatten()
|> Enum.reduce(merged_value, fn subkey, acc -> |> Enum.reduce(merged_value, &Keyword.put(&2, &1, new_value[&1]))
Keyword.put(acc, subkey, new_value[subkey])
end)
end end
defp to_map_set(keyword) do defp to_mapset(keyword) do
keyword keyword
|> Keyword.keys() |> Keyword.keys()
|> MapSet.new() |> MapSet.new()
@ -159,43 +130,40 @@ defp deep_merge(_key, value1, value2) do
@spec update_or_create(map()) :: {:ok, ConfigDB.t()} | {:error, Changeset.t()} @spec update_or_create(map()) :: {:ok, ConfigDB.t()} | {:error, Changeset.t()}
def update_or_create(params) do def update_or_create(params) do
params = Map.put(params, :value, to_elixir_types(params[:value]))
search_opts = Map.take(params, [:group, :key]) search_opts = Map.take(params, [:group, :key])
with %ConfigDB{} = config <- ConfigDB.get_by_params(search_opts), with %ConfigDB{} = config <- ConfigDB.get_by_params(search_opts),
{:partial_update, true, config} <- {_, true, config} <- {:partial_update, can_be_partially_updated?(config), config},
{:partial_update, can_be_partially_updated?(config), config}, {_, true, config} <-
old_value <- from_binary(config.value), {:can_be_merged, is_list(params[:value]) and is_list(config.value), config} do
transformed_value <- do_transform(params[:value]), new_value = merge_group(config.group, config.key, config.value, params[:value])
{:can_be_merged, true, config} <- {:can_be_merged, is_list(transformed_value), config}, update(config, %{value: new_value})
new_value <-
merge_group(
ConfigDB.from_string(config.group),
ConfigDB.from_string(config.key),
old_value,
transformed_value
) do
ConfigDB.update(config, %{value: new_value})
else else
{reason, false, config} when reason in [:partial_update, :can_be_merged] -> {reason, false, config} when reason in [:partial_update, :can_be_merged] ->
ConfigDB.update(config, params) update(config, params)
nil -> nil ->
ConfigDB.create(params) create(params)
end end
end end
defp can_be_partially_updated?(%ConfigDB{} = config), do: not only_full_update?(config) defp can_be_partially_updated?(%ConfigDB{} = config), do: not only_full_update?(config)
defp only_full_update?(%ConfigDB{} = config) do defp only_full_update?(%ConfigDB{group: group, key: key}) do
config_group = ConfigDB.from_string(config.group) full_key_update = [
config_key = ConfigDB.from_string(config.key) {:pleroma, :ecto_repos},
{:quack, :meta},
{:mime, :types},
{:cors_plug, [:max_age, :methods, :expose, :headers]},
{:auto_linker, :opts},
{:swarm, :node_blacklist},
{:logger, :backends}
]
Enum.any?(@full_key_update, fn Enum.any?(full_key_update, fn
{group, key} when is_list(key) -> {s_group, s_key} ->
config_group == group and config_key in key group == s_group and ((is_list(s_key) and key in s_key) or key == s_key)
{group, key} ->
config_group == group and config_key == key
end) end)
end end
@ -205,11 +173,10 @@ def delete(params) do
with %ConfigDB{} = config <- ConfigDB.get_by_params(search_opts), with %ConfigDB{} = config <- ConfigDB.get_by_params(search_opts),
{config, sub_keys} when is_list(sub_keys) <- {config, params[:subkeys]}, {config, sub_keys} when is_list(sub_keys) <- {config, params[:subkeys]},
old_value <- from_binary(config.value), keys <- Enum.map(sub_keys, &string_to_elixir_types(&1)),
keys <- Enum.map(sub_keys, &do_transform_string(&1)), {_, config, new_value} when new_value != [] <-
{:partial_remove, config, new_value} when new_value != [] <- {:partial_remove, config, Keyword.drop(config.value, keys)} do
{:partial_remove, config, Keyword.drop(old_value, keys)} do update(config, %{value: new_value})
ConfigDB.update(config, %{value: new_value})
else else
{:partial_remove, config, []} -> {:partial_remove, config, []} ->
Repo.delete(config) Repo.delete(config)
@ -225,37 +192,32 @@ def delete(params) do
end end
end end
@spec from_binary(binary()) :: term() @spec to_json_types(term()) :: map() | list() | boolean() | String.t()
def from_binary(binary), do: :erlang.binary_to_term(binary) def to_json_types(entity) when is_list(entity) do
Enum.map(entity, &to_json_types/1)
@spec from_binary_with_convert(binary()) :: any()
def from_binary_with_convert(binary) do
binary
|> from_binary()
|> do_convert()
end end
@spec from_string(String.t()) :: atom() | no_return() def to_json_types(%Regex{} = entity), do: inspect(entity)
def from_string(string), do: do_transform_string(string)
@spec convert(any()) :: any() def to_json_types(entity) when is_map(entity) do
def convert(entity), do: do_convert(entity) Map.new(entity, fn {k, v} -> {to_json_types(k), to_json_types(v)} end)
defp do_convert(entity) when is_list(entity) do
for v <- entity, into: [], do: do_convert(v)
end end
defp do_convert(%Regex{} = entity), do: inspect(entity) def to_json_types({:args, args}) when is_list(args) do
arguments =
Enum.map(args, fn
arg when is_tuple(arg) -> inspect(arg)
arg -> to_json_types(arg)
end)
defp do_convert(entity) when is_map(entity) do %{"tuple" => [":args", arguments]}
for {k, v} <- entity, into: %{}, do: {do_convert(k), do_convert(v)}
end end
defp do_convert({:proxy_url, {type, :localhost, port}}) do def to_json_types({:proxy_url, {type, :localhost, port}}) do
%{"tuple" => [":proxy_url", %{"tuple" => [do_convert(type), "localhost", port]}]} %{"tuple" => [":proxy_url", %{"tuple" => [to_json_types(type), "localhost", port]}]}
end end
defp do_convert({:proxy_url, {type, host, port}}) when is_tuple(host) do def to_json_types({:proxy_url, {type, host, port}}) when is_tuple(host) do
ip = ip =
host host
|> :inet_parse.ntoa() |> :inet_parse.ntoa()
@ -264,66 +226,64 @@ defp do_convert({:proxy_url, {type, host, port}}) when is_tuple(host) do
%{ %{
"tuple" => [ "tuple" => [
":proxy_url", ":proxy_url",
%{"tuple" => [do_convert(type), ip, port]} %{"tuple" => [to_json_types(type), ip, port]}
] ]
} }
end end
defp do_convert({:proxy_url, {type, host, port}}) do def to_json_types({:proxy_url, {type, host, port}}) do
%{ %{
"tuple" => [ "tuple" => [
":proxy_url", ":proxy_url",
%{"tuple" => [do_convert(type), to_string(host), port]} %{"tuple" => [to_json_types(type), to_string(host), port]}
] ]
} }
end end
defp do_convert({:partial_chain, entity}), do: %{"tuple" => [":partial_chain", inspect(entity)]} def to_json_types({:partial_chain, entity}),
do: %{"tuple" => [":partial_chain", inspect(entity)]}
defp do_convert(entity) when is_tuple(entity) do def to_json_types(entity) when is_tuple(entity) do
value = value =
entity entity
|> Tuple.to_list() |> Tuple.to_list()
|> do_convert() |> to_json_types()
%{"tuple" => value} %{"tuple" => value}
end end
defp do_convert(entity) when is_boolean(entity) or is_number(entity) or is_nil(entity) do def to_json_types(entity) when is_binary(entity), do: entity
def to_json_types(entity) when is_boolean(entity) or is_number(entity) or is_nil(entity) do
entity entity
end end
defp do_convert(entity) def to_json_types(entity) when entity in [:"tlsv1.1", :"tlsv1.2", :"tlsv1.3"] do
when is_atom(entity) and entity in [:"tlsv1.1", :"tlsv1.2", :"tlsv1.3"] do
":#{entity}" ":#{entity}"
end end
defp do_convert(entity) when is_atom(entity), do: inspect(entity) def to_json_types(entity) when is_atom(entity), do: inspect(entity)
defp do_convert(entity) when is_binary(entity), do: entity @spec to_elixir_types(boolean() | String.t() | map() | list()) :: term()
def to_elixir_types(%{"tuple" => [":args", args]}) when is_list(args) do
arguments =
Enum.map(args, fn arg ->
if String.contains?(arg, ["{", "}"]) do
{elem, []} = Code.eval_string(arg)
elem
else
to_elixir_types(arg)
end
end)
@spec transform(any()) :: binary() | no_return() {:args, arguments}
def transform(entity) when is_binary(entity) or is_map(entity) or is_list(entity) do
entity
|> do_transform()
|> to_binary()
end end
def transform(entity), do: to_binary(entity) def to_elixir_types(%{"tuple" => [":proxy_url", %{"tuple" => [type, host, port]}]}) do
{:proxy_url, {string_to_elixir_types(type), parse_host(host), port}}
@spec transform_with_out_binary(any()) :: any()
def transform_with_out_binary(entity), do: do_transform(entity)
@spec to_binary(any()) :: binary()
def to_binary(entity), do: :erlang.term_to_binary(entity)
defp do_transform(%Regex{} = entity), do: entity
defp do_transform(%{"tuple" => [":proxy_url", %{"tuple" => [type, host, port]}]}) do
{:proxy_url, {do_transform_string(type), parse_host(host), port}}
end end
defp do_transform(%{"tuple" => [":partial_chain", entity]}) do def to_elixir_types(%{"tuple" => [":partial_chain", entity]}) do
{partial_chain, []} = {partial_chain, []} =
entity entity
|> String.replace(~r/[^\w|^{:,[|^,|^[|^\]^}|^\/|^\.|^"]^\s/, "") |> String.replace(~r/[^\w|^{:,[|^,|^[|^\]^}|^\/|^\.|^"]^\s/, "")
@ -332,25 +292,51 @@ defp do_transform(%{"tuple" => [":partial_chain", entity]}) do
{:partial_chain, partial_chain} {:partial_chain, partial_chain}
end end
defp do_transform(%{"tuple" => entity}) do def to_elixir_types(%{"tuple" => entity}) do
Enum.reduce(entity, {}, fn val, acc -> Tuple.append(acc, do_transform(val)) end) Enum.reduce(entity, {}, &Tuple.append(&2, to_elixir_types(&1)))
end end
defp do_transform(entity) when is_map(entity) do def to_elixir_types(entity) when is_map(entity) do
for {k, v} <- entity, into: %{}, do: {do_transform(k), do_transform(v)} Map.new(entity, fn {k, v} -> {to_elixir_types(k), to_elixir_types(v)} end)
end end
defp do_transform(entity) when is_list(entity) do def to_elixir_types(entity) when is_list(entity) do
for v <- entity, into: [], do: do_transform(v) Enum.map(entity, &to_elixir_types/1)
end end
defp do_transform(entity) when is_binary(entity) do def to_elixir_types(entity) when is_binary(entity) do
entity entity
|> String.trim() |> String.trim()
|> do_transform_string() |> string_to_elixir_types()
end end
defp do_transform(entity), do: entity def to_elixir_types(entity), do: entity
@spec string_to_elixir_types(String.t()) ::
atom() | Regex.t() | module() | String.t() | no_return()
def string_to_elixir_types("~r" <> _pattern = regex) do
pattern =
~r/^~r(?'delimiter'[\/|"'([{<]{1})(?'pattern'.+)[\/|"')\]}>]{1}(?'modifier'[uismxfU]*)/u
delimiters = ["/", "|", "\"", "'", {"(", ")"}, {"[", "]"}, {"{", "}"}, {"<", ">"}]
with %{"modifier" => modifier, "pattern" => pattern, "delimiter" => regex_delimiter} <-
Regex.named_captures(pattern, regex),
{:ok, {leading, closing}} <- find_valid_delimiter(delimiters, pattern, regex_delimiter),
{result, _} <- Code.eval_string("~r#{leading}#{pattern}#{closing}#{modifier}") do
result
end
end
def string_to_elixir_types(":" <> atom), do: String.to_atom(atom)
def string_to_elixir_types(value) do
if module_name?(value) do
String.to_existing_atom("Elixir." <> value)
else
value
end
end
defp parse_host("localhost"), do: :localhost defp parse_host("localhost"), do: :localhost
@ -387,27 +373,8 @@ defp find_valid_delimiter([delimiter | others], pattern, regex_delimiter) do
end end
end end
defp do_transform_string("~r" <> _pattern = regex) do @spec module_name?(String.t()) :: boolean()
with %{"modifier" => modifier, "pattern" => pattern, "delimiter" => regex_delimiter} <- def module_name?(string) do
Regex.named_captures(@regex, regex),
{:ok, {leading, closing}} <- find_valid_delimiter(@delimiters, pattern, regex_delimiter),
{result, _} <- Code.eval_string("~r#{leading}#{pattern}#{closing}#{modifier}") do
result
end
end
defp do_transform_string(":" <> atom), do: String.to_atom(atom)
defp do_transform_string(value) do
if is_module_name?(value) do
String.to_existing_atom("Elixir." <> value)
else
value
end
end
@spec is_module_name?(String.t()) :: boolean()
def is_module_name?(string) do
Regex.match?(~r/^(Pleroma|Phoenix|Tesla|Quack|Ueberauth|Swoosh)\./, string) or Regex.match?(~r/^(Pleroma|Phoenix|Tesla|Quack|Ueberauth|Swoosh)\./, string) or
string in ["Oban", "Ueberauth", "ExSyslogger"] string in ["Oban", "Ueberauth", "ExSyslogger"]
end end

View File

@ -28,10 +28,6 @@ defmodule Pleroma.Config.TransferTask do
{:pleroma, Pleroma.Captcha, [:seconds_valid]}, {:pleroma, Pleroma.Captcha, [:seconds_valid]},
{:pleroma, Pleroma.Upload, [:proxy_remote]}, {:pleroma, Pleroma.Upload, [:proxy_remote]},
{:pleroma, :instance, [:upload_limit]}, {:pleroma, :instance, [:upload_limit]},
{:pleroma, :email_notifications, [:digest]},
{:pleroma, :oauth2, [:clean_expired_tokens]},
{:pleroma, Pleroma.ActivityExpiration, [:enabled]},
{:pleroma, Pleroma.ScheduledActivity, [:enabled]},
{:pleroma, :gopher, [:enabled]} {:pleroma, :gopher, [:enabled]}
] ]
@ -48,7 +44,7 @@ def load_and_update_env(deleted_settings \\ [], restart_pleroma? \\ true) do
{logger, other} = {logger, other} =
(Repo.all(ConfigDB) ++ deleted_settings) (Repo.all(ConfigDB) ++ deleted_settings)
|> Enum.map(&transform_and_merge/1) |> Enum.map(&merge_with_default/1)
|> Enum.split_with(fn {group, _, _, _} -> group in [:logger, :quack] end) |> Enum.split_with(fn {group, _, _, _} -> group in [:logger, :quack] end)
logger logger
@ -92,11 +88,7 @@ defp maybe_set_pleroma_last(apps) do
end end
end end
defp transform_and_merge(%{group: group, key: key, value: value} = setting) do defp merge_with_default(%{group: group, key: key, value: value} = setting) do
group = ConfigDB.from_string(group)
key = ConfigDB.from_string(key)
value = ConfigDB.from_binary(value)
default = Config.Holder.default_config(group, key) default = Config.Holder.default_config(group, key)
merged = merged =

View File

@ -0,0 +1,22 @@
defmodule Pleroma.Config.Type.Atom do
use Ecto.Type
def type, do: :atom
def cast(key) when is_atom(key) do
{:ok, key}
end
def cast(key) when is_binary(key) do
{:ok, Pleroma.ConfigDB.string_to_elixir_types(key)}
end
def cast(_), do: :error
def load(key) do
{:ok, Pleroma.ConfigDB.string_to_elixir_types(key)}
end
def dump(key) when is_atom(key), do: {:ok, inspect(key)}
def dump(_), do: :error
end

View File

@ -0,0 +1,23 @@
defmodule Pleroma.Config.Type.BinaryValue do
use Ecto.Type
def type, do: :term
def cast(value) when is_binary(value) do
if String.valid?(value) do
{:ok, value}
else
{:ok, :erlang.binary_to_term(value)}
end
end
def cast(value), do: {:ok, value}
def load(value) when is_binary(value) do
{:ok, :erlang.binary_to_term(value)}
end
def dump(value) do
{:ok, :erlang.term_to_binary(value)}
end
end

View File

@ -33,7 +33,11 @@ def descriptions(conn, _params) do
def show(conn, %{only_db: true}) do def show(conn, %{only_db: true}) do
with :ok <- configurable_from_database() do with :ok <- configurable_from_database() do
configs = Pleroma.Repo.all(ConfigDB) configs = Pleroma.Repo.all(ConfigDB)
render(conn, "index.json", %{configs: configs})
render(conn, "index.json", %{
configs: configs,
need_reboot: Restarter.Pleroma.need_reboot?()
})
end end
end end
@ -61,17 +65,20 @@ def show(conn, _params) do
value value
end end
%{ %ConfigDB{
group: ConfigDB.convert(group), group: group,
key: ConfigDB.convert(key), key: key,
value: ConfigDB.convert(merged_value) value: merged_value
} }
|> Pleroma.Maps.put_if_present(:db, db) |> Pleroma.Maps.put_if_present(:db, db)
end) end)
end) end)
|> List.flatten() |> List.flatten()
json(conn, %{configs: merged, need_reboot: Restarter.Pleroma.need_reboot?()}) render(conn, "index.json", %{
configs: merged,
need_reboot: Restarter.Pleroma.need_reboot?()
})
end end
end end
@ -91,24 +98,17 @@ def update(%{body_params: %{configs: configs}} = conn, _) do
{deleted, updated} = {deleted, updated} =
results results
|> Enum.map(fn {:ok, config} -> |> Enum.map(fn {:ok, %{key: key, value: value} = config} ->
Map.put(config, :db, ConfigDB.get_db_keys(config)) Map.put(config, :db, ConfigDB.get_db_keys(value, key))
end)
|> Enum.split_with(fn config ->
Ecto.get_meta(config, :state) == :deleted
end) end)
|> Enum.split_with(&(Ecto.get_meta(&1, :state) == :deleted))
Config.TransferTask.load_and_update_env(deleted, false) Config.TransferTask.load_and_update_env(deleted, false)
if not Restarter.Pleroma.need_reboot?() do if not Restarter.Pleroma.need_reboot?() do
changed_reboot_settings? = changed_reboot_settings? =
(updated ++ deleted) (updated ++ deleted)
|> Enum.any?(fn config -> |> Enum.any?(&Config.TransferTask.pleroma_need_restart?(&1.group, &1.key, &1.value))
group = ConfigDB.from_string(config.group)
key = ConfigDB.from_string(config.key)
value = ConfigDB.from_binary(config.value)
Config.TransferTask.pleroma_need_restart?(group, key, value)
end)
if changed_reboot_settings?, do: Restarter.Pleroma.need_reboot() if changed_reboot_settings?, do: Restarter.Pleroma.need_reboot()
end end

View File

@ -5,23 +5,20 @@
defmodule Pleroma.Web.AdminAPI.ConfigView do defmodule Pleroma.Web.AdminAPI.ConfigView do
use Pleroma.Web, :view use Pleroma.Web, :view
def render("index.json", %{configs: configs} = params) do alias Pleroma.ConfigDB
map = %{
configs: render_many(configs, __MODULE__, "show.json", as: :config)
}
if params[:need_reboot] do def render("index.json", %{configs: configs} = params) do
Map.put(map, :need_reboot, true) %{
else configs: render_many(configs, __MODULE__, "show.json", as: :config),
map need_reboot: params[:need_reboot]
end }
end end
def render("show.json", %{config: config}) do def render("show.json", %{config: config}) do
map = %{ map = %{
key: config.key, key: ConfigDB.to_json_types(config.key),
group: config.group, group: ConfigDB.to_json_types(config.group),
value: Pleroma.ConfigDB.from_binary_with_convert(config.value) value: ConfigDB.to_json_types(config.value)
} }
if config.db != [] do if config.db != [] do

View File

@ -7,40 +7,28 @@ defmodule Pleroma.ConfigDBTest do
import Pleroma.Factory import Pleroma.Factory
alias Pleroma.ConfigDB alias Pleroma.ConfigDB
test "get_by_key/1" do test "get_by_params/1" do
config = insert(:config) config = insert(:config)
insert(:config) insert(:config)
assert config == ConfigDB.get_by_params(%{group: config.group, key: config.key}) assert config == ConfigDB.get_by_params(%{group: config.group, key: config.key})
end end
test "create/1" do
{:ok, config} = ConfigDB.create(%{group: ":pleroma", key: ":some_key", value: "some_value"})
assert config == ConfigDB.get_by_params(%{group: ":pleroma", key: ":some_key"})
end
test "update/1" do
config = insert(:config)
{:ok, updated} = ConfigDB.update(config, %{value: "some_value"})
loaded = ConfigDB.get_by_params(%{group: config.group, key: config.key})
assert loaded == updated
end
test "get_all_as_keyword/0" do test "get_all_as_keyword/0" do
saved = insert(:config) saved = insert(:config)
insert(:config, group: ":quack", key: ":level", value: ConfigDB.to_binary(:info)) insert(:config, group: ":quack", key: ":level", value: :info)
insert(:config, group: ":quack", key: ":meta", value: ConfigDB.to_binary([:none])) insert(:config, group: ":quack", key: ":meta", value: [:none])
insert(:config, insert(:config,
group: ":quack", group: ":quack",
key: ":webhook_url", key: ":webhook_url",
value: ConfigDB.to_binary("https://hooks.slack.com/services/KEY/some_val") value: "https://hooks.slack.com/services/KEY/some_val"
) )
config = ConfigDB.get_all_as_keyword() config = ConfigDB.get_all_as_keyword()
assert config[:pleroma] == [ assert config[:pleroma] == [
{ConfigDB.from_string(saved.key), ConfigDB.from_binary(saved.value)} {saved.key, saved.value}
] ]
assert config[:quack][:level] == :info assert config[:quack][:level] == :info
@ -51,11 +39,11 @@ test "get_all_as_keyword/0" do
describe "update_or_create/1" do describe "update_or_create/1" do
test "common" do test "common" do
config = insert(:config) config = insert(:config)
key2 = "another_key" key2 = :another_key
params = [ params = [
%{group: "pleroma", key: key2, value: "another_value"}, %{group: :pleroma, key: key2, value: "another_value"},
%{group: config.group, key: config.key, value: "new_value"} %{group: :pleroma, key: config.key, value: [a: 1, b: 2, c: "new_value"]}
] ]
assert Repo.all(ConfigDB) |> length() == 1 assert Repo.all(ConfigDB) |> length() == 1
@ -65,16 +53,16 @@ test "common" do
assert Repo.all(ConfigDB) |> length() == 2 assert Repo.all(ConfigDB) |> length() == 2
config1 = ConfigDB.get_by_params(%{group: config.group, key: config.key}) config1 = ConfigDB.get_by_params(%{group: config.group, key: config.key})
config2 = ConfigDB.get_by_params(%{group: "pleroma", key: key2}) config2 = ConfigDB.get_by_params(%{group: :pleroma, key: key2})
assert config1.value == ConfigDB.transform("new_value") assert config1.value == [a: 1, b: 2, c: "new_value"]
assert config2.value == ConfigDB.transform("another_value") assert config2.value == "another_value"
end end
test "partial update" do test "partial update" do
config = insert(:config, value: ConfigDB.to_binary(key1: "val1", key2: :val2)) config = insert(:config, value: [key1: "val1", key2: :val2])
{:ok, _config} = {:ok, config} =
ConfigDB.update_or_create(%{ ConfigDB.update_or_create(%{
group: config.group, group: config.group,
key: config.key, key: config.key,
@ -83,15 +71,14 @@ test "partial update" do
updated = ConfigDB.get_by_params(%{group: config.group, key: config.key}) updated = ConfigDB.get_by_params(%{group: config.group, key: config.key})
value = ConfigDB.from_binary(updated.value) assert config.value == updated.value
assert length(value) == 3 assert updated.value[:key1] == :val1
assert value[:key1] == :val1 assert updated.value[:key2] == :val2
assert value[:key2] == :val2 assert updated.value[:key3] == :val3
assert value[:key3] == :val3
end end
test "deep merge" do test "deep merge" do
config = insert(:config, value: ConfigDB.to_binary(key1: "val1", key2: [k1: :v1, k2: "v2"])) config = insert(:config, value: [key1: "val1", key2: [k1: :v1, k2: "v2"]])
{:ok, config} = {:ok, config} =
ConfigDB.update_or_create(%{ ConfigDB.update_or_create(%{
@ -103,18 +90,15 @@ test "deep merge" do
updated = ConfigDB.get_by_params(%{group: config.group, key: config.key}) updated = ConfigDB.get_by_params(%{group: config.group, key: config.key})
assert config.value == updated.value assert config.value == updated.value
assert updated.value[:key1] == :val1
value = ConfigDB.from_binary(updated.value) assert updated.value[:key2] == [k1: :v1, k2: :v2, k3: :v3]
assert value[:key1] == :val1 assert updated.value[:key3] == :val3
assert value[:key2] == [k1: :v1, k2: :v2, k3: :v3]
assert value[:key3] == :val3
end end
test "only full update for some keys" do test "only full update for some keys" do
config1 = insert(:config, key: ":ecto_repos", value: ConfigDB.to_binary(repo: Pleroma.Repo)) config1 = insert(:config, key: :ecto_repos, value: [repo: Pleroma.Repo])
config2 = config2 = insert(:config, group: :cors_plug, key: :max_age, value: 18)
insert(:config, group: ":cors_plug", key: ":max_age", value: ConfigDB.to_binary(18))
{:ok, _config} = {:ok, _config} =
ConfigDB.update_or_create(%{ ConfigDB.update_or_create(%{
@ -133,8 +117,8 @@ test "only full update for some keys" do
updated1 = ConfigDB.get_by_params(%{group: config1.group, key: config1.key}) updated1 = ConfigDB.get_by_params(%{group: config1.group, key: config1.key})
updated2 = ConfigDB.get_by_params(%{group: config2.group, key: config2.key}) updated2 = ConfigDB.get_by_params(%{group: config2.group, key: config2.key})
assert ConfigDB.from_binary(updated1.value) == [another_repo: [Pleroma.Repo]] assert updated1.value == [another_repo: [Pleroma.Repo]]
assert ConfigDB.from_binary(updated2.value) == 777 assert updated2.value == 777
end end
test "full update if value is not keyword" do test "full update if value is not keyword" do
@ -142,7 +126,7 @@ test "full update if value is not keyword" do
insert(:config, insert(:config,
group: ":tesla", group: ":tesla",
key: ":adapter", key: ":adapter",
value: ConfigDB.to_binary(Tesla.Adapter.Hackney) value: Tesla.Adapter.Hackney
) )
{:ok, _config} = {:ok, _config} =
@ -154,20 +138,20 @@ test "full update if value is not keyword" do
updated = ConfigDB.get_by_params(%{group: config.group, key: config.key}) updated = ConfigDB.get_by_params(%{group: config.group, key: config.key})
assert ConfigDB.from_binary(updated.value) == Tesla.Adapter.Httpc assert updated.value == Tesla.Adapter.Httpc
end end
test "only full update for some subkeys" do test "only full update for some subkeys" do
config1 = config1 =
insert(:config, insert(:config,
key: ":emoji", key: ":emoji",
value: ConfigDB.to_binary(groups: [a: 1, b: 2], key: [a: 1]) value: [groups: [a: 1, b: 2], key: [a: 1]]
) )
config2 = config2 =
insert(:config, insert(:config,
key: ":assets", key: ":assets",
value: ConfigDB.to_binary(mascots: [a: 1, b: 2], key: [a: 1]) value: [mascots: [a: 1, b: 2], key: [a: 1]]
) )
{:ok, _config} = {:ok, _config} =
@ -187,8 +171,8 @@ test "only full update for some subkeys" do
updated1 = ConfigDB.get_by_params(%{group: config1.group, key: config1.key}) updated1 = ConfigDB.get_by_params(%{group: config1.group, key: config1.key})
updated2 = ConfigDB.get_by_params(%{group: config2.group, key: config2.key}) updated2 = ConfigDB.get_by_params(%{group: config2.group, key: config2.key})
assert ConfigDB.from_binary(updated1.value) == [groups: [c: 3, d: 4], key: [a: 1, b: 2]] assert updated1.value == [groups: [c: 3, d: 4], key: [a: 1, b: 2]]
assert ConfigDB.from_binary(updated2.value) == [mascots: [c: 3, d: 4], key: [a: 1, b: 2]] assert updated2.value == [mascots: [c: 3, d: 4], key: [a: 1, b: 2]]
end end
end end
@ -206,14 +190,14 @@ test "full delete" do
end end
test "partial subkeys delete" do test "partial subkeys delete" do
config = insert(:config, value: ConfigDB.to_binary(groups: [a: 1, b: 2], key: [a: 1])) config = insert(:config, value: [groups: [a: 1, b: 2], key: [a: 1]])
{:ok, deleted} = {:ok, deleted} =
ConfigDB.delete(%{group: config.group, key: config.key, subkeys: [":groups"]}) ConfigDB.delete(%{group: config.group, key: config.key, subkeys: [":groups"]})
assert Ecto.get_meta(deleted, :state) == :loaded assert Ecto.get_meta(deleted, :state) == :loaded
assert deleted.value == ConfigDB.to_binary(key: [a: 1]) assert deleted.value == [key: [a: 1]]
updated = ConfigDB.get_by_params(%{group: config.group, key: config.key}) updated = ConfigDB.get_by_params(%{group: config.group, key: config.key})
@ -221,7 +205,7 @@ test "partial subkeys delete" do
end end
test "full delete if remaining value after subkeys deletion is empty list" do test "full delete if remaining value after subkeys deletion is empty list" do
config = insert(:config, value: ConfigDB.to_binary(groups: [a: 1, b: 2])) config = insert(:config, value: [groups: [a: 1, b: 2]])
{:ok, deleted} = {:ok, deleted} =
ConfigDB.delete(%{group: config.group, key: config.key, subkeys: [":groups"]}) ConfigDB.delete(%{group: config.group, key: config.key, subkeys: [":groups"]})
@ -232,234 +216,159 @@ test "full delete if remaining value after subkeys deletion is empty list" do
end end
end end
describe "transform/1" do describe "to_elixir_types/1" do
test "string" do test "string" do
binary = ConfigDB.transform("value as string") assert ConfigDB.to_elixir_types("value as string") == "value as string"
assert binary == :erlang.term_to_binary("value as string")
assert ConfigDB.from_binary(binary) == "value as string"
end end
test "boolean" do test "boolean" do
binary = ConfigDB.transform(false) assert ConfigDB.to_elixir_types(false) == false
assert binary == :erlang.term_to_binary(false)
assert ConfigDB.from_binary(binary) == false
end end
test "nil" do test "nil" do
binary = ConfigDB.transform(nil) assert ConfigDB.to_elixir_types(nil) == nil
assert binary == :erlang.term_to_binary(nil)
assert ConfigDB.from_binary(binary) == nil
end end
test "integer" do test "integer" do
binary = ConfigDB.transform(150) assert ConfigDB.to_elixir_types(150) == 150
assert binary == :erlang.term_to_binary(150)
assert ConfigDB.from_binary(binary) == 150
end end
test "atom" do test "atom" do
binary = ConfigDB.transform(":atom") assert ConfigDB.to_elixir_types(":atom") == :atom
assert binary == :erlang.term_to_binary(:atom)
assert ConfigDB.from_binary(binary) == :atom
end end
test "ssl options" do test "ssl options" do
binary = ConfigDB.transform([":tlsv1", ":tlsv1.1", ":tlsv1.2"]) assert ConfigDB.to_elixir_types([":tlsv1", ":tlsv1.1", ":tlsv1.2"]) == [
assert binary == :erlang.term_to_binary([:tlsv1, :"tlsv1.1", :"tlsv1.2"]) :tlsv1,
assert ConfigDB.from_binary(binary) == [:tlsv1, :"tlsv1.1", :"tlsv1.2"] :"tlsv1.1",
:"tlsv1.2"
]
end end
test "pleroma module" do test "pleroma module" do
binary = ConfigDB.transform("Pleroma.Bookmark") assert ConfigDB.to_elixir_types("Pleroma.Bookmark") == Pleroma.Bookmark
assert binary == :erlang.term_to_binary(Pleroma.Bookmark)
assert ConfigDB.from_binary(binary) == Pleroma.Bookmark
end end
test "pleroma string" do test "pleroma string" do
binary = ConfigDB.transform("Pleroma") assert ConfigDB.to_elixir_types("Pleroma") == "Pleroma"
assert binary == :erlang.term_to_binary("Pleroma")
assert ConfigDB.from_binary(binary) == "Pleroma"
end end
test "phoenix module" do test "phoenix module" do
binary = ConfigDB.transform("Phoenix.Socket.V1.JSONSerializer") assert ConfigDB.to_elixir_types("Phoenix.Socket.V1.JSONSerializer") ==
assert binary == :erlang.term_to_binary(Phoenix.Socket.V1.JSONSerializer) Phoenix.Socket.V1.JSONSerializer
assert ConfigDB.from_binary(binary) == Phoenix.Socket.V1.JSONSerializer
end end
test "tesla module" do test "tesla module" do
binary = ConfigDB.transform("Tesla.Adapter.Hackney") assert ConfigDB.to_elixir_types("Tesla.Adapter.Hackney") == Tesla.Adapter.Hackney
assert binary == :erlang.term_to_binary(Tesla.Adapter.Hackney)
assert ConfigDB.from_binary(binary) == Tesla.Adapter.Hackney
end end
test "ExSyslogger module" do test "ExSyslogger module" do
binary = ConfigDB.transform("ExSyslogger") assert ConfigDB.to_elixir_types("ExSyslogger") == ExSyslogger
assert binary == :erlang.term_to_binary(ExSyslogger)
assert ConfigDB.from_binary(binary) == ExSyslogger
end end
test "Quack.Logger module" do test "Quack.Logger module" do
binary = ConfigDB.transform("Quack.Logger") assert ConfigDB.to_elixir_types("Quack.Logger") == Quack.Logger
assert binary == :erlang.term_to_binary(Quack.Logger)
assert ConfigDB.from_binary(binary) == Quack.Logger
end end
test "Swoosh.Adapters modules" do test "Swoosh.Adapters modules" do
binary = ConfigDB.transform("Swoosh.Adapters.SMTP") assert ConfigDB.to_elixir_types("Swoosh.Adapters.SMTP") == Swoosh.Adapters.SMTP
assert binary == :erlang.term_to_binary(Swoosh.Adapters.SMTP) assert ConfigDB.to_elixir_types("Swoosh.Adapters.AmazonSES") == Swoosh.Adapters.AmazonSES
assert ConfigDB.from_binary(binary) == Swoosh.Adapters.SMTP
binary = ConfigDB.transform("Swoosh.Adapters.AmazonSES")
assert binary == :erlang.term_to_binary(Swoosh.Adapters.AmazonSES)
assert ConfigDB.from_binary(binary) == Swoosh.Adapters.AmazonSES
end end
test "sigil" do test "sigil" do
binary = ConfigDB.transform("~r[comp[lL][aA][iI][nN]er]") assert ConfigDB.to_elixir_types("~r[comp[lL][aA][iI][nN]er]") == ~r/comp[lL][aA][iI][nN]er/
assert binary == :erlang.term_to_binary(~r/comp[lL][aA][iI][nN]er/)
assert ConfigDB.from_binary(binary) == ~r/comp[lL][aA][iI][nN]er/
end end
test "link sigil" do test "link sigil" do
binary = ConfigDB.transform("~r/https:\/\/example.com/") assert ConfigDB.to_elixir_types("~r/https:\/\/example.com/") == ~r/https:\/\/example.com/
assert binary == :erlang.term_to_binary(~r/https:\/\/example.com/)
assert ConfigDB.from_binary(binary) == ~r/https:\/\/example.com/
end end
test "link sigil with um modifiers" do test "link sigil with um modifiers" do
binary = ConfigDB.transform("~r/https:\/\/example.com/um") assert ConfigDB.to_elixir_types("~r/https:\/\/example.com/um") ==
assert binary == :erlang.term_to_binary(~r/https:\/\/example.com/um) ~r/https:\/\/example.com/um
assert ConfigDB.from_binary(binary) == ~r/https:\/\/example.com/um
end end
test "link sigil with i modifier" do test "link sigil with i modifier" do
binary = ConfigDB.transform("~r/https:\/\/example.com/i") assert ConfigDB.to_elixir_types("~r/https:\/\/example.com/i") == ~r/https:\/\/example.com/i
assert binary == :erlang.term_to_binary(~r/https:\/\/example.com/i)
assert ConfigDB.from_binary(binary) == ~r/https:\/\/example.com/i
end end
test "link sigil with s modifier" do test "link sigil with s modifier" do
binary = ConfigDB.transform("~r/https:\/\/example.com/s") assert ConfigDB.to_elixir_types("~r/https:\/\/example.com/s") == ~r/https:\/\/example.com/s
assert binary == :erlang.term_to_binary(~r/https:\/\/example.com/s)
assert ConfigDB.from_binary(binary) == ~r/https:\/\/example.com/s
end end
test "raise if valid delimiter not found" do test "raise if valid delimiter not found" do
assert_raise ArgumentError, "valid delimiter for Regex expression not found", fn -> assert_raise ArgumentError, "valid delimiter for Regex expression not found", fn ->
ConfigDB.transform("~r/https://[]{}<>\"'()|example.com/s") ConfigDB.to_elixir_types("~r/https://[]{}<>\"'()|example.com/s")
end end
end end
test "2 child tuple" do test "2 child tuple" do
binary = ConfigDB.transform(%{"tuple" => ["v1", ":v2"]}) assert ConfigDB.to_elixir_types(%{"tuple" => ["v1", ":v2"]}) == {"v1", :v2}
assert binary == :erlang.term_to_binary({"v1", :v2})
assert ConfigDB.from_binary(binary) == {"v1", :v2}
end end
test "proxy tuple with localhost" do test "proxy tuple with localhost" do
binary = assert ConfigDB.to_elixir_types(%{
ConfigDB.transform(%{ "tuple" => [":proxy_url", %{"tuple" => [":socks5", "localhost", 1234]}]
"tuple" => [":proxy_url", %{"tuple" => [":socks5", "localhost", 1234]}] }) == {:proxy_url, {:socks5, :localhost, 1234}}
})
assert binary == :erlang.term_to_binary({:proxy_url, {:socks5, :localhost, 1234}})
assert ConfigDB.from_binary(binary) == {:proxy_url, {:socks5, :localhost, 1234}}
end end
test "proxy tuple with domain" do test "proxy tuple with domain" do
binary = assert ConfigDB.to_elixir_types(%{
ConfigDB.transform(%{ "tuple" => [":proxy_url", %{"tuple" => [":socks5", "domain.com", 1234]}]
"tuple" => [":proxy_url", %{"tuple" => [":socks5", "domain.com", 1234]}] }) == {:proxy_url, {:socks5, 'domain.com', 1234}}
})
assert binary == :erlang.term_to_binary({:proxy_url, {:socks5, 'domain.com', 1234}})
assert ConfigDB.from_binary(binary) == {:proxy_url, {:socks5, 'domain.com', 1234}}
end end
test "proxy tuple with ip" do test "proxy tuple with ip" do
binary = assert ConfigDB.to_elixir_types(%{
ConfigDB.transform(%{ "tuple" => [":proxy_url", %{"tuple" => [":socks5", "127.0.0.1", 1234]}]
"tuple" => [":proxy_url", %{"tuple" => [":socks5", "127.0.0.1", 1234]}] }) == {:proxy_url, {:socks5, {127, 0, 0, 1}, 1234}}
})
assert binary == :erlang.term_to_binary({:proxy_url, {:socks5, {127, 0, 0, 1}, 1234}})
assert ConfigDB.from_binary(binary) == {:proxy_url, {:socks5, {127, 0, 0, 1}, 1234}}
end end
test "tuple with n childs" do test "tuple with n childs" do
binary = assert ConfigDB.to_elixir_types(%{
ConfigDB.transform(%{ "tuple" => [
"tuple" => [ "v1",
"v1", ":v2",
":v2", "Pleroma.Bookmark",
"Pleroma.Bookmark", 150,
150, false,
false, "Phoenix.Socket.V1.JSONSerializer"
"Phoenix.Socket.V1.JSONSerializer" ]
] }) == {"v1", :v2, Pleroma.Bookmark, 150, false, Phoenix.Socket.V1.JSONSerializer}
})
assert binary ==
:erlang.term_to_binary(
{"v1", :v2, Pleroma.Bookmark, 150, false, Phoenix.Socket.V1.JSONSerializer}
)
assert ConfigDB.from_binary(binary) ==
{"v1", :v2, Pleroma.Bookmark, 150, false, Phoenix.Socket.V1.JSONSerializer}
end end
test "map with string key" do test "map with string key" do
binary = ConfigDB.transform(%{"key" => "value"}) assert ConfigDB.to_elixir_types(%{"key" => "value"}) == %{"key" => "value"}
assert binary == :erlang.term_to_binary(%{"key" => "value"})
assert ConfigDB.from_binary(binary) == %{"key" => "value"}
end end
test "map with atom key" do test "map with atom key" do
binary = ConfigDB.transform(%{":key" => "value"}) assert ConfigDB.to_elixir_types(%{":key" => "value"}) == %{key: "value"}
assert binary == :erlang.term_to_binary(%{key: "value"})
assert ConfigDB.from_binary(binary) == %{key: "value"}
end end
test "list of strings" do test "list of strings" do
binary = ConfigDB.transform(["v1", "v2", "v3"]) assert ConfigDB.to_elixir_types(["v1", "v2", "v3"]) == ["v1", "v2", "v3"]
assert binary == :erlang.term_to_binary(["v1", "v2", "v3"])
assert ConfigDB.from_binary(binary) == ["v1", "v2", "v3"]
end end
test "list of modules" do test "list of modules" do
binary = ConfigDB.transform(["Pleroma.Repo", "Pleroma.Activity"]) assert ConfigDB.to_elixir_types(["Pleroma.Repo", "Pleroma.Activity"]) == [
assert binary == :erlang.term_to_binary([Pleroma.Repo, Pleroma.Activity]) Pleroma.Repo,
assert ConfigDB.from_binary(binary) == [Pleroma.Repo, Pleroma.Activity] Pleroma.Activity
]
end end
test "list of atoms" do test "list of atoms" do
binary = ConfigDB.transform([":v1", ":v2", ":v3"]) assert ConfigDB.to_elixir_types([":v1", ":v2", ":v3"]) == [:v1, :v2, :v3]
assert binary == :erlang.term_to_binary([:v1, :v2, :v3])
assert ConfigDB.from_binary(binary) == [:v1, :v2, :v3]
end end
test "list of mixed values" do test "list of mixed values" do
binary = assert ConfigDB.to_elixir_types([
ConfigDB.transform([ "v1",
"v1", ":v2",
":v2", "Pleroma.Repo",
"Pleroma.Repo", "Phoenix.Socket.V1.JSONSerializer",
"Phoenix.Socket.V1.JSONSerializer", 15,
15, false
false ]) == [
])
assert binary ==
:erlang.term_to_binary([
"v1",
:v2,
Pleroma.Repo,
Phoenix.Socket.V1.JSONSerializer,
15,
false
])
assert ConfigDB.from_binary(binary) == [
"v1", "v1",
:v2, :v2,
Pleroma.Repo, Pleroma.Repo,
@ -470,40 +379,17 @@ test "list of mixed values" do
end end
test "simple keyword" do test "simple keyword" do
binary = ConfigDB.transform([%{"tuple" => [":key", "value"]}]) assert ConfigDB.to_elixir_types([%{"tuple" => [":key", "value"]}]) == [key: "value"]
assert binary == :erlang.term_to_binary([{:key, "value"}])
assert ConfigDB.from_binary(binary) == [{:key, "value"}]
assert ConfigDB.from_binary(binary) == [key: "value"]
end
test "keyword with partial_chain key" do
binary =
ConfigDB.transform([%{"tuple" => [":partial_chain", "&:hackney_connect.partial_chain/1"]}])
assert binary == :erlang.term_to_binary(partial_chain: &:hackney_connect.partial_chain/1)
assert ConfigDB.from_binary(binary) == [partial_chain: &:hackney_connect.partial_chain/1]
end end
test "keyword" do test "keyword" do
binary = assert ConfigDB.to_elixir_types([
ConfigDB.transform([ %{"tuple" => [":types", "Pleroma.PostgresTypes"]},
%{"tuple" => [":types", "Pleroma.PostgresTypes"]}, %{"tuple" => [":telemetry_event", ["Pleroma.Repo.Instrumenter"]]},
%{"tuple" => [":telemetry_event", ["Pleroma.Repo.Instrumenter"]]}, %{"tuple" => [":migration_lock", nil]},
%{"tuple" => [":migration_lock", nil]}, %{"tuple" => [":key1", 150]},
%{"tuple" => [":key1", 150]}, %{"tuple" => [":key2", "string"]}
%{"tuple" => [":key2", "string"]} ]) == [
])
assert binary ==
:erlang.term_to_binary(
types: Pleroma.PostgresTypes,
telemetry_event: [Pleroma.Repo.Instrumenter],
migration_lock: nil,
key1: 150,
key2: "string"
)
assert ConfigDB.from_binary(binary) == [
types: Pleroma.PostgresTypes, types: Pleroma.PostgresTypes,
telemetry_event: [Pleroma.Repo.Instrumenter], telemetry_event: [Pleroma.Repo.Instrumenter],
migration_lock: nil, migration_lock: nil,
@ -512,86 +398,60 @@ test "keyword" do
] ]
end end
test "complex keyword with nested mixed childs" do test "trandformed keyword" do
binary = assert ConfigDB.to_elixir_types(a: 1, b: 2, c: "string") == [a: 1, b: 2, c: "string"]
ConfigDB.transform([ end
%{"tuple" => [":uploader", "Pleroma.Uploaders.Local"]},
%{"tuple" => [":filters", ["Pleroma.Upload.Filter.Dedupe"]]},
%{"tuple" => [":link_name", true]},
%{"tuple" => [":proxy_remote", false]},
%{"tuple" => [":common_map", %{":key" => "value"}]},
%{
"tuple" => [
":proxy_opts",
[
%{"tuple" => [":redirect_on_failure", false]},
%{"tuple" => [":max_body_length", 1_048_576]},
%{
"tuple" => [
":http",
[%{"tuple" => [":follow_redirect", true]}, %{"tuple" => [":pool", ":upload"]}]
]
}
]
]
}
])
assert binary == test "complex keyword with nested mixed childs" do
:erlang.term_to_binary( assert ConfigDB.to_elixir_types([
uploader: Pleroma.Uploaders.Local, %{"tuple" => [":uploader", "Pleroma.Uploaders.Local"]},
filters: [Pleroma.Upload.Filter.Dedupe], %{"tuple" => [":filters", ["Pleroma.Upload.Filter.Dedupe"]]},
link_name: true, %{"tuple" => [":link_name", true]},
proxy_remote: false, %{"tuple" => [":proxy_remote", false]},
common_map: %{key: "value"}, %{"tuple" => [":common_map", %{":key" => "value"}]},
proxy_opts: [ %{
redirect_on_failure: false, "tuple" => [
max_body_length: 1_048_576, ":proxy_opts",
http: [ [
follow_redirect: true, %{"tuple" => [":redirect_on_failure", false]},
pool: :upload %{"tuple" => [":max_body_length", 1_048_576]},
%{
"tuple" => [
":http",
[
%{"tuple" => [":follow_redirect", true]},
%{"tuple" => [":pool", ":upload"]}
]
]
}
] ]
] ]
) }
]) == [
assert ConfigDB.from_binary(binary) == uploader: Pleroma.Uploaders.Local,
[ filters: [Pleroma.Upload.Filter.Dedupe],
uploader: Pleroma.Uploaders.Local, link_name: true,
filters: [Pleroma.Upload.Filter.Dedupe], proxy_remote: false,
link_name: true, common_map: %{key: "value"},
proxy_remote: false, proxy_opts: [
common_map: %{key: "value"}, redirect_on_failure: false,
proxy_opts: [ max_body_length: 1_048_576,
redirect_on_failure: false, http: [
max_body_length: 1_048_576, follow_redirect: true,
http: [ pool: :upload
follow_redirect: true,
pool: :upload
]
] ]
] ]
]
end end
test "common keyword" do test "common keyword" do
binary = assert ConfigDB.to_elixir_types([
ConfigDB.transform([ %{"tuple" => [":level", ":warn"]},
%{"tuple" => [":level", ":warn"]}, %{"tuple" => [":meta", [":all"]]},
%{"tuple" => [":meta", [":all"]]}, %{"tuple" => [":path", ""]},
%{"tuple" => [":path", ""]}, %{"tuple" => [":val", nil]},
%{"tuple" => [":val", nil]}, %{"tuple" => [":webhook_url", "https://hooks.slack.com/services/YOUR-KEY-HERE"]}
%{"tuple" => [":webhook_url", "https://hooks.slack.com/services/YOUR-KEY-HERE"]} ]) == [
])
assert binary ==
:erlang.term_to_binary(
level: :warn,
meta: [:all],
path: "",
val: nil,
webhook_url: "https://hooks.slack.com/services/YOUR-KEY-HERE"
)
assert ConfigDB.from_binary(binary) == [
level: :warn, level: :warn,
meta: [:all], meta: [:all],
path: "", path: "",
@ -601,98 +461,73 @@ test "common keyword" do
end end
test "complex keyword with sigil" do test "complex keyword with sigil" do
binary = assert ConfigDB.to_elixir_types([
ConfigDB.transform([ %{"tuple" => [":federated_timeline_removal", []]},
%{"tuple" => [":federated_timeline_removal", []]}, %{"tuple" => [":reject", ["~r/comp[lL][aA][iI][nN]er/"]]},
%{"tuple" => [":reject", ["~r/comp[lL][aA][iI][nN]er/"]]}, %{"tuple" => [":replace", []]}
%{"tuple" => [":replace", []]} ]) == [
]) federated_timeline_removal: [],
reject: [~r/comp[lL][aA][iI][nN]er/],
assert binary == replace: []
:erlang.term_to_binary( ]
federated_timeline_removal: [],
reject: [~r/comp[lL][aA][iI][nN]er/],
replace: []
)
assert ConfigDB.from_binary(binary) ==
[federated_timeline_removal: [], reject: [~r/comp[lL][aA][iI][nN]er/], replace: []]
end end
test "complex keyword with tuples with more than 2 values" do test "complex keyword with tuples with more than 2 values" do
binary = assert ConfigDB.to_elixir_types([
ConfigDB.transform([ %{
%{ "tuple" => [
"tuple" => [ ":http",
":http", [
[ %{
%{ "tuple" => [
"tuple" => [ ":key1",
":key1", [
[ %{
%{ "tuple" => [
"tuple" => [ ":_",
":_", [
[ %{
%{ "tuple" => [
"tuple" => [ "/api/v1/streaming",
"/api/v1/streaming", "Pleroma.Web.MastodonAPI.WebsocketHandler",
"Pleroma.Web.MastodonAPI.WebsocketHandler", []
[] ]
] },
}, %{
%{ "tuple" => [
"tuple" => [ "/websocket",
"/websocket", "Phoenix.Endpoint.CowboyWebSocket",
"Phoenix.Endpoint.CowboyWebSocket", %{
%{ "tuple" => [
"tuple" => [ "Phoenix.Transports.WebSocket",
"Phoenix.Transports.WebSocket", %{
%{ "tuple" => [
"tuple" => [ "Pleroma.Web.Endpoint",
"Pleroma.Web.Endpoint", "Pleroma.Web.UserSocket",
"Pleroma.Web.UserSocket", []
[] ]
] }
} ]
] }
} ]
] },
}, %{
%{ "tuple" => [
"tuple" => [ ":_",
":_", "Phoenix.Endpoint.Cowboy2Handler",
"Phoenix.Endpoint.Cowboy2Handler", %{"tuple" => ["Pleroma.Web.Endpoint", []]}
%{"tuple" => ["Pleroma.Web.Endpoint", []]} ]
] }
} ]
] ]
] }
} ]
] ]
] }
}
]
]
}
])
assert binary ==
:erlang.term_to_binary(
http: [
key1: [
_: [
{"/api/v1/streaming", Pleroma.Web.MastodonAPI.WebsocketHandler, []},
{"/websocket", Phoenix.Endpoint.CowboyWebSocket,
{Phoenix.Transports.WebSocket,
{Pleroma.Web.Endpoint, Pleroma.Web.UserSocket, []}}},
{:_, Phoenix.Endpoint.Cowboy2Handler, {Pleroma.Web.Endpoint, []}}
]
] ]
] ]
) }
]) == [
assert ConfigDB.from_binary(binary) == [
http: [ http: [
key1: [ key1: [
{:_, {:_,

View File

@ -6,9 +6,9 @@ defmodule Pleroma.Config.TransferTaskTest do
use Pleroma.DataCase use Pleroma.DataCase
import ExUnit.CaptureLog import ExUnit.CaptureLog
import Pleroma.Factory
alias Pleroma.Config.TransferTask alias Pleroma.Config.TransferTask
alias Pleroma.ConfigDB
setup do: clear_config(:configurable_from_database, true) setup do: clear_config(:configurable_from_database, true)
@ -19,31 +19,11 @@ test "transfer config values from db to env" do
refute Application.get_env(:postgrex, :test_key) refute Application.get_env(:postgrex, :test_key)
initial = Application.get_env(:logger, :level) initial = Application.get_env(:logger, :level)
ConfigDB.create(%{ insert(:config, key: :test_key, value: [live: 2, com: 3])
group: ":pleroma", insert(:config, group: :idna, key: :test_key, value: [live: 15, com: 35])
key: ":test_key", insert(:config, group: :quack, key: :test_key, value: [:test_value1, :test_value2])
value: [live: 2, com: 3] insert(:config, group: :postgrex, key: :test_key, value: :value)
}) insert(:config, group: :logger, key: :level, value: :debug)
ConfigDB.create(%{
group: ":idna",
key: ":test_key",
value: [live: 15, com: 35]
})
ConfigDB.create(%{
group: ":quack",
key: ":test_key",
value: [:test_value1, :test_value2]
})
ConfigDB.create(%{
group: ":postgrex",
key: ":test_key",
value: :value
})
ConfigDB.create(%{group: ":logger", key: ":level", value: :debug})
TransferTask.start_link([]) TransferTask.start_link([])
@ -66,17 +46,8 @@ test "transfer config values for 1 group and some keys" do
level = Application.get_env(:quack, :level) level = Application.get_env(:quack, :level)
meta = Application.get_env(:quack, :meta) meta = Application.get_env(:quack, :meta)
ConfigDB.create(%{ insert(:config, group: :quack, key: :level, value: :info)
group: ":quack", insert(:config, group: :quack, key: :meta, value: [:none])
key: ":level",
value: :info
})
ConfigDB.create(%{
group: ":quack",
key: ":meta",
value: [:none]
})
TransferTask.start_link([]) TransferTask.start_link([])
@ -95,17 +66,8 @@ test "transfer config values with full subkey update" do
clear_config(:emoji) clear_config(:emoji)
clear_config(:assets) clear_config(:assets)
ConfigDB.create(%{ insert(:config, key: :emoji, value: [groups: [a: 1, b: 2]])
group: ":pleroma", insert(:config, key: :assets, value: [mascots: [a: 1, b: 2]])
key: ":emoji",
value: [groups: [a: 1, b: 2]]
})
ConfigDB.create(%{
group: ":pleroma",
key: ":assets",
value: [mascots: [a: 1, b: 2]]
})
TransferTask.start_link([]) TransferTask.start_link([])
@ -122,12 +84,7 @@ test "transfer config values with full subkey update" do
test "don't restart if no reboot time settings were changed" do test "don't restart if no reboot time settings were changed" do
clear_config(:emoji) clear_config(:emoji)
insert(:config, key: :emoji, value: [groups: [a: 1, b: 2]])
ConfigDB.create(%{
group: ":pleroma",
key: ":emoji",
value: [groups: [a: 1, b: 2]]
})
refute String.contains?( refute String.contains?(
capture_log(fn -> TransferTask.start_link([]) end), capture_log(fn -> TransferTask.start_link([]) end),
@ -137,25 +94,13 @@ test "don't restart if no reboot time settings were changed" do
test "on reboot time key" do test "on reboot time key" do
clear_config(:chat) clear_config(:chat)
insert(:config, key: :chat, value: [enabled: false])
ConfigDB.create(%{
group: ":pleroma",
key: ":chat",
value: [enabled: false]
})
assert capture_log(fn -> TransferTask.start_link([]) end) =~ "pleroma restarted" assert capture_log(fn -> TransferTask.start_link([]) end) =~ "pleroma restarted"
end end
test "on reboot time subkey" do test "on reboot time subkey" do
clear_config(Pleroma.Captcha) clear_config(Pleroma.Captcha)
insert(:config, key: Pleroma.Captcha, value: [seconds_valid: 60])
ConfigDB.create(%{
group: ":pleroma",
key: "Pleroma.Captcha",
value: [seconds_valid: 60]
})
assert capture_log(fn -> TransferTask.start_link([]) end) =~ "pleroma restarted" assert capture_log(fn -> TransferTask.start_link([]) end) =~ "pleroma restarted"
end end
@ -163,17 +108,8 @@ test "don't restart pleroma on reboot time key and subkey if there is false flag
clear_config(:chat) clear_config(:chat)
clear_config(Pleroma.Captcha) clear_config(Pleroma.Captcha)
ConfigDB.create(%{ insert(:config, key: :chat, value: [enabled: false])
group: ":pleroma", insert(:config, key: Pleroma.Captcha, value: [seconds_valid: 60])
key: ":chat",
value: [enabled: false]
})
ConfigDB.create(%{
group: ":pleroma",
key: "Pleroma.Captcha",
value: [seconds_valid: 60]
})
refute String.contains?( refute String.contains?(
capture_log(fn -> TransferTask.load_and_update_env([], false) end), capture_log(fn -> TransferTask.load_and_update_env([], false) end),

View File

@ -396,24 +396,17 @@ def registration_factory do
} }
end end
def config_factory do def config_factory(attrs \\ %{}) do
%Pleroma.ConfigDB{ %Pleroma.ConfigDB{
key: key: sequence(:key, &String.to_atom("some_key_#{&1}")),
sequence(:key, fn key -> group: :pleroma,
# Atom dynamic registration hack in tests
"some_key_#{key}"
|> String.to_atom()
|> inspect()
end),
group: ":pleroma",
value: value:
sequence( sequence(
:value, :value,
fn key -> &%{another_key: "#{&1}somevalue", another: "#{&1}somevalue"}
:erlang.term_to_binary(%{another_key: "#{key}somevalue", another: "#{key}somevalue"})
end
) )
} }
|> merge_attributes(attrs)
end end
def marker_factory do def marker_factory do

View File

@ -5,6 +5,8 @@
defmodule Mix.Tasks.Pleroma.ConfigTest do defmodule Mix.Tasks.Pleroma.ConfigTest do
use Pleroma.DataCase use Pleroma.DataCase
import Pleroma.Factory
alias Pleroma.ConfigDB alias Pleroma.ConfigDB
alias Pleroma.Repo alias Pleroma.Repo
@ -49,24 +51,19 @@ test "filtered settings are migrated to db" do
refute ConfigDB.get_by_params(%{group: ":pleroma", key: "Pleroma.Repo"}) refute ConfigDB.get_by_params(%{group: ":pleroma", key: "Pleroma.Repo"})
refute ConfigDB.get_by_params(%{group: ":postgrex", key: ":json_library"}) refute ConfigDB.get_by_params(%{group: ":postgrex", key: ":json_library"})
assert ConfigDB.from_binary(config1.value) == [key: "value", key2: [Repo]] assert config1.value == [key: "value", key2: [Repo]]
assert ConfigDB.from_binary(config2.value) == [key: "value2", key2: ["Activity"]] assert config2.value == [key: "value2", key2: ["Activity"]]
assert ConfigDB.from_binary(config3.value) == :info assert config3.value == :info
end end
test "config table is truncated before migration" do test "config table is truncated before migration" do
ConfigDB.create(%{ insert(:config, key: :first_setting, value: [key: "value", key2: ["Activity"]])
group: ":pleroma",
key: ":first_setting",
value: [key: "value", key2: ["Activity"]]
})
assert Repo.aggregate(ConfigDB, :count, :id) == 1 assert Repo.aggregate(ConfigDB, :count, :id) == 1
Mix.Tasks.Pleroma.Config.migrate_to_db("test/fixtures/config/temp.secret.exs") Mix.Tasks.Pleroma.Config.migrate_to_db("test/fixtures/config/temp.secret.exs")
config = ConfigDB.get_by_params(%{group: ":pleroma", key: ":first_setting"}) config = ConfigDB.get_by_params(%{group: ":pleroma", key: ":first_setting"})
assert ConfigDB.from_binary(config.value) == [key: "value", key2: [Repo]] assert config.value == [key: "value", key2: [Repo]]
end end
end end
@ -82,19 +79,9 @@ test "config table is truncated before migration" do
end end
test "settings are migrated to file and deleted from db", %{temp_file: temp_file} do test "settings are migrated to file and deleted from db", %{temp_file: temp_file} do
ConfigDB.create(%{ insert(:config, key: :setting_first, value: [key: "value", key2: ["Activity"]])
group: ":pleroma", insert(:config, key: :setting_second, value: [key: "value2", key2: [Repo]])
key: ":setting_first", insert(:config, group: :quack, key: :level, value: :info)
value: [key: "value", key2: ["Activity"]]
})
ConfigDB.create(%{
group: ":pleroma",
key: ":setting_second",
value: [key: "value2", key2: [Repo]]
})
ConfigDB.create(%{group: ":quack", key: ":level", value: :info})
Mix.Tasks.Pleroma.Config.run(["migrate_from_db", "--env", "temp", "-d"]) Mix.Tasks.Pleroma.Config.run(["migrate_from_db", "--env", "temp", "-d"])
@ -107,9 +94,8 @@ test "settings are migrated to file and deleted from db", %{temp_file: temp_file
end end
test "load a settings with large values and pass to file", %{temp_file: temp_file} do test "load a settings with large values and pass to file", %{temp_file: temp_file} do
ConfigDB.create(%{ insert(:config,
group: ":pleroma", key: :instance,
key: ":instance",
value: [ value: [
name: "Pleroma", name: "Pleroma",
email: "example@example.com", email: "example@example.com",
@ -163,7 +149,6 @@ test "load a settings with large values and pass to file", %{temp_file: temp_fil
extended_nickname_format: true, extended_nickname_format: true,
multi_factor_authentication: [ multi_factor_authentication: [
totp: [ totp: [
# digits 6 or 8
digits: 6, digits: 6,
period: 30 period: 30
], ],
@ -173,7 +158,7 @@ test "load a settings with large values and pass to file", %{temp_file: temp_fil
] ]
] ]
] ]
}) )
Mix.Tasks.Pleroma.Config.run(["migrate_from_db", "--env", "temp", "-d"]) Mix.Tasks.Pleroma.Config.run(["migrate_from_db", "--env", "temp", "-d"])

View File

@ -6,21 +6,17 @@ defmodule Pleroma.Upload.Filter.MogrifyTest do
use Pleroma.DataCase use Pleroma.DataCase
import Mock import Mock
alias Pleroma.Config
alias Pleroma.Upload
alias Pleroma.Upload.Filter alias Pleroma.Upload.Filter
setup do: clear_config([Filter.Mogrify, :args])
test "apply mogrify filter" do test "apply mogrify filter" do
Config.put([Filter.Mogrify, :args], [{"tint", "40"}]) clear_config(Filter.Mogrify, args: [{"tint", "40"}])
File.cp!( File.cp!(
"test/fixtures/image.jpg", "test/fixtures/image.jpg",
"test/fixtures/image_tmp.jpg" "test/fixtures/image_tmp.jpg"
) )
upload = %Upload{ upload = %Pleroma.Upload{
name: "an… image.jpg", name: "an… image.jpg",
content_type: "image/jpg", content_type: "image/jpg",
path: Path.absname("test/fixtures/image_tmp.jpg"), path: Path.absname("test/fixtures/image_tmp.jpg"),

View File

@ -57,12 +57,12 @@ test "with settings only in db", %{conn: conn} do
] ]
} = json_response_and_validate_schema(conn, 200) } = json_response_and_validate_schema(conn, 200)
assert key1 == config1.key assert key1 == inspect(config1.key)
assert key2 == config2.key assert key2 == inspect(config2.key)
end end
test "db is added to settings that are in db", %{conn: conn} do test "db is added to settings that are in db", %{conn: conn} do
_config = insert(:config, key: ":instance", value: ConfigDB.to_binary(name: "Some name")) _config = insert(:config, key: ":instance", value: [name: "Some name"])
%{"configs" => configs} = %{"configs" => configs} =
conn conn
@ -83,7 +83,7 @@ test "merged default setting with db settings", %{conn: conn} do
config3 = config3 =
insert(:config, insert(:config,
value: ConfigDB.to_binary(k1: :v1, k2: :v2) value: [k1: :v1, k2: :v2]
) )
%{"configs" => configs} = %{"configs" => configs} =
@ -93,42 +93,45 @@ test "merged default setting with db settings", %{conn: conn} do
assert length(configs) > 3 assert length(configs) > 3
saved_configs = [config1, config2, config3]
keys = Enum.map(saved_configs, &inspect(&1.key))
received_configs = received_configs =
Enum.filter(configs, fn %{"group" => group, "key" => key} -> Enum.filter(configs, fn %{"group" => group, "key" => key} ->
group == ":pleroma" and key in [config1.key, config2.key, config3.key] group == ":pleroma" and key in keys
end) end)
assert length(received_configs) == 3 assert length(received_configs) == 3
db_keys = db_keys =
config3.value config3.value
|> ConfigDB.from_binary()
|> Keyword.keys() |> Keyword.keys()
|> ConfigDB.convert() |> ConfigDB.to_json_types()
keys = Enum.map(saved_configs -- [config3], &inspect(&1.key))
values = Enum.map(saved_configs, &ConfigDB.to_json_types(&1.value))
mapset_keys = MapSet.new(keys ++ db_keys)
Enum.each(received_configs, fn %{"value" => value, "db" => db} -> Enum.each(received_configs, fn %{"value" => value, "db" => db} ->
assert db in [[config1.key], [config2.key], db_keys] db = MapSet.new(db)
assert MapSet.subset?(db, mapset_keys)
assert value in [ assert value in values
ConfigDB.from_binary_with_convert(config1.value),
ConfigDB.from_binary_with_convert(config2.value),
ConfigDB.from_binary_with_convert(config3.value)
]
end) end)
end end
test "subkeys with full update right merge", %{conn: conn} do test "subkeys with full update right merge", %{conn: conn} do
config1 = insert(:config,
insert(:config, key: ":emoji",
key: ":emoji", value: [groups: [a: 1, b: 2], key: [a: 1]]
value: ConfigDB.to_binary(groups: [a: 1, b: 2], key: [a: 1]) )
)
config2 = insert(:config,
insert(:config, key: ":assets",
key: ":assets", value: [mascots: [a: 1, b: 2], key: [a: 1]]
value: ConfigDB.to_binary(mascots: [a: 1, b: 2], key: [a: 1]) )
)
%{"configs" => configs} = %{"configs" => configs} =
conn conn
@ -137,14 +140,14 @@ test "subkeys with full update right merge", %{conn: conn} do
vals = vals =
Enum.filter(configs, fn %{"group" => group, "key" => key} -> Enum.filter(configs, fn %{"group" => group, "key" => key} ->
group == ":pleroma" and key in [config1.key, config2.key] group == ":pleroma" and key in [":emoji", ":assets"]
end) end)
emoji = Enum.find(vals, fn %{"key" => key} -> key == ":emoji" end) emoji = Enum.find(vals, fn %{"key" => key} -> key == ":emoji" end)
assets = Enum.find(vals, fn %{"key" => key} -> key == ":assets" end) assets = Enum.find(vals, fn %{"key" => key} -> key == ":assets" end)
emoji_val = ConfigDB.transform_with_out_binary(emoji["value"]) emoji_val = ConfigDB.to_elixir_types(emoji["value"])
assets_val = ConfigDB.transform_with_out_binary(assets["value"]) assets_val = ConfigDB.to_elixir_types(assets["value"])
assert emoji_val[:groups] == [a: 1, b: 2] assert emoji_val[:groups] == [a: 1, b: 2]
assert assets_val[:mascots] == [a: 1, b: 2] assert assets_val[:mascots] == [a: 1, b: 2]
@ -277,7 +280,8 @@ test "create new config setting in db", %{conn: conn} do
"value" => %{"tuple" => ["string", "Pleroma.Captcha.NotReal", []]}, "value" => %{"tuple" => ["string", "Pleroma.Captcha.NotReal", []]},
"db" => [":key5"] "db" => [":key5"]
} }
] ],
"need_reboot" => false
} }
assert Application.get_env(:pleroma, :key1) == "value1" assert Application.get_env(:pleroma, :key1) == "value1"
@ -357,7 +361,8 @@ test "save configs setting without explicit key", %{conn: conn} do
"value" => "https://hooks.slack.com/services/KEY", "value" => "https://hooks.slack.com/services/KEY",
"db" => [":webhook_url"] "db" => [":webhook_url"]
} }
] ],
"need_reboot" => false
} }
assert Application.get_env(:quack, :level) == :info assert Application.get_env(:quack, :level) == :info
@ -366,14 +371,14 @@ test "save configs setting without explicit key", %{conn: conn} do
end end
test "saving config with partial update", %{conn: conn} do test "saving config with partial update", %{conn: conn} do
config = insert(:config, key: ":key1", value: :erlang.term_to_binary(key1: 1, key2: 2)) insert(:config, key: ":key1", value: :erlang.term_to_binary(key1: 1, key2: 2))
conn = conn =
conn conn
|> put_req_header("content-type", "application/json") |> put_req_header("content-type", "application/json")
|> post("/api/pleroma/admin/config", %{ |> post("/api/pleroma/admin/config", %{
configs: [ configs: [
%{group: config.group, key: config.key, value: [%{"tuple" => [":key3", 3]}]} %{group: ":pleroma", key: ":key1", value: [%{"tuple" => [":key3", 3]}]}
] ]
}) })
@ -389,7 +394,8 @@ test "saving config with partial update", %{conn: conn} do
], ],
"db" => [":key1", ":key2", ":key3"] "db" => [":key1", ":key2", ":key3"]
} }
] ],
"need_reboot" => false
} }
end end
@ -500,8 +506,7 @@ test "update setting which need reboot, don't change reboot flag until reboot",
end end
test "saving config with nested merge", %{conn: conn} do test "saving config with nested merge", %{conn: conn} do
config = insert(:config, key: :key1, value: [key1: 1, key2: [k1: 1, k2: 2]])
insert(:config, key: ":key1", value: :erlang.term_to_binary(key1: 1, key2: [k1: 1, k2: 2]))
conn = conn =
conn conn
@ -509,8 +514,8 @@ test "saving config with nested merge", %{conn: conn} do
|> post("/api/pleroma/admin/config", %{ |> post("/api/pleroma/admin/config", %{
configs: [ configs: [
%{ %{
group: config.group, group: ":pleroma",
key: config.key, key: ":key1",
value: [ value: [
%{"tuple" => [":key3", 3]}, %{"tuple" => [":key3", 3]},
%{ %{
@ -548,7 +553,8 @@ test "saving config with nested merge", %{conn: conn} do
], ],
"db" => [":key1", ":key3", ":key2"] "db" => [":key1", ":key3", ":key2"]
} }
] ],
"need_reboot" => false
} }
end end
@ -588,7 +594,8 @@ test "saving special atoms", %{conn: conn} do
], ],
"db" => [":ssl_options"] "db" => [":ssl_options"]
} }
] ],
"need_reboot" => false
} }
assert Application.get_env(:pleroma, :key1) == [ assert Application.get_env(:pleroma, :key1) == [
@ -600,12 +607,11 @@ test "saving full setting if value is in full_key_update list", %{conn: conn} do
backends = Application.get_env(:logger, :backends) backends = Application.get_env(:logger, :backends)
on_exit(fn -> Application.put_env(:logger, :backends, backends) end) on_exit(fn -> Application.put_env(:logger, :backends, backends) end)
config = insert(:config,
insert(:config, group: :logger,
group: ":logger", key: :backends,
key: ":backends", value: []
value: :erlang.term_to_binary([]) )
)
Pleroma.Config.TransferTask.load_and_update_env([], false) Pleroma.Config.TransferTask.load_and_update_env([], false)
@ -617,8 +623,8 @@ test "saving full setting if value is in full_key_update list", %{conn: conn} do
|> post("/api/pleroma/admin/config", %{ |> post("/api/pleroma/admin/config", %{
configs: [ configs: [
%{ %{
group: config.group, group: ":logger",
key: config.key, key: ":backends",
value: [":console"] value: [":console"]
} }
] ]
@ -634,7 +640,8 @@ test "saving full setting if value is in full_key_update list", %{conn: conn} do
], ],
"db" => [":backends"] "db" => [":backends"]
} }
] ],
"need_reboot" => false
} }
assert Application.get_env(:logger, :backends) == [ assert Application.get_env(:logger, :backends) == [
@ -643,19 +650,18 @@ test "saving full setting if value is in full_key_update list", %{conn: conn} do
end end
test "saving full setting if value is not keyword", %{conn: conn} do test "saving full setting if value is not keyword", %{conn: conn} do
config = insert(:config,
insert(:config, group: :tesla,
group: ":tesla", key: :adapter,
key: ":adapter", value: Tesla.Adapter.Hackey
value: :erlang.term_to_binary(Tesla.Adapter.Hackey) )
)
conn = conn =
conn conn
|> put_req_header("content-type", "application/json") |> put_req_header("content-type", "application/json")
|> post("/api/pleroma/admin/config", %{ |> post("/api/pleroma/admin/config", %{
configs: [ configs: [
%{group: config.group, key: config.key, value: "Tesla.Adapter.Httpc"} %{group: ":tesla", key: ":adapter", value: "Tesla.Adapter.Httpc"}
] ]
}) })
@ -667,7 +673,8 @@ test "saving full setting if value is not keyword", %{conn: conn} do
"value" => "Tesla.Adapter.Httpc", "value" => "Tesla.Adapter.Httpc",
"db" => [":adapter"] "db" => [":adapter"]
} }
] ],
"need_reboot" => false
} }
end end
@ -677,13 +684,13 @@ test "update config setting & delete with fallback to default value", %{
token: token token: token
} do } do
ueberauth = Application.get_env(:ueberauth, Ueberauth) ueberauth = Application.get_env(:ueberauth, Ueberauth)
config1 = insert(:config, key: ":keyaa1") insert(:config, key: :keyaa1)
config2 = insert(:config, key: ":keyaa2") insert(:config, key: :keyaa2)
config3 = config3 =
insert(:config, insert(:config,
group: ":ueberauth", group: :ueberauth,
key: "Ueberauth" key: Ueberauth
) )
conn = conn =
@ -691,8 +698,8 @@ test "update config setting & delete with fallback to default value", %{
|> put_req_header("content-type", "application/json") |> put_req_header("content-type", "application/json")
|> post("/api/pleroma/admin/config", %{ |> post("/api/pleroma/admin/config", %{
configs: [ configs: [
%{group: config1.group, key: config1.key, value: "another_value"}, %{group: ":pleroma", key: ":keyaa1", value: "another_value"},
%{group: config2.group, key: config2.key, value: "another_value"} %{group: ":pleroma", key: ":keyaa2", value: "another_value"}
] ]
}) })
@ -700,22 +707,23 @@ test "update config setting & delete with fallback to default value", %{
"configs" => [ "configs" => [
%{ %{
"group" => ":pleroma", "group" => ":pleroma",
"key" => config1.key, "key" => ":keyaa1",
"value" => "another_value", "value" => "another_value",
"db" => [":keyaa1"] "db" => [":keyaa1"]
}, },
%{ %{
"group" => ":pleroma", "group" => ":pleroma",
"key" => config2.key, "key" => ":keyaa2",
"value" => "another_value", "value" => "another_value",
"db" => [":keyaa2"] "db" => [":keyaa2"]
} }
] ],
"need_reboot" => false
} }
assert Application.get_env(:pleroma, :keyaa1) == "another_value" assert Application.get_env(:pleroma, :keyaa1) == "another_value"
assert Application.get_env(:pleroma, :keyaa2) == "another_value" assert Application.get_env(:pleroma, :keyaa2) == "another_value"
assert Application.get_env(:ueberauth, Ueberauth) == ConfigDB.from_binary(config3.value) assert Application.get_env(:ueberauth, Ueberauth) == config3.value
conn = conn =
build_conn() build_conn()
@ -724,7 +732,7 @@ test "update config setting & delete with fallback to default value", %{
|> put_req_header("content-type", "application/json") |> put_req_header("content-type", "application/json")
|> post("/api/pleroma/admin/config", %{ |> post("/api/pleroma/admin/config", %{
configs: [ configs: [
%{group: config2.group, key: config2.key, delete: true}, %{group: ":pleroma", key: ":keyaa2", delete: true},
%{ %{
group: ":ueberauth", group: ":ueberauth",
key: "Ueberauth", key: "Ueberauth",
@ -734,7 +742,8 @@ test "update config setting & delete with fallback to default value", %{
}) })
assert json_response_and_validate_schema(conn, 200) == %{ assert json_response_and_validate_schema(conn, 200) == %{
"configs" => [] "configs" => [],
"need_reboot" => false
} }
assert Application.get_env(:ueberauth, Ueberauth) == ueberauth assert Application.get_env(:ueberauth, Ueberauth) == ueberauth
@ -801,7 +810,8 @@ test "common config example", %{conn: conn} do
":name" ":name"
] ]
} }
] ],
"need_reboot" => false
} }
end end
@ -935,7 +945,8 @@ test "tuples with more than two values", %{conn: conn} do
], ],
"db" => [":http"] "db" => [":http"]
} }
] ],
"need_reboot" => false
} }
end end
@ -1000,7 +1011,8 @@ test "settings with nesting map", %{conn: conn} do
], ],
"db" => [":key2", ":key3"] "db" => [":key2", ":key3"]
} }
] ],
"need_reboot" => false
} }
end end
@ -1027,7 +1039,8 @@ test "value as map", %{conn: conn} do
"value" => %{"key" => "some_val"}, "value" => %{"key" => "some_val"},
"db" => [":key1"] "db" => [":key1"]
} }
] ],
"need_reboot" => false
} }
end end
@ -1077,16 +1090,16 @@ test "queues key as atom", %{conn: conn} do
":background" ":background"
] ]
} }
] ],
"need_reboot" => false
} }
end end
test "delete part of settings by atom subkeys", %{conn: conn} do test "delete part of settings by atom subkeys", %{conn: conn} do
config = insert(:config,
insert(:config, key: :keyaa1,
key: ":keyaa1", value: [subkey1: "val1", subkey2: "val2", subkey3: "val3"]
value: :erlang.term_to_binary(subkey1: "val1", subkey2: "val2", subkey3: "val3") )
)
conn = conn =
conn conn
@ -1094,8 +1107,8 @@ test "delete part of settings by atom subkeys", %{conn: conn} do
|> post("/api/pleroma/admin/config", %{ |> post("/api/pleroma/admin/config", %{
configs: [ configs: [
%{ %{
group: config.group, group: ":pleroma",
key: config.key, key: ":keyaa1",
subkeys: [":subkey1", ":subkey3"], subkeys: [":subkey1", ":subkey3"],
delete: true delete: true
} }
@ -1110,7 +1123,8 @@ test "delete part of settings by atom subkeys", %{conn: conn} do
"value" => [%{"tuple" => [":subkey2", "val2"]}], "value" => [%{"tuple" => [":subkey2", "val2"]}],
"db" => [":subkey2"] "db" => [":subkey2"]
} }
] ],
"need_reboot" => false
} }
end end
@ -1236,6 +1250,90 @@ test "doesn't set keys not in the whitelist", %{conn: conn} do
assert Application.get_env(:pleroma, Pleroma.Captcha.NotReal) == "value5" assert Application.get_env(:pleroma, Pleroma.Captcha.NotReal) == "value5"
assert Application.get_env(:not_real, :anything) == "value6" assert Application.get_env(:not_real, :anything) == "value6"
end end
test "args for Pleroma.Upload.Filter.Mogrify with custom tuples", %{conn: conn} do
clear_config(Pleroma.Upload.Filter.Mogrify)
assert conn
|> put_req_header("content-type", "application/json")
|> post("/api/pleroma/admin/config", %{
configs: [
%{
group: ":pleroma",
key: "Pleroma.Upload.Filter.Mogrify",
value: [
%{"tuple" => [":args", ["auto-orient", "strip"]]}
]
}
]
})
|> json_response_and_validate_schema(200) == %{
"configs" => [
%{
"group" => ":pleroma",
"key" => "Pleroma.Upload.Filter.Mogrify",
"value" => [
%{"tuple" => [":args", ["auto-orient", "strip"]]}
],
"db" => [":args"]
}
],
"need_reboot" => false
}
assert Config.get(Pleroma.Upload.Filter.Mogrify) == [args: ["auto-orient", "strip"]]
assert conn
|> put_req_header("content-type", "application/json")
|> post("/api/pleroma/admin/config", %{
configs: [
%{
group: ":pleroma",
key: "Pleroma.Upload.Filter.Mogrify",
value: [
%{
"tuple" => [
":args",
[
"auto-orient",
"strip",
"{\"implode\", \"1\"}",
"{\"resize\", \"3840x1080>\"}"
]
]
}
]
}
]
})
|> json_response(200) == %{
"configs" => [
%{
"group" => ":pleroma",
"key" => "Pleroma.Upload.Filter.Mogrify",
"value" => [
%{
"tuple" => [
":args",
[
"auto-orient",
"strip",
"{\"implode\", \"1\"}",
"{\"resize\", \"3840x1080>\"}"
]
]
}
],
"db" => [":args"]
}
],
"need_reboot" => false
}
assert Config.get(Pleroma.Upload.Filter.Mogrify) == [
args: ["auto-orient", "strip", {"implode", "1"}, {"resize", "3840x1080>"}]
]
end
end end
describe "GET /api/pleroma/admin/config/descriptions" do describe "GET /api/pleroma/admin/config/descriptions" do