Merge branch 'develop' into issue/1276

This commit is contained in:
Maksim Pechnikov 2020-04-02 14:47:17 +03:00
commit dbcfac11b4
153 changed files with 4845 additions and 1658 deletions

View File

@ -13,6 +13,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
### Added
- NodeInfo: `pleroma:api/v1/notifications:include_types_filter` to the `features` list.
- Configuration: `:restrict_unauthenticated` setting, restrict access for unauthenticated users to timelines (public and federate), user profiles and statuses.
- New HTTP adapter [gun](https://github.com/ninenines/gun). Gun adapter requires minimum OTP version of 22.2 otherwise Pleroma wont start. For hackney OTP update is not required.
<details>
<summary>API Changes</summary>
- Mastodon API: Support for `include_types` in `/api/v1/notifications`.

View File

@ -12,7 +12,7 @@ RUN apk add git gcc g++ musl-dev make &&\
mkdir release &&\
mix release --path release
FROM alpine:3.9
FROM alpine:3.11
ARG BUILD_DATE
ARG VCS_REF
@ -33,7 +33,7 @@ ARG DATA=/var/lib/pleroma
RUN echo "http://nl.alpinelinux.org/alpine/latest-stable/community" >> /etc/apk/repositories &&\
apk update &&\
apk add ncurses postgresql-client &&\
apk add imagemagick ncurses postgresql-client &&\
adduser --system --shell /bin/false --home ${HOME} pleroma &&\
mkdir -p ${DATA}/uploads &&\
mkdir -p ${DATA}/static &&\

View File

@ -0,0 +1,557 @@
defmodule Pleroma.LoadTesting.Activities do
@moduledoc """
Module for generating different activities.
"""
import Ecto.Query
import Pleroma.LoadTesting.Helper, only: [to_sec: 1]
alias Ecto.UUID
alias Pleroma.Constants
alias Pleroma.LoadTesting.Users
alias Pleroma.Repo
alias Pleroma.Web.CommonAPI
require Constants
@defaults [
iterations: 170,
friends_used: 20,
non_friends_used: 20
]
@max_concurrency 10
@visibility ~w(public private direct unlisted)
@types ~w(simple emoji mentions hell_thread attachment tag like reblog simple_thread remote)
@groups ~w(user friends non_friends)
@spec generate(User.t(), keyword()) :: :ok
def generate(user, opts \\ []) do
{:ok, _} =
Agent.start_link(fn -> %{} end,
name: :benchmark_state
)
opts = Keyword.merge(@defaults, opts)
friends =
user
|> Users.get_users(limit: opts[:friends_used], local: :local, friends?: true)
|> Enum.shuffle()
non_friends =
user
|> Users.get_users(limit: opts[:non_friends_used], local: :local, friends?: false)
|> Enum.shuffle()
task_data =
for visibility <- @visibility,
type <- @types,
group <- @groups,
do: {visibility, type, group}
IO.puts("Starting generating #{opts[:iterations]} iterations of activities...")
friends_thread = Enum.take(friends, 5)
non_friends_thread = Enum.take(friends, 5)
public_long_thread = fn ->
generate_long_thread("public", user, friends_thread, non_friends_thread, opts)
end
private_long_thread = fn ->
generate_long_thread("private", user, friends_thread, non_friends_thread, opts)
end
iterations = opts[:iterations]
{time, _} =
:timer.tc(fn ->
Enum.each(
1..iterations,
fn
i when i == iterations - 2 ->
spawn(public_long_thread)
spawn(private_long_thread)
generate_activities(user, friends, non_friends, Enum.shuffle(task_data), opts)
_ ->
generate_activities(user, friends, non_friends, Enum.shuffle(task_data), opts)
end
)
end)
IO.puts("Generating iterations of activities took #{to_sec(time)} sec.\n")
:ok
end
def generate_power_intervals(opts \\ []) do
count = Keyword.get(opts, :count, 20)
power = Keyword.get(opts, :power, 2)
IO.puts("Generating #{count} intervals for a power #{power} series...")
counts = Enum.map(1..count, fn n -> :math.pow(n, power) end)
sum = Enum.sum(counts)
densities =
Enum.map(counts, fn c ->
c / sum
end)
densities
|> Enum.reduce(0, fn density, acc ->
if acc == 0 do
[{0, density}]
else
[{_, lower} | _] = acc
[{lower, lower + density} | acc]
end
end)
|> Enum.reverse()
end
def generate_tagged_activities(opts \\ []) do
tag_count = Keyword.get(opts, :tag_count, 20)
users = Keyword.get(opts, :users, Repo.all(Pleroma.User))
activity_count = Keyword.get(opts, :count, 200_000)
intervals = generate_power_intervals(count: tag_count)
IO.puts(
"Generating #{activity_count} activities using #{tag_count} different tags of format `tag_n`, starting at tag_0"
)
Enum.each(1..activity_count, fn _ ->
random = :rand.uniform()
i = Enum.find_index(intervals, fn {lower, upper} -> lower <= random && upper > random end)
CommonAPI.post(Enum.random(users), %{"status" => "a post with the tag #tag_#{i}"})
end)
end
defp generate_long_thread(visibility, user, friends, non_friends, _opts) do
group =
if visibility == "public",
do: "friends",
else: "user"
tasks = get_reply_tasks(visibility, group) |> Stream.cycle() |> Enum.take(50)
{:ok, activity} =
CommonAPI.post(user, %{
"status" => "Start of #{visibility} long thread",
"visibility" => visibility
})
Agent.update(:benchmark_state, fn state ->
key =
if visibility == "public",
do: :public_thread,
else: :private_thread
Map.put(state, key, activity)
end)
acc = {activity.id, ["@" <> user.nickname, "reply to long thread"]}
insert_replies_for_long_thread(tasks, visibility, user, friends, non_friends, acc)
IO.puts("Generating #{visibility} long thread ended\n")
end
defp insert_replies_for_long_thread(tasks, visibility, user, friends, non_friends, acc) do
Enum.reduce(tasks, acc, fn
"friend", {id, data} ->
friend = Enum.random(friends)
insert_reply(friend, List.delete(data, "@" <> friend.nickname), id, visibility)
"non_friend", {id, data} ->
non_friend = Enum.random(non_friends)
insert_reply(non_friend, List.delete(data, "@" <> non_friend.nickname), id, visibility)
"user", {id, data} ->
insert_reply(user, List.delete(data, "@" <> user.nickname), id, visibility)
end)
end
defp generate_activities(user, friends, non_friends, task_data, opts) do
Task.async_stream(
task_data,
fn {visibility, type, group} ->
insert_activity(type, visibility, group, user, friends, non_friends, opts)
end,
max_concurrency: @max_concurrency,
timeout: 30_000
)
|> Stream.run()
end
defp insert_activity("simple", visibility, group, user, friends, non_friends, _opts) do
{:ok, _activity} =
group
|> get_actor(user, friends, non_friends)
|> CommonAPI.post(%{"status" => "Simple status", "visibility" => visibility})
end
defp insert_activity("emoji", visibility, group, user, friends, non_friends, _opts) do
{:ok, _activity} =
group
|> get_actor(user, friends, non_friends)
|> CommonAPI.post(%{
"status" => "Simple status with emoji :firefox:",
"visibility" => visibility
})
end
defp insert_activity("mentions", visibility, group, user, friends, non_friends, _opts) do
user_mentions =
get_random_mentions(friends, Enum.random(0..3)) ++
get_random_mentions(non_friends, Enum.random(0..3))
user_mentions =
if Enum.random([true, false]),
do: ["@" <> user.nickname | user_mentions],
else: user_mentions
{:ok, _activity} =
group
|> get_actor(user, friends, non_friends)
|> CommonAPI.post(%{
"status" => Enum.join(user_mentions, ", ") <> " simple status with mentions",
"visibility" => visibility
})
end
defp insert_activity("hell_thread", visibility, group, user, friends, non_friends, _opts) do
mentions =
with {:ok, nil} <- Cachex.get(:user_cache, "hell_thread_mentions") do
cached =
([user | Enum.take(friends, 10)] ++ Enum.take(non_friends, 10))
|> Enum.map(&"@#{&1.nickname}")
|> Enum.join(", ")
Cachex.put(:user_cache, "hell_thread_mentions", cached)
cached
else
{:ok, cached} -> cached
end
{:ok, _activity} =
group
|> get_actor(user, friends, non_friends)
|> CommonAPI.post(%{
"status" => mentions <> " hell thread status",
"visibility" => visibility
})
end
defp insert_activity("attachment", visibility, group, user, friends, non_friends, _opts) do
actor = get_actor(group, user, friends, non_friends)
obj_data = %{
"actor" => actor.ap_id,
"name" => "4467-11.jpg",
"type" => "Document",
"url" => [
%{
"href" =>
"#{Pleroma.Web.base_url()}/media/b1b873552422a07bf53af01f3c231c841db4dfc42c35efde681abaf0f2a4eab7.jpg",
"mediaType" => "image/jpeg",
"type" => "Link"
}
]
}
object = Repo.insert!(%Pleroma.Object{data: obj_data})
{:ok, _activity} =
CommonAPI.post(actor, %{
"status" => "Post with attachment",
"visibility" => visibility,
"media_ids" => [object.id]
})
end
defp insert_activity("tag", visibility, group, user, friends, non_friends, _opts) do
{:ok, _activity} =
group
|> get_actor(user, friends, non_friends)
|> CommonAPI.post(%{"status" => "Status with #tag", "visibility" => visibility})
end
defp insert_activity("like", visibility, group, user, friends, non_friends, opts) do
actor = get_actor(group, user, friends, non_friends)
with activity_id when not is_nil(activity_id) <- get_random_create_activity_id(),
{:ok, _activity, _object} <- CommonAPI.favorite(activity_id, actor) do
:ok
else
{:error, _} ->
insert_activity("like", visibility, group, user, friends, non_friends, opts)
nil ->
Process.sleep(15)
insert_activity("like", visibility, group, user, friends, non_friends, opts)
end
end
defp insert_activity("reblog", visibility, group, user, friends, non_friends, opts) do
actor = get_actor(group, user, friends, non_friends)
with activity_id when not is_nil(activity_id) <- get_random_create_activity_id(),
{:ok, _activity, _object} <- CommonAPI.repeat(activity_id, actor) do
:ok
else
{:error, _} ->
insert_activity("reblog", visibility, group, user, friends, non_friends, opts)
nil ->
Process.sleep(15)
insert_activity("reblog", visibility, group, user, friends, non_friends, opts)
end
end
defp insert_activity("simple_thread", visibility, group, user, friends, non_friends, _opts)
when visibility in ["public", "unlisted", "private"] do
actor = get_actor(group, user, friends, non_friends)
tasks = get_reply_tasks(visibility, group)
{:ok, activity} =
CommonAPI.post(user, %{"status" => "Simple status", "visibility" => "unlisted"})
acc = {activity.id, ["@" <> actor.nickname, "reply to status"]}
insert_replies(tasks, visibility, user, friends, non_friends, acc)
end
defp insert_activity("simple_thread", "direct", group, user, friends, non_friends, _opts) do
actor = get_actor(group, user, friends, non_friends)
tasks = get_reply_tasks("direct", group)
list =
case group do
"non_friends" ->
Enum.take(non_friends, 3)
_ ->
Enum.take(friends, 3)
end
data = Enum.map(list, &("@" <> &1.nickname))
{:ok, activity} =
CommonAPI.post(actor, %{
"status" => Enum.join(data, ", ") <> "simple status",
"visibility" => "direct"
})
acc = {activity.id, ["@" <> user.nickname | data] ++ ["reply to status"]}
insert_direct_replies(tasks, user, list, acc)
end
defp insert_activity("remote", _, "user", _, _, _, _), do: :ok
defp insert_activity("remote", visibility, group, user, _friends, _non_friends, opts) do
remote_friends =
Users.get_users(user, limit: opts[:friends_used], local: :external, friends?: true)
remote_non_friends =
Users.get_users(user, limit: opts[:non_friends_used], local: :external, friends?: false)
actor = get_actor(group, user, remote_friends, remote_non_friends)
{act_data, obj_data} = prepare_activity_data(actor, visibility, user)
{activity_data, object_data} = other_data(actor)
activity_data
|> Map.merge(act_data)
|> Map.put("object", Map.merge(object_data, obj_data))
|> Pleroma.Web.ActivityPub.ActivityPub.insert(false)
end
defp get_actor("user", user, _friends, _non_friends), do: user
defp get_actor("friends", _user, friends, _non_friends), do: Enum.random(friends)
defp get_actor("non_friends", _user, _friends, non_friends), do: Enum.random(non_friends)
defp other_data(actor) do
%{host: host} = URI.parse(actor.ap_id)
datetime = DateTime.utc_now()
context_id = "http://#{host}:4000/contexts/#{UUID.generate()}"
activity_id = "http://#{host}:4000/activities/#{UUID.generate()}"
object_id = "http://#{host}:4000/objects/#{UUID.generate()}"
activity_data = %{
"actor" => actor.ap_id,
"context" => context_id,
"id" => activity_id,
"published" => datetime,
"type" => "Create",
"directMessage" => false
}
object_data = %{
"actor" => actor.ap_id,
"attachment" => [],
"attributedTo" => actor.ap_id,
"bcc" => [],
"bto" => [],
"content" => "Remote post",
"context" => context_id,
"conversation" => context_id,
"emoji" => %{},
"id" => object_id,
"published" => datetime,
"sensitive" => false,
"summary" => "",
"tag" => [],
"to" => ["https://www.w3.org/ns/activitystreams#Public"],
"type" => "Note"
}
{activity_data, object_data}
end
defp prepare_activity_data(actor, "public", _mention) do
obj_data = %{
"cc" => [actor.follower_address],
"to" => [Constants.as_public()]
}
act_data = %{
"cc" => [actor.follower_address],
"to" => [Constants.as_public()]
}
{act_data, obj_data}
end
defp prepare_activity_data(actor, "private", _mention) do
obj_data = %{
"cc" => [],
"to" => [actor.follower_address]
}
act_data = %{
"cc" => [],
"to" => [actor.follower_address]
}
{act_data, obj_data}
end
defp prepare_activity_data(actor, "unlisted", _mention) do
obj_data = %{
"cc" => [Constants.as_public()],
"to" => [actor.follower_address]
}
act_data = %{
"cc" => [Constants.as_public()],
"to" => [actor.follower_address]
}
{act_data, obj_data}
end
defp prepare_activity_data(_actor, "direct", mention) do
%{host: mentioned_host} = URI.parse(mention.ap_id)
obj_data = %{
"cc" => [],
"content" =>
"<span class=\"h-card\"><a class=\"u-url mention\" href=\"#{mention.ap_id}\" rel=\"ugc\">@<span>#{
mention.nickname
}</span></a></span> direct message",
"tag" => [
%{
"href" => mention.ap_id,
"name" => "@#{mention.nickname}@#{mentioned_host}",
"type" => "Mention"
}
],
"to" => [mention.ap_id]
}
act_data = %{
"cc" => [],
"directMessage" => true,
"to" => [mention.ap_id]
}
{act_data, obj_data}
end
defp get_reply_tasks("public", "user"), do: ~w(friend non_friend user)
defp get_reply_tasks("public", "friends"), do: ~w(non_friend user friend)
defp get_reply_tasks("public", "non_friends"), do: ~w(user friend non_friend)
defp get_reply_tasks(visibility, "user") when visibility in ["unlisted", "private"],
do: ~w(friend user friend)
defp get_reply_tasks(visibility, "friends") when visibility in ["unlisted", "private"],
do: ~w(user friend user)
defp get_reply_tasks(visibility, "non_friends") when visibility in ["unlisted", "private"],
do: []
defp get_reply_tasks("direct", "user"), do: ~w(friend user friend)
defp get_reply_tasks("direct", "friends"), do: ~w(user friend user)
defp get_reply_tasks("direct", "non_friends"), do: ~w(user non_friend user)
defp insert_replies(tasks, visibility, user, friends, non_friends, acc) do
Enum.reduce(tasks, acc, fn
"friend", {id, data} ->
friend = Enum.random(friends)
insert_reply(friend, data, id, visibility)
"non_friend", {id, data} ->
non_friend = Enum.random(non_friends)
insert_reply(non_friend, data, id, visibility)
"user", {id, data} ->
insert_reply(user, data, id, visibility)
end)
end
defp insert_direct_replies(tasks, user, list, acc) do
Enum.reduce(tasks, acc, fn
group, {id, data} when group in ["friend", "non_friend"] ->
actor = Enum.random(list)
{reply_id, _} =
insert_reply(actor, List.delete(data, "@" <> actor.nickname), id, "direct")
{reply_id, data}
"user", {id, data} ->
{reply_id, _} = insert_reply(user, List.delete(data, "@" <> user.nickname), id, "direct")
{reply_id, data}
end)
end
defp insert_reply(actor, data, activity_id, visibility) do
{:ok, reply} =
CommonAPI.post(actor, %{
"status" => Enum.join(data, ", "),
"visibility" => visibility,
"in_reply_to_status_id" => activity_id
})
{reply.id, ["@" <> actor.nickname | data]}
end
defp get_random_mentions(_users, count) when count == 0, do: []
defp get_random_mentions(users, count) do
users
|> Enum.shuffle()
|> Enum.take(count)
|> Enum.map(&"@#{&1.nickname}")
end
defp get_random_create_activity_id do
Repo.one(
from(a in Pleroma.Activity,
where: fragment("(?)->>'type' = ?", a.data, ^"Create"),
order_by: fragment("RANDOM()"),
limit: 1,
select: a.id
)
)
end
end

View File

@ -1,260 +1,489 @@
defmodule Pleroma.LoadTesting.Fetcher do
use Pleroma.LoadTesting.Helper
alias Pleroma.Activity
alias Pleroma.Pagination
alias Pleroma.Repo
alias Pleroma.User
alias Pleroma.Web.ActivityPub.ActivityPub
alias Pleroma.Web.MastodonAPI.MastodonAPI
alias Pleroma.Web.MastodonAPI.StatusView
def fetch_user(user) do
Benchee.run(%{
@spec run_benchmarks(User.t()) :: any()
def run_benchmarks(user) do
fetch_user(user)
fetch_timelines(user)
render_views(user)
end
defp formatters do
[
Benchee.Formatters.Console
]
end
defp fetch_user(user) do
Benchee.run(
%{
"By id" => fn -> Repo.get_by(User, id: user.id) end,
"By ap_id" => fn -> Repo.get_by(User, ap_id: user.ap_id) end,
"By email" => fn -> Repo.get_by(User, email: user.email) end,
"By nickname" => fn -> Repo.get_by(User, nickname: user.nickname) end
})
},
formatters: formatters()
)
end
def query_timelines(user) do
home_timeline_params = %{
"count" => 20,
"with_muted" => true,
"type" => ["Create", "Announce"],
defp fetch_timelines(user) do
fetch_home_timeline(user)
fetch_direct_timeline(user)
fetch_public_timeline(user)
fetch_public_timeline(user, :local)
fetch_public_timeline(user, :tag)
fetch_notifications(user)
fetch_favourites(user)
fetch_long_thread(user)
end
defp render_views(user) do
render_timelines(user)
render_long_thread(user)
end
defp opts_for_home_timeline(user) do
%{
"blocking_user" => user,
"count" => "20",
"muting_user" => user,
"type" => ["Create", "Announce"],
"user" => user,
"with_muted" => "true"
}
end
defp fetch_home_timeline(user) do
opts = opts_for_home_timeline(user)
recipients = [user.ap_id | User.following(user)]
first_page_last =
ActivityPub.fetch_activities(recipients, opts) |> Enum.reverse() |> List.last()
second_page_last =
ActivityPub.fetch_activities(recipients, Map.put(opts, "max_id", first_page_last.id))
|> Enum.reverse()
|> List.last()
third_page_last =
ActivityPub.fetch_activities(recipients, Map.put(opts, "max_id", second_page_last.id))
|> Enum.reverse()
|> List.last()
forth_page_last =
ActivityPub.fetch_activities(recipients, Map.put(opts, "max_id", third_page_last.id))
|> Enum.reverse()
|> List.last()
Benchee.run(
%{
"home timeline" => fn opts -> ActivityPub.fetch_activities(recipients, opts) end
},
inputs: %{
"1 page" => opts,
"2 page" => Map.put(opts, "max_id", first_page_last.id),
"3 page" => Map.put(opts, "max_id", second_page_last.id),
"4 page" => Map.put(opts, "max_id", third_page_last.id),
"5 page" => Map.put(opts, "max_id", forth_page_last.id),
"1 page only media" => Map.put(opts, "only_media", "true"),
"2 page only media" =>
Map.put(opts, "max_id", first_page_last.id) |> Map.put("only_media", "true"),
"3 page only media" =>
Map.put(opts, "max_id", second_page_last.id) |> Map.put("only_media", "true"),
"4 page only media" =>
Map.put(opts, "max_id", third_page_last.id) |> Map.put("only_media", "true"),
"5 page only media" =>
Map.put(opts, "max_id", forth_page_last.id) |> Map.put("only_media", "true")
},
formatters: formatters()
)
end
defp opts_for_direct_timeline(user) do
%{
:visibility => "direct",
"blocking_user" => user,
"count" => "20",
"type" => "Create",
"user" => user,
"with_muted" => "true"
}
end
defp fetch_direct_timeline(user) do
recipients = [user.ap_id]
opts = opts_for_direct_timeline(user)
first_page_last =
recipients
|> ActivityPub.fetch_activities_query(opts)
|> Pagination.fetch_paginated(opts)
|> List.last()
opts2 = Map.put(opts, "max_id", first_page_last.id)
second_page_last =
recipients
|> ActivityPub.fetch_activities_query(opts2)
|> Pagination.fetch_paginated(opts2)
|> List.last()
opts3 = Map.put(opts, "max_id", second_page_last.id)
third_page_last =
recipients
|> ActivityPub.fetch_activities_query(opts3)
|> Pagination.fetch_paginated(opts3)
|> List.last()
opts4 = Map.put(opts, "max_id", third_page_last.id)
forth_page_last =
recipients
|> ActivityPub.fetch_activities_query(opts4)
|> Pagination.fetch_paginated(opts4)
|> List.last()
Benchee.run(
%{
"direct timeline" => fn opts ->
ActivityPub.fetch_activities_query(recipients, opts) |> Pagination.fetch_paginated(opts)
end
},
inputs: %{
"1 page" => opts,
"2 page" => opts2,
"3 page" => opts3,
"4 page" => opts4,
"5 page" => Map.put(opts4, "max_id", forth_page_last.id)
},
formatters: formatters()
)
end
defp opts_for_public_timeline(user) do
%{
"type" => ["Create", "Announce"],
"local_only" => false,
"blocking_user" => user,
"muting_user" => user
}
end
defp opts_for_public_timeline(user, :local) do
%{
"type" => ["Create", "Announce"],
"local_only" => true,
"blocking_user" => user,
"muting_user" => user
}
end
defp opts_for_public_timeline(user, :tag) do
%{
"blocking_user" => user,
"count" => "20",
"local_only" => nil,
"muting_user" => user,
"tag" => ["tag"],
"tag_all" => [],
"tag_reject" => [],
"type" => "Create",
"user" => user,
"with_muted" => "true"
}
end
defp fetch_public_timeline(user) do
opts = opts_for_public_timeline(user)
fetch_public_timeline(opts, "public timeline")
end
defp fetch_public_timeline(user, :local) do
opts = opts_for_public_timeline(user, :local)
fetch_public_timeline(opts, "public timeline only local")
end
defp fetch_public_timeline(user, :tag) do
opts = opts_for_public_timeline(user, :tag)
fetch_public_timeline(opts, "hashtag timeline")
end
defp fetch_public_timeline(user, :only_media) do
opts = opts_for_public_timeline(user) |> Map.put("only_media", "true")
fetch_public_timeline(opts, "public timeline only media")
end
defp fetch_public_timeline(opts, title) when is_binary(title) do
first_page_last = ActivityPub.fetch_public_activities(opts) |> List.last()
second_page_last =
ActivityPub.fetch_public_activities(Map.put(opts, "max_id", first_page_last.id))
|> List.last()
third_page_last =
ActivityPub.fetch_public_activities(Map.put(opts, "max_id", second_page_last.id))
|> List.last()
forth_page_last =
ActivityPub.fetch_public_activities(Map.put(opts, "max_id", third_page_last.id))
|> List.last()
Benchee.run(
%{
title => fn opts ->
ActivityPub.fetch_public_activities(opts)
end
},
inputs: %{
"1 page" => opts,
"2 page" => Map.put(opts, "max_id", first_page_last.id),
"3 page" => Map.put(opts, "max_id", second_page_last.id),
"4 page" => Map.put(opts, "max_id", third_page_last.id),
"5 page" => Map.put(opts, "max_id", forth_page_last.id)
},
formatters: formatters()
)
end
defp opts_for_notifications do
%{"count" => "20", "with_muted" => "true"}
end
defp fetch_notifications(user) do
opts = opts_for_notifications()
first_page_last = MastodonAPI.get_notifications(user, opts) |> List.last()
second_page_last =
MastodonAPI.get_notifications(user, Map.put(opts, "max_id", first_page_last.id))
|> List.last()
third_page_last =
MastodonAPI.get_notifications(user, Map.put(opts, "max_id", second_page_last.id))
|> List.last()
forth_page_last =
MastodonAPI.get_notifications(user, Map.put(opts, "max_id", third_page_last.id))
|> List.last()
Benchee.run(
%{
"Notifications" => fn opts ->
MastodonAPI.get_notifications(user, opts)
end
},
inputs: %{
"1 page" => opts,
"2 page" => Map.put(opts, "max_id", first_page_last.id),
"3 page" => Map.put(opts, "max_id", second_page_last.id),
"4 page" => Map.put(opts, "max_id", third_page_last.id),
"5 page" => Map.put(opts, "max_id", forth_page_last.id)
},
formatters: formatters()
)
end
defp fetch_favourites(user) do
first_page_last = ActivityPub.fetch_favourites(user) |> List.last()
second_page_last =
ActivityPub.fetch_favourites(user, %{"max_id" => first_page_last.id}) |> List.last()
third_page_last =
ActivityPub.fetch_favourites(user, %{"max_id" => second_page_last.id}) |> List.last()
forth_page_last =
ActivityPub.fetch_favourites(user, %{"max_id" => third_page_last.id}) |> List.last()
Benchee.run(
%{
"Favourites" => fn opts ->
ActivityPub.fetch_favourites(user, opts)
end
},
inputs: %{
"1 page" => %{},
"2 page" => %{"max_id" => first_page_last.id},
"3 page" => %{"max_id" => second_page_last.id},
"4 page" => %{"max_id" => third_page_last.id},
"5 page" => %{"max_id" => forth_page_last.id}
},
formatters: formatters()
)
end
defp opts_for_long_thread(user) do
%{
"blocking_user" => user,
"user" => user
}
end
mastodon_public_timeline_params = %{
"count" => 20,
"local_only" => true,
"only_media" => "false",
"type" => ["Create", "Announce"],
"with_muted" => "true",
"blocking_user" => user,
"muting_user" => user
}
defp fetch_long_thread(user) do
%{public_thread: public, private_thread: private} =
Agent.get(:benchmark_state, fn state -> state end)
mastodon_federated_timeline_params = %{
"count" => 20,
"only_media" => "false",
"type" => ["Create", "Announce"],
"with_muted" => "true",
"blocking_user" => user,
"muting_user" => user
}
opts = opts_for_long_thread(user)
following = User.following(user)
private_input = {private.data["context"], Map.put(opts, "exclude_id", private.id)}
Benchee.run(%{
"User home timeline" => fn ->
Pleroma.Web.ActivityPub.ActivityPub.fetch_activities(
following,
home_timeline_params
)
end,
"User mastodon public timeline" => fn ->
Pleroma.Web.ActivityPub.ActivityPub.fetch_public_activities(
mastodon_public_timeline_params
)
end,
"User mastodon federated public timeline" => fn ->
Pleroma.Web.ActivityPub.ActivityPub.fetch_public_activities(
mastodon_federated_timeline_params
public_input = {public.data["context"], Map.put(opts, "exclude_id", public.id)}
Benchee.run(
%{
"fetch context" => fn {context, opts} ->
ActivityPub.fetch_activities_for_context(context, opts)
end
},
inputs: %{
"Private long thread" => private_input,
"Public long thread" => public_input
},
formatters: formatters()
)
end
})
home_activities =
Pleroma.Web.ActivityPub.ActivityPub.fetch_activities(
following,
home_timeline_params
)
defp render_timelines(user) do
opts = opts_for_home_timeline(user)
public_activities =
Pleroma.Web.ActivityPub.ActivityPub.fetch_public_activities(mastodon_public_timeline_params)
recipients = [user.ap_id | User.following(user)]
public_federated_activities =
Pleroma.Web.ActivityPub.ActivityPub.fetch_public_activities(
mastodon_federated_timeline_params
)
home_activities = ActivityPub.fetch_activities(recipients, opts) |> Enum.reverse()
Benchee.run(%{
recipients = [user.ap_id]
opts = opts_for_direct_timeline(user)
direct_activities =
recipients
|> ActivityPub.fetch_activities_query(opts)
|> Pagination.fetch_paginated(opts)
opts = opts_for_public_timeline(user)
public_activities = ActivityPub.fetch_public_activities(opts)
opts = opts_for_public_timeline(user, :tag)
tag_activities = ActivityPub.fetch_public_activities(opts)
opts = opts_for_notifications()
notifications = MastodonAPI.get_notifications(user, opts)
favourites = ActivityPub.fetch_favourites(user)
Benchee.run(
%{
"Rendering home timeline" => fn ->
Pleroma.Web.MastodonAPI.StatusView.render("index.json", %{
StatusView.render("index.json", %{
activities: home_activities,
for: user,
as: :activity
})
end,
"Rendering direct timeline" => fn ->
StatusView.render("index.json", %{
activities: direct_activities,
for: user,
as: :activity
})
end,
"Rendering public timeline" => fn ->
Pleroma.Web.MastodonAPI.StatusView.render("index.json", %{
StatusView.render("index.json", %{
activities: public_activities,
for: user,
as: :activity
})
end,
"Rendering public federated timeline" => fn ->
Pleroma.Web.MastodonAPI.StatusView.render("index.json", %{
activities: public_federated_activities,
"Rendering tag timeline" => fn ->
StatusView.render("index.json", %{
activities: tag_activities,
for: user,
as: :activity
})
end,
"Rendering favorites timeline" => fn ->
conn = Phoenix.ConnTest.build_conn(:get, "http://localhost:4001/api/v1/favourites", nil)
Pleroma.Web.MastodonAPI.StatusController.favourites(
%Plug.Conn{conn |
assigns: %{user: user},
query_params: %{"limit" => "0"},
body_params: %{},
cookies: %{},
params: %{},
path_params: %{},
private: %{
Pleroma.Web.Router => {[], %{}},
phoenix_router: Pleroma.Web.Router,
phoenix_action: :favourites,
phoenix_controller: Pleroma.Web.MastodonAPI.StatusController,
phoenix_endpoint: Pleroma.Web.Endpoint,
phoenix_format: "json",
phoenix_layout: {Pleroma.Web.LayoutView, "app.html"},
phoenix_recycled: true,
"Rendering notifications" => fn ->
Pleroma.Web.MastodonAPI.NotificationView.render("index.json", %{
notifications: notifications,
for: user
})
end,
"Rendering favourites timeline" => fn ->
StatusView.render("index.json", %{
activities: favourites,
for: user,
as: :activity
})
end
},
formatters: formatters()
)
end
phoenix_view: Pleroma.Web.MastodonAPI.StatusView,
plug_session: %{"user_id" => user.id},
plug_session_fetch: :done,
plug_session_info: :write,
plug_skip_csrf_protection: true
defp render_long_thread(user) do
%{public_thread: public, private_thread: private} =
Agent.get(:benchmark_state, fn state -> state end)
opts = %{for: user}
public_activity = Activity.get_by_id_with_object(public.id)
private_activity = Activity.get_by_id_with_object(private.id)
Benchee.run(
%{
"render" => fn opts ->
StatusView.render("show.json", opts)
end
},
inputs: %{
"Public root" => Map.put(opts, :activity, public_activity),
"Private root" => Map.put(opts, :activity, private_activity)
},
formatters: formatters()
)
fetch_opts = opts_for_long_thread(user)
public_context =
ActivityPub.fetch_activities_for_context(
public.data["context"],
Map.put(fetch_opts, "exclude_id", public.id)
)
private_context =
ActivityPub.fetch_activities_for_context(
private.data["context"],
Map.put(fetch_opts, "exclude_id", private.id)
)
Benchee.run(
%{
"render" => fn opts ->
StatusView.render("context.json", opts)
end
},
inputs: %{
"Public context" => %{user: user, activity: public_activity, activities: public_context},
"Private context" => %{
user: user,
activity: private_activity,
activities: private_context
}
},
%{})
end,
})
end
def query_notifications(user) do
without_muted_params = %{"count" => "20", "with_muted" => "false"}
with_muted_params = %{"count" => "20", "with_muted" => "true"}
Benchee.run(%{
"Notifications without muted" => fn ->
Pleroma.Web.MastodonAPI.MastodonAPI.get_notifications(user, without_muted_params)
end,
"Notifications with muted" => fn ->
Pleroma.Web.MastodonAPI.MastodonAPI.get_notifications(user, with_muted_params)
end
})
without_muted_notifications =
Pleroma.Web.MastodonAPI.MastodonAPI.get_notifications(user, without_muted_params)
with_muted_notifications =
Pleroma.Web.MastodonAPI.MastodonAPI.get_notifications(user, with_muted_params)
Benchee.run(%{
"Render notifications without muted" => fn ->
Pleroma.Web.MastodonAPI.NotificationView.render("index.json", %{
notifications: without_muted_notifications,
for: user
})
end,
"Render notifications with muted" => fn ->
Pleroma.Web.MastodonAPI.NotificationView.render("index.json", %{
notifications: with_muted_notifications,
for: user
})
end
})
end
def query_dms(user) do
params = %{
"count" => "20",
"with_muted" => "true",
"type" => "Create",
"blocking_user" => user,
"user" => user,
visibility: "direct"
}
Benchee.run(%{
"Direct messages with muted" => fn ->
Pleroma.Web.ActivityPub.ActivityPub.fetch_activities_query([user.ap_id], params)
|> Pleroma.Pagination.fetch_paginated(params)
end,
"Direct messages without muted" => fn ->
Pleroma.Web.ActivityPub.ActivityPub.fetch_activities_query([user.ap_id], params)
|> Pleroma.Pagination.fetch_paginated(Map.put(params, "with_muted", false))
end
})
dms_with_muted =
Pleroma.Web.ActivityPub.ActivityPub.fetch_activities_query([user.ap_id], params)
|> Pleroma.Pagination.fetch_paginated(params)
dms_without_muted =
Pleroma.Web.ActivityPub.ActivityPub.fetch_activities_query([user.ap_id], params)
|> Pleroma.Pagination.fetch_paginated(Map.put(params, "with_muted", false))
Benchee.run(%{
"Rendering dms with muted" => fn ->
Pleroma.Web.MastodonAPI.StatusView.render("index.json", %{
activities: dms_with_muted,
for: user,
as: :activity
})
end,
"Rendering dms without muted" => fn ->
Pleroma.Web.MastodonAPI.StatusView.render("index.json", %{
activities: dms_without_muted,
for: user,
as: :activity
})
end
})
end
def query_long_thread(user, activity) do
Benchee.run(%{
"Fetch main post" => fn ->
Pleroma.Activity.get_by_id_with_object(activity.id)
end,
"Fetch context of main post" => fn ->
Pleroma.Web.ActivityPub.ActivityPub.fetch_activities_for_context(
activity.data["context"],
%{
"blocking_user" => user,
"user" => user,
"exclude_id" => activity.id
}
formatters: formatters()
)
end
})
activity = Pleroma.Activity.get_by_id_with_object(activity.id)
context =
Pleroma.Web.ActivityPub.ActivityPub.fetch_activities_for_context(
activity.data["context"],
%{
"blocking_user" => user,
"user" => user,
"exclude_id" => activity.id
}
)
Benchee.run(%{
"Render status" => fn ->
Pleroma.Web.MastodonAPI.StatusView.render("show.json", %{
activity: activity,
for: user
})
end,
"Render context" => fn ->
Pleroma.Web.MastodonAPI.StatusView.render(
"index.json",
for: user,
activities: context,
as: :activity
)
|> Enum.reverse()
end
})
end
end

View File

@ -1,410 +0,0 @@
defmodule Pleroma.LoadTesting.Generator do
use Pleroma.LoadTesting.Helper
alias Pleroma.Web.CommonAPI
def generate_like_activities(user, posts) do
count_likes = Kernel.trunc(length(posts) / 4)
IO.puts("Starting generating #{count_likes} like activities...")
{time, _} =
:timer.tc(fn ->
Task.async_stream(
Enum.take_random(posts, count_likes),
fn post -> {:ok, _, _} = CommonAPI.favorite(post.id, user) end,
max_concurrency: 10,
timeout: 30_000
)
|> Stream.run()
end)
IO.puts("Inserting like activities take #{to_sec(time)} sec.\n")
end
def generate_users(opts) do
IO.puts("Starting generating #{opts[:users_max]} users...")
{time, users} = :timer.tc(fn -> do_generate_users(opts) end)
IO.puts("Inserting users took #{to_sec(time)} sec.\n")
users
end
defp do_generate_users(opts) do
max = Keyword.get(opts, :users_max)
Task.async_stream(
1..max,
&generate_user_data(&1),
max_concurrency: 10,
timeout: 30_000
)
|> Enum.to_list()
end
defp generate_user_data(i) do
remote = Enum.random([true, false])
user = %User{
name: "Test テスト User #{i}",
email: "user#{i}@example.com",
nickname: "nick#{i}",
password_hash:
"$pbkdf2-sha512$160000$bU.OSFI7H/yqWb5DPEqyjw$uKp/2rmXw12QqnRRTqTtuk2DTwZfF8VR4MYW2xMeIlqPR/UX1nT1CEKVUx2CowFMZ5JON8aDvURrZpJjSgqXrg",
bio: "Tester Number #{i}",
local: remote
}
user_urls =
if remote do
base_url =
Enum.random(["https://domain1.com", "https://domain2.com", "https://domain3.com"])
ap_id = "#{base_url}/users/#{user.nickname}"
%{
ap_id: ap_id,
follower_address: ap_id <> "/followers",
following_address: ap_id <> "/following"
}
else
%{
ap_id: User.ap_id(user),
follower_address: User.ap_followers(user),
following_address: User.ap_following(user)
}
end
user = Map.merge(user, user_urls)
Repo.insert!(user)
end
def generate_activities(user, users) do
do_generate_activities(user, users)
end
defp do_generate_activities(user, users) do
IO.puts("Starting generating 20000 common activities...")
{time, _} =
:timer.tc(fn ->
Task.async_stream(
1..20_000,
fn _ ->
do_generate_activity([user | users])
end,
max_concurrency: 10,
timeout: 30_000
)
|> Stream.run()
end)
IO.puts("Inserting common activities take #{to_sec(time)} sec.\n")
IO.puts("Starting generating 20000 activities with mentions...")
{time, _} =
:timer.tc(fn ->
Task.async_stream(
1..20_000,
fn _ ->
do_generate_activity_with_mention(user, users)
end,
max_concurrency: 10,
timeout: 30_000
)
|> Stream.run()
end)
IO.puts("Inserting activities with menthions take #{to_sec(time)} sec.\n")
IO.puts("Starting generating 10000 activities with threads...")
{time, _} =
:timer.tc(fn ->
Task.async_stream(
1..10_000,
fn _ ->
do_generate_threads([user | users])
end,
max_concurrency: 10,
timeout: 30_000
)
|> Stream.run()
end)
IO.puts("Inserting activities with threads take #{to_sec(time)} sec.\n")
end
defp do_generate_activity(users) do
post = %{
"status" => "Some status without mention with random user"
}
CommonAPI.post(Enum.random(users), post)
end
def generate_power_intervals(opts \\ []) do
count = Keyword.get(opts, :count, 20)
power = Keyword.get(opts, :power, 2)
IO.puts("Generating #{count} intervals for a power #{power} series...")
counts = Enum.map(1..count, fn n -> :math.pow(n, power) end)
sum = Enum.sum(counts)
densities =
Enum.map(counts, fn c ->
c / sum
end)
densities
|> Enum.reduce(0, fn density, acc ->
if acc == 0 do
[{0, density}]
else
[{_, lower} | _] = acc
[{lower, lower + density} | acc]
end
end)
|> Enum.reverse()
end
def generate_tagged_activities(opts \\ []) do
tag_count = Keyword.get(opts, :tag_count, 20)
users = Keyword.get(opts, :users, Repo.all(User))
activity_count = Keyword.get(opts, :count, 200_000)
intervals = generate_power_intervals(count: tag_count)
IO.puts(
"Generating #{activity_count} activities using #{tag_count} different tags of format `tag_n`, starting at tag_0"
)
Enum.each(1..activity_count, fn _ ->
random = :rand.uniform()
i = Enum.find_index(intervals, fn {lower, upper} -> lower <= random && upper > random end)
CommonAPI.post(Enum.random(users), %{"status" => "a post with the tag #tag_#{i}"})
end)
end
defp do_generate_activity_with_mention(user, users) do
mentions_cnt = Enum.random([2, 3, 4, 5])
with_user = Enum.random([true, false])
users = Enum.shuffle(users)
mentions_users = Enum.take(users, mentions_cnt)
mentions_users = if with_user, do: [user | mentions_users], else: mentions_users
mentions_str =
Enum.map(mentions_users, fn user -> "@" <> user.nickname end) |> Enum.join(", ")
post = %{
"status" => mentions_str <> "some status with mentions random users"
}
CommonAPI.post(Enum.random(users), post)
end
defp do_generate_threads(users) do
thread_length = Enum.random([2, 3, 4, 5])
actor = Enum.random(users)
post = %{
"status" => "Start of the thread"
}
{:ok, activity} = CommonAPI.post(actor, post)
Enum.each(1..thread_length, fn _ ->
user = Enum.random(users)
post = %{
"status" => "@#{actor.nickname} reply to thread",
"in_reply_to_status_id" => activity.id
}
CommonAPI.post(user, post)
end)
end
def generate_remote_activities(user, users) do
do_generate_remote_activities(user, users)
end
defp do_generate_remote_activities(user, users) do
IO.puts("Starting generating 10000 remote activities...")
{time, _} =
:timer.tc(fn ->
Task.async_stream(
1..10_000,
fn i ->
do_generate_remote_activity(i, user, users)
end,
max_concurrency: 10,
timeout: 30_000
)
|> Stream.run()
end)
IO.puts("Inserting remote activities take #{to_sec(time)} sec.\n")
end
defp do_generate_remote_activity(i, user, users) do
actor = Enum.random(users)
%{host: host} = URI.parse(actor.ap_id)
date = Date.utc_today()
datetime = DateTime.utc_now()
map = %{
"actor" => actor.ap_id,
"cc" => [actor.follower_address, user.ap_id],
"context" => "tag:mastodon.example.org,#{date}:objectId=#{i}:objectType=Conversation",
"id" => actor.ap_id <> "/statuses/#{i}/activity",
"object" => %{
"actor" => actor.ap_id,
"atomUri" => actor.ap_id <> "/statuses/#{i}",
"attachment" => [],
"attributedTo" => actor.ap_id,
"bcc" => [],
"bto" => [],
"cc" => [actor.follower_address, user.ap_id],
"content" =>
"<p><span class=\"h-card\"><a href=\"" <>
user.ap_id <>
"\" class=\"u-url mention\">@<span>" <> user.nickname <> "</span></a></span></p>",
"context" => "tag:mastodon.example.org,#{date}:objectId=#{i}:objectType=Conversation",
"conversation" =>
"tag:mastodon.example.org,#{date}:objectId=#{i}:objectType=Conversation",
"emoji" => %{},
"id" => actor.ap_id <> "/statuses/#{i}",
"inReplyTo" => nil,
"inReplyToAtomUri" => nil,
"published" => datetime,
"sensitive" => true,
"summary" => "cw",
"tag" => [
%{
"href" => user.ap_id,
"name" => "@#{user.nickname}@#{host}",
"type" => "Mention"
}
],
"to" => ["https://www.w3.org/ns/activitystreams#Public"],
"type" => "Note",
"url" => "http://#{host}/@#{actor.nickname}/#{i}"
},
"published" => datetime,
"to" => ["https://www.w3.org/ns/activitystreams#Public"],
"type" => "Create"
}
Pleroma.Web.ActivityPub.ActivityPub.insert(map, false)
end
def generate_dms(user, users, opts) do
IO.puts("Starting generating #{opts[:dms_max]} DMs")
{time, _} = :timer.tc(fn -> do_generate_dms(user, users, opts) end)
IO.puts("Inserting dms take #{to_sec(time)} sec.\n")
end
defp do_generate_dms(user, users, opts) do
Task.async_stream(
1..opts[:dms_max],
fn _ ->
do_generate_dm(user, users)
end,
max_concurrency: 10,
timeout: 30_000
)
|> Stream.run()
end
defp do_generate_dm(user, users) do
post = %{
"status" => "@#{user.nickname} some direct message",
"visibility" => "direct"
}
CommonAPI.post(Enum.random(users), post)
end
def generate_long_thread(user, users, opts) do
IO.puts("Starting generating long thread with #{opts[:thread_length]} replies")
{time, activity} = :timer.tc(fn -> do_generate_long_thread(user, users, opts) end)
IO.puts("Inserting long thread replies take #{to_sec(time)} sec.\n")
{:ok, activity}
end
defp do_generate_long_thread(user, users, opts) do
{:ok, %{id: id} = activity} = CommonAPI.post(user, %{"status" => "Start of long thread"})
Task.async_stream(
1..opts[:thread_length],
fn _ -> do_generate_thread(users, id) end,
max_concurrency: 10,
timeout: 30_000
)
|> Stream.run()
activity
end
defp do_generate_thread(users, activity_id) do
CommonAPI.post(Enum.random(users), %{
"status" => "reply to main post",
"in_reply_to_status_id" => activity_id
})
end
def generate_non_visible_message(user, users) do
IO.puts("Starting generating 1000 non visible posts")
{time, _} =
:timer.tc(fn ->
do_generate_non_visible_posts(user, users)
end)
IO.puts("Inserting non visible posts take #{to_sec(time)} sec.\n")
end
defp do_generate_non_visible_posts(user, users) do
[not_friend | users] = users
make_friends(user, users)
Task.async_stream(1..1000, fn _ -> do_generate_non_visible_post(not_friend, users) end,
max_concurrency: 10,
timeout: 30_000
)
|> Stream.run()
end
defp make_friends(_user, []), do: nil
defp make_friends(user, [friend | users]) do
{:ok, _} = User.follow(user, friend)
{:ok, _} = User.follow(friend, user)
make_friends(user, users)
end
defp do_generate_non_visible_post(not_friend, users) do
post = %{
"status" => "some non visible post",
"visibility" => "private"
}
{:ok, activity} = CommonAPI.post(not_friend, post)
thread_length = Enum.random([2, 3, 4, 5])
Enum.each(1..thread_length, fn _ ->
user = Enum.random(users)
post = %{
"status" => "@#{not_friend.nickname} reply to non visible post",
"in_reply_to_status_id" => activity.id,
"visibility" => "private"
}
CommonAPI.post(user, post)
end)
end
end

View File

@ -1,11 +1,14 @@
defmodule Pleroma.LoadTesting.Helper do
defmacro __using__(_) do
quote do
import Ecto.Query
alias Ecto.Adapters.SQL
alias Pleroma.Repo
alias Pleroma.User
defp to_sec(microseconds), do: microseconds / 1_000_000
end
def to_sec(microseconds), do: microseconds / 1_000_000
def clean_tables do
IO.puts("Deleting old data...\n")
SQL.query!(Repo, "TRUNCATE users CASCADE;")
SQL.query!(Repo, "TRUNCATE activities CASCADE;")
SQL.query!(Repo, "TRUNCATE objects CASCADE;")
SQL.query!(Repo, "TRUNCATE oban_jobs CASCADE;")
end
end

View File

@ -0,0 +1,169 @@
defmodule Pleroma.LoadTesting.Users do
@moduledoc """
Module for generating users with friends.
"""
import Ecto.Query
import Pleroma.LoadTesting.Helper, only: [to_sec: 1]
alias Pleroma.Repo
alias Pleroma.User
alias Pleroma.User.Query
@defaults [
users: 20_000,
friends: 100
]
@max_concurrency 10
@spec generate(keyword()) :: User.t()
def generate(opts \\ []) do
opts = Keyword.merge(@defaults, opts)
generate_users(opts[:users])
main_user =
Repo.one(from(u in User, where: u.local == true, order_by: fragment("RANDOM()"), limit: 1))
make_friends(main_user, opts[:friends])
Repo.get(User, main_user.id)
end
def generate_users(max) do
IO.puts("Starting generating #{max} users...")
{time, users} =
:timer.tc(fn ->
Task.async_stream(
1..max,
&generate_user(&1),
max_concurrency: @max_concurrency,
timeout: 30_000
)
|> Enum.to_list()
end)
IO.puts("Generating users took #{to_sec(time)} sec.\n")
users
end
defp generate_user(i) do
remote = Enum.random([true, false])
%User{
name: "Test テスト User #{i}",
email: "user#{i}@example.com",
nickname: "nick#{i}",
password_hash: Comeonin.Pbkdf2.hashpwsalt("test"),
bio: "Tester Number #{i}",
local: !remote
}
|> user_urls()
|> Repo.insert!()
end
defp user_urls(%{local: true} = user) do
urls = %{
ap_id: User.ap_id(user),
follower_address: User.ap_followers(user),
following_address: User.ap_following(user)
}
Map.merge(user, urls)
end
defp user_urls(%{local: false} = user) do
base_domain = Enum.random(["domain1.com", "domain2.com", "domain3.com"])
ap_id = "https://#{base_domain}/users/#{user.nickname}"
urls = %{
ap_id: ap_id,
follower_address: ap_id <> "/followers",
following_address: ap_id <> "/following"
}
Map.merge(user, urls)
end
def make_friends(main_user, max) when is_integer(max) do
IO.puts("Starting making friends for #{max} users...")
{time, _} =
:timer.tc(fn ->
number_of_users =
(max / 2)
|> Kernel.trunc()
main_user
|> get_users(%{limit: number_of_users, local: :local})
|> run_stream(main_user)
main_user
|> get_users(%{limit: number_of_users, local: :external})
|> run_stream(main_user)
end)
IO.puts("Making friends took #{to_sec(time)} sec.\n")
end
def make_friends(%User{} = main_user, %User{} = user) do
{:ok, _} = User.follow(main_user, user)
{:ok, _} = User.follow(user, main_user)
end
@spec get_users(User.t(), keyword()) :: [User.t()]
def get_users(user, opts) do
criteria = %{limit: opts[:limit]}
criteria =
if opts[:local] do
Map.put(criteria, opts[:local], true)
else
criteria
end
criteria =
if opts[:friends?] do
Map.put(criteria, :friends, user)
else
criteria
end
query =
criteria
|> Query.build()
|> random_without_user(user)
query =
if opts[:friends?] == false do
friends_ids =
%{friends: user}
|> Query.build()
|> Repo.all()
|> Enum.map(& &1.id)
from(u in query, where: u.id not in ^friends_ids)
else
query
end
Repo.all(query)
end
defp random_without_user(query, user) do
from(u in query,
where: u.id != ^user.id,
order_by: fragment("RANDOM()")
)
end
defp run_stream(users, main_user) do
Task.async_stream(users, &make_friends(main_user, &1),
max_concurrency: @max_concurrency,
timeout: 30_000
)
|> Stream.run()
end
end

View File

@ -1,9 +1,12 @@
defmodule Mix.Tasks.Pleroma.Benchmarks.Tags do
use Mix.Task
alias Pleroma.Repo
alias Pleroma.LoadTesting.Generator
import Pleroma.LoadTesting.Helper, only: [clean_tables: 0]
import Ecto.Query
alias Pleroma.Repo
alias Pleroma.Web.MastodonAPI.TimelineController
def run(_args) do
Mix.Pleroma.start_pleroma()
activities_count = Repo.aggregate(from(a in Pleroma.Activity), :count, :id)
@ -11,8 +14,8 @@ def run(_args) do
if activities_count == 0 do
IO.puts("Did not find any activities, cleaning and generating")
clean_tables()
Generator.generate_users(users_max: 10)
Generator.generate_tagged_activities()
Pleroma.LoadTesting.Users.generate_users(10)
Pleroma.LoadTesting.Activities.generate_tagged_activities()
else
IO.puts("Found #{activities_count} activities, won't generate new ones")
end
@ -34,7 +37,7 @@ def run(_args) do
Benchee.run(
%{
"Hashtag fetching, any" => fn tags ->
Pleroma.Web.MastodonAPI.TimelineController.hashtag_fetching(
TimelineController.hashtag_fetching(
%{
"any" => tags
},
@ -44,7 +47,7 @@ def run(_args) do
end,
# Will always return zero results because no overlapping hashtags are generated.
"Hashtag fetching, all" => fn tags ->
Pleroma.Web.MastodonAPI.TimelineController.hashtag_fetching(
TimelineController.hashtag_fetching(
%{
"all" => tags
},
@ -64,7 +67,7 @@ def run(_args) do
Benchee.run(
%{
"Hashtag fetching" => fn tag ->
Pleroma.Web.MastodonAPI.TimelineController.hashtag_fetching(
TimelineController.hashtag_fetching(
%{
"tag" => tag
},
@ -77,11 +80,4 @@ def run(_args) do
time: 5
)
end
defp clean_tables do
IO.puts("Deleting old data...\n")
Ecto.Adapters.SQL.query!(Repo, "TRUNCATE users CASCADE;")
Ecto.Adapters.SQL.query!(Repo, "TRUNCATE activities CASCADE;")
Ecto.Adapters.SQL.query!(Repo, "TRUNCATE objects CASCADE;")
end
end

View File

@ -1,9 +1,10 @@
defmodule Mix.Tasks.Pleroma.Benchmarks.Timelines do
use Mix.Task
alias Pleroma.Repo
alias Pleroma.LoadTesting.Generator
import Pleroma.LoadTesting.Helper, only: [clean_tables: 0]
alias Pleroma.Web.CommonAPI
alias Plug.Conn
def run(_args) do
Mix.Pleroma.start_pleroma()
@ -11,7 +12,7 @@ def run(_args) do
# Cleaning tables
clean_tables()
[{:ok, user} | users] = Generator.generate_users(users_max: 1000)
[{:ok, user} | users] = Pleroma.LoadTesting.Users.generate_users(1000)
# Let the user make 100 posts
@ -38,8 +39,8 @@ def run(_args) do
"user timeline, no followers" => fn reading_user ->
conn =
Phoenix.ConnTest.build_conn()
|> Plug.Conn.assign(:user, reading_user)
|> Plug.Conn.assign(:skip_link_headers, true)
|> Conn.assign(:user, reading_user)
|> Conn.assign(:skip_link_headers, true)
Pleroma.Web.MastodonAPI.AccountController.statuses(conn, %{"id" => user.id})
end
@ -56,8 +57,8 @@ def run(_args) do
"user timeline, all following" => fn reading_user ->
conn =
Phoenix.ConnTest.build_conn()
|> Plug.Conn.assign(:user, reading_user)
|> Plug.Conn.assign(:skip_link_headers, true)
|> Conn.assign(:user, reading_user)
|> Conn.assign(:skip_link_headers, true)
Pleroma.Web.MastodonAPI.AccountController.statuses(conn, %{"id" => user.id})
end
@ -66,11 +67,4 @@ def run(_args) do
time: 60
)
end
defp clean_tables do
IO.puts("Deleting old data...\n")
Ecto.Adapters.SQL.query!(Repo, "TRUNCATE users CASCADE;")
Ecto.Adapters.SQL.query!(Repo, "TRUNCATE activities CASCADE;")
Ecto.Adapters.SQL.query!(Repo, "TRUNCATE objects CASCADE;")
end
end

View File

@ -1,114 +1,55 @@
defmodule Mix.Tasks.Pleroma.LoadTesting do
use Mix.Task
use Pleroma.LoadTesting.Helper
import Mix.Pleroma
import Pleroma.LoadTesting.Generator
import Pleroma.LoadTesting.Fetcher
import Ecto.Query
import Pleroma.LoadTesting.Helper, only: [clean_tables: 0]
alias Pleroma.Repo
alias Pleroma.User
@shortdoc "Factory for generation data"
@moduledoc """
Generates data like:
- local/remote users
- local/remote activities with notifications
- direct messages
- long thread
- non visible posts
- local/remote activities with differrent visibility:
- simple activiities
- with emoji
- with mentions
- hellthreads
- with attachments
- with tags
- likes
- reblogs
- simple threads
- long threads
## Generate data
MIX_ENV=benchmark mix pleroma.load_testing --users 20000 --dms 20000 --thread_length 2000
MIX_ENV=benchmark mix pleroma.load_testing -u 20000 -d 20000 -t 2000
MIX_ENV=benchmark mix pleroma.load_testing --users 20000 --friends 1000 --iterations 170 --friends_used 20 --non_friends_used 20
MIX_ENV=benchmark mix pleroma.load_testing -u 20000 -f 1000 -i 170 -fu 20 -nfu 20
Options:
- `--users NUMBER` - number of users to generate. Defaults to: 20000. Alias: `-u`
- `--dms NUMBER` - number of direct messages to generate. Defaults to: 20000. Alias `-d`
- `--thread_length` - number of messages in thread. Defaults to: 2000. ALias `-t`
- `--friends NUMBER` - number of friends for main user. Defaults to: 1000. Alias: `-f`
- `--iterations NUMBER` - number of iterations to generate activities. For each iteration in database is inserted about 120+ activities with different visibility, actors and types.Defaults to: 170. Alias: `-i`
- `--friends_used NUMBER` - number of main user friends used in activity generation. Defaults to: 20. Alias: `-fu`
- `--non_friends_used NUMBER` - number of non friends used in activity generation. Defaults to: 20. Alias: `-nfu`
"""
@aliases [u: :users, d: :dms, t: :thread_length]
@aliases [u: :users, f: :friends, i: :iterations, fu: :friends_used, nfu: :non_friends_used]
@switches [
users: :integer,
dms: :integer,
thread_length: :integer
friends: :integer,
iterations: :integer,
friends_used: :integer,
non_friends_used: :integer
]
@users_default 20_000
@dms_default 1_000
@thread_length_default 2_000
def run(args) do
start_pleroma()
Pleroma.Config.put([:instance, :skip_thread_containment], true)
Mix.Pleroma.start_pleroma()
clean_tables()
{opts, _} = OptionParser.parse!(args, strict: @switches, aliases: @aliases)
users_max = Keyword.get(opts, :users, @users_default)
dms_max = Keyword.get(opts, :dms, @dms_default)
thread_length = Keyword.get(opts, :thread_length, @thread_length_default)
clean_tables()
opts =
Keyword.put(opts, :users_max, users_max)
|> Keyword.put(:dms_max, dms_max)
|> Keyword.put(:thread_length, thread_length)
generate_users(opts)
# main user for queries
IO.puts("Fetching local main user...")
{time, user} =
:timer.tc(fn ->
Repo.one(
from(u in User, where: u.local == true, order_by: fragment("RANDOM()"), limit: 1)
)
end)
IO.puts("Fetching main user take #{to_sec(time)} sec.\n")
IO.puts("Fetching local users...")
{time, users} =
:timer.tc(fn ->
Repo.all(
from(u in User,
where: u.id != ^user.id,
where: u.local == true,
order_by: fragment("RANDOM()"),
limit: 10
)
)
end)
IO.puts("Fetching local users take #{to_sec(time)} sec.\n")
IO.puts("Fetching remote users...")
{time, remote_users} =
:timer.tc(fn ->
Repo.all(
from(u in User,
where: u.id != ^user.id,
where: u.local == false,
order_by: fragment("RANDOM()"),
limit: 10
)
)
end)
IO.puts("Fetching remote users take #{to_sec(time)} sec.\n")
generate_activities(user, users)
generate_remote_activities(user, remote_users)
generate_like_activities(
user, Pleroma.Repo.all(Pleroma.Activity.Queries.by_type("Create"))
)
generate_dms(user, users, opts)
{:ok, activity} = generate_long_thread(user, users, opts)
generate_non_visible_message(user, users)
user = Pleroma.LoadTesting.Users.generate(opts)
Pleroma.LoadTesting.Activities.generate(user, opts)
IO.puts("Users in DB: #{Repo.aggregate(from(u in User), :count, :id)}")
@ -120,19 +61,6 @@ def run(args) do
"Notifications in DB: #{Repo.aggregate(from(n in Pleroma.Notification), :count, :id)}"
)
fetch_user(user)
query_timelines(user)
query_notifications(user)
query_dms(user)
query_long_thread(user, activity)
Pleroma.Config.put([:instance, :skip_thread_containment], false)
query_timelines(user)
end
defp clean_tables do
IO.puts("Deleting old data...\n")
Ecto.Adapters.SQL.query!(Repo, "TRUNCATE users CASCADE;")
Ecto.Adapters.SQL.query!(Repo, "TRUNCATE activities CASCADE;")
Ecto.Adapters.SQL.query!(Repo, "TRUNCATE objects CASCADE;")
Pleroma.LoadTesting.Fetcher.run_benchmarks(user)
end
end

View File

@ -39,7 +39,7 @@
adapter: Ecto.Adapters.Postgres,
username: "postgres",
password: "postgres",
database: "pleroma_test",
database: "pleroma_benchmark",
hostname: System.get_env("DB_HOST") || "localhost",
pool_size: 10

View File

@ -58,20 +58,6 @@
config :pleroma, Pleroma.Captcha.Kocaptcha, endpoint: "https://captcha.kotobank.ch"
config :pleroma, :hackney_pools,
federation: [
max_connections: 50,
timeout: 150_000
],
media: [
max_connections: 50,
timeout: 150_000
],
upload: [
max_connections: 25,
timeout: 300_000
]
# Upload configuration
config :pleroma, Pleroma.Upload,
uploader: Pleroma.Uploaders.Local,
@ -184,21 +170,13 @@
"application/ld+json" => ["activity+json"]
}
config :tesla, adapter: Tesla.Adapter.Hackney
config :tesla, adapter: Tesla.Adapter.Gun
# Configures http settings, upstream proxy etc.
config :pleroma, :http,
proxy_url: nil,
send_user_agent: true,
user_agent: :default,
adapter: [
ssl_options: [
# Workaround for remote server certificate chain issues
partial_chain: &:hackney_connect.partial_chain/1,
# We don't support TLS v1.3 yet
versions: [:tlsv1, :"tlsv1.1", :"tlsv1.2"]
]
]
adapter: []
config :pleroma, :instance,
name: "Pleroma",
@ -624,6 +602,49 @@
parameters: [gin_fuzzy_search_limit: "500"],
prepare: :unnamed
config :pleroma, :connections_pool,
checkin_timeout: 250,
max_connections: 250,
retry: 1,
retry_timeout: 1000,
await_up_timeout: 5_000
config :pleroma, :pools,
federation: [
size: 50,
max_overflow: 10,
timeout: 150_000
],
media: [
size: 50,
max_overflow: 10,
timeout: 150_000
],
upload: [
size: 25,
max_overflow: 5,
timeout: 300_000
],
default: [
size: 10,
max_overflow: 2,
timeout: 10_000
]
config :pleroma, :hackney_pools,
federation: [
max_connections: 50,
timeout: 150_000
],
media: [
max_connections: 50,
timeout: 150_000
],
upload: [
max_connections: 25,
timeout: 300_000
]
config :pleroma, :restrict_unauthenticated,
timelines: %{local: false, federated: false},
profiles: %{local: false, remote: false},

View File

@ -2916,6 +2916,219 @@
}
]
},
%{
group: :pleroma,
key: :connections_pool,
type: :group,
description: "Advanced settings for `gun` connections pool",
children: [
%{
key: :checkin_timeout,
type: :integer,
description: "Timeout to checkin connection from pool. Default: 250ms.",
suggestions: [250]
},
%{
key: :max_connections,
type: :integer,
description: "Maximum number of connections in the pool. Default: 250 connections.",
suggestions: [250]
},
%{
key: :retry,
type: :integer,
description:
"Number of retries, while `gun` will try to reconnect if connection goes down. Default: 1.",
suggestions: [1]
},
%{
key: :retry_timeout,
type: :integer,
description:
"Time between retries when `gun` will try to reconnect in milliseconds. Default: 1000ms.",
suggestions: [1000]
},
%{
key: :await_up_timeout,
type: :integer,
description: "Timeout while `gun` will wait until connection is up. Default: 5000ms.",
suggestions: [5000]
}
]
},
%{
group: :pleroma,
key: :pools,
type: :group,
description: "Advanced settings for `gun` workers pools",
children: [
%{
key: :federation,
type: :keyword,
description: "Settings for federation pool.",
children: [
%{
key: :size,
type: :integer,
description: "Number workers in the pool.",
suggestions: [50]
},
%{
key: :max_overflow,
type: :integer,
description: "Number of additional workers if pool is under load.",
suggestions: [10]
},
%{
key: :timeout,
type: :integer,
description: "Timeout while `gun` will wait for response.",
suggestions: [150_000]
}
]
},
%{
key: :media,
type: :keyword,
description: "Settings for media pool.",
children: [
%{
key: :size,
type: :integer,
description: "Number workers in the pool.",
suggestions: [50]
},
%{
key: :max_overflow,
type: :integer,
description: "Number of additional workers if pool is under load.",
suggestions: [10]
},
%{
key: :timeout,
type: :integer,
description: "Timeout while `gun` will wait for response.",
suggestions: [150_000]
}
]
},
%{
key: :upload,
type: :keyword,
description: "Settings for upload pool.",
children: [
%{
key: :size,
type: :integer,
description: "Number workers in the pool.",
suggestions: [25]
},
%{
key: :max_overflow,
type: :integer,
description: "Number of additional workers if pool is under load.",
suggestions: [5]
},
%{
key: :timeout,
type: :integer,
description: "Timeout while `gun` will wait for response.",
suggestions: [300_000]
}
]
},
%{
key: :default,
type: :keyword,
description: "Settings for default pool.",
children: [
%{
key: :size,
type: :integer,
description: "Number workers in the pool.",
suggestions: [10]
},
%{
key: :max_overflow,
type: :integer,
description: "Number of additional workers if pool is under load.",
suggestions: [2]
},
%{
key: :timeout,
type: :integer,
description: "Timeout while `gun` will wait for response.",
suggestions: [10_000]
}
]
}
]
},
%{
group: :pleroma,
key: :hackney_pools,
type: :group,
description: "Advanced settings for `hackney` connections pools",
children: [
%{
key: :federation,
type: :keyword,
description: "Settings for federation pool.",
children: [
%{
key: :max_connections,
type: :integer,
description: "Number workers in the pool.",
suggestions: [50]
},
%{
key: :timeout,
type: :integer,
description: "Timeout while `hackney` will wait for response.",
suggestions: [150_000]
}
]
},
%{
key: :media,
type: :keyword,
description: "Settings for media pool.",
children: [
%{
key: :max_connections,
type: :integer,
description: "Number workers in the pool.",
suggestions: [50]
},
%{
key: :timeout,
type: :integer,
description: "Timeout while `hackney` will wait for response.",
suggestions: [150_000]
}
]
},
%{
key: :upload,
type: :keyword,
description: "Settings for upload pool.",
children: [
%{
key: :max_connections,
type: :integer,
description: "Number workers in the pool.",
suggestions: [25]
},
%{
key: :timeout,
type: :integer,
description: "Timeout while `hackney` will wait for response.",
suggestions: [300_000]
}
]
}
]
},
%{
group: :pleroma,
key: :restrict_unauthenticated,

View File

@ -90,6 +90,8 @@
config :pleroma, :modules, runtime_dir: "test/fixtures/modules"
config :pleroma, Pleroma.Gun, Pleroma.GunMock
config :pleroma, Pleroma.Emails.NewUsersDigestEmail, enabled: true
config :pleroma, Pleroma.Plugs.RemoteIp, enabled: false

View File

@ -841,6 +841,8 @@ Some modifications are necessary to save the config settings correctly:
Most of the settings will be applied in `runtime`, this means that you don't need to restart the instance. But some settings are applied in `compile time` and require a reboot of the instance, such as:
- all settings inside these keys:
- `:hackney_pools`
- `:connections_pool`
- `:pools`
- `:chat`
- partially settings inside these keys:
- `:seconds_valid` in `Pleroma.Captcha`

View File

@ -369,8 +369,7 @@ Available caches:
* `proxy_url`: an upstream proxy to fetch posts and/or media with, (default: `nil`)
* `send_user_agent`: should we include a user agent with HTTP requests? (default: `true`)
* `user_agent`: what user agent should we use? (default: `:default`), must be string or `:default`
* `adapter`: array of hackney options
* `adapter`: array of adapter options
### :hackney_pools
@ -389,6 +388,42 @@ For each pool, the options are:
* `timeout` - retention duration for connections
### :connections_pool
*For `gun` adapter*
Advanced settings for connections pool. Pool with opened connections. These connections can be reused in worker pools.
For big instances it's recommended to increase `config :pleroma, :connections_pool, max_connections: 500` up to 500-1000.
It will increase memory usage, but federation would work faster.
* `:checkin_timeout` - timeout to checkin connection from pool. Default: 250ms.
* `:max_connections` - maximum number of connections in the pool. Default: 250 connections.
* `:retry` - number of retries, while `gun` will try to reconnect if connection goes down. Default: 1.
* `:retry_timeout` - time between retries when `gun` will try to reconnect in milliseconds. Default: 1000ms.
* `:await_up_timeout` - timeout while `gun` will wait until connection is up. Default: 5000ms.
### :pools
*For `gun` adapter*
Advanced settings for workers pools.
There are four pools used:
* `:federation` for the federation jobs.
You may want this pool max_connections to be at least equal to the number of federator jobs + retry queue jobs.
* `:media` for rich media, media proxy
* `:upload` for uploaded media (if using a remote uploader and `proxy_remote: true`)
* `:default` for other requests
For each pool, the options are:
* `:size` - how much workers the pool can hold
* `:timeout` - timeout while `gun` will wait for response
* `:max_overflow` - additional workers if pool is under load
## Captcha
### Pleroma.Captcha

View File

@ -5,6 +5,7 @@
defmodule Mix.Pleroma do
@doc "Common functions to be reused in mix tasks"
def start_pleroma do
Mix.Task.run("app.start")
Application.put_env(:phoenix, :serve_endpoints, false, persistent: true)
if Pleroma.Config.get(:env) != :test do

View File

@ -74,4 +74,43 @@ def run(["render_timeline", nickname | _] = args) do
inputs: inputs
)
end
def run(["adapters"]) do
start_pleroma()
:ok =
Pleroma.Gun.Conn.open(
"https://httpbin.org/stream-bytes/1500",
:gun_connections
)
Process.sleep(1_500)
Benchee.run(
%{
"Without conn and without pool" => fn ->
{:ok, %Tesla.Env{}} =
Pleroma.HTTP.get("https://httpbin.org/stream-bytes/1500", [],
adapter: [pool: :no_pool, receive_conn: false]
)
end,
"Without conn and with pool" => fn ->
{:ok, %Tesla.Env{}} =
Pleroma.HTTP.get("https://httpbin.org/stream-bytes/1500", [],
adapter: [receive_conn: false]
)
end,
"With reused conn and without pool" => fn ->
{:ok, %Tesla.Env{}} =
Pleroma.HTTP.get("https://httpbin.org/stream-bytes/1500", [],
adapter: [pool: :no_pool]
)
end,
"With reused conn and with pool" => fn ->
{:ok, %Tesla.Env{}} = Pleroma.HTTP.get("https://httpbin.org/stream-bytes/1500")
end
},
parallel: 10
)
end
end

View File

@ -4,13 +4,13 @@
defmodule Mix.Tasks.Pleroma.Emoji do
use Mix.Task
import Mix.Pleroma
@shortdoc "Manages emoji packs"
@moduledoc File.read!("docs/administration/CLI_tasks/emoji.md")
def run(["ls-packs" | args]) do
Mix.Pleroma.start_pleroma()
Application.ensure_all_started(:hackney)
start_pleroma()
{options, [], []} = parse_global_opts(args)
@ -36,8 +36,7 @@ def run(["ls-packs" | args]) do
end
def run(["get-packs" | args]) do
Mix.Pleroma.start_pleroma()
Application.ensure_all_started(:hackney)
start_pleroma()
{options, pack_names, []} = parse_global_opts(args)
@ -135,7 +134,7 @@ def run(["get-packs" | args]) do
end
def run(["gen-pack", src]) do
Application.ensure_all_started(:hackney)
start_pleroma()
proposed_name = Path.basename(src) |> Path.rootname()
name = String.trim(IO.gets("Pack name [#{proposed_name}]: "))

View File

@ -3,8 +3,12 @@
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Application do
import Cachex.Spec
use Application
import Cachex.Spec
alias Pleroma.Config
require Logger
@name Mix.Project.config()[:name]
@ -18,9 +22,9 @@ def named_version, do: @name <> " " <> @version
def repository, do: @repository
def user_agent do
case Pleroma.Config.get([:http, :user_agent], :default) do
case Config.get([:http, :user_agent], :default) do
:default ->
info = "#{Pleroma.Web.base_url()} <#{Pleroma.Config.get([:instance, :email], "")}>"
info = "#{Pleroma.Web.base_url()} <#{Config.get([:instance, :email], "")}>"
named_version() <> "; " <> info
custom ->
@ -33,27 +37,51 @@ def user_agent do
def start(_type, _args) do
Pleroma.Config.Holder.save_default()
Pleroma.HTML.compile_scrubbers()
Pleroma.Config.DeprecationWarnings.warn()
Config.DeprecationWarnings.warn()
Pleroma.Plugs.HTTPSecurityPlug.warn_if_disabled()
Pleroma.Repo.check_migrations_applied!()
setup_instrumenters()
load_custom_modules()
adapter = Application.get_env(:tesla, :adapter)
if adapter == Tesla.Adapter.Gun do
if version = Pleroma.OTPVersion.version() do
[major, minor] =
version
|> String.split(".")
|> Enum.map(&String.to_integer/1)
|> Enum.take(2)
if (major == 22 and minor < 2) or major < 22 do
raise "
!!!OTP VERSION WARNING!!!
You are using gun adapter with OTP version #{version}, which doesn't support correct handling of unordered certificates chains.
"
end
else
raise "
!!!OTP VERSION WARNING!!!
To support correct handling of unordered certificates chains - OTP version must be > 22.2.
"
end
end
# Define workers and child supervisors to be supervised
children =
[
Pleroma.Repo,
Pleroma.Config.TransferTask,
Config.TransferTask,
Pleroma.Emoji,
Pleroma.Captcha,
Pleroma.Plugs.RateLimiter.Supervisor
] ++
cachex_children() ++
hackney_pool_children() ++
http_children(adapter, @env) ++
[
Pleroma.Stats,
Pleroma.JobQueueMonitor,
{Oban, Pleroma.Config.get(Oban)}
{Oban, Config.get(Oban)}
] ++
task_children(@env) ++
streamer_child(@env) ++
@ -70,7 +98,7 @@ def start(_type, _args) do
end
def load_custom_modules do
dir = Pleroma.Config.get([:modules, :runtime_dir])
dir = Config.get([:modules, :runtime_dir])
if dir && File.exists?(dir) do
dir
@ -111,20 +139,6 @@ defp setup_instrumenters do
Pleroma.Web.Endpoint.Instrumenter.setup()
end
def enabled_hackney_pools do
[:media] ++
if Application.get_env(:tesla, :adapter) == Tesla.Adapter.Hackney do
[:federation]
else
[]
end ++
if Pleroma.Config.get([Pleroma.Upload, :proxy_remote]) do
[:upload]
else
[]
end
end
defp cachex_children do
[
build_cachex("used_captcha", ttl_interval: seconds_valid_interval()),
@ -146,7 +160,7 @@ defp idempotency_expiration,
do: expiration(default: :timer.seconds(6 * 60 * 60), interval: :timer.seconds(60))
defp seconds_valid_interval,
do: :timer.seconds(Pleroma.Config.get!([Pleroma.Captcha, :seconds_valid]))
do: :timer.seconds(Config.get!([Pleroma.Captcha, :seconds_valid]))
defp build_cachex(type, opts),
do: %{
@ -155,9 +169,9 @@ defp build_cachex(type, opts),
type: :worker
}
defp chat_enabled?, do: Pleroma.Config.get([:chat, :enabled])
defp chat_enabled?, do: Config.get([:chat, :enabled])
defp streamer_child(:test), do: []
defp streamer_child(env) when env in [:test, :benchmark], do: []
defp streamer_child(_) do
[Pleroma.Web.Streamer.supervisor()]
@ -169,13 +183,6 @@ defp chat_child(_env, true) do
defp chat_child(_, _), do: []
defp hackney_pool_children do
for pool <- enabled_hackney_pools() do
options = Pleroma.Config.get([:hackney_pools, pool])
:hackney_pool.child_spec(pool, options)
end
end
defp task_children(:test) do
[
%{
@ -200,4 +207,31 @@ defp task_children(_) do
}
]
end
# start hackney and gun pools in tests
defp http_children(_, :test) do
hackney_options = Config.get([:hackney_pools, :federation])
hackney_pool = :hackney_pool.child_spec(:federation, hackney_options)
[hackney_pool, Pleroma.Pool.Supervisor]
end
defp http_children(Tesla.Adapter.Hackney, _) do
pools = [:federation, :media]
pools =
if Config.get([Pleroma.Upload, :proxy_remote]) do
[:upload | pools]
else
pools
end
for pool <- pools do
options = Config.get([:hackney_pools, pool])
:hackney_pool.child_spec(pool, options)
end
end
defp http_children(Tesla.Adapter.Gun, _), do: [Pleroma.Pool.Supervisor]
defp http_children(_, _), do: []
end

View File

@ -278,8 +278,6 @@ defp do_convert({:proxy_url, {type, host, port}}) do
}
end
defp do_convert({:partial_chain, entity}), do: %{"tuple" => [":partial_chain", inspect(entity)]}
defp do_convert(entity) when is_tuple(entity) do
value =
entity
@ -323,15 +321,6 @@ defp do_transform(%{"tuple" => [":proxy_url", %{"tuple" => [type, host, port]}]}
{:proxy_url, {do_transform_string(type), parse_host(host), port}}
end
defp do_transform(%{"tuple" => [":partial_chain", entity]}) do
{partial_chain, []} =
entity
|> String.replace(~r/[^\w|^{:,[|^,|^[|^\]^}|^\/|^\.|^"]^\s/, "")
|> Code.eval_string()
{:partial_chain, partial_chain}
end
defp do_transform(%{"tuple" => entity}) do
Enum.reduce(entity, {}, fn val, acc -> Tuple.append(acc, do_transform(val)) end)
end

View File

@ -5,6 +5,7 @@
defmodule Pleroma.Config.TransferTask do
use Task
alias Pleroma.Config
alias Pleroma.ConfigDB
alias Pleroma.Repo
@ -18,7 +19,9 @@ defmodule Pleroma.Config.TransferTask do
{:pleroma, Oban},
{:pleroma, :rate_limit},
{:pleroma, :markup},
{:plerome, :streamer}
{:pleroma, :streamer},
{:pleroma, :pools},
{:pleroma, :connections_pool}
]
@reboot_time_subkeys [
@ -32,45 +35,33 @@ defmodule Pleroma.Config.TransferTask do
{:pleroma, :gopher, [:enabled]}
]
@reject [nil, :prometheus]
def start_link(_) do
load_and_update_env()
if Pleroma.Config.get(:env) == :test, do: Ecto.Adapters.SQL.Sandbox.checkin(Repo)
if Config.get(:env) == :test, do: Ecto.Adapters.SQL.Sandbox.checkin(Repo)
:ignore
end
@spec load_and_update_env([ConfigDB.t()]) :: :ok | false
def load_and_update_env(deleted \\ [], restart_pleroma? \\ true) do
with {:configurable, true} <-
{:configurable, Pleroma.Config.get(:configurable_from_database)},
true <- Ecto.Adapters.SQL.table_exists?(Repo, "config"),
started_applications <- Application.started_applications() do
@spec load_and_update_env([ConfigDB.t()], boolean()) :: :ok
def load_and_update_env(deleted_settings \\ [], restart_pleroma? \\ true) do
with {_, true} <- {:configurable, Config.get(:configurable_from_database)} do
# We need to restart applications for loaded settings take effect
in_db = Repo.all(ConfigDB)
with_deleted = in_db ++ deleted
reject_for_restart = if restart_pleroma?, do: @reject, else: [:pleroma | @reject]
applications =
with_deleted
|> Enum.map(&merge_and_update(&1))
|> Enum.uniq()
# TODO: some problem with prometheus after restart!
|> Enum.reject(&(&1 in reject_for_restart))
# to be ensured that pleroma will be restarted last
applications =
if :pleroma in applications do
List.delete(applications, :pleroma) ++ [:pleroma]
reject_restart =
if restart_pleroma? do
[nil, :prometheus]
else
Restarter.Pleroma.rebooted()
applications
[:pleroma, nil, :prometheus]
end
Enum.each(applications, &restart(started_applications, &1, Pleroma.Config.get(:env)))
started_applications = Application.started_applications()
(Repo.all(ConfigDB) ++ deleted_settings)
|> Enum.map(&merge_and_update/1)
|> Enum.uniq()
|> Enum.reject(&(&1 in reject_restart))
|> maybe_set_pleroma_last()
|> Enum.each(&restart(started_applications, &1, Config.get(:env)))
:ok
else
@ -78,32 +69,19 @@ def load_and_update_env(deleted \\ [], restart_pleroma? \\ true) do
end
end
defp merge_and_update(setting) do
try do
key = ConfigDB.from_string(setting.key)
group = ConfigDB.from_string(setting.group)
default = Pleroma.Config.Holder.default_config(group, key)
value = ConfigDB.from_binary(setting.value)
merged_value =
if Ecto.get_meta(setting, :state) == :deleted do
default
defp maybe_set_pleroma_last(apps) do
# to be ensured that pleroma will be restarted last
if :pleroma in apps do
apps
|> List.delete(:pleroma)
|> List.insert_at(-1, :pleroma)
else
if can_be_merged?(default, value) do
ConfigDB.merge_group(group, key, default, value)
else
value
Restarter.Pleroma.rebooted()
apps
end
end
:ok = update_env(group, key, merged_value)
if group != :logger do
if group != :pleroma or pleroma_need_restart?(group, key, value) do
group
end
else
defp group_for_restart(:logger, key, _, merged_value) do
# change logger configuration in runtime, without restart
if Keyword.keyword?(merged_value) and
key not in [:compile_time_application, :backends, :compile_time_purge_matching] do
@ -114,6 +92,31 @@ defp merge_and_update(setting) do
nil
end
defp group_for_restart(group, _, _, _) when group != :pleroma, do: group
defp group_for_restart(group, key, value, _) do
if pleroma_need_restart?(group, key, value), do: group
end
defp merge_and_update(setting) do
try do
key = ConfigDB.from_string(setting.key)
group = ConfigDB.from_string(setting.group)
default = Config.Holder.default_config(group, key)
value = ConfigDB.from_binary(setting.value)
merged_value =
cond do
Ecto.get_meta(setting, :state) == :deleted -> default
can_be_merged?(default, value) -> ConfigDB.merge_group(group, key, default, value)
true -> value
end
:ok = update_env(group, key, merged_value)
group_for_restart(group, key, value, merged_value)
rescue
error ->
error_msg =

45
lib/pleroma/gun/api.ex Normal file
View File

@ -0,0 +1,45 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Gun.API do
@behaviour Pleroma.Gun
alias Pleroma.Gun
@gun_keys [
:connect_timeout,
:http_opts,
:http2_opts,
:protocols,
:retry,
:retry_timeout,
:trace,
:transport,
:tls_opts,
:tcp_opts,
:socks_opts,
:ws_opts
]
@impl Gun
def open(host, port, opts \\ %{}), do: :gun.open(host, port, Map.take(opts, @gun_keys))
@impl Gun
defdelegate info(pid), to: :gun
@impl Gun
defdelegate close(pid), to: :gun
@impl Gun
defdelegate await_up(pid, timeout \\ 5_000), to: :gun
@impl Gun
defdelegate connect(pid, opts), to: :gun
@impl Gun
defdelegate await(pid, ref), to: :gun
@impl Gun
defdelegate set_owner(pid, owner), to: :gun
end

196
lib/pleroma/gun/conn.ex Normal file
View File

@ -0,0 +1,196 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Gun.Conn do
@moduledoc """
Struct for gun connection data
"""
alias Pleroma.Gun
alias Pleroma.Pool.Connections
require Logger
@type gun_state :: :up | :down
@type conn_state :: :active | :idle
@type t :: %__MODULE__{
conn: pid(),
gun_state: gun_state(),
conn_state: conn_state(),
used_by: [pid()],
last_reference: pos_integer(),
crf: float(),
retries: pos_integer()
}
defstruct conn: nil,
gun_state: :open,
conn_state: :init,
used_by: [],
last_reference: 0,
crf: 1,
retries: 0
@spec open(String.t() | URI.t(), atom(), keyword()) :: :ok | nil
def open(url, name, opts \\ [])
def open(url, name, opts) when is_binary(url), do: open(URI.parse(url), name, opts)
def open(%URI{} = uri, name, opts) do
pool_opts = Pleroma.Config.get([:connections_pool], [])
opts =
opts
|> Enum.into(%{})
|> Map.put_new(:retry, pool_opts[:retry] || 1)
|> Map.put_new(:retry_timeout, pool_opts[:retry_timeout] || 1000)
|> Map.put_new(:await_up_timeout, pool_opts[:await_up_timeout] || 5_000)
|> maybe_add_tls_opts(uri)
key = "#{uri.scheme}:#{uri.host}:#{uri.port}"
conn_pid =
if Connections.count(name) < opts[:max_connection] do
do_open(uri, opts)
else
close_least_used_and_do_open(name, uri, opts)
end
if is_pid(conn_pid) do
conn = %Pleroma.Gun.Conn{
conn: conn_pid,
gun_state: :up,
conn_state: :active,
last_reference: :os.system_time(:second)
}
:ok = Gun.set_owner(conn_pid, Process.whereis(name))
Connections.add_conn(name, key, conn)
end
end
defp maybe_add_tls_opts(opts, %URI{scheme: "http"}), do: opts
defp maybe_add_tls_opts(opts, %URI{scheme: "https", host: host}) do
tls_opts = [
verify: :verify_peer,
cacertfile: CAStore.file_path(),
depth: 20,
reuse_sessions: false,
verify_fun:
{&:ssl_verify_hostname.verify_fun/3,
[check_hostname: Pleroma.HTTP.Connection.format_host(host)]}
]
tls_opts =
if Keyword.keyword?(opts[:tls_opts]) do
Keyword.merge(tls_opts, opts[:tls_opts])
else
tls_opts
end
Map.put(opts, :tls_opts, tls_opts)
end
defp do_open(uri, %{proxy: {proxy_host, proxy_port}} = opts) do
connect_opts =
uri
|> destination_opts()
|> add_http2_opts(uri.scheme, Map.get(opts, :tls_opts, []))
with open_opts <- Map.delete(opts, :tls_opts),
{:ok, conn} <- Gun.open(proxy_host, proxy_port, open_opts),
{:ok, _} <- Gun.await_up(conn, opts[:await_up_timeout]),
stream <- Gun.connect(conn, connect_opts),
{:response, :fin, 200, _} <- Gun.await(conn, stream) do
conn
else
error ->
Logger.warn(
"Opening proxied connection to #{compose_uri_log(uri)} failed with error #{
inspect(error)
}"
)
error
end
end
defp do_open(uri, %{proxy: {proxy_type, proxy_host, proxy_port}} = opts) do
version =
proxy_type
|> to_string()
|> String.last()
|> case do
"4" -> 4
_ -> 5
end
socks_opts =
uri
|> destination_opts()
|> add_http2_opts(uri.scheme, Map.get(opts, :tls_opts, []))
|> Map.put(:version, version)
opts =
opts
|> Map.put(:protocols, [:socks])
|> Map.put(:socks_opts, socks_opts)
with {:ok, conn} <- Gun.open(proxy_host, proxy_port, opts),
{:ok, _} <- Gun.await_up(conn, opts[:await_up_timeout]) do
conn
else
error ->
Logger.warn(
"Opening socks proxied connection to #{compose_uri_log(uri)} failed with error #{
inspect(error)
}"
)
error
end
end
defp do_open(%URI{host: host, port: port} = uri, opts) do
host = Pleroma.HTTP.Connection.parse_host(host)
with {:ok, conn} <- Gun.open(host, port, opts),
{:ok, _} <- Gun.await_up(conn, opts[:await_up_timeout]) do
conn
else
error ->
Logger.warn(
"Opening connection to #{compose_uri_log(uri)} failed with error #{inspect(error)}"
)
error
end
end
defp destination_opts(%URI{host: host, port: port}) do
host = Pleroma.HTTP.Connection.parse_host(host)
%{host: host, port: port}
end
defp add_http2_opts(opts, "https", tls_opts) do
Map.merge(opts, %{protocols: [:http2], transport: :tls, tls_opts: tls_opts})
end
defp add_http2_opts(opts, _, _), do: opts
defp close_least_used_and_do_open(name, uri, opts) do
with [{key, conn} | _conns] <- Connections.get_unused_conns(name),
:ok <- Gun.close(conn.conn) do
Connections.remove_conn(name, key)
do_open(uri, opts)
else
[] -> {:error, :pool_overflowed}
end
end
def compose_uri_log(%URI{scheme: scheme, host: host, path: path}) do
"#{scheme}://#{host}#{path}"
end
end

31
lib/pleroma/gun/gun.ex Normal file
View File

@ -0,0 +1,31 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Gun do
@callback open(charlist(), pos_integer(), map()) :: {:ok, pid()}
@callback info(pid()) :: map()
@callback close(pid()) :: :ok
@callback await_up(pid, pos_integer()) :: {:ok, atom()} | {:error, atom()}
@callback connect(pid(), map()) :: reference()
@callback await(pid(), reference()) :: {:response, :fin, 200, []}
@callback set_owner(pid(), pid()) :: :ok
@api Pleroma.Config.get([Pleroma.Gun], Pleroma.Gun.API)
defp api, do: @api
def open(host, port, opts), do: api().open(host, port, opts)
def info(pid), do: api().info(pid)
def close(pid), do: api().close(pid)
def await_up(pid, timeout \\ 5_000), do: api().await_up(pid, timeout)
def connect(pid, opts), do: api().connect(pid, opts)
def await(pid, ref), do: api().await(pid, ref)
def set_owner(pid, owner), do: api().set_owner(pid, owner)
end

View File

@ -0,0 +1,41 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.HTTP.AdapterHelper do
alias Pleroma.HTTP.Connection
@type proxy ::
{Connection.host(), pos_integer()}
| {Connection.proxy_type(), Connection.host(), pos_integer()}
@callback options(keyword(), URI.t()) :: keyword()
@callback after_request(keyword()) :: :ok
@spec options(keyword(), URI.t()) :: keyword()
def options(opts, _uri) do
proxy = Pleroma.Config.get([:http, :proxy_url], nil)
maybe_add_proxy(opts, format_proxy(proxy))
end
@spec maybe_get_conn(URI.t(), keyword()) :: keyword()
def maybe_get_conn(_uri, opts), do: opts
@spec after_request(keyword()) :: :ok
def after_request(_opts), do: :ok
@spec format_proxy(String.t() | tuple() | nil) :: proxy() | nil
def format_proxy(nil), do: nil
def format_proxy(proxy_url) do
case Connection.parse_proxy(proxy_url) do
{:ok, host, port} -> {host, port}
{:ok, type, host, port} -> {type, host, port}
_ -> nil
end
end
@spec maybe_add_proxy(keyword(), proxy() | nil) :: keyword()
def maybe_add_proxy(opts, nil), do: opts
def maybe_add_proxy(opts, proxy), do: Keyword.put_new(opts, :proxy, proxy)
end

View File

@ -0,0 +1,77 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.HTTP.AdapterHelper.Gun do
@behaviour Pleroma.HTTP.AdapterHelper
alias Pleroma.HTTP.AdapterHelper
alias Pleroma.Pool.Connections
require Logger
@defaults [
connect_timeout: 5_000,
domain_lookup_timeout: 5_000,
tls_handshake_timeout: 5_000,
retry: 1,
retry_timeout: 1000,
await_up_timeout: 5_000
]
@spec options(keyword(), URI.t()) :: keyword()
def options(incoming_opts \\ [], %URI{} = uri) do
proxy =
Pleroma.Config.get([:http, :proxy_url])
|> AdapterHelper.format_proxy()
config_opts = Pleroma.Config.get([:http, :adapter], [])
@defaults
|> Keyword.merge(config_opts)
|> add_scheme_opts(uri)
|> AdapterHelper.maybe_add_proxy(proxy)
|> maybe_get_conn(uri, incoming_opts)
end
@spec after_request(keyword()) :: :ok
def after_request(opts) do
if opts[:conn] && opts[:body_as] != :chunks do
Connections.checkout(opts[:conn], self(), :gun_connections)
end
:ok
end
defp add_scheme_opts(opts, %{scheme: "http"}), do: opts
defp add_scheme_opts(opts, %{scheme: "https"}) do
opts
|> Keyword.put(:certificates_verification, true)
|> Keyword.put(:tls_opts, log_level: :warning)
end
defp maybe_get_conn(adapter_opts, uri, incoming_opts) do
{receive_conn?, opts} =
adapter_opts
|> Keyword.merge(incoming_opts)
|> Keyword.pop(:receive_conn, true)
if Connections.alive?(:gun_connections) and receive_conn? do
checkin_conn(uri, opts)
else
opts
end
end
defp checkin_conn(uri, opts) do
case Connections.checkin(uri, :gun_connections) do
nil ->
Task.start(Pleroma.Gun.Conn, :open, [uri, :gun_connections, opts])
opts
conn when is_pid(conn) ->
Keyword.merge(opts, conn: conn, close_conn: false)
end
end
end

View File

@ -0,0 +1,43 @@
defmodule Pleroma.HTTP.AdapterHelper.Hackney do
@behaviour Pleroma.HTTP.AdapterHelper
@defaults [
connect_timeout: 10_000,
recv_timeout: 20_000,
follow_redirect: true,
force_redirect: true,
pool: :federation
]
@spec options(keyword(), URI.t()) :: keyword()
def options(connection_opts \\ [], %URI{} = uri) do
proxy = Pleroma.Config.get([:http, :proxy_url])
config_opts = Pleroma.Config.get([:http, :adapter], [])
@defaults
|> Keyword.merge(config_opts)
|> Keyword.merge(connection_opts)
|> add_scheme_opts(uri)
|> Pleroma.HTTP.AdapterHelper.maybe_add_proxy(proxy)
end
defp add_scheme_opts(opts, %URI{scheme: "http"}), do: opts
defp add_scheme_opts(opts, %URI{scheme: "https", host: host}) do
ssl_opts = [
ssl_options: [
# Workaround for remote server certificate chain issues
partial_chain: &:hackney_connect.partial_chain/1,
# We don't support TLS v1.3 yet
versions: [:tlsv1, :"tlsv1.1", :"tlsv1.2"],
server_name_indication: to_charlist(host)
]
]
Keyword.merge(opts, ssl_opts)
end
def after_request(_), do: :ok
end

View File

@ -4,40 +4,121 @@
defmodule Pleroma.HTTP.Connection do
@moduledoc """
Connection for http-requests.
Configure Tesla.Client with default and customized adapter options.
"""
@hackney_options [
connect_timeout: 10_000,
recv_timeout: 20_000,
follow_redirect: true,
force_redirect: true,
pool: :federation
]
@adapter Application.get_env(:tesla, :adapter)
alias Pleroma.Config
alias Pleroma.HTTP.AdapterHelper
require Logger
@defaults [pool: :federation]
@type ip_address :: ipv4_address() | ipv6_address()
@type ipv4_address :: {0..255, 0..255, 0..255, 0..255}
@type ipv6_address ::
{0..65_535, 0..65_535, 0..65_535, 0..65_535, 0..65_535, 0..65_535, 0..65_535, 0..65_535}
@type proxy_type() :: :socks4 | :socks5
@type host() :: charlist() | ip_address()
@doc """
Configure a client connection
# Returns
Tesla.Env.client
Merge default connection & adapter options with received ones.
"""
@spec new(Keyword.t()) :: Tesla.Env.client()
def new(opts \\ []) do
Tesla.client([], {@adapter, hackney_options(opts)})
@spec options(URI.t(), keyword()) :: keyword()
def options(%URI{} = uri, opts \\ []) do
@defaults
|> pool_timeout()
|> Keyword.merge(opts)
|> adapter_helper().options(uri)
end
# fetch Hackney options
#
def hackney_options(opts) do
options = Keyword.get(opts, :adapter, [])
adapter_options = Pleroma.Config.get([:http, :adapter], [])
proxy_url = Pleroma.Config.get([:http, :proxy_url], nil)
defp pool_timeout(opts) do
{config_key, default} =
if adapter() == Tesla.Adapter.Gun do
{:pools, Config.get([:pools, :default, :timeout])}
else
{:hackney_pools, 10_000}
end
@hackney_options
|> Keyword.merge(adapter_options)
|> Keyword.merge(options)
|> Keyword.merge(proxy: proxy_url)
timeout = Config.get([config_key, opts[:pool], :timeout], default)
Keyword.merge(opts, timeout: timeout)
end
@spec after_request(keyword()) :: :ok
def after_request(opts), do: adapter_helper().after_request(opts)
defp adapter, do: Application.get_env(:tesla, :adapter)
defp adapter_helper do
case adapter() do
Tesla.Adapter.Gun -> AdapterHelper.Gun
Tesla.Adapter.Hackney -> AdapterHelper.Hackney
_ -> AdapterHelper
end
end
@spec parse_proxy(String.t() | tuple() | nil) ::
{:ok, host(), pos_integer()}
| {:ok, proxy_type(), host(), pos_integer()}
| {:error, atom()}
| nil
def parse_proxy(nil), do: nil
def parse_proxy(proxy) when is_binary(proxy) do
with [host, port] <- String.split(proxy, ":"),
{port, ""} <- Integer.parse(port) do
{:ok, parse_host(host), port}
else
{_, _} ->
Logger.warn("Parsing port failed #{inspect(proxy)}")
{:error, :invalid_proxy_port}
:error ->
Logger.warn("Parsing port failed #{inspect(proxy)}")
{:error, :invalid_proxy_port}
_ ->
Logger.warn("Parsing proxy failed #{inspect(proxy)}")
{:error, :invalid_proxy}
end
end
def parse_proxy(proxy) when is_tuple(proxy) do
with {type, host, port} <- proxy do
{:ok, type, parse_host(host), port}
else
_ ->
Logger.warn("Parsing proxy failed #{inspect(proxy)}")
{:error, :invalid_proxy}
end
end
@spec parse_host(String.t() | atom() | charlist()) :: charlist() | ip_address()
def parse_host(host) when is_list(host), do: host
def parse_host(host) when is_atom(host), do: to_charlist(host)
def parse_host(host) when is_binary(host) do
host = to_charlist(host)
case :inet.parse_address(host) do
{:error, :einval} -> host
{:ok, ip} -> ip
end
end
@spec format_host(String.t()) :: charlist()
def format_host(host) do
host_charlist = to_charlist(host)
case :inet.parse_address(host_charlist) do
{:error, :einval} ->
:idna.encode(host_charlist)
{:ok, _ip} ->
host_charlist
end
end
end

View File

@ -4,21 +4,47 @@
defmodule Pleroma.HTTP do
@moduledoc """
Wrapper for `Tesla.request/2`.
"""
alias Pleroma.HTTP.Connection
alias Pleroma.HTTP.Request
alias Pleroma.HTTP.RequestBuilder, as: Builder
alias Tesla.Client
alias Tesla.Env
require Logger
@type t :: __MODULE__
@doc """
Builds and perform http request.
Performs GET request.
See `Pleroma.HTTP.request/5`
"""
@spec get(Request.url() | nil, Request.headers(), keyword()) ::
nil | {:ok, Env.t()} | {:error, any()}
def get(url, headers \\ [], options \\ [])
def get(nil, _, _), do: nil
def get(url, headers, options), do: request(:get, url, "", headers, options)
@doc """
Performs POST request.
See `Pleroma.HTTP.request/5`
"""
@spec post(Request.url(), String.t(), Request.headers(), keyword()) ::
{:ok, Env.t()} | {:error, any()}
def post(url, body, headers \\ [], options \\ []),
do: request(:post, url, body, headers, options)
@doc """
Builds and performs http request.
# Arguments:
`method` - :get, :post, :put, :delete
`url`
`body`
`url` - full url
`body` - request body
`headers` - a keyworld list of headers, e.g. `[{"content-type", "text/plain"}]`
`options` - custom, per-request middleware or adapter options
@ -26,61 +52,66 @@ defmodule Pleroma.HTTP do
`{:ok, %Tesla.Env{}}` or `{:error, error}`
"""
def request(method, url, body \\ "", headers \\ [], options \\ []) do
try do
options =
process_request_options(options)
|> process_sni_options(url)
@spec request(atom(), Request.url(), String.t(), Request.headers(), keyword()) ::
{:ok, Env.t()} | {:error, any()}
def request(method, url, body, headers, options) when is_binary(url) do
uri = URI.parse(url)
adapter_opts = Connection.options(uri, options[:adapter] || [])
options = put_in(options[:adapter], adapter_opts)
params = options[:params] || []
request = build_request(method, headers, options, url, body, params)
params = Keyword.get(options, :params, [])
adapter = Application.get_env(:tesla, :adapter)
client = Tesla.client([Tesla.Middleware.FollowRedirects], adapter)
%{}
pid = Process.whereis(adapter_opts[:pool])
pool_alive? =
if adapter == Tesla.Adapter.Gun && pid do
Process.alive?(pid)
else
false
end
request_opts =
adapter_opts
|> Enum.into(%{})
|> Map.put(:env, Pleroma.Config.get([:env]))
|> Map.put(:pool_alive?, pool_alive?)
response = request(client, request, request_opts)
Connection.after_request(adapter_opts)
response
end
@spec request(Client.t(), keyword(), map()) :: {:ok, Env.t()} | {:error, any()}
def request(%Client{} = client, request, %{env: :test}), do: request(client, request)
def request(%Client{} = client, request, %{body_as: :chunks}), do: request(client, request)
def request(%Client{} = client, request, %{pool_alive?: false}), do: request(client, request)
def request(%Client{} = client, request, %{pool: pool, timeout: timeout}) do
:poolboy.transaction(
pool,
&Pleroma.Pool.Request.execute(&1, client, request, timeout),
timeout
)
end
@spec request(Client.t(), keyword()) :: {:ok, Env.t()} | {:error, any()}
def request(client, request), do: Tesla.request(client, request)
defp build_request(method, headers, options, url, body, params) do
Builder.new()
|> Builder.method(method)
|> Builder.headers(headers)
|> Builder.opts(options)
|> Builder.url(url)
|> Builder.add_param(:body, :body, body)
|> Builder.add_param(:query, :query, params)
|> Enum.into([])
|> (&Tesla.request(Connection.new(options), &1)).()
rescue
e ->
{:error, e}
catch
:exit, e ->
{:error, e}
|> Builder.convert_to_keyword()
end
end
defp process_sni_options(options, nil), do: options
defp process_sni_options(options, url) do
uri = URI.parse(url)
host = uri.host |> to_charlist()
case uri.scheme do
"https" -> options ++ [ssl: [server_name_indication: host]]
_ -> options
end
end
def process_request_options(options) do
Keyword.merge(Pleroma.HTTP.Connection.hackney_options([]), options)
end
@doc """
Performs GET request.
See `Pleroma.HTTP.request/5`
"""
def get(url, headers \\ [], options \\ []),
do: request(:get, url, "", headers, options)
@doc """
Performs POST request.
See `Pleroma.HTTP.request/5`
"""
def post(url, body, headers \\ [], options \\ []),
do: request(:post, url, body, headers, options)
end

View File

@ -0,0 +1,23 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.HTTP.Request do
@moduledoc """
Request struct.
"""
defstruct method: :get, url: "", query: [], headers: [], body: "", opts: []
@type method :: :head | :get | :delete | :trace | :options | :post | :put | :patch
@type url :: String.t()
@type headers :: [{String.t(), String.t()}]
@type t :: %__MODULE__{
method: method(),
url: url(),
query: keyword(),
headers: headers(),
body: String.t(),
opts: keyword()
}
end

View File

@ -7,136 +7,87 @@ defmodule Pleroma.HTTP.RequestBuilder do
Helper functions for building Tesla requests
"""
alias Pleroma.HTTP.Request
alias Tesla.Multipart
@doc """
Specify the request method when building a request
## Parameters
- request (Map) - Collected request options
- m (atom) - Request method
## Returns
Map
Creates new request
"""
@spec method(map(), atom) :: map()
def method(request, m) do
Map.put_new(request, :method, m)
end
@spec new(Request.t()) :: Request.t()
def new(%Request{} = request \\ %Request{}), do: request
@doc """
Specify the request method when building a request
## Parameters
- request (Map) - Collected request options
- u (String) - Request URL
## Returns
Map
"""
@spec url(map(), String.t()) :: map()
def url(request, u) do
Map.put_new(request, :url, u)
end
@spec method(Request.t(), Request.method()) :: Request.t()
def method(request, m), do: %{request | method: m}
@doc """
Specify the request method when building a request
"""
@spec url(Request.t(), Request.url()) :: Request.t()
def url(request, u), do: %{request | url: u}
@doc """
Add headers to the request
"""
@spec headers(map(), list(tuple)) :: map()
def headers(request, header_list) do
header_list =
@spec headers(Request.t(), Request.headers()) :: Request.t()
def headers(request, headers) do
headers_list =
if Pleroma.Config.get([:http, :send_user_agent]) do
header_list ++ [{"User-Agent", Pleroma.Application.user_agent()}]
[{"user-agent", Pleroma.Application.user_agent()} | headers]
else
header_list
headers
end
Map.put_new(request, :headers, header_list)
%{request | headers: headers_list}
end
@doc """
Add custom, per-request middleware or adapter options to the request
"""
@spec opts(map(), Keyword.t()) :: map()
def opts(request, options) do
Map.put_new(request, :opts, options)
end
@spec opts(Request.t(), keyword()) :: Request.t()
def opts(request, options), do: %{request | opts: options}
@doc """
Add optional parameters to the request
## Parameters
- request (Map) - Collected request options
- definitions (Map) - Map of parameter name to parameter location.
- options (KeywordList) - The provided optional parameters
## Returns
Map
"""
@spec add_optional_params(map(), %{optional(atom) => atom}, keyword()) :: map()
def add_optional_params(request, _, []), do: request
@spec add_param(Request.t(), atom(), atom(), any()) :: Request.t()
def add_param(request, :query, :query, values), do: %{request | query: values}
def add_optional_params(request, definitions, [{key, value} | tail]) do
case definitions do
%{^key => location} ->
request
|> add_param(location, key, value)
|> add_optional_params(definitions, tail)
_ ->
add_optional_params(request, definitions, tail)
end
end
@doc """
Add optional parameters to the request
## Parameters
- request (Map) - Collected request options
- location (atom) - Where to put the parameter
- key (atom) - The name of the parameter
- value (any) - The value of the parameter
## Returns
Map
"""
@spec add_param(map(), atom, atom, any()) :: map()
def add_param(request, :query, :query, values), do: Map.put(request, :query, values)
def add_param(request, :body, :body, value), do: Map.put(request, :body, value)
def add_param(request, :body, :body, value), do: %{request | body: value}
def add_param(request, :body, key, value) do
request
|> Map.put_new_lazy(:body, &Tesla.Multipart.new/0)
|> Map.put(:body, Multipart.new())
|> Map.update!(
:body,
&Tesla.Multipart.add_field(
&Multipart.add_field(
&1,
key,
Jason.encode!(value),
headers: [{:"Content-Type", "application/json"}]
headers: [{"content-type", "application/json"}]
)
)
end
def add_param(request, :file, name, path) do
request
|> Map.put_new_lazy(:body, &Tesla.Multipart.new/0)
|> Map.update!(:body, &Tesla.Multipart.add_file(&1, path, name: name))
|> Map.put(:body, Multipart.new())
|> Map.update!(:body, &Multipart.add_file(&1, path, name: name))
end
def add_param(request, :form, name, value) do
request
|> Map.update(:body, %{name => value}, &Map.put(&1, name, value))
Map.update(request, :body, %{name => value}, &Map.put(&1, name, value))
end
def add_param(request, location, key, value) do
Map.update(request, location, [{key, value}], &(&1 ++ [{key, value}]))
end
def convert_to_keyword(request) do
request
|> Map.from_struct()
|> Enum.into([])
end
end

View File

@ -141,7 +141,7 @@ defp make_signature(id, date) do
date: date
})
[{:Signature, signature}]
[{"signature", signature}]
end
defp sign_fetch(headers, id, date) do
@ -154,7 +154,7 @@ defp sign_fetch(headers, id, date) do
defp maybe_date_fetch(headers, date) do
if Pleroma.Config.get([:activitypub, :sign_object_fetches]) do
headers ++ [{:Date, date}]
headers ++ [{"date", date}]
else
headers
end
@ -166,7 +166,7 @@ def fetch_and_contain_remote_object_from_id(id) when is_binary(id) do
date = Pleroma.Signature.signed_date()
headers =
[{:Accept, "application/activity+json"}]
[{"accept", "application/activity+json"}]
|> maybe_date_fetch(date)
|> sign_fetch(id, date)

View File

@ -0,0 +1,28 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.OTPVersion do
@spec version() :: String.t() | nil
def version do
# OTP Version https://erlang.org/doc/system_principles/versions.html#otp-version
[
Path.join(:code.root_dir(), "OTP_VERSION"),
Path.join([:code.root_dir(), "releases", :erlang.system_info(:otp_release), "OTP_VERSION"])
]
|> get_version_from_files()
end
@spec get_version_from_files([Path.t()]) :: String.t() | nil
def get_version_from_files([]), do: nil
def get_version_from_files([path | paths]) do
if File.exists?(path) do
path
|> File.read!()
|> String.replace(~r/\r|\n|\s/, "")
else
get_version_from_files(paths)
end
end
end

View File

@ -0,0 +1,283 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Pool.Connections do
use GenServer
alias Pleroma.Config
alias Pleroma.Gun
require Logger
@type domain :: String.t()
@type conn :: Pleroma.Gun.Conn.t()
@type t :: %__MODULE__{
conns: %{domain() => conn()},
opts: keyword()
}
defstruct conns: %{}, opts: []
@spec start_link({atom(), keyword()}) :: {:ok, pid()}
def start_link({name, opts}) do
GenServer.start_link(__MODULE__, opts, name: name)
end
@impl true
def init(opts), do: {:ok, %__MODULE__{conns: %{}, opts: opts}}
@spec checkin(String.t() | URI.t(), atom()) :: pid() | nil
def checkin(url, name)
def checkin(url, name) when is_binary(url), do: checkin(URI.parse(url), name)
def checkin(%URI{} = uri, name) do
timeout = Config.get([:connections_pool, :checkin_timeout], 250)
GenServer.call(name, {:checkin, uri}, timeout)
end
@spec alive?(atom()) :: boolean()
def alive?(name) do
if pid = Process.whereis(name) do
Process.alive?(pid)
else
false
end
end
@spec get_state(atom()) :: t()
def get_state(name) do
GenServer.call(name, :state)
end
@spec count(atom()) :: pos_integer()
def count(name) do
GenServer.call(name, :count)
end
@spec get_unused_conns(atom()) :: [{domain(), conn()}]
def get_unused_conns(name) do
GenServer.call(name, :unused_conns)
end
@spec checkout(pid(), pid(), atom()) :: :ok
def checkout(conn, pid, name) do
GenServer.cast(name, {:checkout, conn, pid})
end
@spec add_conn(atom(), String.t(), Pleroma.Gun.Conn.t()) :: :ok
def add_conn(name, key, conn) do
GenServer.cast(name, {:add_conn, key, conn})
end
@spec remove_conn(atom(), String.t()) :: :ok
def remove_conn(name, key) do
GenServer.cast(name, {:remove_conn, key})
end
@impl true
def handle_cast({:add_conn, key, conn}, state) do
state = put_in(state.conns[key], conn)
Process.monitor(conn.conn)
{:noreply, state}
end
@impl true
def handle_cast({:checkout, conn_pid, pid}, state) do
state =
with true <- Process.alive?(conn_pid),
{key, conn} <- find_conn(state.conns, conn_pid),
used_by <- List.keydelete(conn.used_by, pid, 0) do
conn_state = if used_by == [], do: :idle, else: conn.conn_state
put_in(state.conns[key], %{conn | conn_state: conn_state, used_by: used_by})
else
false ->
Logger.debug("checkout for closed conn #{inspect(conn_pid)}")
state
nil ->
Logger.debug("checkout for alive conn #{inspect(conn_pid)}, but is not in state")
state
end
{:noreply, state}
end
@impl true
def handle_cast({:remove_conn, key}, state) do
state = put_in(state.conns, Map.delete(state.conns, key))
{:noreply, state}
end
@impl true
def handle_call({:checkin, uri}, from, state) do
key = "#{uri.scheme}:#{uri.host}:#{uri.port}"
case state.conns[key] do
%{conn: pid, gun_state: :up} = conn ->
time = :os.system_time(:second)
last_reference = time - conn.last_reference
crf = crf(last_reference, 100, conn.crf)
state =
put_in(state.conns[key], %{
conn
| last_reference: time,
crf: crf,
conn_state: :active,
used_by: [from | conn.used_by]
})
{:reply, pid, state}
%{gun_state: :down} ->
{:reply, nil, state}
nil ->
{:reply, nil, state}
end
end
@impl true
def handle_call(:state, _from, state), do: {:reply, state, state}
@impl true
def handle_call(:count, _from, state) do
{:reply, Enum.count(state.conns), state}
end
@impl true
def handle_call(:unused_conns, _from, state) do
unused_conns =
state.conns
|> Enum.filter(&filter_conns/1)
|> Enum.sort(&sort_conns/2)
{:reply, unused_conns, state}
end
defp filter_conns({_, %{conn_state: :idle, used_by: []}}), do: true
defp filter_conns(_), do: false
defp sort_conns({_, c1}, {_, c2}) do
c1.crf <= c2.crf and c1.last_reference <= c2.last_reference
end
@impl true
def handle_info({:gun_up, conn_pid, _protocol}, state) do
%{origin_host: host, origin_scheme: scheme, origin_port: port} = Gun.info(conn_pid)
host =
case :inet.ntoa(host) do
{:error, :einval} -> host
ip -> ip
end
key = "#{scheme}:#{host}:#{port}"
state =
with {key, conn} <- find_conn(state.conns, conn_pid, key),
{true, key} <- {Process.alive?(conn_pid), key} do
put_in(state.conns[key], %{
conn
| gun_state: :up,
conn_state: :active,
retries: 0
})
else
{false, key} ->
put_in(
state.conns,
Map.delete(state.conns, key)
)
nil ->
:ok = Gun.close(conn_pid)
state
end
{:noreply, state}
end
@impl true
def handle_info({:gun_down, conn_pid, _protocol, _reason, _killed}, state) do
retries = Config.get([:connections_pool, :retry], 1)
# we can't get info on this pid, because pid is dead
state =
with {key, conn} <- find_conn(state.conns, conn_pid),
{true, key} <- {Process.alive?(conn_pid), key} do
if conn.retries == retries do
:ok = Gun.close(conn.conn)
put_in(
state.conns,
Map.delete(state.conns, key)
)
else
put_in(state.conns[key], %{
conn
| gun_state: :down,
retries: conn.retries + 1
})
end
else
{false, key} ->
put_in(
state.conns,
Map.delete(state.conns, key)
)
nil ->
Logger.debug(":gun_down for conn which isn't found in state")
state
end
{:noreply, state}
end
@impl true
def handle_info({:DOWN, _ref, :process, conn_pid, reason}, state) do
Logger.debug("received DOWM message for #{inspect(conn_pid)} reason -> #{inspect(reason)}")
state =
with {key, conn} <- find_conn(state.conns, conn_pid) do
Enum.each(conn.used_by, fn {pid, _ref} ->
Process.exit(pid, reason)
end)
put_in(
state.conns,
Map.delete(state.conns, key)
)
else
nil ->
Logger.debug(":DOWN for conn which isn't found in state")
state
end
{:noreply, state}
end
defp find_conn(conns, conn_pid) do
Enum.find(conns, fn {_key, conn} ->
conn.conn == conn_pid
end)
end
defp find_conn(conns, conn_pid, conn_key) do
Enum.find(conns, fn {key, conn} ->
key == conn_key and conn.conn == conn_pid
end)
end
def crf(current, steps, crf) do
1 + :math.pow(0.5, current / steps) * crf
end
end

22
lib/pleroma/pool/pool.ex Normal file
View File

@ -0,0 +1,22 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Pool do
def child_spec(opts) do
poolboy_opts =
opts
|> Keyword.put(:worker_module, Pleroma.Pool.Request)
|> Keyword.put(:name, {:local, opts[:name]})
|> Keyword.put(:size, opts[:size])
|> Keyword.put(:max_overflow, opts[:max_overflow])
%{
id: opts[:id] || {__MODULE__, make_ref()},
start: {:poolboy, :start_link, [poolboy_opts, [name: opts[:name]]]},
restart: :permanent,
shutdown: 5000,
type: :worker
}
end
end

View File

@ -0,0 +1,65 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Pool.Request do
use GenServer
require Logger
def start_link(args) do
GenServer.start_link(__MODULE__, args)
end
@impl true
def init(_), do: {:ok, []}
@spec execute(pid() | atom(), Tesla.Client.t(), keyword(), pos_integer()) ::
{:ok, Tesla.Env.t()} | {:error, any()}
def execute(pid, client, request, timeout) do
GenServer.call(pid, {:execute, client, request}, timeout)
end
@impl true
def handle_call({:execute, client, request}, _from, state) do
response = Pleroma.HTTP.request(client, request)
{:reply, response, state}
end
@impl true
def handle_info({:gun_data, _conn, _stream, _, _}, state) do
{:noreply, state}
end
@impl true
def handle_info({:gun_up, _conn, _protocol}, state) do
{:noreply, state}
end
@impl true
def handle_info({:gun_down, _conn, _protocol, _reason, _killed}, state) do
{:noreply, state}
end
@impl true
def handle_info({:gun_error, _conn, _stream, _error}, state) do
{:noreply, state}
end
@impl true
def handle_info({:gun_push, _conn, _stream, _new_stream, _method, _uri, _headers}, state) do
{:noreply, state}
end
@impl true
def handle_info({:gun_response, _conn, _stream, _, _status, _headers}, state) do
{:noreply, state}
end
@impl true
def handle_info(msg, state) do
Logger.warn("Received unexpected message #{inspect(__MODULE__)} #{inspect(msg)}")
{:noreply, state}
end
end

View File

@ -0,0 +1,42 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Pool.Supervisor do
use Supervisor
alias Pleroma.Config
alias Pleroma.Pool
def start_link(args) do
Supervisor.start_link(__MODULE__, args, name: __MODULE__)
end
def init(_) do
conns_child = %{
id: Pool.Connections,
start:
{Pool.Connections, :start_link, [{:gun_connections, Config.get([:connections_pool])}]}
}
Supervisor.init([conns_child | pools()], strategy: :one_for_one)
end
defp pools do
pools = Config.get(:pools)
pools =
if Config.get([Pleroma.Upload, :proxy_remote]) == false do
Keyword.delete(pools, :upload)
else
pools
end
for {pool_name, pool_opts} <- pools do
pool_opts
|> Keyword.put(:id, {Pool, pool_name})
|> Keyword.put(:name, pool_name)
|> Pool.child_spec()
end
end
end

View File

@ -3,19 +3,23 @@
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.ReverseProxy.Client do
@callback request(atom(), String.t(), [tuple()], String.t(), list()) ::
{:ok, pos_integer(), [tuple()], reference() | map()}
| {:ok, pos_integer(), [tuple()]}
@type status :: pos_integer()
@type header_name :: String.t()
@type header_value :: String.t()
@type headers :: [{header_name(), header_value()}]
@callback request(atom(), String.t(), headers(), String.t(), list()) ::
{:ok, status(), headers(), reference() | map()}
| {:ok, status(), headers()}
| {:ok, reference()}
| {:error, term()}
@callback stream_body(reference() | pid() | map()) ::
{:ok, binary()} | :done | {:error, String.t()}
@callback stream_body(map()) :: {:ok, binary(), map()} | :done | {:error, atom() | String.t()}
@callback close(reference() | pid() | map()) :: :ok
def request(method, url, headers, "", opts \\ []) do
client().request(method, url, headers, "", opts)
def request(method, url, headers, body \\ "", opts \\ []) do
client().request(method, url, headers, body, opts)
end
def stream_body(ref), do: client().stream_body(ref)
@ -23,6 +27,12 @@ def stream_body(ref), do: client().stream_body(ref)
def close(ref), do: client().close(ref)
defp client do
Pleroma.Config.get([Pleroma.ReverseProxy.Client], :hackney)
:tesla
|> Application.get_env(:adapter)
|> client()
end
defp client(Tesla.Adapter.Hackney), do: Pleroma.ReverseProxy.Client.Hackney
defp client(Tesla.Adapter.Gun), do: Pleroma.ReverseProxy.Client.Tesla
defp client(_), do: Pleroma.Config.get!(Pleroma.ReverseProxy.Client)
end

View File

@ -0,0 +1,24 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.ReverseProxy.Client.Hackney do
@behaviour Pleroma.ReverseProxy.Client
@impl true
def request(method, url, headers, body, opts \\ []) do
:hackney.request(method, url, headers, body, opts)
end
@impl true
def stream_body(ref) do
case :hackney.stream_body(ref) do
:done -> :done
{:ok, data} -> {:ok, data, ref}
{:error, error} -> {:error, error}
end
end
@impl true
def close(ref), do: :hackney.close(ref)
end

View File

@ -0,0 +1,90 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.ReverseProxy.Client.Tesla do
@behaviour Pleroma.ReverseProxy.Client
@type headers() :: [{String.t(), String.t()}]
@type status() :: pos_integer()
@spec request(atom(), String.t(), headers(), String.t(), keyword()) ::
{:ok, status(), headers}
| {:ok, status(), headers, map()}
| {:error, atom() | String.t()}
| no_return()
@impl true
def request(method, url, headers, body, opts \\ []) do
check_adapter()
opts = Keyword.put(opts, :body_as, :chunks)
with {:ok, response} <-
Pleroma.HTTP.request(
method,
url,
body,
headers,
Keyword.put(opts, :adapter, opts)
) do
if is_map(response.body) and method != :head do
{:ok, response.status, response.headers, response.body}
else
{:ok, response.status, response.headers}
end
else
{:error, error} -> {:error, error}
end
end
@impl true
@spec stream_body(map()) ::
{:ok, binary(), map()} | {:error, atom() | String.t()} | :done | no_return()
def stream_body(%{pid: pid, opts: opts, fin: true}) do
# if connection was reused, but in tesla were redirects,
# tesla returns new opened connection, which must be closed manually
if opts[:old_conn], do: Tesla.Adapter.Gun.close(pid)
# if there were redirects we need to checkout old conn
conn = opts[:old_conn] || opts[:conn]
if conn, do: :ok = Pleroma.Pool.Connections.checkout(conn, self(), :gun_connections)
:done
end
def stream_body(client) do
case read_chunk!(client) do
{:fin, body} ->
{:ok, body, Map.put(client, :fin, true)}
{:nofin, part} ->
{:ok, part, client}
{:error, error} ->
{:error, error}
end
end
defp read_chunk!(%{pid: pid, stream: stream, opts: opts}) do
adapter = check_adapter()
adapter.read_chunk(pid, stream, opts)
end
@impl true
@spec close(map) :: :ok | no_return()
def close(%{pid: pid}) do
adapter = check_adapter()
adapter.close(pid)
end
defp check_adapter do
adapter = Application.get_env(:tesla, :adapter)
unless adapter == Tesla.Adapter.Gun do
raise "#{adapter} doesn't support reading body in chunks"
end
adapter
end
end

View File

@ -3,8 +3,6 @@
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.ReverseProxy do
alias Pleroma.HTTP
@keep_req_headers ~w(accept user-agent accept-encoding cache-control if-modified-since) ++
~w(if-unmodified-since if-none-match if-range range)
@resp_cache_headers ~w(etag date last-modified)
@ -58,10 +56,10 @@ defmodule Pleroma.ReverseProxy do
* `req_headers`, `resp_headers` additional headers.
* `http`: options for [hackney](https://github.com/benoitc/hackney).
* `http`: options for [hackney](https://github.com/benoitc/hackney) or [gun](https://github.com/ninenines/gun).
"""
@default_hackney_options [pool: :media]
@default_options [pool: :media]
@inline_content_types [
"image/gif",
@ -94,11 +92,7 @@ defmodule Pleroma.ReverseProxy do
def call(_conn, _url, _opts \\ [])
def call(conn = %{method: method}, url, opts) when method in @methods do
hackney_opts =
Pleroma.HTTP.Connection.hackney_options([])
|> Keyword.merge(@default_hackney_options)
|> Keyword.merge(Keyword.get(opts, :http, []))
|> HTTP.process_request_options()
client_opts = Keyword.merge(@default_options, Keyword.get(opts, :http, []))
req_headers = build_req_headers(conn.req_headers, opts)
@ -110,7 +104,7 @@ def call(conn = %{method: method}, url, opts) when method in @methods do
end
with {:ok, nil} <- Cachex.get(:failed_proxy_url_cache, url),
{:ok, code, headers, client} <- request(method, url, req_headers, hackney_opts),
{:ok, code, headers, client} <- request(method, url, req_headers, client_opts),
:ok <-
header_length_constraint(
headers,
@ -156,11 +150,11 @@ def call(conn, _, _) do
|> halt()
end
defp request(method, url, headers, hackney_opts) do
defp request(method, url, headers, opts) do
Logger.debug("#{__MODULE__} #{method} #{url} #{inspect(headers)}")
method = method |> String.downcase() |> String.to_existing_atom()
case client().request(method, url, headers, "", hackney_opts) do
case client().request(method, url, headers, "", opts) do
{:ok, code, headers, client} when code in @valid_resp_codes ->
{:ok, code, downcase_headers(headers), client}
@ -210,7 +204,7 @@ defp chunk_reply(conn, client, opts, sent_so_far, duration) do
duration,
Keyword.get(opts, :max_read_duration, @max_read_duration)
),
{:ok, data} <- client().stream_body(client),
{:ok, data, client} <- client().stream_body(client),
{:ok, duration} <- increase_read_duration(duration),
sent_so_far = sent_so_far + byte_size(data),
:ok <-

View File

@ -305,16 +305,12 @@ def banner_url(user, options \\ []) do
end
end
def profile_url(%User{source_data: %{"url" => url}}), do: url
def profile_url(%User{ap_id: ap_id}), do: ap_id
def profile_url(_), do: nil
def ap_id(%User{nickname: nickname}), do: "#{Web.base_url()}/users/#{nickname}"
def ap_followers(%User{follower_address: fa}) when is_binary(fa), do: fa
def ap_followers(%User{} = user), do: "#{ap_id(user)}/followers"
@spec ap_following(User.t()) :: Sring.t()
@spec ap_following(User.t()) :: String.t()
def ap_following(%User{following_address: fa}) when is_binary(fa), do: fa
def ap_following(%User{} = user), do: "#{ap_id(user)}/following"

View File

@ -1379,6 +1379,18 @@ def upload(file, opts \\ []) do
end
end
@spec get_actor_url(any()) :: binary() | nil
defp get_actor_url(url) when is_binary(url), do: url
defp get_actor_url(%{"href" => href}) when is_binary(href), do: href
defp get_actor_url(url) when is_list(url) do
url
|> List.first()
|> get_actor_url()
end
defp get_actor_url(_url), do: nil
defp object_to_user_data(data) do
avatar =
data["icon"]["url"] &&
@ -1408,6 +1420,7 @@ defp object_to_user_data(data) do
user_data = %{
ap_id: data["id"],
uri: get_actor_url(data["url"]),
ap_enabled: true,
source_data: data,
banner: banner,

View File

@ -1,5 +1,5 @@
# Pleroma: A lightweight social networking server
# Copyright © 2019 Pleroma Authors <https://pleroma.social/>
# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Web.ActivityPub.MRF.AntiFollowbotPolicy do

View File

@ -12,17 +12,23 @@ defmodule Pleroma.Web.ActivityPub.MRF.MediaProxyWarmingPolicy do
require Logger
@hackney_options [
pool: :media,
recv_timeout: 10_000
@options [
pool: :media
]
def perform(:prefetch, url) do
Logger.debug("Prefetching #{inspect(url)}")
opts =
if Application.get_env(:tesla, :adapter) == Tesla.Adapter.Hackney do
Keyword.put(@options, :recv_timeout, 10_000)
else
@options
end
url
|> MediaProxy.url()
|> HTTP.get([], adapter: @hackney_options)
|> HTTP.get([], adapter: opts)
end
def perform(:preload, %{"object" => %{"attachment" => attachments}} = _message) do

View File

@ -1,5 +1,5 @@
# Pleroma: A lightweight social networking server
# Copyright © 2019 Pleroma Authors <https://pleroma.social/>
# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Web.ActivityPub.MRF.NoPlaceholderTextPolicy do

View File

@ -229,7 +229,8 @@ def fix_url(%{"url" => url} = object) when is_map(url) do
Map.put(object, "url", url["href"])
end
def fix_url(%{"type" => "Video", "url" => url} = object) when is_list(url) do
def fix_url(%{"type" => object_type, "url" => url} = object)
when object_type in ["Video", "Audio"] and is_list(url) do
first_element = Enum.at(url, 0)
link_element = Enum.find(url, fn x -> is_map(x) and x["mimeType"] == "text/html" end)
@ -398,7 +399,7 @@ def handle_incoming(
%{"type" => "Create", "object" => %{"type" => objtype} = object} = data,
options
)
when objtype in ["Article", "Event", "Note", "Video", "Page", "Question", "Answer"] do
when objtype in ["Article", "Event", "Note", "Video", "Page", "Question", "Answer", "Audio"] do
actor = Containment.get_actor(data)
data =

View File

@ -795,102 +795,6 @@ def get_reports(params, page, page_size) do
ActivityPub.fetch_activities([], params, :offset)
end
def parse_report_group(activity) do
reports = get_reports_by_status_id(activity["id"])
max_date = Enum.max_by(reports, &NaiveDateTime.from_iso8601!(&1.data["published"]))
actors = Enum.map(reports, & &1.user_actor)
[%{data: %{"object" => [account_id | _]}} | _] = reports
account =
AccountView.render("show.json", %{
user: User.get_by_ap_id(account_id)
})
status = get_status_data(activity)
%{
date: max_date.data["published"],
account: account,
status: status,
actors: Enum.uniq(actors),
reports: reports
}
end
defp get_status_data(status) do
case status["deleted"] do
true ->
%{
"id" => status["id"],
"deleted" => true
}
_ ->
Activity.get_by_ap_id(status["id"])
end
end
def get_reports_by_status_id(ap_id) do
from(a in Activity,
where: fragment("(?)->>'type' = 'Flag'", a.data),
where: fragment("(?)->'object' @> ?", a.data, ^[%{id: ap_id}]),
or_where: fragment("(?)->'object' @> ?", a.data, ^[ap_id])
)
|> Activity.with_preloaded_user_actor()
|> Repo.all()
end
@spec get_reports_grouped_by_status([String.t()]) :: %{
required(:groups) => [
%{
required(:date) => String.t(),
required(:account) => %{},
required(:status) => %{},
required(:actors) => [%User{}],
required(:reports) => [%Activity{}]
}
]
}
def get_reports_grouped_by_status(activity_ids) do
parsed_groups =
activity_ids
|> Enum.map(fn id ->
id
|> build_flag_object()
|> parse_report_group()
end)
%{
groups: parsed_groups
}
end
@spec get_reported_activities() :: [
%{
required(:activity) => String.t(),
required(:date) => String.t()
}
]
def get_reported_activities do
reported_activities_query =
from(a in Activity,
where: fragment("(?)->>'type' = 'Flag'", a.data),
select: %{
activity: fragment("jsonb_array_elements((? #- '{object,0}')->'object')", a.data)
},
group_by: fragment("activity")
)
from(a in subquery(reported_activities_query),
distinct: true,
select: %{
id: fragment("COALESCE(?->>'id'::text, ? #>> '{}')", a.activity, a.activity)
}
)
|> Repo.all()
|> Enum.map(& &1.id)
end
def update_report_state(%Activity{} = activity, state)
when state in @strip_status_report_states do
{:ok, stripped_activity} = strip_report_status_data(activity)

View File

@ -715,14 +715,6 @@ def list_reports(conn, params) do
|> render("index.json", %{reports: reports})
end
def list_grouped_reports(conn, _params) do
statuses = Utils.get_reported_activities()
conn
|> put_view(ReportView)
|> render("index_grouped.json", Utils.get_reports_grouped_by_status(statuses))
end
def report_show(conn, %{"id" => id}) do
with %Activity{} = report <- Activity.get_by_id(id) do
conn

View File

@ -4,7 +4,7 @@
defmodule Pleroma.Web.AdminAPI.ReportView do
use Pleroma.Web, :view
alias Pleroma.Activity
alias Pleroma.HTML
alias Pleroma.User
alias Pleroma.Web.AdminAPI.Report
@ -44,32 +44,6 @@ def render("show.json", %{report: report, user: user, account: account, statuses
}
end
def render("index_grouped.json", %{groups: groups}) do
reports =
Enum.map(groups, fn group ->
status =
case group.status do
%Activity{} = activity -> StatusView.render("show.json", %{activity: activity})
_ -> group.status
end
%{
date: group[:date],
account: group[:account],
status: Map.put_new(status, "deleted", false),
actors: Enum.map(group[:actors], &merge_account_views/1),
reports:
group[:reports]
|> Enum.map(&Report.extract_report_info(&1))
|> Enum.map(&render(__MODULE__, "show.json", &1))
}
end)
%{
reports: reports
}
end
def render("index_notes.json", %{notes: notes}) when is_list(notes) do
Enum.map(notes, &render(__MODULE__, "show_note.json", &1))
end

View File

@ -43,7 +43,7 @@ def render("mention.json", %{user: user}) do
id: to_string(user.id),
acct: user.nickname,
username: username_from_nickname(user.nickname),
url: User.profile_url(user)
url: user.uri || user.ap_id
}
end
@ -207,7 +207,7 @@ defp do_render("show.json", %{user: user} = opts) do
following_count: following_count,
statuses_count: user.note_count,
note: user.bio || "",
url: User.profile_url(user),
url: user.uri || user.ap_id,
avatar: image,
avatar_static: image,
header: header,

View File

@ -482,7 +482,7 @@ def get_reply_to(%{data: %{"object" => _object}} = activity, _) do
end
def render_content(%{data: %{"type" => object_type}} = object)
when object_type in ["Video", "Event"] do
when object_type in ["Video", "Event", "Audio"] do
with name when not is_nil(name) and name != "" <- object.data["name"] do
"<p><a href=\"#{object.data["id"]}\">#{name}</a></p>#{object.data["content"]}"
else

View File

@ -6,7 +6,12 @@ defmodule Pleroma.Web.Metadata do
alias Phoenix.HTML
def build_tags(params) do
Enum.reduce(Pleroma.Config.get([__MODULE__, :providers], []), "", fn parser, acc ->
providers = [
Pleroma.Web.Metadata.Providers.RestrictIndexing
| Pleroma.Config.get([__MODULE__, :providers], [])
]
Enum.reduce(providers, "", fn parser, acc ->
rendered_html =
params
|> parser.build_tags()

View File

@ -68,7 +68,7 @@ def build_tags(%{user: user}) do
property: "og:title",
content: Utils.user_name_string(user)
], []},
{:meta, [property: "og:url", content: User.profile_url(user)], []},
{:meta, [property: "og:url", content: user.uri || user.ap_id], []},
{:meta, [property: "og:description", content: truncated_bio], []},
{:meta, [property: "og:type", content: "website"], []},
{:meta, [property: "og:image", content: Utils.attachment_url(User.avatar_url(user))], []},

View File

@ -0,0 +1,25 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Web.Metadata.Providers.RestrictIndexing do
@behaviour Pleroma.Web.Metadata.Providers.Provider
@moduledoc """
Restricts indexing of remote users.
"""
@impl true
def build_tags(%{user: %{local: false}}) do
[
{:meta,
[
name: "robots",
content: "noindex, noarchive"
], []}
]
end
@impl true
def build_tags(%{user: %{local: true}}), do: []
end

View File

@ -3,11 +3,9 @@
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Web.RelMe do
@hackney_options [
@options [
pool: :media,
recv_timeout: 2_000,
max_body: 2_000_000,
with_body: true
max_body: 2_000_000
]
if Pleroma.Config.get(:env) == :test do
@ -25,8 +23,18 @@ def parse(url) when is_binary(url) do
def parse(_), do: {:error, "No URL provided"}
defp parse_url(url) do
opts =
if Application.get_env(:tesla, :adapter) == Tesla.Adapter.Hackney do
Keyword.merge(@options,
recv_timeout: 2_000,
with_body: true
)
else
@options
end
with {:ok, %Tesla.Env{body: html, status: status}} when status in 200..299 <-
Pleroma.HTTP.get(url, [], adapter: @hackney_options),
Pleroma.HTTP.get(url, [], adapter: opts),
{:ok, html_tree} <- Floki.parse_document(html),
data <-
Floki.attribute(html_tree, "link[rel~=me]", "href") ++

View File

@ -3,11 +3,9 @@
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Web.RichMedia.Parser do
@hackney_options [
@options [
pool: :media,
recv_timeout: 2_000,
max_body: 2_000_000,
with_body: true
max_body: 2_000_000
]
defp parsers do
@ -77,8 +75,18 @@ defp get_ttl_from_image(data, url) do
end
defp parse_url(url) do
opts =
if Application.get_env(:tesla, :adapter) == Tesla.Adapter.Hackney do
Keyword.merge(@options,
recv_timeout: 2_000,
with_body: true
)
else
@options
end
try do
{:ok, %Tesla.Env{body: html}} = Pleroma.HTTP.get(url, [], adapter: @hackney_options)
{:ok, %Tesla.Env{body: html}} = Pleroma.HTTP.get(url, [], adapter: opts)
html
|> parse_html()

View File

@ -186,7 +186,6 @@ defmodule Pleroma.Web.Router do
patch("/users/resend_confirmation_email", AdminAPIController, :resend_confirmation_email)
get("/reports", AdminAPIController, :list_reports)
get("/grouped_reports", AdminAPIController, :list_grouped_reports)
get("/reports/:id", AdminAPIController, :report_show)
patch("/reports", AdminAPIController, :reports_update)
post("/reports/:id/notes", AdminAPIController, :report_notes_create)

View File

@ -1,5 +1,5 @@
<div class="p-author h-card">
<a class="u-url" rel="author noopener" href="<%= User.profile_url(@user) %>">
<a class="u-url" rel="author noopener" href="<%= (@user.uri || @user.ap_id) %>">
<div class="avatar">
<img src="<%= User.avatar_url(@user) |> MediaProxy.url %>" width="48" height="48" alt="">
</div>

View File

@ -8,7 +8,7 @@
<button type="submit" class="collapse">Remote follow</button>
</form>
<%= raw Formatter.emojify(@user.name, emoji_for_user(@user)) %> |
<%= link "@#{@user.nickname}@#{Endpoint.host()}", to: User.profile_url(@user) %>
<%= link "@#{@user.nickname}@#{Endpoint.host()}", to: (@user.uri || @user.ap_id) %>
</h3>
<p><%= raw @user.bio %></p>
</header>

View File

@ -173,7 +173,8 @@ def find_lrdd_template(domain) do
get_template_from_xml(body)
else
_ ->
with {:ok, %{body: body}} <- HTTP.get("https://#{domain}/.well-known/host-meta", []) do
with {:ok, %{body: body, status: status}} when status in 200..299 <-
HTTP.get("https://#{domain}/.well-known/host-meta", []) do
get_template_from_xml(body)
else
e -> {:error, "Can't find LRDD template: #{inspect(e)}"}
@ -205,7 +206,7 @@ def finger(account) do
with response <-
HTTP.get(
address,
Accept: "application/xrd+xml,application/jrd+json"
[{"accept", "application/xrd+xml,application/jrd+json"}]
),
{:ok, %{status: status, body: body}} when status in 200..299 <- response do
doc = XML.parse_document(body)

10
mix.exs
View File

@ -119,7 +119,15 @@ defp deps do
{:calendar, "~> 0.17.4"},
{:cachex, "~> 3.2"},
{:poison, "~> 3.0", override: true},
{:tesla, "~> 1.3", override: true},
# {:tesla, "~> 1.3", override: true},
{:tesla,
git: "https://git.pleroma.social/pleroma/elixir-libraries/tesla.git",
ref: "61b7503cef33f00834f78ddfafe0d5d9dec2270b",
override: true},
{:castore, "~> 0.1"},
{:cowlib, "~> 2.8", override: true},
{:gun,
github: "ninenines/gun", ref: "e1a69b36b180a574c0ac314ced9613fdd52312cc", override: true},
{:jason, "~> 1.0"},
{:mogrify, "~> 0.6.1"},
{:ex_aws, "~> 2.1"},

View File

@ -10,6 +10,7 @@
"cachex": {:hex, :cachex, "3.2.0", "a596476c781b0646e6cb5cd9751af2e2974c3e0d5498a8cab71807618b74fe2f", [:mix], [{:eternal, "~> 1.2", [hex: :eternal, repo: "hexpm", optional: false]}, {:jumper, "~> 1.0", [hex: :jumper, repo: "hexpm", optional: false]}, {:sleeplocks, "~> 1.1", [hex: :sleeplocks, repo: "hexpm", optional: false]}, {:unsafe, "~> 1.0", [hex: :unsafe, repo: "hexpm", optional: false]}], "hexpm", "aef93694067a43697ae0531727e097754a9e992a1e7946296f5969d6dd9ac986"},
"calendar": {:hex, :calendar, "0.17.6", "ec291cb2e4ba499c2e8c0ef5f4ace974e2f9d02ae9e807e711a9b0c7850b9aee", [:mix], [{:tzdata, "~> 0.5.20 or ~> 0.1.201603 or ~> 1.0", [hex: :tzdata, repo: "hexpm", optional: false]}], "hexpm", "738d0e17a93c2ccfe4ddc707bdc8e672e9074c8569498483feb1c4530fb91b2b"},
"captcha": {:git, "https://git.pleroma.social/pleroma/elixir-libraries/elixir-captcha.git", "e0f16822d578866e186a0974d65ad58cddc1e2ab", [ref: "e0f16822d578866e186a0974d65ad58cddc1e2ab"]},
"castore": {:hex, :castore, "0.1.5", "591c763a637af2cc468a72f006878584bc6c306f8d111ef8ba1d4c10e0684010", [:mix], [], "hexpm", "6db356b2bc6cc22561e051ff545c20ad064af57647e436650aa24d7d06cd941a"},
"certifi": {:hex, :certifi, "2.5.1", "867ce347f7c7d78563450a18a6a28a8090331e77fa02380b4a21962a65d36ee5", [:rebar3], [{:parse_trans, "~>3.3", [hex: :parse_trans, repo: "hexpm", optional: false]}], "hexpm", "805abd97539caf89ec6d4732c91e62ba9da0cda51ac462380bbd28ee697a8c42"},
"combine": {:hex, :combine, "0.10.0", "eff8224eeb56498a2af13011d142c5e7997a80c8f5b97c499f84c841032e429f", [:mix], [], "hexpm", "1b1dbc1790073076580d0d1d64e42eae2366583e7aecd455d1215b0d16f2451b"},
"comeonin": {:hex, :comeonin, "4.1.2", "3eb5620fd8e35508991664b4c2b04dd41e52f1620b36957be837c1d7784b7592", [:mix], [{:argon2_elixir, "~> 1.2", [hex: :argon2_elixir, repo: "hexpm", optional: true]}, {:bcrypt_elixir, "~> 0.12.1 or ~> 1.0", [hex: :bcrypt_elixir, repo: "hexpm", optional: true]}, {:pbkdf2_elixir, "~> 0.12", [hex: :pbkdf2_elixir, repo: "hexpm", optional: true]}], "hexpm", "d8700a0ca4dbb616c22c9b3f6dd539d88deaafec3efe66869d6370c9a559b3e9"},
@ -46,6 +47,7 @@
"gen_stage": {:hex, :gen_stage, "0.14.3", "d0c66f1c87faa301c1a85a809a3ee9097a4264b2edf7644bf5c123237ef732bf", [:mix], [], "hexpm", "8453e2289d94c3199396eb517d65d6715ef26bcae0ee83eb5ff7a84445458d76"},
"gen_state_machine": {:hex, :gen_state_machine, "2.0.5", "9ac15ec6e66acac994cc442dcc2c6f9796cf380ec4b08267223014be1c728a95", [:mix], [], "hexpm", "5cacd405e72b2609a7e1f891bddb80c53d0b3b7b0036d1648e7382ca108c41c8"},
"gettext": {:hex, :gettext, "0.17.1", "8baab33482df4907b3eae22f719da492cee3981a26e649b9c2be1c0192616962", [:mix], [], "hexpm", "f7d97341e536f95b96eef2988d6d4230f7262cf239cda0e2e63123ee0b717222"},
"gun": {:git, "https://github.com/ninenines/gun.git", "e1a69b36b180a574c0ac314ced9613fdd52312cc", [ref: "e1a69b36b180a574c0ac314ced9613fdd52312cc"]},
"hackney": {:hex, :hackney, "1.15.2", "07e33c794f8f8964ee86cebec1a8ed88db5070e52e904b8f12209773c1036085", [:rebar3], [{:certifi, "2.5.1", [hex: :certifi, repo: "hexpm", optional: false]}, {:idna, "6.0.0", [hex: :idna, repo: "hexpm", optional: false]}, {:metrics, "1.0.1", [hex: :metrics, repo: "hexpm", optional: false]}, {:mimerl, "~>1.1", [hex: :mimerl, repo: "hexpm", optional: false]}, {:ssl_verify_fun, "1.1.5", [hex: :ssl_verify_fun, repo: "hexpm", optional: false]}], "hexpm", "e0100f8ef7d1124222c11ad362c857d3df7cb5f4204054f9f0f4a728666591fc"},
"html_entities": {:hex, :html_entities, "0.5.1", "1c9715058b42c35a2ab65edc5b36d0ea66dd083767bef6e3edb57870ef556549", [:mix], [], "hexpm", "30efab070904eb897ff05cd52fa61c1025d7f8ef3a9ca250bc4e6513d16c32de"},
"html_sanitize_ex": {:hex, :html_sanitize_ex, "1.3.0", "f005ad692b717691203f940c686208aa3d8ffd9dd4bb3699240096a51fa9564e", [:mix], [{:mochiweb, "~> 2.15", [hex: :mochiweb, repo: "hexpm", optional: false]}], "hexpm"},
@ -53,7 +55,7 @@
"httpoison": {:hex, :httpoison, "1.6.1", "2ce5bf6e535cd0ab02e905ba8c276580bab80052c5c549f53ddea52d72e81f33", [:mix], [{:hackney, "~> 1.15 and >= 1.15.2", [hex: :hackney, repo: "hexpm", optional: false]}], "hexpm", "89149056039084024a284cd703b2d1900d584958dba432132cb21ef35aed7487"},
"idna": {:hex, :idna, "6.0.0", "689c46cbcdf3524c44d5f3dde8001f364cd7608a99556d8fbd8239a5798d4c10", [:rebar3], [{:unicode_util_compat, "0.4.1", [hex: :unicode_util_compat, repo: "hexpm", optional: false]}], "hexpm", "4bdd305eb64e18b0273864920695cb18d7a2021f31a11b9c5fbcd9a253f936e2"},
"inet_cidr": {:hex, :inet_cidr, "1.0.4", "a05744ab7c221ca8e395c926c3919a821eb512e8f36547c062f62c4ca0cf3d6e", [:mix], [], "hexpm", "64a2d30189704ae41ca7dbdd587f5291db5d1dda1414e0774c29ffc81088c1bc"},
"jason": {:hex, :jason, "1.1.2", "b03dedea67a99223a2eaf9f1264ce37154564de899fd3d8b9a21b1a6fd64afe7", [:mix], [{:decimal, "~> 1.0", [hex: :decimal, repo: "hexpm", optional: true]}], "hexpm", "fdf843bca858203ae1de16da2ee206f53416bbda5dc8c9e78f43243de4bc3afe"},
"jason": {:hex, :jason, "1.2.0", "10043418c42d2493d0ee212d3fddd25d7ffe484380afad769a0a38795938e448", [:mix], [{:decimal, "~> 1.0", [hex: :decimal, repo: "hexpm", optional: true]}], "hexpm", "116747dbe057794c3a3e4e143b7c8390b29f634e16c78a7f59ba75bfa6852e7f"},
"joken": {:hex, :joken, "2.1.0", "bf21a73105d82649f617c5e59a7f8919aa47013d2519ebcc39d998d8d12adda9", [:mix], [{:jose, "~> 1.9", [hex: :jose, repo: "hexpm", optional: false]}], "hexpm", "eb02df7d5526df13063397e051b926b7006d5986d66f399eefc474f560cdad6a"},
"jose": {:hex, :jose, "1.9.0", "4167c5f6d06ffaebffd15cdb8da61a108445ef5e85ab8f5a7ad926fdf3ada154", [:mix, :rebar3], [{:base64url, "~> 0.0.1", [hex: :base64url, repo: "hexpm", optional: false]}], "hexpm", "6429c4fee52b2dda7861ee19a4f09c8c1ffa213bee3a1ec187828fde95d447ed"},
"jumper": {:hex, :jumper, "1.0.1", "3c00542ef1a83532b72269fab9f0f0c82bf23a35e27d278bfd9ed0865cecabff", [:mix], [], "hexpm", "318c59078ac220e966d27af3646026db9b5a5e6703cb2aa3e26bcfaba65b7433"},
@ -103,7 +105,7 @@
"swoosh": {:hex, :swoosh, "0.23.5", "bfd9404bbf5069b1be2ffd317923ce57e58b332e25dbca2a35dedd7820dfee5a", [:mix], [{:cowboy, "~> 1.0.1 or ~> 1.1 or ~> 2.4", [hex: :cowboy, repo: "hexpm", optional: true]}, {:gen_smtp, "~> 0.13", [hex: :gen_smtp, repo: "hexpm", optional: true]}, {:hackney, "~> 1.9", [hex: :hackney, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: false]}, {:mail, "~> 0.2", [hex: :mail, repo: "hexpm", optional: true]}, {:mime, "~> 1.1", [hex: :mime, repo: "hexpm", optional: false]}, {:plug_cowboy, ">= 1.0.0", [hex: :plug_cowboy, repo: "hexpm", optional: true]}], "hexpm", "e3928e1d2889a308aaf3e42755809ac21cffd77cb58eef01cbfdab4ce2fd1e21"},
"syslog": {:hex, :syslog, "1.0.6", "995970c9aa7feb380ac493302138e308d6e04fd57da95b439a6df5bb3bf75076", [:rebar3], [], "hexpm", "769ddfabd0d2a16f3f9c17eb7509951e0ca4f68363fb26f2ee51a8ec4a49881a"},
"telemetry": {:hex, :telemetry, "0.4.1", "ae2718484892448a24470e6aa341bc847c3277bfb8d4e9289f7474d752c09c7f", [:rebar3], [], "hexpm", "4738382e36a0a9a2b6e25d67c960e40e1a2c95560b9f936d8e29de8cd858480f"},
"tesla": {:hex, :tesla, "1.3.0", "f35d72f029e608f9cdc6f6d6fcc7c66cf6d6512a70cfef9206b21b8bd0203a30", [:mix], [{:castore, "~> 0.1", [hex: :castore, repo: "hexpm", optional: true]}, {:exjsx, ">= 3.0.0", [hex: :exjsx, repo: "hexpm", optional: true]}, {:fuse, "~> 2.4", [hex: :fuse, repo: "hexpm", optional: true]}, {:gun, "~> 1.3", [hex: :gun, repo: "hexpm", optional: true]}, {:hackney, "~> 1.6", [hex: :hackney, repo: "hexpm", optional: true]}, {:ibrowse, "~> 4.4.0", [hex: :ibrowse, repo: "hexpm", optional: true]}, {:jason, ">= 1.0.0", [hex: :jason, repo: "hexpm", optional: true]}, {:mime, "~> 1.0", [hex: :mime, repo: "hexpm", optional: false]}, {:mint, "~> 0.4", [hex: :mint, repo: "hexpm", optional: true]}, {:poison, ">= 1.0.0", [hex: :poison, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.3", [hex: :telemetry, repo: "hexpm", optional: true]}], "hexpm", "93a7cacc5ca47997759cfa1d3ab25501d291e490908006d5be56f37f89d96693"},
"tesla": {:git, "https://git.pleroma.social/pleroma/elixir-libraries/tesla.git", "61b7503cef33f00834f78ddfafe0d5d9dec2270b", [ref: "61b7503cef33f00834f78ddfafe0d5d9dec2270b"]},
"timex": {:hex, :timex, "3.6.1", "efdf56d0e67a6b956cc57774353b0329c8ab7726766a11547e529357ffdc1d56", [:mix], [{:combine, "~> 0.10", [hex: :combine, repo: "hexpm", optional: false]}, {:gettext, "~> 0.10", [hex: :gettext, repo: "hexpm", optional: false]}, {:tzdata, "~> 0.1.8 or ~> 0.5 or ~> 1.0.0", [hex: :tzdata, repo: "hexpm", optional: false]}], "hexpm", "f354efb2400dd7a80fd9eb6c8419068c4f632da4ac47f3d8822d6e33f08bc852"},
"trailing_format_plug": {:hex, :trailing_format_plug, "0.0.7", "64b877f912cf7273bed03379936df39894149e35137ac9509117e59866e10e45", [:mix], [{:plug, "> 0.12.0", [hex: :plug, repo: "hexpm", optional: false]}], "hexpm", "bd4fde4c15f3e993a999e019d64347489b91b7a9096af68b2bdadd192afa693f"},
"tzdata": {:hex, :tzdata, "0.5.22", "f2ba9105117ee0360eae2eca389783ef7db36d533899b2e84559404dbc77ebb8", [:mix], [{:hackney, "~> 1.0", [hex: :hackney, repo: "hexpm", optional: false]}], "hexpm", "cd66c8a1e6a9e121d1f538b01bef459334bb4029a1ffb4eeeb5e4eae0337e7b6"},

View File

@ -1,5 +1,5 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Repo.Migrations.CreateConversations do

Binary file not shown.

Binary file not shown.

Binary file not shown.

View File

@ -1 +1 @@
<!DOCTYPE html><html><head><meta charset=utf-8><meta http-equiv=X-UA-Compatible content="IE=edge,chrome=1"><meta name=renderer content=webkit><meta name=viewport content="width=device-width,initial-scale=1,maximum-scale=1,user-scalable=no"><title>Admin FE</title><link rel="shortcut icon" href=favicon.ico><link href=chunk-elementUI.1abbc9b8.css rel=stylesheet><link href=chunk-libs.686b5876.css rel=stylesheet><link href=app.c836e084.css rel=stylesheet></head><body><div id=app></div><script type=text/javascript src=static/js/runtime.fa19e5d1.js></script><script type=text/javascript src=static/js/chunk-elementUI.fba0efec.js></script><script type=text/javascript src=static/js/chunk-libs.b8c453ab.js></script><script type=text/javascript src=static/js/app.d2c3c6b3.js></script></body></html>
<!DOCTYPE html><html><head><meta charset=utf-8><meta http-equiv=X-UA-Compatible content="IE=edge,chrome=1"><meta name=renderer content=webkit><meta name=viewport content="width=device-width,initial-scale=1,maximum-scale=1,user-scalable=no"><title>Admin FE</title><link rel="shortcut icon" href=favicon.ico><link href=chunk-elementUI.1abbc9b8.css rel=stylesheet><link href=chunk-libs.686b5876.css rel=stylesheet><link href=app.85534e14.css rel=stylesheet></head><body><div id=app></div><script type=text/javascript src=static/js/runtime.cb26bbd1.js></script><script type=text/javascript src=static/js/chunk-elementUI.fba0efec.js></script><script type=text/javascript src=static/js/chunk-libs.b8c453ab.js></script><script type=text/javascript src=static/js/app.d898cc2b.js></script></body></html>

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Some files were not shown because too many files have changed in this diff Show More