Merge branch 'release/2.1.1' into 'stable'
2.1.1 release See merge request pleroma/secteam/pleroma!13
This commit is contained in:
commit
425324aae3
25
CHANGELOG.md
25
CHANGELOG.md
|
@ -3,6 +3,31 @@ All notable changes to this project will be documented in this file.
|
||||||
|
|
||||||
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
|
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
|
||||||
|
|
||||||
|
## [2.1.1] - 2020-09-08
|
||||||
|
|
||||||
|
### Security
|
||||||
|
- Fix possible DoS in Mastodon API user search due to an error in match clauses, leading to an infinite recursion and subsequent OOM with certain inputs.
|
||||||
|
- Fix metadata leak for accounts and statuses on private instances.
|
||||||
|
- Fix possible DoS in Admin API search using an atom leak vulnerability. Authentication with admin rights was required to exploit.
|
||||||
|
|
||||||
|
### Changed
|
||||||
|
|
||||||
|
- **Breaking:** The metadata providers RelMe and Feed are no longer configurable. RelMe should always be activated and Feed only provides a <link> header tag for the actual RSS/Atom feed when the instance is public.
|
||||||
|
- Improved error message when cmake is not available at build stage.
|
||||||
|
|
||||||
|
### Added
|
||||||
|
- Rich media failure tracking (along with `:failure_backoff` option).
|
||||||
|
|
||||||
|
### Fixed
|
||||||
|
- Default HTTP adapter not respecting pool setting, leading to possible OOM.
|
||||||
|
- Fixed uploading webp images when the Exiftool Upload Filter is enabled by skipping them
|
||||||
|
- Mastodon API: Search parameter `following` now correctly returns the followings rather than the followers
|
||||||
|
- Mastodon API: Timelines hanging for (`number of posts with links * rich media timeout`) in the worst case.
|
||||||
|
Reduced to just rich media timeout.
|
||||||
|
- Mastodon API: Cards being wrong for preview statuses due to cache key collision.
|
||||||
|
- Password resets no longer processed for deactivated accounts.
|
||||||
|
- Favicon scraper raising exceptions on URLs longer than 255 characters.
|
||||||
|
|
||||||
## [2.1.0] - 2020-08-28
|
## [2.1.0] - 2020-08-28
|
||||||
|
|
||||||
### Changed
|
### Changed
|
||||||
|
|
|
@ -412,6 +412,7 @@
|
||||||
Pleroma.Web.RichMedia.Parsers.TwitterCard,
|
Pleroma.Web.RichMedia.Parsers.TwitterCard,
|
||||||
Pleroma.Web.RichMedia.Parsers.OEmbed
|
Pleroma.Web.RichMedia.Parsers.OEmbed
|
||||||
],
|
],
|
||||||
|
failure_backoff: 60_000,
|
||||||
ttl_setters: [Pleroma.Web.RichMedia.Parser.TTL.AwsSignedUrl]
|
ttl_setters: [Pleroma.Web.RichMedia.Parser.TTL.AwsSignedUrl]
|
||||||
|
|
||||||
config :pleroma, :media_proxy,
|
config :pleroma, :media_proxy,
|
||||||
|
@ -453,9 +454,7 @@
|
||||||
config :pleroma, Pleroma.Web.Metadata,
|
config :pleroma, Pleroma.Web.Metadata,
|
||||||
providers: [
|
providers: [
|
||||||
Pleroma.Web.Metadata.Providers.OpenGraph,
|
Pleroma.Web.Metadata.Providers.OpenGraph,
|
||||||
Pleroma.Web.Metadata.Providers.TwitterCard,
|
Pleroma.Web.Metadata.Providers.TwitterCard
|
||||||
Pleroma.Web.Metadata.Providers.RelMe,
|
|
||||||
Pleroma.Web.Metadata.Providers.Feed
|
|
||||||
],
|
],
|
||||||
unfurl_nsfw: false
|
unfurl_nsfw: false
|
||||||
|
|
||||||
|
@ -740,19 +739,23 @@
|
||||||
config :pleroma, :pools,
|
config :pleroma, :pools,
|
||||||
federation: [
|
federation: [
|
||||||
size: 50,
|
size: 50,
|
||||||
max_waiting: 10
|
max_waiting: 10,
|
||||||
|
timeout: 10_000
|
||||||
],
|
],
|
||||||
media: [
|
media: [
|
||||||
size: 50,
|
size: 50,
|
||||||
max_waiting: 10
|
max_waiting: 10,
|
||||||
|
timeout: 10_000
|
||||||
],
|
],
|
||||||
upload: [
|
upload: [
|
||||||
size: 25,
|
size: 25,
|
||||||
max_waiting: 5
|
max_waiting: 5,
|
||||||
|
timeout: 15_000
|
||||||
],
|
],
|
||||||
default: [
|
default: [
|
||||||
size: 10,
|
size: 10,
|
||||||
max_waiting: 2
|
max_waiting: 2,
|
||||||
|
timeout: 5_000
|
||||||
]
|
]
|
||||||
|
|
||||||
config :pleroma, :hackney_pools,
|
config :pleroma, :hackney_pools,
|
||||||
|
|
|
@ -2385,6 +2385,13 @@
|
||||||
suggestions: [
|
suggestions: [
|
||||||
Pleroma.Web.RichMedia.Parser.TTL.AwsSignedUrl
|
Pleroma.Web.RichMedia.Parser.TTL.AwsSignedUrl
|
||||||
]
|
]
|
||||||
|
},
|
||||||
|
%{
|
||||||
|
key: :failure_backoff,
|
||||||
|
type: :integer,
|
||||||
|
description:
|
||||||
|
"Amount of milliseconds after request failure, during which the request will not be retried.",
|
||||||
|
suggestions: [60_000]
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
|
|
|
@ -114,7 +114,7 @@
|
||||||
|
|
||||||
config :pleroma, Pleroma.Web.ApiSpec.CastAndValidate, strict: true
|
config :pleroma, Pleroma.Web.ApiSpec.CastAndValidate, strict: true
|
||||||
|
|
||||||
config :pleroma, :instances_favicons, enabled: true
|
config :pleroma, :instances_favicons, enabled: false
|
||||||
|
|
||||||
config :pleroma, Pleroma.Uploaders.S3,
|
config :pleroma, Pleroma.Uploaders.S3,
|
||||||
bucket: nil,
|
bucket: nil,
|
||||||
|
|
|
@ -352,8 +352,6 @@ config :pleroma, Pleroma.Web.MediaProxy.Invalidation.Http,
|
||||||
* `providers`: a list of metadata providers to enable. Providers available:
|
* `providers`: a list of metadata providers to enable. Providers available:
|
||||||
* `Pleroma.Web.Metadata.Providers.OpenGraph`
|
* `Pleroma.Web.Metadata.Providers.OpenGraph`
|
||||||
* `Pleroma.Web.Metadata.Providers.TwitterCard`
|
* `Pleroma.Web.Metadata.Providers.TwitterCard`
|
||||||
* `Pleroma.Web.Metadata.Providers.RelMe` - add links from user bio with rel=me into the `<header>` as `<link rel=me>`.
|
|
||||||
* `Pleroma.Web.Metadata.Providers.Feed` - add a link to a user's Atom feed into the `<header>` as `<link rel=alternate>`.
|
|
||||||
* `unfurl_nsfw`: If set to `true` nsfw attachments will be shown in previews.
|
* `unfurl_nsfw`: If set to `true` nsfw attachments will be shown in previews.
|
||||||
|
|
||||||
### :rich_media (consumer)
|
### :rich_media (consumer)
|
||||||
|
@ -361,6 +359,7 @@ config :pleroma, Pleroma.Web.MediaProxy.Invalidation.Http,
|
||||||
* `ignore_hosts`: list of hosts which will be ignored by the metadata parser. For example `["accounts.google.com", "xss.website"]`, defaults to `[]`.
|
* `ignore_hosts`: list of hosts which will be ignored by the metadata parser. For example `["accounts.google.com", "xss.website"]`, defaults to `[]`.
|
||||||
* `ignore_tld`: list TLDs (top-level domains) which will ignore for parse metadata. default is ["local", "localdomain", "lan"].
|
* `ignore_tld`: list TLDs (top-level domains) which will ignore for parse metadata. default is ["local", "localdomain", "lan"].
|
||||||
* `parsers`: list of Rich Media parsers.
|
* `parsers`: list of Rich Media parsers.
|
||||||
|
* `failure_backoff`: Amount of milliseconds after request failure, during which the request will not be retried.
|
||||||
|
|
||||||
## HTTP server
|
## HTTP server
|
||||||
|
|
||||||
|
|
|
@ -7,7 +7,7 @@ This document was written for FreeBSD 12.1, but should be work on future release
|
||||||
This assumes the target system has `pkg(8)`.
|
This assumes the target system has `pkg(8)`.
|
||||||
|
|
||||||
```
|
```
|
||||||
# pkg install elixir postgresql12-server postgresql12-client postgresql12-contrib git-lite sudo nginx gmake acme.sh
|
# pkg install elixir postgresql12-server postgresql12-client postgresql12-contrib git-lite sudo nginx gmake acme.sh cmake
|
||||||
```
|
```
|
||||||
|
|
||||||
Copy the rc.d scripts to the right directory:
|
Copy the rc.d scripts to the right directory:
|
||||||
|
|
|
@ -124,7 +124,9 @@ defp download_build(frontend_info, dest) do
|
||||||
url = String.replace(frontend_info["build_url"], "${ref}", frontend_info["ref"])
|
url = String.replace(frontend_info["build_url"], "${ref}", frontend_info["ref"])
|
||||||
|
|
||||||
with {:ok, %{status: 200, body: zip_body}} <-
|
with {:ok, %{status: 200, body: zip_body}} <-
|
||||||
Pleroma.HTTP.get(url, [], timeout: 120_000, recv_timeout: 120_000) do
|
Pleroma.HTTP.get(url, [],
|
||||||
|
adapter: [pool: :media, timeout: 120_000, recv_timeout: 120_000]
|
||||||
|
) do
|
||||||
unzip(zip_body, dest)
|
unzip(zip_body, dest)
|
||||||
else
|
else
|
||||||
e -> {:error, e}
|
e -> {:error, e}
|
||||||
|
|
|
@ -83,17 +83,25 @@ def handle_call(:remove_client, {client_pid, _}, %{key: key} = state) do
|
||||||
end)
|
end)
|
||||||
|
|
||||||
{ref, state} = pop_in(state.client_monitors[client_pid])
|
{ref, state} = pop_in(state.client_monitors[client_pid])
|
||||||
Process.demonitor(ref)
|
# DOWN message can receive right after `remove_client` call and cause worker to terminate
|
||||||
|
state =
|
||||||
timer =
|
if is_nil(ref) do
|
||||||
if used_by == [] do
|
state
|
||||||
max_idle = Pleroma.Config.get([:connections_pool, :max_idle_time], 30_000)
|
|
||||||
Process.send_after(self(), :idle_close, max_idle)
|
|
||||||
else
|
else
|
||||||
nil
|
Process.demonitor(ref)
|
||||||
|
|
||||||
|
timer =
|
||||||
|
if used_by == [] do
|
||||||
|
max_idle = Pleroma.Config.get([:connections_pool, :max_idle_time], 30_000)
|
||||||
|
Process.send_after(self(), :idle_close, max_idle)
|
||||||
|
else
|
||||||
|
nil
|
||||||
|
end
|
||||||
|
|
||||||
|
%{state | timer: timer}
|
||||||
end
|
end
|
||||||
|
|
||||||
{:reply, :ok, %{state | timer: timer}, :hibernate}
|
{:reply, :ok, state, :hibernate}
|
||||||
end
|
end
|
||||||
|
|
||||||
@impl true
|
@impl true
|
||||||
|
@ -103,16 +111,21 @@ def handle_info(:idle_close, state) do
|
||||||
{:stop, :normal, state}
|
{:stop, :normal, state}
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@impl true
|
||||||
|
def handle_info({:gun_up, _pid, _protocol}, state) do
|
||||||
|
{:noreply, state, :hibernate}
|
||||||
|
end
|
||||||
|
|
||||||
# Gracefully shutdown if the connection got closed without any streams left
|
# Gracefully shutdown if the connection got closed without any streams left
|
||||||
@impl true
|
@impl true
|
||||||
def handle_info({:gun_down, _pid, _protocol, _reason, []}, state) do
|
def handle_info({:gun_down, _pid, _protocol, _reason, []}, state) do
|
||||||
{:stop, :normal, state}
|
{:stop, :normal, state}
|
||||||
end
|
end
|
||||||
|
|
||||||
# Otherwise, shutdown with an error
|
# Otherwise, wait for retry
|
||||||
@impl true
|
@impl true
|
||||||
def handle_info({:gun_down, _pid, _protocol, _reason, _killed_streams} = down_message, state) do
|
def handle_info({:gun_down, _pid, _protocol, _reason, _killed_streams}, state) do
|
||||||
{:stop, {:error, down_message}, state}
|
{:noreply, state, :hibernate}
|
||||||
end
|
end
|
||||||
|
|
||||||
@impl true
|
@impl true
|
||||||
|
|
|
@ -100,20 +100,27 @@ defp generate_scrubber_signature(scrubbers) do
|
||||||
end)
|
end)
|
||||||
end
|
end
|
||||||
|
|
||||||
def extract_first_external_url(_, nil), do: {:error, "No content"}
|
def extract_first_external_url_from_object(%{data: %{"content" => content}} = object)
|
||||||
|
when is_binary(content) do
|
||||||
|
unless object.data["fake"] do
|
||||||
|
key = "URL|#{object.id}"
|
||||||
|
|
||||||
def extract_first_external_url(object, content) do
|
Cachex.fetch!(:scrubber_cache, key, fn _key ->
|
||||||
key = "URL|#{object.id}"
|
{:commit, {:ok, extract_first_external_url(content)}}
|
||||||
|
end)
|
||||||
|
else
|
||||||
|
{:ok, extract_first_external_url(content)}
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
Cachex.fetch!(:scrubber_cache, key, fn _key ->
|
def extract_first_external_url_from_object(_), do: {:error, :no_content}
|
||||||
result =
|
|
||||||
content
|
|
||||||
|> Floki.parse_fragment!()
|
|
||||||
|> Floki.filter_out("a.mention,a.hashtag,a.attachment,a[rel~=\"tag\"]")
|
|
||||||
|> Floki.attribute("a", "href")
|
|
||||||
|> Enum.at(0)
|
|
||||||
|
|
||||||
{:commit, {:ok, result}}
|
def extract_first_external_url(content) do
|
||||||
end)
|
content
|
||||||
|
|> Floki.parse_fragment!()
|
||||||
|
|> Floki.find("a:not(.mention,.hashtag,.attachment,[rel~=\"tag\"])")
|
||||||
|
|> Enum.take(1)
|
||||||
|
|> Floki.attribute("href")
|
||||||
|
|> Enum.at(0)
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -11,7 +11,6 @@ defmodule Pleroma.HTTP.AdapterHelper do
|
||||||
@type proxy_type() :: :socks4 | :socks5
|
@type proxy_type() :: :socks4 | :socks5
|
||||||
@type host() :: charlist() | :inet.ip_address()
|
@type host() :: charlist() | :inet.ip_address()
|
||||||
|
|
||||||
alias Pleroma.Config
|
|
||||||
alias Pleroma.HTTP.AdapterHelper
|
alias Pleroma.HTTP.AdapterHelper
|
||||||
require Logger
|
require Logger
|
||||||
|
|
||||||
|
@ -44,27 +43,13 @@ def maybe_add_proxy(opts, proxy), do: Keyword.put_new(opts, :proxy, proxy)
|
||||||
@spec options(URI.t(), keyword()) :: keyword()
|
@spec options(URI.t(), keyword()) :: keyword()
|
||||||
def options(%URI{} = uri, opts \\ []) do
|
def options(%URI{} = uri, opts \\ []) do
|
||||||
@defaults
|
@defaults
|
||||||
|> put_timeout()
|
|
||||||
|> Keyword.merge(opts)
|
|> Keyword.merge(opts)
|
||||||
|> adapter_helper().options(uri)
|
|> adapter_helper().options(uri)
|
||||||
end
|
end
|
||||||
|
|
||||||
# For Hackney, this is the time a connection can stay idle in the pool.
|
@spec get_conn(URI.t(), keyword()) :: {:ok, keyword()} | {:error, atom()}
|
||||||
# For Gun, this is the timeout to receive a message from Gun.
|
|
||||||
defp put_timeout(opts) do
|
|
||||||
{config_key, default} =
|
|
||||||
if adapter() == Tesla.Adapter.Gun do
|
|
||||||
{:pools, Config.get([:pools, :default, :timeout], 5_000)}
|
|
||||||
else
|
|
||||||
{:hackney_pools, 10_000}
|
|
||||||
end
|
|
||||||
|
|
||||||
timeout = Config.get([config_key, opts[:pool], :timeout], default)
|
|
||||||
|
|
||||||
Keyword.merge(opts, timeout: timeout)
|
|
||||||
end
|
|
||||||
|
|
||||||
def get_conn(uri, opts), do: adapter_helper().get_conn(uri, opts)
|
def get_conn(uri, opts), do: adapter_helper().get_conn(uri, opts)
|
||||||
|
|
||||||
defp adapter, do: Application.get_env(:tesla, :adapter)
|
defp adapter, do: Application.get_env(:tesla, :adapter)
|
||||||
|
|
||||||
defp adapter_helper do
|
defp adapter_helper do
|
||||||
|
|
|
@ -5,6 +5,7 @@
|
||||||
defmodule Pleroma.HTTP.AdapterHelper.Gun do
|
defmodule Pleroma.HTTP.AdapterHelper.Gun do
|
||||||
@behaviour Pleroma.HTTP.AdapterHelper
|
@behaviour Pleroma.HTTP.AdapterHelper
|
||||||
|
|
||||||
|
alias Pleroma.Config
|
||||||
alias Pleroma.Gun.ConnectionPool
|
alias Pleroma.Gun.ConnectionPool
|
||||||
alias Pleroma.HTTP.AdapterHelper
|
alias Pleroma.HTTP.AdapterHelper
|
||||||
|
|
||||||
|
@ -14,31 +15,46 @@ defmodule Pleroma.HTTP.AdapterHelper.Gun do
|
||||||
connect_timeout: 5_000,
|
connect_timeout: 5_000,
|
||||||
domain_lookup_timeout: 5_000,
|
domain_lookup_timeout: 5_000,
|
||||||
tls_handshake_timeout: 5_000,
|
tls_handshake_timeout: 5_000,
|
||||||
retry: 0,
|
retry: 1,
|
||||||
retry_timeout: 1000,
|
retry_timeout: 1000,
|
||||||
await_up_timeout: 5_000
|
await_up_timeout: 5_000
|
||||||
]
|
]
|
||||||
|
|
||||||
|
@type pool() :: :federation | :upload | :media | :default
|
||||||
|
|
||||||
@spec options(keyword(), URI.t()) :: keyword()
|
@spec options(keyword(), URI.t()) :: keyword()
|
||||||
def options(incoming_opts \\ [], %URI{} = uri) do
|
def options(incoming_opts \\ [], %URI{} = uri) do
|
||||||
proxy =
|
proxy =
|
||||||
Pleroma.Config.get([:http, :proxy_url])
|
[:http, :proxy_url]
|
||||||
|
|> Config.get()
|
||||||
|> AdapterHelper.format_proxy()
|
|> AdapterHelper.format_proxy()
|
||||||
|
|
||||||
config_opts = Pleroma.Config.get([:http, :adapter], [])
|
config_opts = Config.get([:http, :adapter], [])
|
||||||
|
|
||||||
@defaults
|
@defaults
|
||||||
|> Keyword.merge(config_opts)
|
|> Keyword.merge(config_opts)
|
||||||
|> add_scheme_opts(uri)
|
|> add_scheme_opts(uri)
|
||||||
|> AdapterHelper.maybe_add_proxy(proxy)
|
|> AdapterHelper.maybe_add_proxy(proxy)
|
||||||
|> Keyword.merge(incoming_opts)
|
|> Keyword.merge(incoming_opts)
|
||||||
|
|> put_timeout()
|
||||||
end
|
end
|
||||||
|
|
||||||
defp add_scheme_opts(opts, %{scheme: "http"}), do: opts
|
defp add_scheme_opts(opts, %{scheme: "http"}), do: opts
|
||||||
|
|
||||||
defp add_scheme_opts(opts, %{scheme: "https"}) do
|
defp add_scheme_opts(opts, %{scheme: "https"}) do
|
||||||
opts
|
Keyword.put(opts, :certificates_verification, true)
|
||||||
|> Keyword.put(:certificates_verification, true)
|
end
|
||||||
|
|
||||||
|
defp put_timeout(opts) do
|
||||||
|
# this is the timeout to receive a message from Gun
|
||||||
|
Keyword.put_new(opts, :timeout, pool_timeout(opts[:pool]))
|
||||||
|
end
|
||||||
|
|
||||||
|
@spec pool_timeout(pool()) :: non_neg_integer()
|
||||||
|
def pool_timeout(pool) do
|
||||||
|
default = Config.get([:pools, :default, :timeout], 5_000)
|
||||||
|
|
||||||
|
Config.get([:pools, pool, :timeout], default)
|
||||||
end
|
end
|
||||||
|
|
||||||
@spec get_conn(URI.t(), keyword()) :: {:ok, keyword()} | {:error, atom()}
|
@spec get_conn(URI.t(), keyword()) :: {:ok, keyword()} | {:error, atom()}
|
||||||
|
@ -51,11 +67,11 @@ def get_conn(uri, opts) do
|
||||||
|
|
||||||
@prefix Pleroma.Gun.ConnectionPool
|
@prefix Pleroma.Gun.ConnectionPool
|
||||||
def limiter_setup do
|
def limiter_setup do
|
||||||
wait = Pleroma.Config.get([:connections_pool, :connection_acquisition_wait])
|
wait = Config.get([:connections_pool, :connection_acquisition_wait])
|
||||||
retries = Pleroma.Config.get([:connections_pool, :connection_acquisition_retries])
|
retries = Config.get([:connections_pool, :connection_acquisition_retries])
|
||||||
|
|
||||||
:pools
|
:pools
|
||||||
|> Pleroma.Config.get([])
|
|> Config.get([])
|
||||||
|> Enum.each(fn {name, opts} ->
|
|> Enum.each(fn {name, opts} ->
|
||||||
max_running = Keyword.get(opts, :size, 50)
|
max_running = Keyword.get(opts, :size, 50)
|
||||||
max_waiting = Keyword.get(opts, :max_waiting, 10)
|
max_waiting = Keyword.get(opts, :max_waiting, 10)
|
||||||
|
@ -69,7 +85,6 @@ def limiter_setup do
|
||||||
case result do
|
case result do
|
||||||
:ok -> :ok
|
:ok -> :ok
|
||||||
{:error, :existing} -> :ok
|
{:error, :existing} -> :ok
|
||||||
e -> raise e
|
|
||||||
end
|
end
|
||||||
end)
|
end)
|
||||||
|
|
||||||
|
|
|
@ -22,6 +22,10 @@ def options(connection_opts \\ [], %URI{} = uri) do
|
||||||
|> Pleroma.HTTP.AdapterHelper.maybe_add_proxy(proxy)
|
|> Pleroma.HTTP.AdapterHelper.maybe_add_proxy(proxy)
|
||||||
end
|
end
|
||||||
|
|
||||||
|
defp add_scheme_opts(opts, %URI{scheme: "https"}) do
|
||||||
|
Keyword.put(opts, :ssl_options, versions: [:"tlsv1.2", :"tlsv1.1", :tlsv1])
|
||||||
|
end
|
||||||
|
|
||||||
defp add_scheme_opts(opts, _), do: opts
|
defp add_scheme_opts(opts, _), do: opts
|
||||||
|
|
||||||
@spec get_conn(URI.t(), keyword()) :: {:ok, keyword()}
|
@spec get_conn(URI.t(), keyword()) :: {:ok, keyword()}
|
||||||
|
|
|
@ -11,6 +11,8 @@ defmodule Pleroma.HTTP.ExAws do
|
||||||
|
|
||||||
@impl true
|
@impl true
|
||||||
def request(method, url, body \\ "", headers \\ [], http_opts \\ []) do
|
def request(method, url, body \\ "", headers \\ [], http_opts \\ []) do
|
||||||
|
http_opts = Keyword.put_new(http_opts, :adapter, pool: :upload)
|
||||||
|
|
||||||
case HTTP.request(method, url, body, headers, http_opts) do
|
case HTTP.request(method, url, body, headers, http_opts) do
|
||||||
{:ok, env} ->
|
{:ok, env} ->
|
||||||
{:ok, %{status_code: env.status, headers: env.headers, body: env.body}}
|
{:ok, %{status_code: env.status, headers: env.headers, body: env.body}}
|
||||||
|
|
|
@ -11,6 +11,8 @@ defmodule Pleroma.HTTP.Tzdata do
|
||||||
|
|
||||||
@impl true
|
@impl true
|
||||||
def get(url, headers, options) do
|
def get(url, headers, options) do
|
||||||
|
options = Keyword.put_new(options, :adapter, pool: :default)
|
||||||
|
|
||||||
with {:ok, %Tesla.Env{} = env} <- HTTP.get(url, headers, options) do
|
with {:ok, %Tesla.Env{} = env} <- HTTP.get(url, headers, options) do
|
||||||
{:ok, {env.status, env.headers, env.body}}
|
{:ok, {env.status, env.headers, env.body}}
|
||||||
end
|
end
|
||||||
|
@ -18,6 +20,8 @@ def get(url, headers, options) do
|
||||||
|
|
||||||
@impl true
|
@impl true
|
||||||
def head(url, headers, options) do
|
def head(url, headers, options) do
|
||||||
|
options = Keyword.put_new(options, :adapter, pool: :default)
|
||||||
|
|
||||||
with {:ok, %Tesla.Env{} = env} <- HTTP.head(url, headers, options) do
|
with {:ok, %Tesla.Env{} = env} <- HTTP.head(url, headers, options) do
|
||||||
{:ok, {env.status, env.headers}}
|
{:ok, {env.status, env.headers}}
|
||||||
end
|
end
|
||||||
|
|
|
@ -14,6 +14,8 @@ defmodule Pleroma.Instances.Instance do
|
||||||
import Ecto.Query
|
import Ecto.Query
|
||||||
import Ecto.Changeset
|
import Ecto.Changeset
|
||||||
|
|
||||||
|
require Logger
|
||||||
|
|
||||||
schema "instances" do
|
schema "instances" do
|
||||||
field(:host, :string)
|
field(:host, :string)
|
||||||
field(:unreachable_since, :naive_datetime_usec)
|
field(:unreachable_since, :naive_datetime_usec)
|
||||||
|
@ -145,12 +147,18 @@ def get_or_update_favicon(%URI{host: host} = instance_uri) do
|
||||||
|
|
||||||
favicon
|
favicon
|
||||||
end
|
end
|
||||||
|
rescue
|
||||||
|
e ->
|
||||||
|
Logger.warn("Instance.get_or_update_favicon(\"#{host}\") error: #{inspect(e)}")
|
||||||
|
nil
|
||||||
end
|
end
|
||||||
|
|
||||||
defp scrape_favicon(%URI{} = instance_uri) do
|
defp scrape_favicon(%URI{} = instance_uri) do
|
||||||
try do
|
try do
|
||||||
with {:ok, %Tesla.Env{body: html}} <-
|
with {:ok, %Tesla.Env{body: html}} <-
|
||||||
Pleroma.HTTP.get(to_string(instance_uri), [{:Accept, "text/html"}]),
|
Pleroma.HTTP.get(to_string(instance_uri), [{"accept", "text/html"}],
|
||||||
|
adapter: [pool: :media]
|
||||||
|
),
|
||||||
favicon_rel <-
|
favicon_rel <-
|
||||||
html
|
html
|
||||||
|> Floki.parse_document!()
|
|> Floki.parse_document!()
|
||||||
|
@ -163,7 +171,12 @@ defp scrape_favicon(%URI{} = instance_uri) do
|
||||||
_ -> nil
|
_ -> nil
|
||||||
end
|
end
|
||||||
rescue
|
rescue
|
||||||
_ -> nil
|
e ->
|
||||||
|
Logger.warn(
|
||||||
|
"Instance.scrape_favicon(\"#{to_string(instance_uri)}\") error: #{inspect(e)}"
|
||||||
|
)
|
||||||
|
|
||||||
|
nil
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -164,12 +164,12 @@ defp make_signature(id, date) do
|
||||||
date: date
|
date: date
|
||||||
})
|
})
|
||||||
|
|
||||||
[{"signature", signature}]
|
{"signature", signature}
|
||||||
end
|
end
|
||||||
|
|
||||||
defp sign_fetch(headers, id, date) do
|
defp sign_fetch(headers, id, date) do
|
||||||
if Pleroma.Config.get([:activitypub, :sign_object_fetches]) do
|
if Pleroma.Config.get([:activitypub, :sign_object_fetches]) do
|
||||||
headers ++ make_signature(id, date)
|
[make_signature(id, date) | headers]
|
||||||
else
|
else
|
||||||
headers
|
headers
|
||||||
end
|
end
|
||||||
|
@ -177,7 +177,7 @@ defp sign_fetch(headers, id, date) do
|
||||||
|
|
||||||
defp maybe_date_fetch(headers, date) do
|
defp maybe_date_fetch(headers, date) do
|
||||||
if Pleroma.Config.get([:activitypub, :sign_object_fetches]) do
|
if Pleroma.Config.get([:activitypub, :sign_object_fetches]) do
|
||||||
headers ++ [{"date", date}]
|
[{"date", date} | headers]
|
||||||
else
|
else
|
||||||
headers
|
headers
|
||||||
end
|
end
|
||||||
|
|
|
@ -7,6 +7,7 @@ defmodule Pleroma.ReverseProxy.Client.Hackney do
|
||||||
|
|
||||||
@impl true
|
@impl true
|
||||||
def request(method, url, headers, body, opts \\ []) do
|
def request(method, url, headers, body, opts \\ []) do
|
||||||
|
opts = Keyword.put(opts, :ssl_options, versions: [:"tlsv1.2", :"tlsv1.1", :tlsv1])
|
||||||
:hackney.request(method, url, headers, body, opts)
|
:hackney.request(method, url, headers, body, opts)
|
||||||
end
|
end
|
||||||
|
|
||||||
|
|
|
@ -15,7 +15,11 @@ defmodule Pleroma.Upload.Filter do
|
||||||
|
|
||||||
require Logger
|
require Logger
|
||||||
|
|
||||||
@callback filter(Pleroma.Upload.t()) :: :ok | {:ok, Pleroma.Upload.t()} | {:error, any()}
|
@callback filter(Pleroma.Upload.t()) ::
|
||||||
|
{:ok, :filtered}
|
||||||
|
| {:ok, :noop}
|
||||||
|
| {:ok, :filtered, Pleroma.Upload.t()}
|
||||||
|
| {:error, any()}
|
||||||
|
|
||||||
@spec filter([module()], Pleroma.Upload.t()) :: {:ok, Pleroma.Upload.t()} | {:error, any()}
|
@spec filter([module()], Pleroma.Upload.t()) :: {:ok, Pleroma.Upload.t()} | {:error, any()}
|
||||||
|
|
||||||
|
@ -25,10 +29,13 @@ def filter([], upload) do
|
||||||
|
|
||||||
def filter([filter | rest], upload) do
|
def filter([filter | rest], upload) do
|
||||||
case filter.filter(upload) do
|
case filter.filter(upload) do
|
||||||
:ok ->
|
{:ok, :filtered} ->
|
||||||
filter(rest, upload)
|
filter(rest, upload)
|
||||||
|
|
||||||
{:ok, upload} ->
|
{:ok, :filtered, upload} ->
|
||||||
|
filter(rest, upload)
|
||||||
|
|
||||||
|
{:ok, :noop} ->
|
||||||
filter(rest, upload)
|
filter(rest, upload)
|
||||||
|
|
||||||
error ->
|
error ->
|
||||||
|
|
|
@ -16,9 +16,11 @@ defmodule Pleroma.Upload.Filter.AnonymizeFilename do
|
||||||
def filter(%Upload{name: name} = upload) do
|
def filter(%Upload{name: name} = upload) do
|
||||||
extension = List.last(String.split(name, "."))
|
extension = List.last(String.split(name, "."))
|
||||||
name = predefined_name(extension) || random(extension)
|
name = predefined_name(extension) || random(extension)
|
||||||
{:ok, %Upload{upload | name: name}}
|
{:ok, :filtered, %Upload{upload | name: name}}
|
||||||
end
|
end
|
||||||
|
|
||||||
|
def filter(_), do: {:ok, :noop}
|
||||||
|
|
||||||
@spec predefined_name(String.t()) :: String.t() | nil
|
@spec predefined_name(String.t()) :: String.t() | nil
|
||||||
defp predefined_name(extension) do
|
defp predefined_name(extension) do
|
||||||
with name when not is_nil(name) <- Config.get([__MODULE__, :text]),
|
with name when not is_nil(name) <- Config.get([__MODULE__, :text]),
|
||||||
|
|
|
@ -17,8 +17,8 @@ def filter(%Upload{name: name, tempfile: tempfile} = upload) do
|
||||||
|> Base.encode16(case: :lower)
|
|> Base.encode16(case: :lower)
|
||||||
|
|
||||||
filename = shasum <> "." <> extension
|
filename = shasum <> "." <> extension
|
||||||
{:ok, %Upload{upload | id: shasum, path: filename}}
|
{:ok, :filtered, %Upload{upload | id: shasum, path: filename}}
|
||||||
end
|
end
|
||||||
|
|
||||||
def filter(_), do: :ok
|
def filter(_), do: {:ok, :noop}
|
||||||
end
|
end
|
||||||
|
|
|
@ -9,11 +9,15 @@ defmodule Pleroma.Upload.Filter.Exiftool do
|
||||||
"""
|
"""
|
||||||
@behaviour Pleroma.Upload.Filter
|
@behaviour Pleroma.Upload.Filter
|
||||||
|
|
||||||
@spec filter(Pleroma.Upload.t()) :: :ok | {:error, String.t()}
|
@spec filter(Pleroma.Upload.t()) :: {:ok, any()} | {:error, String.t()}
|
||||||
|
|
||||||
|
# webp is not compatible with exiftool at this time
|
||||||
|
def filter(%Pleroma.Upload{content_type: "image/webp"}), do: {:ok, :noop}
|
||||||
|
|
||||||
def filter(%Pleroma.Upload{tempfile: file, content_type: "image" <> _}) do
|
def filter(%Pleroma.Upload{tempfile: file, content_type: "image" <> _}) do
|
||||||
try do
|
try do
|
||||||
case System.cmd("exiftool", ["-overwrite_original", "-gps:all=", file], parallelism: true) do
|
case System.cmd("exiftool", ["-overwrite_original", "-gps:all=", file], parallelism: true) do
|
||||||
{_response, 0} -> :ok
|
{_response, 0} -> {:ok, :filtered}
|
||||||
{error, 1} -> {:error, error}
|
{error, 1} -> {:error, error}
|
||||||
end
|
end
|
||||||
rescue
|
rescue
|
||||||
|
@ -22,5 +26,5 @@ def filter(%Pleroma.Upload{tempfile: file, content_type: "image" <> _}) do
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def filter(_), do: :ok
|
def filter(_), do: {:ok, :noop}
|
||||||
end
|
end
|
||||||
|
|
|
@ -38,16 +38,16 @@ defmodule Pleroma.Upload.Filter.Mogrifun do
|
||||||
[{"fill", "yellow"}, {"tint", "40"}]
|
[{"fill", "yellow"}, {"tint", "40"}]
|
||||||
]
|
]
|
||||||
|
|
||||||
@spec filter(Pleroma.Upload.t()) :: :ok | {:error, String.t()}
|
@spec filter(Pleroma.Upload.t()) :: {:ok, atom()} | {:error, String.t()}
|
||||||
def filter(%Pleroma.Upload{tempfile: file, content_type: "image" <> _}) do
|
def filter(%Pleroma.Upload{tempfile: file, content_type: "image" <> _}) do
|
||||||
try do
|
try do
|
||||||
Filter.Mogrify.do_filter(file, [Enum.random(@filters)])
|
Filter.Mogrify.do_filter(file, [Enum.random(@filters)])
|
||||||
:ok
|
{:ok, :filtered}
|
||||||
rescue
|
rescue
|
||||||
_e in ErlangError ->
|
_e in ErlangError ->
|
||||||
{:error, "mogrify command not found"}
|
{:error, "mogrify command not found"}
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def filter(_), do: :ok
|
def filter(_), do: {:ok, :noop}
|
||||||
end
|
end
|
||||||
|
|
|
@ -8,18 +8,18 @@ defmodule Pleroma.Upload.Filter.Mogrify do
|
||||||
@type conversion :: action :: String.t() | {action :: String.t(), opts :: String.t()}
|
@type conversion :: action :: String.t() | {action :: String.t(), opts :: String.t()}
|
||||||
@type conversions :: conversion() | [conversion()]
|
@type conversions :: conversion() | [conversion()]
|
||||||
|
|
||||||
@spec filter(Pleroma.Upload.t()) :: :ok | {:error, String.t()}
|
@spec filter(Pleroma.Upload.t()) :: {:ok, :atom} | {:error, String.t()}
|
||||||
def filter(%Pleroma.Upload{tempfile: file, content_type: "image" <> _}) do
|
def filter(%Pleroma.Upload{tempfile: file, content_type: "image" <> _}) do
|
||||||
try do
|
try do
|
||||||
do_filter(file, Pleroma.Config.get!([__MODULE__, :args]))
|
do_filter(file, Pleroma.Config.get!([__MODULE__, :args]))
|
||||||
:ok
|
{:ok, :filtered}
|
||||||
rescue
|
rescue
|
||||||
_e in ErlangError ->
|
_e in ErlangError ->
|
||||||
{:error, "mogrify command not found"}
|
{:error, "mogrify command not found"}
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def filter(_), do: :ok
|
def filter(_), do: {:ok, :noop}
|
||||||
|
|
||||||
def do_filter(file, filters) do
|
def do_filter(file, filters) do
|
||||||
file
|
file
|
||||||
|
|
|
@ -46,12 +46,23 @@ def put_file(%Pleroma.Upload{} = upload) do
|
||||||
|
|
||||||
op =
|
op =
|
||||||
if streaming do
|
if streaming do
|
||||||
upload.tempfile
|
op =
|
||||||
|> ExAws.S3.Upload.stream_file()
|
upload.tempfile
|
||||||
|> ExAws.S3.upload(bucket, s3_name, [
|
|> ExAws.S3.Upload.stream_file()
|
||||||
{:acl, :public_read},
|
|> ExAws.S3.upload(bucket, s3_name, [
|
||||||
{:content_type, upload.content_type}
|
{:acl, :public_read},
|
||||||
])
|
{:content_type, upload.content_type}
|
||||||
|
])
|
||||||
|
|
||||||
|
if Application.get_env(:tesla, :adapter) == Tesla.Adapter.Gun do
|
||||||
|
# set s3 upload timeout to respect :upload pool timeout
|
||||||
|
# timeout should be slightly larger, so s3 can retry upload on fail
|
||||||
|
timeout = Pleroma.HTTP.AdapterHelper.Gun.pool_timeout(:upload) + 1_000
|
||||||
|
opts = Keyword.put(op.opts, :timeout, timeout)
|
||||||
|
Map.put(op, :opts, opts)
|
||||||
|
else
|
||||||
|
op
|
||||||
|
end
|
||||||
else
|
else
|
||||||
{:ok, file_data} = File.read(upload.tempfile)
|
{:ok, file_data} = File.read(upload.tempfile)
|
||||||
|
|
||||||
|
|
|
@ -1125,31 +1125,31 @@ def get_followers_query(%User{} = user, nil) do
|
||||||
User.Query.build(%{followers: user, deactivated: false})
|
User.Query.build(%{followers: user, deactivated: false})
|
||||||
end
|
end
|
||||||
|
|
||||||
def get_followers_query(user, page) do
|
def get_followers_query(%User{} = user, page) do
|
||||||
user
|
user
|
||||||
|> get_followers_query(nil)
|
|> get_followers_query(nil)
|
||||||
|> User.Query.paginate(page, 20)
|
|> User.Query.paginate(page, 20)
|
||||||
end
|
end
|
||||||
|
|
||||||
@spec get_followers_query(User.t()) :: Ecto.Query.t()
|
@spec get_followers_query(User.t()) :: Ecto.Query.t()
|
||||||
def get_followers_query(user), do: get_followers_query(user, nil)
|
def get_followers_query(%User{} = user), do: get_followers_query(user, nil)
|
||||||
|
|
||||||
@spec get_followers(User.t(), pos_integer() | nil) :: {:ok, list(User.t())}
|
@spec get_followers(User.t(), pos_integer() | nil) :: {:ok, list(User.t())}
|
||||||
def get_followers(user, page \\ nil) do
|
def get_followers(%User{} = user, page \\ nil) do
|
||||||
user
|
user
|
||||||
|> get_followers_query(page)
|
|> get_followers_query(page)
|
||||||
|> Repo.all()
|
|> Repo.all()
|
||||||
end
|
end
|
||||||
|
|
||||||
@spec get_external_followers(User.t(), pos_integer() | nil) :: {:ok, list(User.t())}
|
@spec get_external_followers(User.t(), pos_integer() | nil) :: {:ok, list(User.t())}
|
||||||
def get_external_followers(user, page \\ nil) do
|
def get_external_followers(%User{} = user, page \\ nil) do
|
||||||
user
|
user
|
||||||
|> get_followers_query(page)
|
|> get_followers_query(page)
|
||||||
|> User.Query.build(%{external: true})
|
|> User.Query.build(%{external: true})
|
||||||
|> Repo.all()
|
|> Repo.all()
|
||||||
end
|
end
|
||||||
|
|
||||||
def get_followers_ids(user, page \\ nil) do
|
def get_followers_ids(%User{} = user, page \\ nil) do
|
||||||
user
|
user
|
||||||
|> get_followers_query(page)
|
|> get_followers_query(page)
|
||||||
|> select([u], u.id)
|
|> select([u], u.id)
|
||||||
|
@ -1161,29 +1161,29 @@ def get_friends_query(%User{} = user, nil) do
|
||||||
User.Query.build(%{friends: user, deactivated: false})
|
User.Query.build(%{friends: user, deactivated: false})
|
||||||
end
|
end
|
||||||
|
|
||||||
def get_friends_query(user, page) do
|
def get_friends_query(%User{} = user, page) do
|
||||||
user
|
user
|
||||||
|> get_friends_query(nil)
|
|> get_friends_query(nil)
|
||||||
|> User.Query.paginate(page, 20)
|
|> User.Query.paginate(page, 20)
|
||||||
end
|
end
|
||||||
|
|
||||||
@spec get_friends_query(User.t()) :: Ecto.Query.t()
|
@spec get_friends_query(User.t()) :: Ecto.Query.t()
|
||||||
def get_friends_query(user), do: get_friends_query(user, nil)
|
def get_friends_query(%User{} = user), do: get_friends_query(user, nil)
|
||||||
|
|
||||||
def get_friends(user, page \\ nil) do
|
def get_friends(%User{} = user, page \\ nil) do
|
||||||
user
|
user
|
||||||
|> get_friends_query(page)
|
|> get_friends_query(page)
|
||||||
|> Repo.all()
|
|> Repo.all()
|
||||||
end
|
end
|
||||||
|
|
||||||
def get_friends_ap_ids(user) do
|
def get_friends_ap_ids(%User{} = user) do
|
||||||
user
|
user
|
||||||
|> get_friends_query(nil)
|
|> get_friends_query(nil)
|
||||||
|> select([u], u.ap_id)
|
|> select([u], u.ap_id)
|
||||||
|> Repo.all()
|
|> Repo.all()
|
||||||
end
|
end
|
||||||
|
|
||||||
def get_friends_ids(user, page \\ nil) do
|
def get_friends_ids(%User{} = user, page \\ nil) do
|
||||||
user
|
user
|
||||||
|> get_friends_query(page)
|
|> get_friends_query(page)
|
||||||
|> select([u], u.id)
|
|> select([u], u.id)
|
||||||
|
|
|
@ -115,8 +115,8 @@ defp trigram_rank(query, query_string) do
|
||||||
)
|
)
|
||||||
end
|
end
|
||||||
|
|
||||||
defp base_query(_user, false), do: User
|
defp base_query(%User{} = user, true), do: User.get_friends_query(user)
|
||||||
defp base_query(user, true), do: User.get_followers_query(user)
|
defp base_query(_user, _following), do: User
|
||||||
|
|
||||||
defp filter_invisible_users(query) do
|
defp filter_invisible_users(query) do
|
||||||
from(q in query, where: q.invisible == false)
|
from(q in query, where: q.invisible == false)
|
||||||
|
|
|
@ -399,21 +399,30 @@ def read_inbox(%{assigns: %{user: %User{nickname: as_nickname}}} = conn, %{
|
||||||
|
|
||||||
defp handle_user_activity(
|
defp handle_user_activity(
|
||||||
%User{} = user,
|
%User{} = user,
|
||||||
%{"type" => "Create", "object" => %{"type" => "Note"}} = params
|
%{"type" => "Create", "object" => %{"type" => "Note"} = object} = params
|
||||||
) do
|
) do
|
||||||
object =
|
content = if is_binary(object["content"]), do: object["content"], else: ""
|
||||||
params["object"]
|
name = if is_binary(object["name"]), do: object["name"], else: ""
|
||||||
|> Map.merge(Map.take(params, ["to", "cc"]))
|
summary = if is_binary(object["summary"]), do: object["summary"], else: ""
|
||||||
|> Map.put("attributedTo", user.ap_id())
|
length = String.length(content <> name <> summary)
|
||||||
|> Transmogrifier.fix_object()
|
|
||||||
|
|
||||||
ActivityPub.create(%{
|
if length > Pleroma.Config.get([:instance, :limit]) do
|
||||||
to: params["to"],
|
{:error, dgettext("errors", "Note is over the character limit")}
|
||||||
actor: user,
|
else
|
||||||
context: object["context"],
|
object =
|
||||||
object: object,
|
object
|
||||||
additional: Map.take(params, ["cc"])
|
|> Map.merge(Map.take(params, ["to", "cc"]))
|
||||||
})
|
|> Map.put("attributedTo", user.ap_id())
|
||||||
|
|> Transmogrifier.fix_object()
|
||||||
|
|
||||||
|
ActivityPub.create(%{
|
||||||
|
to: params["to"],
|
||||||
|
actor: user,
|
||||||
|
context: object["context"],
|
||||||
|
object: object,
|
||||||
|
additional: Map.take(params, ["cc"])
|
||||||
|
})
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
defp handle_user_activity(%User{} = user, %{"type" => "Delete"} = params) do
|
defp handle_user_activity(%User{} = user, %{"type" => "Delete"} = params) do
|
||||||
|
|
|
@ -379,8 +379,7 @@ defp maybe_parse_filters(filters) do
|
||||||
filters
|
filters
|
||||||
|> String.split(",")
|
|> String.split(",")
|
||||||
|> Enum.filter(&Enum.member?(@filters, &1))
|
|> Enum.filter(&Enum.member?(@filters, &1))
|
||||||
|> Enum.map(&String.to_atom/1)
|
|> Map.new(&{String.to_existing_atom(&1), true})
|
||||||
|> Map.new(&{&1, true})
|
|
||||||
end
|
end
|
||||||
|
|
||||||
def right_add_multiple(%{assigns: %{user: admin}} = conn, %{
|
def right_add_multiple(%{assigns: %{user: admin}} = conn, %{
|
||||||
|
|
|
@ -9,7 +9,15 @@ defmodule Pleroma.Web.Feed.TagController do
|
||||||
alias Pleroma.Web.ActivityPub.ActivityPub
|
alias Pleroma.Web.ActivityPub.ActivityPub
|
||||||
alias Pleroma.Web.Feed.FeedView
|
alias Pleroma.Web.Feed.FeedView
|
||||||
|
|
||||||
def feed(conn, %{"tag" => raw_tag} = params) do
|
def feed(conn, params) do
|
||||||
|
unless Pleroma.Config.restrict_unauthenticated_access?(:activities, :local) do
|
||||||
|
render_feed(conn, params)
|
||||||
|
else
|
||||||
|
render_error(conn, :not_found, "Not found")
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def render_feed(conn, %{"tag" => raw_tag} = params) do
|
||||||
{format, tag} = parse_tag(raw_tag)
|
{format, tag} = parse_tag(raw_tag)
|
||||||
|
|
||||||
activities =
|
activities =
|
||||||
|
|
|
@ -37,7 +37,15 @@ def feed_redirect(conn, %{"nickname" => nickname}) do
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def feed(conn, %{"nickname" => nickname} = params) do
|
def feed(conn, params) do
|
||||||
|
unless Pleroma.Config.restrict_unauthenticated_access?(:profiles, :local) do
|
||||||
|
render_feed(conn, params)
|
||||||
|
else
|
||||||
|
errors(conn, {:error, :not_found})
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def render_feed(conn, %{"nickname" => nickname} = params) do
|
||||||
format = get_format(conn)
|
format = get_format(conn)
|
||||||
|
|
||||||
format =
|
format =
|
||||||
|
|
|
@ -59,17 +59,11 @@ def logout(conn, _) do
|
||||||
def password_reset(conn, params) do
|
def password_reset(conn, params) do
|
||||||
nickname_or_email = params["email"] || params["nickname"]
|
nickname_or_email = params["email"] || params["nickname"]
|
||||||
|
|
||||||
with {:ok, _} <- TwitterAPI.password_reset(nickname_or_email) do
|
TwitterAPI.password_reset(nickname_or_email)
|
||||||
conn
|
|
||||||
|> put_status(:no_content)
|
|
||||||
|> json("")
|
|
||||||
else
|
|
||||||
{:error, "unknown user"} ->
|
|
||||||
send_resp(conn, :not_found, "")
|
|
||||||
|
|
||||||
{:error, _} ->
|
conn
|
||||||
send_resp(conn, :bad_request, "")
|
|> put_status(:no_content)
|
||||||
end
|
|> json("")
|
||||||
end
|
end
|
||||||
|
|
||||||
defp local_mastodon_root_path(conn) do
|
defp local_mastodon_root_path(conn) do
|
||||||
|
|
|
@ -23,6 +23,17 @@ defmodule Pleroma.Web.MastodonAPI.StatusView do
|
||||||
|
|
||||||
import Pleroma.Web.ActivityPub.Visibility, only: [get_visibility: 1, visible_for_user?: 2]
|
import Pleroma.Web.ActivityPub.Visibility, only: [get_visibility: 1, visible_for_user?: 2]
|
||||||
|
|
||||||
|
# This is a naive way to do this, just spawning a process per activity
|
||||||
|
# to fetch the preview. However it should be fine considering
|
||||||
|
# pagination is restricted to 40 activities at a time
|
||||||
|
defp fetch_rich_media_for_activities(activities) do
|
||||||
|
Enum.each(activities, fn activity ->
|
||||||
|
spawn(fn ->
|
||||||
|
Pleroma.Web.RichMedia.Helpers.fetch_data_for_activity(activity)
|
||||||
|
end)
|
||||||
|
end)
|
||||||
|
end
|
||||||
|
|
||||||
# TODO: Add cached version.
|
# TODO: Add cached version.
|
||||||
defp get_replied_to_activities([]), do: %{}
|
defp get_replied_to_activities([]), do: %{}
|
||||||
|
|
||||||
|
@ -80,6 +91,11 @@ def render("index.json", opts) do
|
||||||
|
|
||||||
# To do: check AdminAPIControllerTest on the reasons behind nil activities in the list
|
# To do: check AdminAPIControllerTest on the reasons behind nil activities in the list
|
||||||
activities = Enum.filter(opts.activities, & &1)
|
activities = Enum.filter(opts.activities, & &1)
|
||||||
|
|
||||||
|
# Start fetching rich media before doing anything else, so that later calls to get the cards
|
||||||
|
# only block for timeout in the worst case, as opposed to
|
||||||
|
# length(activities_with_links) * timeout
|
||||||
|
fetch_rich_media_for_activities(activities)
|
||||||
replied_to_activities = get_replied_to_activities(activities)
|
replied_to_activities = get_replied_to_activities(activities)
|
||||||
|
|
||||||
parent_activities =
|
parent_activities =
|
||||||
|
|
|
@ -7,8 +7,9 @@ defmodule Pleroma.Web.Metadata do
|
||||||
|
|
||||||
def build_tags(params) do
|
def build_tags(params) do
|
||||||
providers = [
|
providers = [
|
||||||
|
Pleroma.Web.Metadata.Providers.RelMe,
|
||||||
Pleroma.Web.Metadata.Providers.RestrictIndexing
|
Pleroma.Web.Metadata.Providers.RestrictIndexing
|
||||||
| Pleroma.Config.get([__MODULE__, :providers], [])
|
| activated_providers()
|
||||||
]
|
]
|
||||||
|
|
||||||
Enum.reduce(providers, "", fn parser, acc ->
|
Enum.reduce(providers, "", fn parser, acc ->
|
||||||
|
@ -42,4 +43,12 @@ def activity_nsfw?(%{data: %{"sensitive" => sensitive}}) do
|
||||||
def activity_nsfw?(_) do
|
def activity_nsfw?(_) do
|
||||||
false
|
false
|
||||||
end
|
end
|
||||||
|
|
||||||
|
defp activated_providers do
|
||||||
|
unless Pleroma.Config.restrict_unauthenticated_access?(:activities, :local) do
|
||||||
|
[Pleroma.Web.Metadata.Providers.Feed | Pleroma.Config.get([__MODULE__, :providers], [])]
|
||||||
|
else
|
||||||
|
[]
|
||||||
|
end
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -58,7 +58,7 @@ def fetch_data_for_object(object) do
|
||||||
with true <- Config.get([:rich_media, :enabled]),
|
with true <- Config.get([:rich_media, :enabled]),
|
||||||
false <- object.data["sensitive"] || false,
|
false <- object.data["sensitive"] || false,
|
||||||
{:ok, page_url} <-
|
{:ok, page_url} <-
|
||||||
HTML.extract_first_external_url(object, object.data["content"]),
|
HTML.extract_first_external_url_from_object(object),
|
||||||
:ok <- validate_page_url(page_url),
|
:ok <- validate_page_url(page_url),
|
||||||
{:ok, rich_media} <- Parser.parse(page_url) do
|
{:ok, rich_media} <- Parser.parse(page_url) do
|
||||||
%{page_url: page_url, rich_media: rich_media}
|
%{page_url: page_url, rich_media: rich_media}
|
||||||
|
@ -96,6 +96,6 @@ def rich_media_get(url) do
|
||||||
@rich_media_options
|
@rich_media_options
|
||||||
end
|
end
|
||||||
|
|
||||||
Pleroma.HTTP.get(url, headers, options)
|
Pleroma.HTTP.get(url, headers, adapter: options)
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -3,6 +3,8 @@
|
||||||
# SPDX-License-Identifier: AGPL-3.0-only
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
defmodule Pleroma.Web.RichMedia.Parser do
|
defmodule Pleroma.Web.RichMedia.Parser do
|
||||||
|
require Logger
|
||||||
|
|
||||||
defp parsers do
|
defp parsers do
|
||||||
Pleroma.Config.get([:rich_media, :parsers])
|
Pleroma.Config.get([:rich_media, :parsers])
|
||||||
end
|
end
|
||||||
|
@ -10,17 +12,29 @@ defp parsers do
|
||||||
def parse(nil), do: {:error, "No URL provided"}
|
def parse(nil), do: {:error, "No URL provided"}
|
||||||
|
|
||||||
if Pleroma.Config.get(:env) == :test do
|
if Pleroma.Config.get(:env) == :test do
|
||||||
|
@spec parse(String.t()) :: {:ok, map()} | {:error, any()}
|
||||||
def parse(url), do: parse_url(url)
|
def parse(url), do: parse_url(url)
|
||||||
else
|
else
|
||||||
|
@spec parse(String.t()) :: {:ok, map()} | {:error, any()}
|
||||||
def parse(url) do
|
def parse(url) do
|
||||||
try do
|
with {:ok, data} <- get_cached_or_parse(url),
|
||||||
Cachex.fetch!(:rich_media_cache, url, fn _ ->
|
{:ok, _} <- set_ttl_based_on_image(data, url) do
|
||||||
{:commit, parse_url(url)}
|
{:ok, data}
|
||||||
end)
|
else
|
||||||
|> set_ttl_based_on_image(url)
|
error ->
|
||||||
rescue
|
Logger.error(fn -> "Rich media error: #{inspect(error)}" end)
|
||||||
e ->
|
end
|
||||||
{:error, "Cachex error: #{inspect(e)}"}
|
end
|
||||||
|
|
||||||
|
defp get_cached_or_parse(url) do
|
||||||
|
case Cachex.fetch!(:rich_media_cache, url, fn _ -> {:commit, parse_url(url)} end) do
|
||||||
|
{:ok, _data} = res ->
|
||||||
|
res
|
||||||
|
|
||||||
|
{:error, _} = e ->
|
||||||
|
ttl = Pleroma.Config.get([:rich_media, :failure_backoff], 60_000)
|
||||||
|
Cachex.expire(:rich_media_cache, url, ttl)
|
||||||
|
e
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
@ -47,19 +61,26 @@ def ttl(data, url) do
|
||||||
config :pleroma, :rich_media,
|
config :pleroma, :rich_media,
|
||||||
ttl_setters: [MyModule]
|
ttl_setters: [MyModule]
|
||||||
"""
|
"""
|
||||||
def set_ttl_based_on_image({:ok, data}, url) do
|
@spec set_ttl_based_on_image(map(), String.t()) ::
|
||||||
with {:ok, nil} <- Cachex.ttl(:rich_media_cache, url),
|
{:ok, Integer.t() | :noop} | {:error, :no_key}
|
||||||
ttl when is_number(ttl) <- get_ttl_from_image(data, url) do
|
def set_ttl_based_on_image(data, url) do
|
||||||
Cachex.expire_at(:rich_media_cache, url, ttl * 1000)
|
case get_ttl_from_image(data, url) do
|
||||||
{:ok, data}
|
{:ok, ttl} when is_number(ttl) ->
|
||||||
else
|
ttl = ttl * 1000
|
||||||
|
|
||||||
|
case Cachex.expire_at(:rich_media_cache, url, ttl) do
|
||||||
|
{:ok, true} -> {:ok, ttl}
|
||||||
|
{:ok, false} -> {:error, :no_key}
|
||||||
|
end
|
||||||
|
|
||||||
_ ->
|
_ ->
|
||||||
{:ok, data}
|
{:ok, :noop}
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
defp get_ttl_from_image(data, url) do
|
defp get_ttl_from_image(data, url) do
|
||||||
Pleroma.Config.get([:rich_media, :ttl_setters])
|
[:rich_media, :ttl_setters]
|
||||||
|
|> Pleroma.Config.get()
|
||||||
|> Enum.reduce({:ok, nil}, fn
|
|> Enum.reduce({:ok, nil}, fn
|
||||||
module, {:ok, _ttl} ->
|
module, {:ok, _ttl} ->
|
||||||
module.ttl(data, url)
|
module.ttl(data, url)
|
||||||
|
@ -70,23 +91,16 @@ defp get_ttl_from_image(data, url) do
|
||||||
end
|
end
|
||||||
|
|
||||||
defp parse_url(url) do
|
defp parse_url(url) do
|
||||||
try do
|
with {:ok, %Tesla.Env{body: html}} <- Pleroma.Web.RichMedia.Helpers.rich_media_get(url),
|
||||||
{:ok, %Tesla.Env{body: html}} = Pleroma.Web.RichMedia.Helpers.rich_media_get(url)
|
{:ok, html} <- Floki.parse_document(html) do
|
||||||
|
|
||||||
html
|
html
|
||||||
|> parse_html()
|
|
||||||
|> maybe_parse()
|
|> maybe_parse()
|
||||||
|> Map.put("url", url)
|
|> Map.put("url", url)
|
||||||
|> clean_parsed_data()
|
|> clean_parsed_data()
|
||||||
|> check_parsed_data()
|
|> check_parsed_data()
|
||||||
rescue
|
|
||||||
e ->
|
|
||||||
{:error, "Parsing error: #{inspect(e)} #{inspect(__STACKTRACE__)}"}
|
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
defp parse_html(html), do: Floki.parse_document!(html)
|
|
||||||
|
|
||||||
defp maybe_parse(html) do
|
defp maybe_parse(html) do
|
||||||
Enum.reduce_while(parsers(), %{}, fn parser, acc ->
|
Enum.reduce_while(parsers(), %{}, fn parser, acc ->
|
||||||
case parser.parse(html, acc) do
|
case parser.parse(html, acc) do
|
||||||
|
|
|
@ -10,20 +10,15 @@ def ttl(data, _url) do
|
||||||
|> parse_query_params()
|
|> parse_query_params()
|
||||||
|> format_query_params()
|
|> format_query_params()
|
||||||
|> get_expiration_timestamp()
|
|> get_expiration_timestamp()
|
||||||
|
else
|
||||||
|
{:error, "Not aws signed url #{inspect(image)}"}
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
defp is_aws_signed_url(""), do: nil
|
defp is_aws_signed_url(image) when is_binary(image) and image != "" do
|
||||||
defp is_aws_signed_url(nil), do: nil
|
|
||||||
|
|
||||||
defp is_aws_signed_url(image) when is_binary(image) do
|
|
||||||
%URI{host: host, query: query} = URI.parse(image)
|
%URI{host: host, query: query} = URI.parse(image)
|
||||||
|
|
||||||
if String.contains?(host, "amazonaws.com") and String.contains?(query, "X-Amz-Expires") do
|
String.contains?(host, "amazonaws.com") and String.contains?(query, "X-Amz-Expires")
|
||||||
image
|
|
||||||
else
|
|
||||||
nil
|
|
||||||
end
|
|
||||||
end
|
end
|
||||||
|
|
||||||
defp is_aws_signed_url(_), do: nil
|
defp is_aws_signed_url(_), do: nil
|
||||||
|
@ -46,6 +41,6 @@ defp get_expiration_timestamp(params) when is_map(params) do
|
||||||
|> Map.get("X-Amz-Date")
|
|> Map.get("X-Amz-Date")
|
||||||
|> Timex.parse("{ISO:Basic:Z}")
|
|> Timex.parse("{ISO:Basic:Z}")
|
||||||
|
|
||||||
Timex.to_unix(date) + String.to_integer(Map.get(params, "X-Amz-Expires"))
|
{:ok, Timex.to_unix(date) + String.to_integer(Map.get(params, "X-Amz-Expires"))}
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -72,7 +72,7 @@ defp maybe_notify_admins(%User{} = account) do
|
||||||
|
|
||||||
def password_reset(nickname_or_email) do
|
def password_reset(nickname_or_email) do
|
||||||
with true <- is_binary(nickname_or_email),
|
with true <- is_binary(nickname_or_email),
|
||||||
%User{local: true, email: email} = user when is_binary(email) <-
|
%User{local: true, email: email, deactivated: false} = user when is_binary(email) <-
|
||||||
User.get_by_nickname_or_email(nickname_or_email),
|
User.get_by_nickname_or_email(nickname_or_email),
|
||||||
{:ok, token_record} <- Pleroma.PasswordResetToken.create_token(user) do
|
{:ok, token_record} <- Pleroma.PasswordResetToken.create_token(user) do
|
||||||
user
|
user
|
||||||
|
@ -81,17 +81,8 @@ def password_reset(nickname_or_email) do
|
||||||
|
|
||||||
{:ok, :enqueued}
|
{:ok, :enqueued}
|
||||||
else
|
else
|
||||||
false ->
|
_ ->
|
||||||
{:error, "bad user identifier"}
|
|
||||||
|
|
||||||
%User{local: true, email: nil} ->
|
|
||||||
{:ok, :noop}
|
{:ok, :noop}
|
||||||
|
|
||||||
%User{local: false} ->
|
|
||||||
{:error, "remote user"}
|
|
||||||
|
|
||||||
nil ->
|
|
||||||
{:error, "unknown user"}
|
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
|
|
@ -136,12 +136,12 @@ def get_template_from_xml(body) do
|
||||||
|
|
||||||
def find_lrdd_template(domain) do
|
def find_lrdd_template(domain) do
|
||||||
with {:ok, %{status: status, body: body}} when status in 200..299 <-
|
with {:ok, %{status: status, body: body}} when status in 200..299 <-
|
||||||
HTTP.get("http://#{domain}/.well-known/host-meta", []) do
|
HTTP.get("http://#{domain}/.well-known/host-meta") do
|
||||||
get_template_from_xml(body)
|
get_template_from_xml(body)
|
||||||
else
|
else
|
||||||
_ ->
|
_ ->
|
||||||
with {:ok, %{body: body, status: status}} when status in 200..299 <-
|
with {:ok, %{body: body, status: status}} when status in 200..299 <-
|
||||||
HTTP.get("https://#{domain}/.well-known/host-meta", []) do
|
HTTP.get("https://#{domain}/.well-known/host-meta") do
|
||||||
get_template_from_xml(body)
|
get_template_from_xml(body)
|
||||||
else
|
else
|
||||||
e -> {:error, "Can't find LRDD template: #{inspect(e)}"}
|
e -> {:error, "Can't find LRDD template: #{inspect(e)}"}
|
||||||
|
|
6
mix.exs
6
mix.exs
|
@ -4,7 +4,7 @@ defmodule Pleroma.Mixfile do
|
||||||
def project do
|
def project do
|
||||||
[
|
[
|
||||||
app: :pleroma,
|
app: :pleroma,
|
||||||
version: version("2.1.0"),
|
version: version("2.1.1"),
|
||||||
elixir: "~> 1.9",
|
elixir: "~> 1.9",
|
||||||
elixirc_paths: elixirc_paths(Mix.env()),
|
elixirc_paths: elixirc_paths(Mix.env()),
|
||||||
compilers: [:phoenix, :gettext] ++ Mix.compilers(),
|
compilers: [:phoenix, :gettext] ++ Mix.compilers(),
|
||||||
|
@ -195,7 +195,9 @@ defp deps do
|
||||||
{:ex_machina, "~> 2.4", only: :test},
|
{:ex_machina, "~> 2.4", only: :test},
|
||||||
{:credo, "~> 1.4", only: [:dev, :test], runtime: false},
|
{:credo, "~> 1.4", only: [:dev, :test], runtime: false},
|
||||||
{:mock, "~> 0.3.5", only: :test},
|
{:mock, "~> 0.3.5", only: :test},
|
||||||
{:excoveralls, "~> 0.13.1", only: :test},
|
# temporary downgrade for excoveralls, hackney until hackney max_connections bug will be fixed
|
||||||
|
{:excoveralls, "0.12.3", only: :test},
|
||||||
|
{:hackney, "1.15.2", override: true},
|
||||||
{:mox, "~> 0.5", only: :test},
|
{:mox, "~> 0.5", only: :test},
|
||||||
{:websocket_client, git: "https://github.com/jeremyong/websocket_client.git", only: :test}
|
{:websocket_client, git: "https://github.com/jeremyong/websocket_client.git", only: :test}
|
||||||
] ++ oauth_deps()
|
] ++ oauth_deps()
|
||||||
|
|
18
mix.lock
18
mix.lock
|
@ -11,7 +11,7 @@
|
||||||
"calendar": {:hex, :calendar, "1.0.0", "f52073a708528482ec33d0a171954ca610fe2bd28f1e871f247dc7f1565fa807", [:mix], [{:tzdata, "~> 0.5.20 or ~> 0.1.201603 or ~> 1.0", [hex: :tzdata, repo: "hexpm", optional: false]}], "hexpm", "990e9581920c82912a5ee50e62ff5ef96da6b15949a2ee4734f935fdef0f0a6f"},
|
"calendar": {:hex, :calendar, "1.0.0", "f52073a708528482ec33d0a171954ca610fe2bd28f1e871f247dc7f1565fa807", [:mix], [{:tzdata, "~> 0.5.20 or ~> 0.1.201603 or ~> 1.0", [hex: :tzdata, repo: "hexpm", optional: false]}], "hexpm", "990e9581920c82912a5ee50e62ff5ef96da6b15949a2ee4734f935fdef0f0a6f"},
|
||||||
"captcha": {:git, "https://git.pleroma.social/pleroma/elixir-libraries/elixir-captcha.git", "e0f16822d578866e186a0974d65ad58cddc1e2ab", [ref: "e0f16822d578866e186a0974d65ad58cddc1e2ab"]},
|
"captcha": {:git, "https://git.pleroma.social/pleroma/elixir-libraries/elixir-captcha.git", "e0f16822d578866e186a0974d65ad58cddc1e2ab", [ref: "e0f16822d578866e186a0974d65ad58cddc1e2ab"]},
|
||||||
"castore": {:hex, :castore, "0.1.7", "1ca19eee705cde48c9e809e37fdd0730510752cc397745e550f6065a56a701e9", [:mix], [], "hexpm", "a2ae2c13d40e9c308387f1aceb14786dca019ebc2a11484fb2a9f797ea0aa0d8"},
|
"castore": {:hex, :castore, "0.1.7", "1ca19eee705cde48c9e809e37fdd0730510752cc397745e550f6065a56a701e9", [:mix], [], "hexpm", "a2ae2c13d40e9c308387f1aceb14786dca019ebc2a11484fb2a9f797ea0aa0d8"},
|
||||||
"certifi": {:hex, :certifi, "2.5.2", "b7cfeae9d2ed395695dd8201c57a2d019c0c43ecaf8b8bcb9320b40d6662f340", [:rebar3], [{:parse_trans, "~>3.3", [hex: :parse_trans, repo: "hexpm", optional: false]}], "hexpm", "3b3b5f36493004ac3455966991eaf6e768ce9884693d9968055aeeeb1e575040"},
|
"certifi": {:hex, :certifi, "2.5.1", "867ce347f7c7d78563450a18a6a28a8090331e77fa02380b4a21962a65d36ee5", [:rebar3], [{:parse_trans, "~>3.3", [hex: :parse_trans, repo: "hexpm", optional: false]}], "hexpm", "805abd97539caf89ec6d4732c91e62ba9da0cda51ac462380bbd28ee697a8c42"},
|
||||||
"combine": {:hex, :combine, "0.10.0", "eff8224eeb56498a2af13011d142c5e7997a80c8f5b97c499f84c841032e429f", [:mix], [], "hexpm", "1b1dbc1790073076580d0d1d64e42eae2366583e7aecd455d1215b0d16f2451b"},
|
"combine": {:hex, :combine, "0.10.0", "eff8224eeb56498a2af13011d142c5e7997a80c8f5b97c499f84c841032e429f", [:mix], [], "hexpm", "1b1dbc1790073076580d0d1d64e42eae2366583e7aecd455d1215b0d16f2451b"},
|
||||||
"comeonin": {:hex, :comeonin, "5.3.1", "7fe612b739c78c9c1a75186ef2d322ce4d25032d119823269d0aa1e2f1e20025", [:mix], [], "hexpm", "d6222483060c17f0977fad1b7401ef0c5863c985a64352755f366aee3799c245"},
|
"comeonin": {:hex, :comeonin, "5.3.1", "7fe612b739c78c9c1a75186ef2d322ce4d25032d119823269d0aa1e2f1e20025", [:mix], [], "hexpm", "d6222483060c17f0977fad1b7401ef0c5863c985a64352755f366aee3799c245"},
|
||||||
"concurrent_limiter": {:git, "https://git.pleroma.social/pleroma/elixir-libraries/concurrent_limiter.git", "55e92f84b4ed531bd487952a71040a9c69dc2807", [ref: "55e92f84b4ed531bd487952a71040a9c69dc2807"]},
|
"concurrent_limiter": {:git, "https://git.pleroma.social/pleroma/elixir-libraries/concurrent_limiter.git", "55e92f84b4ed531bd487952a71040a9c69dc2807", [ref: "55e92f84b4ed531bd487952a71040a9c69dc2807"]},
|
||||||
|
@ -41,9 +41,9 @@
|
||||||
"ex_doc": {:hex, :ex_doc, "0.22.2", "03a2a58bdd2ba0d83d004507c4ee113b9c521956938298eba16e55cc4aba4a6c", [:mix], [{:earmark_parser, "~> 1.4.0", [hex: :earmark_parser, repo: "hexpm", optional: false]}, {:makeup_elixir, "~> 0.14", [hex: :makeup_elixir, repo: "hexpm", optional: false]}], "hexpm", "cf60e1b3e2efe317095b6bb79651f83a2c1b3edcb4d319c421d7fcda8b3aff26"},
|
"ex_doc": {:hex, :ex_doc, "0.22.2", "03a2a58bdd2ba0d83d004507c4ee113b9c521956938298eba16e55cc4aba4a6c", [:mix], [{:earmark_parser, "~> 1.4.0", [hex: :earmark_parser, repo: "hexpm", optional: false]}, {:makeup_elixir, "~> 0.14", [hex: :makeup_elixir, repo: "hexpm", optional: false]}], "hexpm", "cf60e1b3e2efe317095b6bb79651f83a2c1b3edcb4d319c421d7fcda8b3aff26"},
|
||||||
"ex_machina": {:hex, :ex_machina, "2.4.0", "09a34c5d371bfb5f78399029194a8ff67aff340ebe8ba19040181af35315eabb", [:mix], [{:ecto, "~> 2.2 or ~> 3.0", [hex: :ecto, repo: "hexpm", optional: true]}, {:ecto_sql, "~> 3.0", [hex: :ecto_sql, repo: "hexpm", optional: true]}], "hexpm", "a20bc9ddc721b33ea913b93666c5d0bdca5cbad7a67540784ae277228832d72c"},
|
"ex_machina": {:hex, :ex_machina, "2.4.0", "09a34c5d371bfb5f78399029194a8ff67aff340ebe8ba19040181af35315eabb", [:mix], [{:ecto, "~> 2.2 or ~> 3.0", [hex: :ecto, repo: "hexpm", optional: true]}, {:ecto_sql, "~> 3.0", [hex: :ecto_sql, repo: "hexpm", optional: true]}], "hexpm", "a20bc9ddc721b33ea913b93666c5d0bdca5cbad7a67540784ae277228832d72c"},
|
||||||
"ex_syslogger": {:hex, :ex_syslogger, "1.5.2", "72b6aa2d47a236e999171f2e1ec18698740f40af0bd02c8c650bf5f1fd1bac79", [:mix], [{:poison, ">= 1.5.0", [hex: :poison, repo: "hexpm", optional: true]}, {:syslog, "~> 1.1.0", [hex: :syslog, repo: "hexpm", optional: false]}], "hexpm", "ab9fab4136dbc62651ec6f16fa4842f10cf02ab4433fa3d0976c01be99398399"},
|
"ex_syslogger": {:hex, :ex_syslogger, "1.5.2", "72b6aa2d47a236e999171f2e1ec18698740f40af0bd02c8c650bf5f1fd1bac79", [:mix], [{:poison, ">= 1.5.0", [hex: :poison, repo: "hexpm", optional: true]}, {:syslog, "~> 1.1.0", [hex: :syslog, repo: "hexpm", optional: false]}], "hexpm", "ab9fab4136dbc62651ec6f16fa4842f10cf02ab4433fa3d0976c01be99398399"},
|
||||||
"excoveralls": {:hex, :excoveralls, "0.13.1", "b9f1697f7c9e0cfe15d1a1d737fb169c398803ffcbc57e672aa007e9fd42864c", [:mix], [{:hackney, "~> 1.16", [hex: :hackney, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: false]}], "hexpm", "b4bb550e045def1b4d531a37fb766cbbe1307f7628bf8f0414168b3f52021cce"},
|
"excoveralls": {:hex, :excoveralls, "0.12.3", "2142be7cb978a3ae78385487edda6d1aff0e482ffc6123877bb7270a8ffbcfe0", [:mix], [{:hackney, "~> 1.0", [hex: :hackney, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: false]}], "hexpm", "568a3e616c264283f5dea5b020783ae40eef3f7ee2163f7a67cbd7b35bcadada"},
|
||||||
"fast_html": {:hex, :fast_html, "2.0.2", "1fabc408b2baa965cf6399a48796326f2721b21b397a3c667bb3bb88fb9559a4", [:make, :mix], [{:elixir_make, "~> 0.4", [hex: :elixir_make, repo: "hexpm", optional: false]}, {:nimble_pool, "~> 0.1.0", [hex: :nimble_pool, repo: "hexpm", optional: false]}], "hexpm", "f077e2c1597a6e2678e6cacc64f456a6c6024eb4240092c46d4212496dc59aba"},
|
"fast_html": {:hex, :fast_html, "2.0.4", "4910ee49f2f6b19692e3bf30bf97f1b6b7dac489cd6b0f34cd0fe3042c56ba30", [:make, :mix], [{:elixir_make, "~> 0.4", [hex: :elixir_make, repo: "hexpm", optional: false]}, {:nimble_pool, "~> 0.1.0", [hex: :nimble_pool, repo: "hexpm", optional: false]}], "hexpm", "3bb49d541dfc02ad5e425904f53376d758c09f89e521afc7d2b174b3227761ea"},
|
||||||
"fast_sanitize": {:hex, :fast_sanitize, "0.2.1", "3302421a988992b6cae08e68f77069e167ff116444183f3302e3c36017a50558", [:mix], [{:fast_html, "~> 2.0", [hex: :fast_html, repo: "hexpm", optional: false]}, {:plug, "~> 1.8", [hex: :plug, repo: "hexpm", optional: false]}], "hexpm", "bcd2c54e328128515edd1a8fb032fdea7e5581672ba161fc5962d21ecee92502"},
|
"fast_sanitize": {:hex, :fast_sanitize, "0.2.2", "3cbbaebaea6043865dfb5b4ecb0f1af066ad410a51470e353714b10c42007b81", [:mix], [{:fast_html, "~> 2.0", [hex: :fast_html, repo: "hexpm", optional: false]}, {:plug, "~> 1.8", [hex: :plug, repo: "hexpm", optional: false]}], "hexpm", "69f204db9250afa94a0d559d9110139850f57de2b081719fbafa1e9a89e94466"},
|
||||||
"flake_id": {:hex, :flake_id, "0.1.0", "7716b086d2e405d09b647121a166498a0d93d1a623bead243e1f74216079ccb3", [:mix], [{:base62, "~> 1.2", [hex: :base62, repo: "hexpm", optional: false]}, {:ecto, ">= 2.0.0", [hex: :ecto, repo: "hexpm", optional: true]}], "hexpm", "31fc8090fde1acd267c07c36ea7365b8604055f897d3a53dd967658c691bd827"},
|
"flake_id": {:hex, :flake_id, "0.1.0", "7716b086d2e405d09b647121a166498a0d93d1a623bead243e1f74216079ccb3", [:mix], [{:base62, "~> 1.2", [hex: :base62, repo: "hexpm", optional: false]}, {:ecto, ">= 2.0.0", [hex: :ecto, repo: "hexpm", optional: true]}], "hexpm", "31fc8090fde1acd267c07c36ea7365b8604055f897d3a53dd967658c691bd827"},
|
||||||
"floki": {:hex, :floki, "0.27.0", "6b29a14283f1e2e8fad824bc930eaa9477c462022075df6bea8f0ad811c13599", [:mix], [{:html_entities, "~> 0.5.0", [hex: :html_entities, repo: "hexpm", optional: false]}], "hexpm", "583b8c13697c37179f1f82443bcc7ad2f76fbc0bf4c186606eebd658f7f2631b"},
|
"floki": {:hex, :floki, "0.27.0", "6b29a14283f1e2e8fad824bc930eaa9477c462022075df6bea8f0ad811c13599", [:mix], [{:html_entities, "~> 0.5.0", [hex: :html_entities, repo: "hexpm", optional: false]}], "hexpm", "583b8c13697c37179f1f82443bcc7ad2f76fbc0bf4c186606eebd658f7f2631b"},
|
||||||
"gen_smtp": {:hex, :gen_smtp, "0.15.0", "9f51960c17769b26833b50df0b96123605a8024738b62db747fece14eb2fbfcc", [:rebar3], [], "hexpm", "29bd14a88030980849c7ed2447b8db6d6c9278a28b11a44cafe41b791205440f"},
|
"gen_smtp": {:hex, :gen_smtp, "0.15.0", "9f51960c17769b26833b50df0b96123605a8024738b62db747fece14eb2fbfcc", [:rebar3], [], "hexpm", "29bd14a88030980849c7ed2447b8db6d6c9278a28b11a44cafe41b791205440f"},
|
||||||
|
@ -51,12 +51,12 @@
|
||||||
"gen_state_machine": {:hex, :gen_state_machine, "2.0.5", "9ac15ec6e66acac994cc442dcc2c6f9796cf380ec4b08267223014be1c728a95", [:mix], [], "hexpm"},
|
"gen_state_machine": {:hex, :gen_state_machine, "2.0.5", "9ac15ec6e66acac994cc442dcc2c6f9796cf380ec4b08267223014be1c728a95", [:mix], [], "hexpm"},
|
||||||
"gettext": {:hex, :gettext, "0.18.0", "406d6b9e0e3278162c2ae1de0a60270452c553536772167e2d701f028116f870", [:mix], [], "hexpm", "c3f850be6367ebe1a08616c2158affe4a23231c70391050bf359d5f92f66a571"},
|
"gettext": {:hex, :gettext, "0.18.0", "406d6b9e0e3278162c2ae1de0a60270452c553536772167e2d701f028116f870", [:mix], [], "hexpm", "c3f850be6367ebe1a08616c2158affe4a23231c70391050bf359d5f92f66a571"},
|
||||||
"gun": {:git, "https://github.com/ninenines/gun.git", "921c47146b2d9567eac7e9a4d2ccc60fffd4f327", [ref: "921c47146b2d9567eac7e9a4d2ccc60fffd4f327"]},
|
"gun": {:git, "https://github.com/ninenines/gun.git", "921c47146b2d9567eac7e9a4d2ccc60fffd4f327", [ref: "921c47146b2d9567eac7e9a4d2ccc60fffd4f327"]},
|
||||||
"hackney": {:hex, :hackney, "1.16.0", "5096ac8e823e3a441477b2d187e30dd3fff1a82991a806b2003845ce72ce2d84", [:rebar3], [{:certifi, "2.5.2", [hex: :certifi, repo: "hexpm", optional: false]}, {:idna, "6.0.1", [hex: :idna, repo: "hexpm", optional: false]}, {:metrics, "1.0.1", [hex: :metrics, repo: "hexpm", optional: false]}, {:mimerl, "~>1.1", [hex: :mimerl, repo: "hexpm", optional: false]}, {:parse_trans, "3.3.0", [hex: :parse_trans, repo: "hexpm", optional: false]}, {:ssl_verify_fun, "1.1.6", [hex: :ssl_verify_fun, repo: "hexpm", optional: false]}], "hexpm", "3bf0bebbd5d3092a3543b783bf065165fa5d3ad4b899b836810e513064134e18"},
|
"hackney": {:hex, :hackney, "1.15.2", "07e33c794f8f8964ee86cebec1a8ed88db5070e52e904b8f12209773c1036085", [:rebar3], [{:certifi, "2.5.1", [hex: :certifi, repo: "hexpm", optional: false]}, {:idna, "6.0.0", [hex: :idna, repo: "hexpm", optional: false]}, {:metrics, "1.0.1", [hex: :metrics, repo: "hexpm", optional: false]}, {:mimerl, "~>1.1", [hex: :mimerl, repo: "hexpm", optional: false]}, {:ssl_verify_fun, "1.1.5", [hex: :ssl_verify_fun, repo: "hexpm", optional: false]}], "hexpm", "e0100f8ef7d1124222c11ad362c857d3df7cb5f4204054f9f0f4a728666591fc"},
|
||||||
"html_entities": {:hex, :html_entities, "0.5.1", "1c9715058b42c35a2ab65edc5b36d0ea66dd083767bef6e3edb57870ef556549", [:mix], [], "hexpm", "30efab070904eb897ff05cd52fa61c1025d7f8ef3a9ca250bc4e6513d16c32de"},
|
"html_entities": {:hex, :html_entities, "0.5.1", "1c9715058b42c35a2ab65edc5b36d0ea66dd083767bef6e3edb57870ef556549", [:mix], [], "hexpm", "30efab070904eb897ff05cd52fa61c1025d7f8ef3a9ca250bc4e6513d16c32de"},
|
||||||
"html_sanitize_ex": {:hex, :html_sanitize_ex, "1.3.0", "f005ad692b717691203f940c686208aa3d8ffd9dd4bb3699240096a51fa9564e", [:mix], [{:mochiweb, "~> 2.15", [hex: :mochiweb, repo: "hexpm", optional: false]}], "hexpm"},
|
"html_sanitize_ex": {:hex, :html_sanitize_ex, "1.3.0", "f005ad692b717691203f940c686208aa3d8ffd9dd4bb3699240096a51fa9564e", [:mix], [{:mochiweb, "~> 2.15", [hex: :mochiweb, repo: "hexpm", optional: false]}], "hexpm"},
|
||||||
"http_signatures": {:hex, :http_signatures, "0.1.0", "4e4b501a936dbf4cb5222597038a89ea10781776770d2e185849fa829686b34c", [:mix], [], "hexpm", "f8a7b3731e3fd17d38fa6e343fcad7b03d6874a3b0a108c8568a71ed9c2cf824"},
|
"http_signatures": {:hex, :http_signatures, "0.1.0", "4e4b501a936dbf4cb5222597038a89ea10781776770d2e185849fa829686b34c", [:mix], [], "hexpm", "f8a7b3731e3fd17d38fa6e343fcad7b03d6874a3b0a108c8568a71ed9c2cf824"},
|
||||||
"httpoison": {:hex, :httpoison, "1.7.0", "abba7d086233c2d8574726227b6c2c4f6e53c4deae7fe5f6de531162ce9929a0", [:mix], [{:hackney, "~> 1.16", [hex: :hackney, repo: "hexpm", optional: false]}], "hexpm", "975cc87c845a103d3d1ea1ccfd68a2700c211a434d8428b10c323dc95dc5b980"},
|
"httpoison": {:hex, :httpoison, "1.6.2", "ace7c8d3a361cebccbed19c283c349b3d26991eff73a1eaaa8abae2e3c8089b6", [:mix], [{:hackney, "~> 1.15 and >= 1.15.2", [hex: :hackney, repo: "hexpm", optional: false]}], "hexpm", "aa2c74bd271af34239a3948779612f87df2422c2fdcfdbcec28d9c105f0773fe"},
|
||||||
"idna": {:hex, :idna, "6.0.1", "1d038fb2e7668ce41fbf681d2c45902e52b3cb9e9c77b55334353b222c2ee50c", [:rebar3], [{:unicode_util_compat, "0.5.0", [hex: :unicode_util_compat, repo: "hexpm", optional: false]}], "hexpm", "a02c8a1c4fd601215bb0b0324c8a6986749f807ce35f25449ec9e69758708122"},
|
"idna": {:hex, :idna, "6.0.0", "689c46cbcdf3524c44d5f3dde8001f364cd7608a99556d8fbd8239a5798d4c10", [:rebar3], [{:unicode_util_compat, "0.4.1", [hex: :unicode_util_compat, repo: "hexpm", optional: false]}], "hexpm", "4bdd305eb64e18b0273864920695cb18d7a2021f31a11b9c5fbcd9a253f936e2"},
|
||||||
"inet_cidr": {:hex, :inet_cidr, "1.0.4", "a05744ab7c221ca8e395c926c3919a821eb512e8f36547c062f62c4ca0cf3d6e", [:mix], [], "hexpm", "64a2d30189704ae41ca7dbdd587f5291db5d1dda1414e0774c29ffc81088c1bc"},
|
"inet_cidr": {:hex, :inet_cidr, "1.0.4", "a05744ab7c221ca8e395c926c3919a821eb512e8f36547c062f62c4ca0cf3d6e", [:mix], [], "hexpm", "64a2d30189704ae41ca7dbdd587f5291db5d1dda1414e0774c29ffc81088c1bc"},
|
||||||
"jason": {:hex, :jason, "1.2.1", "12b22825e22f468c02eb3e4b9985f3d0cb8dc40b9bd704730efa11abd2708c44", [:mix], [{:decimal, "~> 1.0", [hex: :decimal, repo: "hexpm", optional: true]}], "hexpm", "b659b8571deedf60f79c5a608e15414085fa141344e2716fbd6988a084b5f993"},
|
"jason": {:hex, :jason, "1.2.1", "12b22825e22f468c02eb3e4b9985f3d0cb8dc40b9bd704730efa11abd2708c44", [:mix], [{:decimal, "~> 1.0", [hex: :decimal, repo: "hexpm", optional: true]}], "hexpm", "b659b8571deedf60f79c5a608e15414085fa141344e2716fbd6988a084b5f993"},
|
||||||
"joken": {:hex, :joken, "2.2.0", "2daa1b12be05184aff7b5ace1d43ca1f81345962285fff3f88db74927c954d3a", [:mix], [{:jose, "~> 1.9", [hex: :jose, repo: "hexpm", optional: false]}], "hexpm", "b4f92e30388206f869dd25d1af628a1d99d7586e5cf0672f64d4df84c4d2f5e9"},
|
"joken": {:hex, :joken, "2.2.0", "2daa1b12be05184aff7b5ace1d43ca1f81345962285fff3f88db74927c954d3a", [:mix], [{:jose, "~> 1.9", [hex: :jose, repo: "hexpm", optional: false]}], "hexpm", "b4f92e30388206f869dd25d1af628a1d99d7586e5cf0672f64d4df84c4d2f5e9"},
|
||||||
|
@ -105,7 +105,7 @@
|
||||||
"recon": {:hex, :recon, "2.5.1", "430ffa60685ac1efdfb1fe4c97b8767c92d0d92e6e7c3e8621559ba77598678a", [:mix, :rebar3], [], "hexpm", "5721c6b6d50122d8f68cccac712caa1231f97894bab779eff5ff0f886cb44648"},
|
"recon": {:hex, :recon, "2.5.1", "430ffa60685ac1efdfb1fe4c97b8767c92d0d92e6e7c3e8621559ba77598678a", [:mix, :rebar3], [], "hexpm", "5721c6b6d50122d8f68cccac712caa1231f97894bab779eff5ff0f886cb44648"},
|
||||||
"remote_ip": {:git, "https://git.pleroma.social/pleroma/remote_ip.git", "b647d0deecaa3acb140854fe4bda5b7e1dc6d1c8", [ref: "b647d0deecaa3acb140854fe4bda5b7e1dc6d1c8"]},
|
"remote_ip": {:git, "https://git.pleroma.social/pleroma/remote_ip.git", "b647d0deecaa3acb140854fe4bda5b7e1dc6d1c8", [ref: "b647d0deecaa3acb140854fe4bda5b7e1dc6d1c8"]},
|
||||||
"sleeplocks": {:hex, :sleeplocks, "1.1.1", "3d462a0639a6ef36cc75d6038b7393ae537ab394641beb59830a1b8271faeed3", [:rebar3], [], "hexpm", "84ee37aeff4d0d92b290fff986d6a95ac5eedf9b383fadfd1d88e9b84a1c02e1"},
|
"sleeplocks": {:hex, :sleeplocks, "1.1.1", "3d462a0639a6ef36cc75d6038b7393ae537ab394641beb59830a1b8271faeed3", [:rebar3], [], "hexpm", "84ee37aeff4d0d92b290fff986d6a95ac5eedf9b383fadfd1d88e9b84a1c02e1"},
|
||||||
"ssl_verify_fun": {:hex, :ssl_verify_fun, "1.1.6", "cf344f5692c82d2cd7554f5ec8fd961548d4fd09e7d22f5b62482e5aeaebd4b0", [:make, :mix, :rebar3], [], "hexpm", "bdb0d2471f453c88ff3908e7686f86f9be327d065cc1ec16fa4540197ea04680"},
|
"ssl_verify_fun": {:hex, :ssl_verify_fun, "1.1.5", "6eaf7ad16cb568bb01753dbbd7a95ff8b91c7979482b95f38443fe2c8852a79b", [:make, :mix, :rebar3], [], "hexpm", "13104d7897e38ed7f044c4de953a6c28597d1c952075eb2e328bc6d6f2bfc496"},
|
||||||
"sweet_xml": {:hex, :sweet_xml, "0.6.6", "fc3e91ec5dd7c787b6195757fbcf0abc670cee1e4172687b45183032221b66b8", [:mix], [], "hexpm", "2e1ec458f892ffa81f9f8386e3f35a1af6db7a7a37748a64478f13163a1f3573"},
|
"sweet_xml": {:hex, :sweet_xml, "0.6.6", "fc3e91ec5dd7c787b6195757fbcf0abc670cee1e4172687b45183032221b66b8", [:mix], [], "hexpm", "2e1ec458f892ffa81f9f8386e3f35a1af6db7a7a37748a64478f13163a1f3573"},
|
||||||
"swoosh": {:hex, :swoosh, "1.0.0", "c547cfc83f30e12d5d1fdcb623d7de2c2e29a5becfc68bf8f42ba4d23d2c2756", [:mix], [{:cowboy, "~> 1.0.1 or ~> 1.1 or ~> 2.4", [hex: :cowboy, repo: "hexpm", optional: true]}, {:gen_smtp, "~> 0.13", [hex: :gen_smtp, repo: "hexpm", optional: true]}, {:hackney, "~> 1.9", [hex: :hackney, repo: "hexpm", optional: true]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: false]}, {:mail, "~> 0.2", [hex: :mail, repo: "hexpm", optional: true]}, {:mime, "~> 1.1", [hex: :mime, repo: "hexpm", optional: false]}, {:plug_cowboy, ">= 1.0.0", [hex: :plug_cowboy, repo: "hexpm", optional: true]}], "hexpm", "b3b08e463f876cb6167f7168e9ad99a069a724e124bcee61847e0e1ed13f4a0d"},
|
"swoosh": {:hex, :swoosh, "1.0.0", "c547cfc83f30e12d5d1fdcb623d7de2c2e29a5becfc68bf8f42ba4d23d2c2756", [:mix], [{:cowboy, "~> 1.0.1 or ~> 1.1 or ~> 2.4", [hex: :cowboy, repo: "hexpm", optional: true]}, {:gen_smtp, "~> 0.13", [hex: :gen_smtp, repo: "hexpm", optional: true]}, {:hackney, "~> 1.9", [hex: :hackney, repo: "hexpm", optional: true]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: false]}, {:mail, "~> 0.2", [hex: :mail, repo: "hexpm", optional: true]}, {:mime, "~> 1.1", [hex: :mime, repo: "hexpm", optional: false]}, {:plug_cowboy, ">= 1.0.0", [hex: :plug_cowboy, repo: "hexpm", optional: true]}], "hexpm", "b3b08e463f876cb6167f7168e9ad99a069a724e124bcee61847e0e1ed13f4a0d"},
|
||||||
"syslog": {:hex, :syslog, "1.1.0", "6419a232bea84f07b56dc575225007ffe34d9fdc91abe6f1b2f254fd71d8efc2", [:rebar3], [], "hexpm", "4c6a41373c7e20587be33ef841d3de6f3beba08519809329ecc4d27b15b659e1"},
|
"syslog": {:hex, :syslog, "1.1.0", "6419a232bea84f07b56dc575225007ffe34d9fdc91abe6f1b2f254fd71d8efc2", [:rebar3], [], "hexpm", "4c6a41373c7e20587be33ef841d3de6f3beba08519809329ecc4d27b15b659e1"},
|
||||||
|
@ -115,7 +115,7 @@
|
||||||
"trailing_format_plug": {:hex, :trailing_format_plug, "0.0.7", "64b877f912cf7273bed03379936df39894149e35137ac9509117e59866e10e45", [:mix], [{:plug, "> 0.12.0", [hex: :plug, repo: "hexpm", optional: false]}], "hexpm", "bd4fde4c15f3e993a999e019d64347489b91b7a9096af68b2bdadd192afa693f"},
|
"trailing_format_plug": {:hex, :trailing_format_plug, "0.0.7", "64b877f912cf7273bed03379936df39894149e35137ac9509117e59866e10e45", [:mix], [{:plug, "> 0.12.0", [hex: :plug, repo: "hexpm", optional: false]}], "hexpm", "bd4fde4c15f3e993a999e019d64347489b91b7a9096af68b2bdadd192afa693f"},
|
||||||
"tzdata": {:hex, :tzdata, "1.0.3", "73470ad29dde46e350c60a66e6b360d3b99d2d18b74c4c349dbebbc27a09a3eb", [:mix], [{:hackney, "~> 1.0", [hex: :hackney, repo: "hexpm", optional: false]}], "hexpm", "a6e1ee7003c4d04ecbd21dd3ec690d4c6662db5d3bbdd7262d53cdf5e7c746c1"},
|
"tzdata": {:hex, :tzdata, "1.0.3", "73470ad29dde46e350c60a66e6b360d3b99d2d18b74c4c349dbebbc27a09a3eb", [:mix], [{:hackney, "~> 1.0", [hex: :hackney, repo: "hexpm", optional: false]}], "hexpm", "a6e1ee7003c4d04ecbd21dd3ec690d4c6662db5d3bbdd7262d53cdf5e7c746c1"},
|
||||||
"ueberauth": {:hex, :ueberauth, "0.6.3", "d42ace28b870e8072cf30e32e385579c57b9cc96ec74fa1f30f30da9c14f3cc0", [:mix], [{:plug, "~> 1.5", [hex: :plug, repo: "hexpm", optional: false]}], "hexpm", "afc293d8a1140d6591b53e3eaf415ca92842cb1d32fad3c450c6f045f7f91b60"},
|
"ueberauth": {:hex, :ueberauth, "0.6.3", "d42ace28b870e8072cf30e32e385579c57b9cc96ec74fa1f30f30da9c14f3cc0", [:mix], [{:plug, "~> 1.5", [hex: :plug, repo: "hexpm", optional: false]}], "hexpm", "afc293d8a1140d6591b53e3eaf415ca92842cb1d32fad3c450c6f045f7f91b60"},
|
||||||
"unicode_util_compat": {:hex, :unicode_util_compat, "0.5.0", "8516502659002cec19e244ebd90d312183064be95025a319a6c7e89f4bccd65b", [:rebar3], [], "hexpm", "d48d002e15f5cc105a696cf2f1bbb3fc72b4b770a184d8420c8db20da2674b38"},
|
"unicode_util_compat": {:hex, :unicode_util_compat, "0.4.1", "d869e4c68901dd9531385bb0c8c40444ebf624e60b6962d95952775cac5e90cd", [:rebar3], [], "hexpm", "1d1848c40487cdb0b30e8ed975e34e025860c02e419cb615d255849f3427439d"},
|
||||||
"unsafe": {:hex, :unsafe, "1.0.1", "a27e1874f72ee49312e0a9ec2e0b27924214a05e3ddac90e91727bc76f8613d8", [:mix], [], "hexpm", "6c7729a2d214806450d29766abc2afaa7a2cbecf415be64f36a6691afebb50e5"},
|
"unsafe": {:hex, :unsafe, "1.0.1", "a27e1874f72ee49312e0a9ec2e0b27924214a05e3ddac90e91727bc76f8613d8", [:mix], [], "hexpm", "6c7729a2d214806450d29766abc2afaa7a2cbecf415be64f36a6691afebb50e5"},
|
||||||
"web_push_encryption": {:hex, :web_push_encryption, "0.3.0", "598b5135e696fd1404dc8d0d7c0fa2c027244a4e5d5e5a98ba267f14fdeaabc8", [:mix], [{:httpoison, "~> 1.0", [hex: :httpoison, repo: "hexpm", optional: false]}, {:jose, "~> 1.8", [hex: :jose, repo: "hexpm", optional: false]}], "hexpm", "f10bdd1afe527ede694749fb77a2f22f146a51b054c7fa541c9fd920fba7c875"},
|
"web_push_encryption": {:hex, :web_push_encryption, "0.3.0", "598b5135e696fd1404dc8d0d7c0fa2c027244a4e5d5e5a98ba267f14fdeaabc8", [:mix], [{:httpoison, "~> 1.0", [hex: :httpoison, repo: "hexpm", optional: false]}, {:jose, "~> 1.8", [hex: :jose, repo: "hexpm", optional: false]}], "hexpm", "f10bdd1afe527ede694749fb77a2f22f146a51b054c7fa541c9fd920fba7c875"},
|
||||||
"websocket_client": {:git, "https://github.com/jeremyong/websocket_client.git", "9a6f65d05ebf2725d62fb19262b21f1805a59fbf", []},
|
"websocket_client": {:git, "https://github.com/jeremyong/websocket_client.git", "9a6f65d05ebf2725d62fb19262b21f1805a59fbf", []},
|
||||||
|
|
|
@ -1 +1 @@
|
||||||
<!DOCTYPE html><html lang=en><head><meta charset=utf-8><meta name=viewport content="width=device-width,initial-scale=1,user-scalable=no"><title>Pleroma</title><!--server-generated-meta--><link rel=icon type=image/png href=/favicon.png><link href=/static/css/app.77b1644622e3bae24b6b.css rel=stylesheet><link href=/static/fontello.1598361006087.css rel=stylesheet></head><body class=hidden><noscript>To use Pleroma, please enable JavaScript.</noscript><div id=app></div><script type=text/javascript src=/static/js/vendors~app.bc5812c087f5dbcb914d.js></script><script type=text/javascript src=/static/js/app.154c25316542278028a6.js></script></body></html>
|
<!DOCTYPE html><html lang=en><head><meta charset=utf-8><meta name=viewport content="width=device-width,initial-scale=1,user-scalable=no"><title>Pleroma</title><!--server-generated-meta--><link rel=icon type=image/png href=/favicon.png><link href=/static/css/app.77b1644622e3bae24b6b.css rel=stylesheet><link href=/static/fontello.1599568314856.css rel=stylesheet></head><body class=hidden><noscript>To use Pleroma, please enable JavaScript.</noscript><div id=app></div><script type=text/javascript src=/static/js/vendors~app.90c4af83c1ae68f4cd95.js></script><script type=text/javascript src=/static/js/app.55d173dc5e39519aa518.js></script></body></html>
|
Binary file not shown.
Binary file not shown.
Before Width: | Height: | Size: 28 KiB After Width: | Height: | Size: 28 KiB |
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
|
@ -165,7 +165,7 @@ test "filters invalid microformats markup" do
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
describe "extract_first_external_url" do
|
describe "extract_first_external_url_from_object" do
|
||||||
test "extracts the url" do
|
test "extracts the url" do
|
||||||
user = insert(:user)
|
user = insert(:user)
|
||||||
|
|
||||||
|
@ -176,7 +176,7 @@ test "extracts the url" do
|
||||||
})
|
})
|
||||||
|
|
||||||
object = Object.normalize(activity)
|
object = Object.normalize(activity)
|
||||||
{:ok, url} = HTML.extract_first_external_url(object, object.data["content"])
|
{:ok, url} = HTML.extract_first_external_url_from_object(object)
|
||||||
assert url == "https://github.com/komeiji-satori/Dress"
|
assert url == "https://github.com/komeiji-satori/Dress"
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@ -191,7 +191,7 @@ test "skips mentions" do
|
||||||
})
|
})
|
||||||
|
|
||||||
object = Object.normalize(activity)
|
object = Object.normalize(activity)
|
||||||
{:ok, url} = HTML.extract_first_external_url(object, object.data["content"])
|
{:ok, url} = HTML.extract_first_external_url_from_object(object)
|
||||||
|
|
||||||
assert url == "https://github.com/syuilo/misskey/blob/develop/docs/setup.en.md"
|
assert url == "https://github.com/syuilo/misskey/blob/develop/docs/setup.en.md"
|
||||||
|
|
||||||
|
@ -207,7 +207,7 @@ test "skips hashtags" do
|
||||||
})
|
})
|
||||||
|
|
||||||
object = Object.normalize(activity)
|
object = Object.normalize(activity)
|
||||||
{:ok, url} = HTML.extract_first_external_url(object, object.data["content"])
|
{:ok, url} = HTML.extract_first_external_url_from_object(object)
|
||||||
|
|
||||||
assert url == "https://www.pixiv.net/member_illust.php?mode=medium&illust_id=72255140"
|
assert url == "https://www.pixiv.net/member_illust.php?mode=medium&illust_id=72255140"
|
||||||
end
|
end
|
||||||
|
@ -223,7 +223,7 @@ test "skips microformats hashtags" do
|
||||||
})
|
})
|
||||||
|
|
||||||
object = Object.normalize(activity)
|
object = Object.normalize(activity)
|
||||||
{:ok, url} = HTML.extract_first_external_url(object, object.data["content"])
|
{:ok, url} = HTML.extract_first_external_url_from_object(object)
|
||||||
|
|
||||||
assert url == "https://www.pixiv.net/member_illust.php?mode=medium&illust_id=72255140"
|
assert url == "https://www.pixiv.net/member_illust.php?mode=medium&illust_id=72255140"
|
||||||
end
|
end
|
||||||
|
@ -235,7 +235,7 @@ test "does not crash when there is an HTML entity in a link" do
|
||||||
|
|
||||||
object = Object.normalize(activity)
|
object = Object.normalize(activity)
|
||||||
|
|
||||||
assert {:ok, nil} = HTML.extract_first_external_url(object, object.data["content"])
|
assert {:ok, nil} = HTML.extract_first_external_url_from_object(object)
|
||||||
end
|
end
|
||||||
|
|
||||||
test "skips attachment links" do
|
test "skips attachment links" do
|
||||||
|
@ -249,7 +249,7 @@ test "skips attachment links" do
|
||||||
|
|
||||||
object = Object.normalize(activity)
|
object = Object.normalize(activity)
|
||||||
|
|
||||||
assert {:ok, nil} = HTML.extract_first_external_url(object, object.data["content"])
|
assert {:ok, nil} = HTML.extract_first_external_url_from_object(object)
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -3,7 +3,7 @@
|
||||||
# SPDX-License-Identifier: AGPL-3.0-only
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
defmodule Pleroma.Plugs.AdminSecretAuthenticationPlugTest do
|
defmodule Pleroma.Plugs.AdminSecretAuthenticationPlugTest do
|
||||||
use Pleroma.Web.ConnCase, async: true
|
use Pleroma.Web.ConnCase
|
||||||
|
|
||||||
import Mock
|
import Mock
|
||||||
import Pleroma.Factory
|
import Pleroma.Factory
|
||||||
|
|
|
@ -3,7 +3,7 @@
|
||||||
# SPDX-License-Identifier: AGPL-3.0-only
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
defmodule Pleroma.Plugs.OAuthScopesPlugTest do
|
defmodule Pleroma.Plugs.OAuthScopesPlugTest do
|
||||||
use Pleroma.Web.ConnCase, async: true
|
use Pleroma.Web.ConnCase
|
||||||
|
|
||||||
alias Pleroma.Plugs.OAuthScopesPlug
|
alias Pleroma.Plugs.OAuthScopesPlug
|
||||||
alias Pleroma.Repo
|
alias Pleroma.Repo
|
||||||
|
|
|
@ -1350,11 +1350,11 @@ def get("https://relay.mastodon.host/actor", _, _, _) do
|
||||||
{:ok, %Tesla.Env{status: 200, body: File.read!("test/fixtures/relay/relay.json")}}
|
{:ok, %Tesla.Env{status: 200, body: File.read!("test/fixtures/relay/relay.json")}}
|
||||||
end
|
end
|
||||||
|
|
||||||
def get("http://localhost:4001/", _, "", Accept: "text/html") do
|
def get("http://localhost:4001/", _, "", [{"accept", "text/html"}]) do
|
||||||
{:ok, %Tesla.Env{status: 200, body: File.read!("test/fixtures/tesla_mock/7369654.html")}}
|
{:ok, %Tesla.Env{status: 200, body: File.read!("test/fixtures/tesla_mock/7369654.html")}}
|
||||||
end
|
end
|
||||||
|
|
||||||
def get("https://osada.macgirvin.com/", _, "", Accept: "text/html") do
|
def get("https://osada.macgirvin.com/", _, "", [{"accept", "text/html"}]) do
|
||||||
{:ok,
|
{:ok,
|
||||||
%Tesla.Env{
|
%Tesla.Env{
|
||||||
status: 200,
|
status: 200,
|
||||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue