[#1234] Merge remote-tracking branch 'remotes/upstream/develop' into 1234-mastodon-2-4-3-oauth-scopes

# Conflicts:
#	lib/pleroma/web/activity_pub/activity_pub_controller.ex
This commit is contained in:
Ivan Tashkinov 2019-09-17 22:53:26 +03:00
commit 01c1078015
95 changed files with 3189 additions and 1928 deletions

4
.gitignore vendored
View File

@ -43,3 +43,7 @@ docs/generated_config.md
# Code test coverage
/cover
/Elixir.*.coverdata
.idea
pleroma.iml

View File

@ -30,6 +30,9 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
- AdminAPI: Add "godmode" while fetching user statuses (i.e. admin can see private statuses)
- Improve digest email template
Pagination: (optional) return `total` alongside with `items` when paginating
- Replaced [pleroma_job_queue](https://git.pleroma.social/pleroma/pleroma_job_queue) and `Pleroma.Web.Federator.RetryQueue` with [Oban](https://github.com/sorentwo/oban) (see [`docs/config.md`](docs/config.md) on migrating customized worker / retry settings)
- Introduced [quantum](https://github.com/quantum-elixir/quantum-core) job scheduler
- Admin API: Return `total` when querying for reports
### Fixed
- Following from Osada

View File

@ -51,6 +51,24 @@
telemetry_event: [Pleroma.Repo.Instrumenter],
migration_lock: nil
scheduled_jobs =
with digest_config <- Application.get_env(:pleroma, :email_notifications)[:digest],
true <- digest_config[:active] do
[{digest_config[:schedule], {Pleroma.Daemons.DigestEmailDaemon, :perform, []}}]
else
_ -> []
end
scheduled_jobs =
scheduled_jobs ++
[{"0 */6 * * * *", {Pleroma.Web.Websub, :refresh_subscriptions, []}}]
config :pleroma, Pleroma.Scheduler,
global: true,
overlap: true,
timezone: :utc,
jobs: scheduled_jobs
config :pleroma, Pleroma.Captcha,
enabled: false,
seconds_valid: 60,
@ -258,7 +276,7 @@
max_account_fields: 10,
max_remote_account_fields: 20,
account_field_name_length: 512,
account_field_value_length: 512,
account_field_value_length: 2048,
external_user_synchronization: true
config :pleroma, :markup,
@ -313,6 +331,10 @@
follow_handshake_timeout: 500,
sign_object_fetches: true
config :pleroma, :streamer,
workers: 3,
overflow_workers: 2
config :pleroma, :user, deny_follow_blocked: true
config :pleroma, :mrf_normalize_markup, scrub_policy: Pleroma.HTML.Scrubber.Default
@ -451,13 +473,11 @@
"web"
]
config :pleroma, Pleroma.Web.Federator.RetryQueue,
enabled: false,
max_jobs: 20,
initial_timeout: 30,
max_retries: 5
config :pleroma_job_queue, :queues,
config :pleroma, Oban,
repo: Pleroma.Repo,
verbose: false,
prune: {:maxlen, 1500},
queues: [
activity_expiration: 10,
federator_incoming: 50,
federator_outgoing: 50,
@ -466,6 +486,13 @@
transmogrifier: 20,
scheduled_activities: 10,
background: 5
]
config :pleroma, :workers,
retries: [
federator_incoming: 5,
federator_outgoing: 5
]
config :pleroma, :fetch_initial_posts,
enabled: false,

View File

@ -878,9 +878,9 @@
%{
key: :account_field_value_length,
type: :integer,
description: "An account field value maximum length (default: 512)",
description: "An account field value maximum length (default: 2048)",
suggestions: [
512
2048
]
},
%{
@ -1778,44 +1778,73 @@
group: :pleroma_job_queue,
key: :queues,
type: :group,
description: "Pleroma Job Queue configuration: a list of queues with maximum concurrent jobs",
description: "[Deprecated] Replaced with `Oban`/`:queues` (keeping the same format)",
children: []
},
%{
group: :pleroma,
key: Pleroma.Web.Federator.RetryQueue,
type: :group,
description: "[Deprecated] See `Oban` and `:workers` sections for configuration notes",
children: [
%{
key: :federator_outgoing,
key: :max_retries,
type: :integer,
description: "Outgoing federation queue",
suggestions: [50]
description: "[Deprecated] Replaced as `Oban`/`:queues`/`:outgoing_federation` value",
suggestions: []
}
]
},
%{
key: :federator_incoming,
type: :integer,
description: "Incoming federation queue",
suggestions: [50]
group: :pleroma,
key: Oban,
type: :group,
description: """
[Oban](https://github.com/sorentwo/oban) asynchronous job processor configuration.
Note: if you are running PostgreSQL in [`silent_mode`](https://postgresqlco.nf/en/doc/param/silent_mode?version=9.1),
it's advised to set [`log_destination`](https://postgresqlco.nf/en/doc/param/log_destination?version=9.1) to `syslog`,
otherwise `postmaster.log` file may grow because of "you don't own a lock of type ShareLock" warnings
(see https://github.com/sorentwo/oban/issues/52).
""",
children: [
%{
key: :repo,
type: :module,
description: "Application's Ecto repo",
suggestions: [Pleroma.Repo]
},
%{
key: :mailer,
type: :integer,
description: "Email sender queue, see Pleroma.Emails.Mailer",
suggestions: [10]
key: :verbose,
type: :boolean,
description: "Logs verbose mode",
suggestions: [false, true]
},
%{
key: :web_push,
type: :integer,
description: "Web push notifications queue",
suggestions: [50]
key: :prune,
type: [:atom, :tuple],
description:
"Non-retryable jobs [pruning settings](https://github.com/sorentwo/oban#pruning)",
suggestions: [:disabled, {:maxlen, 1500}, {:maxage, 60 * 60}]
},
%{
key: :transmogrifier,
type: :integer,
description: "Transmogrifier queue",
suggestions: [20]
},
%{
key: :scheduled_activities,
type: :integer,
description: "Scheduled activities queue, see Pleroma.ScheduledActivities",
suggestions: [10]
},
key: :queues,
type: :keyword,
description:
"Background jobs queues (keys: queues, values: max numbers of concurrent jobs)",
suggestions: [
[
activity_expiration: 10,
background: 5,
federator_incoming: 50,
federator_outgoing: 50,
mailer: 10,
scheduled_activities: 10,
transmogrifier: 20,
web_push: 50
]
],
children: [
%{
key: :activity_expiration,
type: :integer,
@ -1827,38 +1856,63 @@
type: :integer,
description: "Background queue",
suggestions: [5]
},
%{
key: :federator_incoming,
type: :integer,
description: "Incoming federation queue",
suggestions: [50]
},
%{
key: :federator_outgoing,
type: :integer,
description: "Outgoing federation queue",
suggestions: [50]
},
%{
key: :mailer,
type: :integer,
description: "Email sender queue, see Pleroma.Emails.Mailer",
suggestions: [10]
},
%{
key: :scheduled_activities,
type: :integer,
description: "Scheduled activities queue, see Pleroma.ScheduledActivities",
suggestions: [10]
},
%{
key: :transmogrifier,
type: :integer,
description: "Transmogrifier queue",
suggestions: [20]
},
%{
key: :web_push,
type: :integer,
description: "Web push notifications queue",
suggestions: [50]
}
]
}
]
},
%{
group: :pleroma,
key: Pleroma.Web.Federator.RetryQueue,
key: :workers,
type: :group,
description: "",
description: "Includes custom worker options not interpretable directly by `Oban`",
children: [
%{
key: :enabled,
type: :boolean,
description: "If set to true, failed federation jobs will be retried",
suggestions: [true, false]
},
%{
key: :max_jobs,
type: :integer,
description: "The maximum amount of parallel federation jobs running at the same time",
suggestions: [20]
},
%{
key: :initial_timeout,
type: :integer,
description: "The initial timeout in seconds",
suggestions: [30]
},
%{
key: :max_retries,
type: :integer,
description: "The maximum number of times a federation job is retried",
suggestions: [5]
key: :retries,
type: :keyword,
description: "Max retry attempts for failed jobs, per `Oban` queue",
suggestions: [
[
federator_incoming: 5,
federator_outgoing: 5
]
]
}
]
},

View File

@ -61,7 +61,11 @@
config :web_push_encryption, :http_client, Pleroma.Web.WebPushHttpClientMock
config :pleroma_job_queue, disabled: true
config :pleroma, Oban,
queues: false,
prune: :disabled
config :pleroma, Pleroma.Scheduler, jobs: []
config :pleroma, Pleroma.ScheduledActivity,
daily_user_limit: 2,

View File

@ -317,6 +317,7 @@ Note: Available `:permission_group` is currently moderator and admin. 404 is ret
```json
{
"total" : 1,
"reports": [
{
"account": {

View File

@ -135,7 +135,7 @@ config :pleroma, Pleroma.Emails.Mailer,
* `max_account_fields`: The maximum number of custom fields in the user profile (default: `10`)
* `max_remote_account_fields`: The maximum number of custom fields in the remote user profile (default: `20`)
* `account_field_name_length`: An account field name maximum length (default: `512`)
* `account_field_value_length`: An account field value maximum length (default: `512`)
* `account_field_value_length`: An account field value maximum length (default: `2048`)
* `external_user_synchronization`: Enabling following/followers counters synchronization for external users.
@ -400,35 +400,71 @@ You can then do
curl "http://localhost:4000/api/pleroma/admin/invite_token?admin_token=somerandomtoken"
```
## :pleroma_job_queue
## Oban
[Pleroma Job Queue](https://git.pleroma.social/pleroma/pleroma_job_queue) configuration: a list of queues with maximum concurrent jobs.
[Oban](https://github.com/sorentwo/oban) asynchronous job processor configuration.
Configuration options described in [Oban readme](https://github.com/sorentwo/oban#usage):
* `repo` - app's Ecto repo (`Pleroma.Repo`)
* `verbose` - logs verbosity
* `prune` - non-retryable jobs [pruning settings](https://github.com/sorentwo/oban#pruning) (`:disabled` / `{:maxlen, value}` / `{:maxage, value}`)
* `queues` - job queues (see below)
Pleroma has the following queues:
* `activity_expiration` - Activity expiration
* `federator_outgoing` - Outgoing federation
* `federator_incoming` - Incoming federation
* `mailer` - Email sender, see [`Pleroma.Emails.Mailer`](#pleroma-emails-mailer)
* `mailer` - Email sender, see [`Pleroma.Emails.Mailer`](#pleromaemailsmailer)
* `transmogrifier` - Transmogrifier
* `web_push` - Web push notifications
* `scheduled_activities` - Scheduled activities, see [`Pleroma.ScheduledActivities`](#pleromascheduledactivity)
* `scheduled_activities` - Scheduled activities, see [`Pleroma.ScheduledActivity`](#pleromascheduledactivity)
Example:
```elixir
config :pleroma_job_queue, :queues,
config :pleroma, Oban,
repo: Pleroma.Repo,
verbose: false,
prune: {:maxlen, 1500},
queues: [
federator_incoming: 50,
federator_outgoing: 50
]
```
This config contains two queues: `federator_incoming` and `federator_outgoing`. Both have the `max_jobs` set to `50`.
This config contains two queues: `federator_incoming` and `federator_outgoing`. Both have the number of max concurrent jobs set to `50`.
## Pleroma.Web.Federator.RetryQueue
### Migrating `pleroma_job_queue` settings
* `enabled`: If set to `true`, failed federation jobs will be retried
* `max_jobs`: The maximum amount of parallel federation jobs running at the same time.
* `initial_timeout`: The initial timeout in seconds
* `max_retries`: The maximum number of times a federation job is retried
`config :pleroma_job_queue, :queues` is replaced by `config :pleroma, Oban, :queues` and uses the same format (keys are queues' names, values are max concurrent jobs numbers).
### Note on running with PostgreSQL in silent mode
If you are running PostgreSQL in [`silent_mode`](https://postgresqlco.nf/en/doc/param/silent_mode?version=9.1), it's advised to set [`log_destination`](https://postgresqlco.nf/en/doc/param/log_destination?version=9.1) to `syslog`,
otherwise `postmaster.log` file may grow because of "you don't own a lock of type ShareLock" warnings (see https://github.com/sorentwo/oban/issues/52).
## :workers
Includes custom worker options not interpretable directly by `Oban`.
* `retries` — keyword lists where keys are `Oban` queues (see above) and values are numbers of max attempts for failed jobs.
Example:
```elixir
config :pleroma, :workers,
retries: [
federator_incoming: 5,
federator_outgoing: 5
]
```
### Migrating `Pleroma.Web.Federator.RetryQueue` settings
* `max_retries` is replaced with `config :pleroma, :workers, retries: [federator_outgoing: 5]`
* `enabled: false` corresponds to `config :pleroma, :workers, retries: [federator_outgoing: 1]`
* deprecated options: `max_jobs`, `initial_timeout`
## Pleroma.Web.Metadata
* `providers`: a list of metadata providers to enable. Providers available:
@ -489,6 +525,24 @@ config :auto_linker,
]
```
## Pleroma.Scheduler
Configuration for [Quantum](https://github.com/quantum-elixir/quantum-core) jobs scheduler.
See [Quantum readme](https://github.com/quantum-elixir/quantum-core#usage) for the list of supported options.
Example:
```elixir
config :pleroma, Pleroma.Scheduler,
global: true,
overlap: true,
timezone: :utc,
jobs: [{"0 */6 * * * *", {Pleroma.Web.Websub, :refresh_subscriptions, []}}]
```
The above example defines a single job which invokes `Pleroma.Web.Websub.refresh_subscriptions()` every 6 hours ("0 */6 * * * *", [crontab format](https://en.wikipedia.org/wiki/Cron)).
## Pleroma.ScheduledActivity
* `daily_user_limit`: the number of scheduled activities a user is allowed to create in a single day (Default: `25`)

View File

@ -0,0 +1,63 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Activity.Ir.Topics do
alias Pleroma.Object
alias Pleroma.Web.ActivityPub.Visibility
def get_activity_topics(activity) do
activity
|> Object.normalize()
|> generate_topics(activity)
|> List.flatten()
end
defp generate_topics(%{data: %{"type" => "Answer"}}, _) do
[]
end
defp generate_topics(object, activity) do
["user", "list"] ++ visibility_tags(object, activity)
end
defp visibility_tags(object, activity) do
case Visibility.get_visibility(activity) do
"public" ->
if activity.local do
["public", "public:local"]
else
["public"]
end
|> item_creation_tags(object, activity)
"direct" ->
["direct"]
_ ->
[]
end
end
defp item_creation_tags(tags, %{data: %{"type" => "Create"}} = object, activity) do
tags ++ hashtags_to_topics(object) ++ attachment_topics(object, activity)
end
defp item_creation_tags(tags, _, _) do
tags
end
defp hashtags_to_topics(%{data: %{"tag" => tags}}) do
tags
|> Enum.filter(&is_bitstring(&1))
|> Enum.map(fn tag -> "hashtag:" <> tag end)
end
defp hashtags_to_topics(_), do: []
defp attachment_topics(%{data: %{"attachment" => []}}, _act), do: []
defp attachment_topics(_object, %{local: true}), do: ["public:media", "public:local:media"]
defp attachment_topics(_object, _act), do: ["public:media"]
end

View File

@ -31,34 +31,21 @@ def start(_type, _args) do
children =
[
Pleroma.Repo,
Pleroma.Scheduler,
Pleroma.Config.TransferTask,
Pleroma.Emoji,
Pleroma.Captcha,
Pleroma.FlakeId,
Pleroma.ScheduledActivityWorker,
Pleroma.ActivityExpirationWorker
Pleroma.Daemons.ScheduledActivityDaemon,
Pleroma.Daemons.ActivityExpirationDaemon
] ++
cachex_children() ++
hackney_pool_children() ++
[
Pleroma.Web.Federator.RetryQueue,
Pleroma.Stats,
%{
id: :web_push_init,
start: {Task, :start_link, [&Pleroma.Web.Push.init/0]},
restart: :temporary
},
%{
id: :federator_init,
start: {Task, :start_link, [&Pleroma.Web.Federator.init/0]},
restart: :temporary
},
%{
id: :internal_fetch_init,
start: {Task, :start_link, [&Pleroma.Web.ActivityPub.InternalFetchActor.init/0]},
restart: :temporary
}
{Oban, Pleroma.Config.get(Oban)}
] ++
task_children(@env) ++
oauth_cleanup_child(oauth_cleanup_enabled?()) ++
streamer_child(@env) ++
chat_child(@env, chat_enabled?()) ++
@ -70,9 +57,7 @@ def start(_type, _args) do
# See http://elixir-lang.org/docs/stable/elixir/Supervisor.html
# for other strategies and supported options
opts = [strategy: :one_for_one, name: Pleroma.Supervisor]
result = Supervisor.start_link(children, opts)
:ok = after_supervisor_start()
result
Supervisor.start_link(children, opts)
end
defp setup_instrumenters do
@ -142,7 +127,7 @@ defp oauth_cleanup_enabled?,
defp streamer_child(:test), do: []
defp streamer_child(_) do
[Pleroma.Web.Streamer]
[Pleroma.Web.Streamer.supervisor()]
end
defp oauth_cleanup_child(true),
@ -165,16 +150,38 @@ defp hackney_pool_children do
end
end
defp after_supervisor_start do
with digest_config <- Application.get_env(:pleroma, :email_notifications)[:digest],
true <- digest_config[:active] do
PleromaJobQueue.schedule(
digest_config[:schedule],
:digest_emails,
Pleroma.DigestEmailWorker
)
defp task_children(:test) do
[
%{
id: :web_push_init,
start: {Task, :start_link, [&Pleroma.Web.Push.init/0]},
restart: :temporary
},
%{
id: :federator_init,
start: {Task, :start_link, [&Pleroma.Web.Federator.init/0]},
restart: :temporary
}
]
end
:ok
defp task_children(_) do
[
%{
id: :web_push_init,
start: {Task, :start_link, [&Pleroma.Web.Push.init/0]},
restart: :temporary
},
%{
id: :federator_init,
start: {Task, :start_link, [&Pleroma.Web.Federator.init/0]},
restart: :temporary
},
%{
id: :internal_fetch_init,
start: {Task, :start_link, [&Pleroma.Web.ActivityPub.InternalFetchActor.init/0]},
restart: :temporary
}
]
end
end

View File

@ -2,13 +2,14 @@
# Copyright © 2019 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.ActivityExpirationWorker do
defmodule Pleroma.Daemons.ActivityExpirationDaemon do
alias Pleroma.Activity
alias Pleroma.ActivityExpiration
alias Pleroma.Config
alias Pleroma.Repo
alias Pleroma.User
alias Pleroma.Web.CommonAPI
require Logger
use GenServer
import Ecto.Query
@ -49,7 +50,10 @@ def perform(:execute, expiration_id) do
def handle_info(:perform, state) do
ActivityExpiration.due_expirations(@schedule_interval)
|> Enum.each(fn expiration ->
PleromaJobQueue.enqueue(:activity_expiration, __MODULE__, [:execute, expiration.id])
Pleroma.Workers.ActivityExpirationWorker.enqueue(
"activity_expiration",
%{"activity_expiration_id" => expiration.id}
)
end)
schedule_next()

View File

@ -2,10 +2,11 @@
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.DigestEmailWorker do
import Ecto.Query
defmodule Pleroma.Daemons.DigestEmailDaemon do
alias Pleroma.Repo
alias Pleroma.Workers.DigestEmailsWorker
@queue_name :digest_emails
import Ecto.Query
def perform do
config = Pleroma.Config.get([:email_notifications, :digest])
@ -20,8 +21,10 @@ def perform do
where: u.last_digest_emailed_at < datetime_add(^now, ^negative_interval, "day"),
select: u
)
|> Pleroma.Repo.all()
|> Enum.each(&PleromaJobQueue.enqueue(@queue_name, __MODULE__, [&1]))
|> Repo.all()
|> Enum.each(fn user ->
DigestEmailsWorker.enqueue("digest_email", %{"user_id" => user.id})
end)
end
@doc """

View File

@ -2,7 +2,7 @@
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.ScheduledActivityWorker do
defmodule Pleroma.Daemons.ScheduledActivityDaemon do
@moduledoc """
Sends scheduled activities to the job queue.
"""
@ -11,6 +11,7 @@ defmodule Pleroma.ScheduledActivityWorker do
alias Pleroma.ScheduledActivity
alias Pleroma.User
alias Pleroma.Web.CommonAPI
use GenServer
require Logger
@ -45,7 +46,10 @@ def perform(:execute, scheduled_activity_id) do
def handle_info(:perform, state) do
ScheduledActivity.due_activities(@schedule_interval)
|> Enum.each(fn scheduled_activity ->
PleromaJobQueue.enqueue(:scheduled_activities, __MODULE__, [:execute, scheduled_activity.id])
Pleroma.Workers.ScheduledActivityWorker.enqueue(
"execute",
%{"activity_id" => scheduled_activity.id}
)
end)
schedule_next()

51
lib/pleroma/delivery.ex Normal file
View File

@ -0,0 +1,51 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Delivery do
use Ecto.Schema
alias Pleroma.Delivery
alias Pleroma.FlakeId
alias Pleroma.Object
alias Pleroma.Repo
alias Pleroma.User
alias Pleroma.User
import Ecto.Changeset
import Ecto.Query
schema "deliveries" do
belongs_to(:user, User, type: FlakeId)
belongs_to(:object, Object)
end
def changeset(delivery, params \\ %{}) do
delivery
|> cast(params, [:user_id, :object_id])
|> validate_required([:user_id, :object_id])
|> foreign_key_constraint(:object_id)
|> foreign_key_constraint(:user_id)
|> unique_constraint(:user_id, name: :deliveries_user_id_object_id_index)
end
def create(object_id, user_id) do
%Delivery{}
|> changeset(%{user_id: user_id, object_id: object_id})
|> Repo.insert(on_conflict: :nothing)
end
def get(object_id, user_id) do
from(d in Delivery, where: d.user_id == ^user_id and d.object_id == ^object_id)
|> Repo.one()
end
# A hack because user delete activities have a fake id for whatever reason
# TODO: Get rid of this
def delete_all_by_object_id("pleroma:fake_object_id"), do: {0, []}
def delete_all_by_object_id(object_id) do
from(d in Delivery, where: d.object_id == ^object_id)
|> Repo.delete_all()
end
end

View File

@ -9,6 +9,7 @@ defmodule Pleroma.Emails.Mailer do
The module contains functions to delivery email using Swoosh.Mailer.
"""
alias Pleroma.Workers.MailerWorker
alias Swoosh.DeliveryError
@otp_app :pleroma
@ -19,7 +20,12 @@ def enabled?, do: Pleroma.Config.get([__MODULE__, :enabled])
@doc "add email to queue"
def deliver_async(email, config \\ []) do
PleromaJobQueue.enqueue(:mailer, __MODULE__, [:deliver_async, email, config])
encoded_email =
email
|> :erlang.term_to_binary()
|> Base.encode64()
MailerWorker.enqueue("email", %{"encoded_email" => encoded_email, "config" => config})
end
@doc "callback to perform send email from queue"

View File

@ -14,7 +14,7 @@ defmodule Pleroma.FlakeId do
@type t :: binary
@behaviour Ecto.Type
use Ecto.Type
use GenServer
require Logger
alias __MODULE__

View File

@ -90,7 +90,7 @@ def set_reachable(_), do: {:error, nil}
def set_unreachable(url_or_host, unreachable_since \\ nil)
def set_unreachable(url_or_host, unreachable_since) when is_binary(url_or_host) do
unreachable_since = unreachable_since || DateTime.utc_now()
unreachable_since = parse_datetime(unreachable_since) || NaiveDateTime.utc_now()
host = host(url_or_host)
existing_record = Repo.get_by(Instance, %{host: host})
@ -114,4 +114,10 @@ def set_unreachable(url_or_host, unreachable_since) when is_binary(url_or_host)
end
def set_unreachable(_, _), do: {:error, nil}
defp parse_datetime(datetime) when is_binary(datetime) do
NaiveDateTime.from_iso8601(datetime)
end
defp parse_datetime(datetime), do: datetime
end

View File

@ -210,8 +210,10 @@ def create_notification(%Activity{} = activity, %User{} = user) do
unless skip?(activity, user) do
notification = %Notification{user_id: user.id, activity: activity}
{:ok, notification} = Repo.insert(notification)
Streamer.stream("user", notification)
Streamer.stream("user:notification", notification)
["user", "user:notification"]
|> Streamer.stream(notification)
Push.send(notification)
notification
end

View File

@ -20,6 +20,7 @@ defmodule Pleroma.Plugs.Cache do
- `ttl`: An expiration time (time-to-live). This value should be in milliseconds or `nil` to disable expiration. Defaults to `nil`.
- `query_params`: Take URL query string into account (`true`), ignore it (`false`) or limit to specific params only (list). Defaults to `true`.
- `tracking_fun`: A function that is called on successfull responses, no matter if the request is cached or not. It should accept a conn as the first argument and the value assigned to `tracking_fun_data` as the second.
Additionally, you can overwrite the TTL inside a controller action by assigning `cache_ttl` to the connection struct:
@ -56,6 +57,11 @@ def call(%{method: "GET"} = conn, opts) do
{:ok, nil} ->
cache_resp(conn, opts)
{:ok, {content_type, body, tracking_fun_data}} ->
conn = opts.tracking_fun.(conn, tracking_fun_data)
send_cached(conn, {content_type, body})
{:ok, record} ->
send_cached(conn, record)
@ -88,9 +94,17 @@ defp cache_resp(conn, opts) do
ttl = Map.get(conn.assigns, :cache_ttl, opts.ttl)
key = cache_key(conn, opts)
content_type = content_type(conn)
record = {content_type, body}
Cachex.put(:web_resp_cache, key, record, ttl: ttl)
conn =
unless opts[:tracking_fun] do
Cachex.put(:web_resp_cache, key, {content_type, body}, ttl: ttl)
conn
else
tracking_fun_data = Map.get(conn.assigns, :tracking_fun_data, nil)
Cachex.put(:web_resp_cache, key, {content_type, body, tracking_fun_data}, ttl: ttl)
opts.tracking_fun.(conn, tracking_fun_data)
end
put_resp_header(conn, "x-cache", "MISS from Pleroma")

View File

@ -15,7 +15,8 @@ def call(%{assigns: %{valid_signature: true}} = conn, _opts) do
end
def call(conn, _opts) do
[signature | _] = get_req_header(conn, "signature")
headers = get_req_header(conn, "signature")
signature = Enum.at(headers, 0)
if signature do
# set (request-target) header to the appropriate value

7
lib/pleroma/scheduler.ex Normal file
View File

@ -0,0 +1,7 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Scheduler do
use Quantum.Scheduler, otp_app: :pleroma
end

View File

@ -11,6 +11,7 @@ defmodule Pleroma.User do
alias Comeonin.Pbkdf2
alias Ecto.Multi
alias Pleroma.Activity
alias Pleroma.Delivery
alias Pleroma.Keys
alias Pleroma.Notification
alias Pleroma.Object
@ -27,6 +28,7 @@ defmodule Pleroma.User do
alias Pleroma.Web.OStatus
alias Pleroma.Web.RelMe
alias Pleroma.Web.Websub
alias Pleroma.Workers.BackgroundWorker
require Logger
@ -61,6 +63,7 @@ defmodule Pleroma.User do
field(:last_digest_emailed_at, :naive_datetime)
has_many(:notifications, Notification)
has_many(:registrations, Registration)
has_many(:deliveries, Delivery)
embeds_one(:info, User.Info)
timestamps()
@ -147,6 +150,7 @@ def get_cached_follow_state(user, target) do
Cachex.fetch!(:user_cache, key, fn _ -> {:commit, follow_state(user, target)} end)
end
@spec set_follow_state_cache(String.t(), String.t(), String.t()) :: {:ok | :error, boolean()}
def set_follow_state_cache(user_ap_id, target_ap_id, state) do
Cachex.put(
:user_cache,
@ -647,8 +651,9 @@ def get_or_fetch_by_nickname(nickname) do
end
@doc "Fetch some posts when the user has just been federated with"
def fetch_initial_posts(user),
do: PleromaJobQueue.enqueue(:background, __MODULE__, [:fetch_initial_posts, user])
def fetch_initial_posts(user) do
BackgroundWorker.enqueue("fetch_initial_posts", %{"user_id" => user.id})
end
@spec get_followers_query(User.t(), pos_integer() | nil) :: Ecto.Query.t()
def get_followers_query(%User{} = user, nil) do
@ -1078,7 +1083,7 @@ def unblock_domain(user, domain) do
end
def deactivate_async(user, status \\ true) do
PleromaJobQueue.enqueue(:background, __MODULE__, [:deactivate_async, user, status])
BackgroundWorker.enqueue("deactivate_user", %{"user_id" => user.id, "status" => status})
end
def deactivate(%User{} = user, status \\ true) do
@ -1106,9 +1111,9 @@ def update_notification_settings(%User{} = user, settings \\ %{}) do
|> update_and_set_cache()
end
@spec delete(User.t()) :: :ok
def delete(%User{} = user),
do: PleromaJobQueue.enqueue(:background, __MODULE__, [:delete, user])
def delete(%User{} = user) do
BackgroundWorker.enqueue("delete_user", %{"user_id" => user.id})
end
@spec perform(atom(), User.t()) :: {:ok, User.t()}
def perform(:delete, %User{} = user) do
@ -1215,21 +1220,20 @@ def external_users(opts \\ []) do
Repo.all(query)
end
def blocks_import(%User{} = blocker, blocked_identifiers) when is_list(blocked_identifiers),
do:
PleromaJobQueue.enqueue(:background, __MODULE__, [
:blocks_import,
blocker,
blocked_identifiers
])
def blocks_import(%User{} = blocker, blocked_identifiers) when is_list(blocked_identifiers) do
BackgroundWorker.enqueue("blocks_import", %{
"blocker_id" => blocker.id,
"blocked_identifiers" => blocked_identifiers
})
end
def follow_import(%User{} = follower, followed_identifiers) when is_list(followed_identifiers),
do:
PleromaJobQueue.enqueue(:background, __MODULE__, [
:follow_import,
follower,
followed_identifiers
])
def follow_import(%User{} = follower, followed_identifiers)
when is_list(followed_identifiers) do
BackgroundWorker.enqueue("follow_import", %{
"follower_id" => follower.id,
"followed_identifiers" => followed_identifiers
})
end
def delete_user_activities(%User{ap_id: ap_id} = user) do
ap_id
@ -1639,6 +1643,18 @@ def is_internal_user?(%User{nickname: nil}), do: true
def is_internal_user?(%User{local: true, nickname: "internal." <> _}), do: true
def is_internal_user?(_), do: false
# A hack because user delete activities have a fake id for whatever reason
# TODO: Get rid of this
def get_delivered_users_by_object_id("pleroma:fake_object_id"), do: []
def get_delivered_users_by_object_id(object_id) do
from(u in User,
inner_join: delivery in assoc(u, :deliveries),
where: delivery.object_id == ^object_id
)
|> Repo.all()
end
def change_email(user, email) do
user
|> cast(%{email: email}, [:email])

View File

@ -4,6 +4,7 @@
defmodule Pleroma.Web.ActivityPub.ActivityPub do
alias Pleroma.Activity
alias Pleroma.Activity.Ir.Topics
alias Pleroma.Config
alias Pleroma.Conversation
alias Pleroma.Notification
@ -16,7 +17,9 @@ defmodule Pleroma.Web.ActivityPub.ActivityPub do
alias Pleroma.User
alias Pleroma.Web.ActivityPub.MRF
alias Pleroma.Web.ActivityPub.Transmogrifier
alias Pleroma.Web.Streamer
alias Pleroma.Web.WebFinger
alias Pleroma.Workers.BackgroundWorker
import Ecto.Query
import Pleroma.Web.ActivityPub.Utils
@ -145,7 +148,7 @@ def insert(map, local \\ true, fake \\ false, bypass_actor_check \\ false) when
activity
end
PleromaJobQueue.enqueue(:background, Pleroma.Web.RichMedia.Helpers, [:fetch, activity])
BackgroundWorker.enqueue("fetch_data_for_activity", %{"activity_id" => activity.id})
Notification.create_notifications(activity)
@ -186,9 +189,7 @@ def stream_out_participations(participations) do
participations
|> Repo.preload(:user)
Enum.each(participations, fn participation ->
Pleroma.Web.Streamer.stream("participation", participation)
end)
Streamer.stream("participation", participations)
end
def stream_out_participations(%Object{data: %{"context" => context}}, user) do
@ -207,41 +208,15 @@ def stream_out_participations(%Object{data: %{"context" => context}}, user) do
def stream_out_participations(_, _), do: :noop
def stream_out(activity) do
if activity.data["type"] in ["Create", "Announce", "Delete"] do
object = Object.normalize(activity)
# Do not stream out poll replies
unless object.data["type"] == "Answer" do
Pleroma.Web.Streamer.stream("user", activity)
Pleroma.Web.Streamer.stream("list", activity)
if get_visibility(activity) == "public" do
Pleroma.Web.Streamer.stream("public", activity)
if activity.local do
Pleroma.Web.Streamer.stream("public:local", activity)
def stream_out(%Activity{data: %{"type" => data_type}} = activity)
when data_type in ["Create", "Announce", "Delete"] do
activity
|> Topics.get_activity_topics()
|> Streamer.stream(activity)
end
if activity.data["type"] in ["Create"] do
object.data
|> Map.get("tag", [])
|> Enum.filter(fn tag -> is_bitstring(tag) end)
|> Enum.each(fn tag -> Pleroma.Web.Streamer.stream("hashtag:" <> tag, activity) end)
if object.data["attachment"] != [] do
Pleroma.Web.Streamer.stream("public:media", activity)
if activity.local do
Pleroma.Web.Streamer.stream("public:local:media", activity)
end
end
end
else
if get_visibility(activity) == "direct",
do: Pleroma.Web.Streamer.stream("direct", activity)
end
end
end
def stream_out(_activity) do
:noop
end
def create(%{to: to, actor: actor, context: context, object: object} = params, fake \\ false) do
@ -435,6 +410,7 @@ def delete(%Object{data: %{"id" => id, "actor" => actor}} = object, local \\ tru
end
end
@spec block(User.t(), User.t(), String.t() | nil, boolean) :: {:ok, Activity.t() | nil}
def block(blocker, blocked, activity_id \\ nil, local \\ true) do
outgoing_blocks = Config.get([:activitypub, :outgoing_blocks])
unfollow_blocked = Config.get([:activitypub, :unfollow_blocked])
@ -463,10 +439,11 @@ def unblock(blocker, blocked, activity_id \\ nil, local \\ true) do
end
end
@spec flag(map()) :: {:ok, Activity.t()} | any
def flag(
%{
actor: actor,
context: context,
context: _context,
account: account,
statuses: statuses,
content: content
@ -478,14 +455,6 @@ def flag(
additional = params[:additional] || %{}
params = %{
actor: actor,
context: context,
account: account,
statuses: statuses,
content: content
}
additional =
if forward do
Map.merge(additional, %{"to" => [], "cc" => [account.ap_id]})

View File

@ -6,6 +6,7 @@ defmodule Pleroma.Web.ActivityPub.ActivityPubController do
use Pleroma.Web, :controller
alias Pleroma.Activity
alias Pleroma.Delivery
alias Pleroma.Object
alias Pleroma.Object.Fetcher
alias Pleroma.User
@ -23,7 +24,11 @@ defmodule Pleroma.Web.ActivityPub.ActivityPubController do
action_fallback(:errors)
plug(Pleroma.Plugs.Cache, [query_params: false] when action in [:activity, :object])
plug(
Pleroma.Plugs.Cache,
[query_params: false, tracking_fun: &__MODULE__.track_object_fetch/2]
when action in [:activity, :object]
)
plug(
Pleroma.Plugs.OAuthScopesPlug,
@ -60,6 +65,7 @@ def object(conn, %{"uuid" => uuid}) do
%Object{} = object <- Object.get_cached_by_ap_id(ap_id),
{_, true} <- {:public?, Visibility.is_public?(object)} do
conn
|> assign(:tracking_fun_data, object.id)
|> set_cache_ttl_for(object)
|> put_resp_content_type("application/activity+json")
|> put_view(ObjectView)
@ -70,6 +76,16 @@ def object(conn, %{"uuid" => uuid}) do
end
end
def track_object_fetch(conn, nil), do: conn
def track_object_fetch(conn, object_id) do
with %{assigns: %{user: %User{id: user_id}}} <- conn do
Delivery.create(object_id, user_id)
end
conn
end
def object_likes(conn, %{"uuid" => uuid, "page" => page}) do
with ap_id <- o_status_url(conn, :object, uuid),
%Object{} = object <- Object.get_cached_by_ap_id(ap_id),
@ -105,6 +121,7 @@ def activity(conn, %{"uuid" => uuid}) do
%Activity{} = activity <- Activity.normalize(ap_id),
{_, true} <- {:public?, Visibility.is_public?(activity)} do
conn
|> maybe_set_tracking_data(activity)
|> set_cache_ttl_for(activity)
|> put_resp_content_type("application/activity+json")
|> put_view(ObjectView)
@ -115,6 +132,13 @@ def activity(conn, %{"uuid" => uuid}) do
end
end
defp maybe_set_tracking_data(conn, %Activity{data: %{"type" => "Create"}} = activity) do
object_id = Object.normalize(activity).id
assign(conn, :tracking_fun_data, object_id)
end
defp maybe_set_tracking_data(conn, _activity), do: conn
defp set_cache_ttl_for(conn, %Activity{object: object}) do
set_cache_ttl_for(conn, object)
end

View File

@ -8,6 +8,7 @@ defmodule Pleroma.Web.ActivityPub.MRF.MediaProxyWarmingPolicy do
alias Pleroma.HTTP
alias Pleroma.Web.MediaProxy
alias Pleroma.Workers.BackgroundWorker
require Logger
@ -30,7 +31,7 @@ def perform(:preload, %{"object" => %{"attachment" => attachments}} = _message)
url
|> Enum.each(fn
%{"href" => href} ->
PleromaJobQueue.enqueue(:background, __MODULE__, [:prefetch, href])
BackgroundWorker.enqueue("media_proxy_prefetch", %{"url" => href})
x ->
Logger.debug("Unhandled attachment URL object #{inspect(x)}")
@ -46,7 +47,7 @@ def filter(
%{"type" => "Create", "object" => %{"attachment" => attachments} = _object} = message
)
when is_list(attachments) and length(attachments) > 0 do
PleromaJobQueue.enqueue(:background, __MODULE__, [:preload, message])
BackgroundWorker.enqueue("media_proxy_preload", %{"message" => message})
{:ok, message}
end

View File

@ -5,8 +5,10 @@
defmodule Pleroma.Web.ActivityPub.Publisher do
alias Pleroma.Activity
alias Pleroma.Config
alias Pleroma.Delivery
alias Pleroma.HTTP
alias Pleroma.Instances
alias Pleroma.Object
alias Pleroma.User
alias Pleroma.Web.ActivityPub.Relay
alias Pleroma.Web.ActivityPub.Transmogrifier
@ -84,6 +86,15 @@ def publish_one(%{inbox: inbox, json: json, actor: %User{} = actor, id: id} = pa
end
end
def publish_one(%{actor_id: actor_id} = params) do
actor = User.get_cached_by_id(actor_id)
params
|> Map.delete(:actor_id)
|> Map.put(:actor, actor)
|> publish_one()
end
defp should_federate?(inbox, public) do
if public do
true
@ -107,7 +118,18 @@ defp recipients(actor, activity) do
{:ok, []}
end
Pleroma.Web.Salmon.remote_users(actor, activity) ++ followers
fetchers =
with %Activity{data: %{"type" => "Delete"}} <- activity,
%Object{id: object_id} <- Object.normalize(activity),
fetchers <- User.get_delivered_users_by_object_id(object_id),
_ <- Delivery.delete_all_by_object_id(object_id) do
fetchers
else
_ ->
[]
end
Pleroma.Web.Salmon.remote_users(actor, activity) ++ followers ++ fetchers
end
defp get_cc_ap_ids(ap_id, recipients) do
@ -159,7 +181,8 @@ def determine_inbox(
Publishes an activity with BCC to all relevant peers.
"""
def publish(actor, %{data: %{"bcc" => bcc}} = activity) when is_list(bcc) and bcc != [] do
def publish(%User{} = actor, %{data: %{"bcc" => bcc}} = activity)
when is_list(bcc) and bcc != [] do
public = is_public?(activity)
{:ok, data} = Transmogrifier.prepare_outgoing(activity.data)
@ -186,7 +209,7 @@ def publish(actor, %{data: %{"bcc" => bcc}} = activity) when is_list(bcc) and bc
Pleroma.Web.Federator.Publisher.enqueue_one(__MODULE__, %{
inbox: inbox,
json: json,
actor: actor,
actor_id: actor.id,
id: activity.data["id"],
unreachable_since: unreachable_since
})
@ -221,7 +244,7 @@ def publish(%User{} = actor, %Activity{} = activity) do
%{
inbox: inbox,
json: json,
actor: actor,
actor_id: actor.id,
id: activity.data["id"],
unreachable_since: unreachable_since
}

View File

@ -15,6 +15,7 @@ defmodule Pleroma.Web.ActivityPub.Transmogrifier do
alias Pleroma.Web.ActivityPub.Utils
alias Pleroma.Web.ActivityPub.Visibility
alias Pleroma.Web.Federator
alias Pleroma.Workers.TransmogrifierWorker
import Ecto.Query
@ -1049,9 +1050,9 @@ def upgrade_user_from_ap_id(ap_id) do
with %User{local: false} = user <- User.get_cached_by_ap_id(ap_id),
{:ok, data} <- ActivityPub.fetch_and_prepare_user_from_ap_id(ap_id),
already_ap <- User.ap_enabled?(user),
{:ok, user} <- user |> User.upgrade_changeset(data) |> User.update_and_set_cache() do
{:ok, user} <- user |> User.upgrade_changeset(data, true) |> User.update_and_set_cache() do
unless already_ap do
PleromaJobQueue.enqueue(:transmogrifier, __MODULE__, [:user_upgrade, user])
TransmogrifierWorker.enqueue("user_upgrade", %{"user_id" => user.id})
end
{:ok, user}

View File

@ -33,50 +33,40 @@ def normalize_params(params) do
Map.put(params, "actor", get_ap_id(params["actor"]))
end
def determine_explicit_mentions(%{"tag" => tag} = _object) when is_list(tag) do
tag
|> Enum.filter(fn x -> is_map(x) end)
|> Enum.filter(fn x -> x["type"] == "Mention" end)
|> Enum.map(fn x -> x["href"] end)
@spec determine_explicit_mentions(map()) :: map()
def determine_explicit_mentions(%{"tag" => tag} = _) when is_list(tag) do
Enum.flat_map(tag, fn
%{"type" => "Mention", "href" => href} -> [href]
_ -> []
end)
end
def determine_explicit_mentions(%{"tag" => tag} = object) when is_map(tag) do
Map.put(object, "tag", [tag])
object
|> Map.put("tag", [tag])
|> determine_explicit_mentions()
end
def determine_explicit_mentions(_), do: []
@spec recipient_in_collection(any(), any()) :: boolean()
defp recipient_in_collection(ap_id, coll) when is_binary(coll), do: ap_id == coll
defp recipient_in_collection(ap_id, coll) when is_list(coll), do: ap_id in coll
defp recipient_in_collection(_, _), do: false
@spec recipient_in_message(User.t(), User.t(), map()) :: boolean()
def recipient_in_message(%User{ap_id: ap_id} = recipient, %User{} = actor, params) do
addresses = [params["to"], params["cc"], params["bto"], params["bcc"]]
cond do
recipient_in_collection(ap_id, params["to"]) ->
true
recipient_in_collection(ap_id, params["cc"]) ->
true
recipient_in_collection(ap_id, params["bto"]) ->
true
recipient_in_collection(ap_id, params["bcc"]) ->
true
Enum.any?(addresses, &recipient_in_collection(ap_id, &1)) -> true
# if the message is unaddressed at all, then assume it is directly addressed
# to the recipient
!params["to"] && !params["cc"] && !params["bto"] && !params["bcc"] ->
true
Enum.all?(addresses, &is_nil(&1)) -> true
# if the message is sent from somebody the user is following, then assume it
# is addressed to the recipient
User.following?(recipient, actor) ->
true
true ->
false
User.following?(recipient, actor) -> true
true -> false
end
end
@ -167,14 +157,7 @@ def create_context(context) do
@spec maybe_federate(any()) :: :ok
def maybe_federate(%Activity{local: true} = activity) do
if Pleroma.Config.get!([:instance, :federating]) do
priority =
case activity.data["type"] do
"Delete" -> 10
"Create" -> 1
_ -> 5
end
Pleroma.Web.Federator.publish(activity, priority)
Pleroma.Web.Federator.publish(activity)
end
:ok
@ -186,9 +169,19 @@ def maybe_federate(_), do: :ok
Adds an id and a published data if they aren't there,
also adds it to an included object
"""
def lazy_put_activity_defaults(map, fake? \\ false) do
map =
if not fake? do
@spec lazy_put_activity_defaults(map(), boolean) :: map()
def lazy_put_activity_defaults(map, fake? \\ false)
def lazy_put_activity_defaults(map, true) do
map
|> Map.put_new("id", "pleroma:fakeid")
|> Map.put_new_lazy("published", &make_date/0)
|> Map.put_new("context", "pleroma:fakecontext")
|> Map.put_new("context_id", -1)
|> lazy_put_object_defaults(true)
end
def lazy_put_activity_defaults(map, _fake?) do
%{data: %{"id" => context}, id: context_id} = create_context(map["context"])
map
@ -196,44 +189,39 @@ def lazy_put_activity_defaults(map, fake? \\ false) do
|> Map.put_new_lazy("published", &make_date/0)
|> Map.put_new("context", context)
|> Map.put_new("context_id", context_id)
else
map
|> Map.put_new("id", "pleroma:fakeid")
|> Map.put_new_lazy("published", &make_date/0)
|> Map.put_new("context", "pleroma:fakecontext")
|> Map.put_new("context_id", -1)
|> lazy_put_object_defaults(false)
end
if is_map(map["object"]) do
object = lazy_put_object_defaults(map["object"], map, fake?)
%{map | "object" => object}
else
# Adds an id and published date if they aren't there.
#
@spec lazy_put_object_defaults(map(), boolean()) :: map()
defp lazy_put_object_defaults(%{"object" => map} = activity, true)
when is_map(map) do
object =
map
end
end
@doc """
Adds an id and published date if they aren't there.
"""
def lazy_put_object_defaults(map, activity \\ %{}, fake?)
def lazy_put_object_defaults(map, activity, true = _fake?) do
map
|> Map.put_new_lazy("published", &make_date/0)
|> Map.put_new("id", "pleroma:fake_object_id")
|> Map.put_new_lazy("published", &make_date/0)
|> Map.put_new("context", activity["context"])
|> Map.put_new("fake", true)
|> Map.put_new("context_id", activity["context_id"])
|> Map.put_new("fake", true)
%{activity | "object" => object}
end
def lazy_put_object_defaults(map, activity, _fake?) do
defp lazy_put_object_defaults(%{"object" => map} = activity, _)
when is_map(map) do
object =
map
|> Map.put_new_lazy("id", &generate_object_id/0)
|> Map.put_new_lazy("published", &make_date/0)
|> Map.put_new("context", activity["context"])
|> Map.put_new("context_id", activity["context_id"])
%{activity | "object" => object}
end
defp lazy_put_object_defaults(activity, _), do: activity
@doc """
Inserts a full object if it is contained in an activity.
"""
@ -352,24 +340,24 @@ defp fetch_likes(object) do
@doc """
Updates a follow activity's state (for locked accounts).
"""
@spec update_follow_state_for_all(Activity.t(), String.t()) :: {:ok, Activity} | {:error, any()}
def update_follow_state_for_all(
%Activity{data: %{"actor" => actor, "object" => object}} = activity,
state
) do
try do
Ecto.Adapters.SQL.query!(
Repo,
"UPDATE activities SET data = jsonb_set(data, '{state}', $1) WHERE data->>'type' = 'Follow' AND data->>'actor' = $2 AND data->>'object' = $3 AND data->>'state' = 'pending'",
[state, actor, object]
)
"Follow"
|> Activity.Queries.by_type()
|> Activity.Queries.by_actor(actor)
|> Activity.Queries.by_object_id(object)
|> where(fragment("data->>'state' = 'pending'"))
|> update(set: [data: fragment("jsonb_set(data, '{state}', ?)", ^state)])
|> Repo.update_all([])
User.set_follow_state_cache(actor, object, state)
activity = Activity.get_by_id(activity.id)
{:ok, activity}
rescue
e ->
{:error, e}
end
end
def update_follow_state(
@ -420,6 +408,7 @@ def fetch_latest_follow(%User{ap_id: follower_id}, %User{ap_id: followed_id}) do
@doc """
Retruns an existing announce activity if the notice has already been announced
"""
@spec get_existing_announce(String.t(), map()) :: Activity.t() | nil
def get_existing_announce(actor, %{data: %{"id" => ap_id}}) do
"Announce"
|> Activity.Queries.by_type()
@ -502,33 +491,35 @@ def make_unlike_data(
|> maybe_put("id", activity_id)
end
@spec add_announce_to_object(Activity.t(), Object.t()) ::
{:ok, Object.t()} | {:error, Ecto.Changeset.t()}
def add_announce_to_object(
%Activity{
data: %{"actor" => actor, "cc" => [Pleroma.Constants.as_public()]}
},
%Activity{data: %{"actor" => actor, "cc" => [Pleroma.Constants.as_public()]}},
object
) do
announcements =
if is_list(object.data["announcements"]) do
Enum.uniq([actor | object.data["announcements"]])
else
[actor]
end
announcements = take_announcements(object)
with announcements <- Enum.uniq([actor | announcements]) do
update_element_in_object("announcement", announcements, object)
end
end
def add_announce_to_object(_, object), do: {:ok, object}
@spec remove_announce_from_object(Activity.t(), Object.t()) ::
{:ok, Object.t()} | {:error, Ecto.Changeset.t()}
def remove_announce_from_object(%Activity{data: %{"actor" => actor}}, object) do
announcements =
if is_list(object.data["announcements"]), do: object.data["announcements"], else: []
with announcements <- announcements |> List.delete(actor) do
with announcements <- List.delete(take_announcements(object), actor) do
update_element_in_object("announcement", announcements, object)
end
end
defp take_announcements(%{data: %{"announcements" => announcements}} = _)
when is_list(announcements),
do: announcements
defp take_announcements(_), do: []
#### Unfollow-related helpers
def make_unfollow_data(follower, followed, follow_activity, activity_id) do
@ -542,6 +533,7 @@ def make_unfollow_data(follower, followed, follow_activity, activity_id) do
end
#### Block-related helpers
@spec fetch_latest_block(User.t(), User.t()) :: Activity.t() | nil
def fetch_latest_block(%User{ap_id: blocker_id}, %User{ap_id: blocked_id}) do
"Block"
|> Activity.Queries.by_type()
@ -590,28 +582,32 @@ def make_create_data(params, additional) do
end
#### Flag-related helpers
def make_flag_data(params, additional) do
status_ap_ids =
Enum.map(params.statuses || [], fn
%Activity{} = act -> act.data["id"]
act when is_map(act) -> act["id"]
act when is_binary(act) -> act
end)
object = [params.account.ap_id] ++ status_ap_ids
@spec make_flag_data(map(), map()) :: map()
def make_flag_data(%{actor: actor, context: context, content: content} = params, additional) do
%{
"type" => "Flag",
"actor" => params.actor.ap_id,
"content" => params.content,
"object" => object,
"context" => params.context,
"actor" => actor.ap_id,
"content" => content,
"object" => build_flag_object(params),
"context" => context,
"state" => "open"
}
|> Map.merge(additional)
end
def make_flag_data(_, _), do: %{}
defp build_flag_object(%{account: account, statuses: statuses} = _) do
[account.ap_id] ++
Enum.map(statuses || [], fn
%Activity{} = act -> act.data["id"]
act when is_map(act) -> act["id"]
act when is_binary(act) -> act
end)
end
defp build_flag_object(_), do: []
@doc """
Fetches the OrderedCollection/OrderedCollectionPage from `from`, limiting the amount of pages fetched after
the first one to `pages_left` pages.

View File

@ -504,11 +504,9 @@ def list_reports(conn, params) do
params
|> Map.put("type", "Flag")
|> Map.put("skip_preload", true)
|> Map.put("total", true)
reports =
[]
|> ActivityPub.fetch_activities(params)
|> Enum.reverse()
reports = ActivityPub.fetch_activities([], params)
conn
|> put_view(ReportView)

View File

@ -12,7 +12,9 @@ defmodule Pleroma.Web.AdminAPI.ReportView do
def render("index.json", %{reports: reports}) do
%{
reports: render_many(reports, __MODULE__, "show.json", as: :report)
reports:
render_many(reports[:items], __MODULE__, "show.json", as: :report) |> Enum.reverse(),
total: reports[:total]
}
end

View File

@ -10,16 +10,17 @@ defmodule Pleroma.Web.Federator do
alias Pleroma.Web.ActivityPub.Transmogrifier
alias Pleroma.Web.ActivityPub.Utils
alias Pleroma.Web.Federator.Publisher
alias Pleroma.Web.Federator.RetryQueue
alias Pleroma.Web.OStatus
alias Pleroma.Web.Websub
alias Pleroma.Workers.PublisherWorker
alias Pleroma.Workers.ReceiverWorker
alias Pleroma.Workers.SubscriberWorker
require Logger
def init do
# 1 minute
Process.sleep(1000 * 60)
refresh_subscriptions()
# To do: consider removing this call in favor of scheduled execution (`quantum`-based)
refresh_subscriptions(schedule_in: 60)
end
@doc "Addresses [memory leaks on recursive replies fetching](https://git.pleroma.social/pleroma/pleroma/issues/161)"
@ -37,50 +38,38 @@ def allowed_incoming_reply_depth?(depth) do
# Client API
def incoming_doc(doc) do
PleromaJobQueue.enqueue(:federator_incoming, __MODULE__, [:incoming_doc, doc])
ReceiverWorker.enqueue("incoming_doc", %{"body" => doc})
end
def incoming_ap_doc(params) do
PleromaJobQueue.enqueue(:federator_incoming, __MODULE__, [:incoming_ap_doc, params])
ReceiverWorker.enqueue("incoming_ap_doc", %{"params" => params})
end
def publish(activity, priority \\ 1) do
PleromaJobQueue.enqueue(:federator_outgoing, __MODULE__, [:publish, activity], priority)
def publish(%{id: "pleroma:fakeid"} = activity) do
perform(:publish, activity)
end
def publish(activity) do
PublisherWorker.enqueue("publish", %{"activity_id" => activity.id})
end
def verify_websub(websub) do
PleromaJobQueue.enqueue(:federator_outgoing, __MODULE__, [:verify_websub, websub])
SubscriberWorker.enqueue("verify_websub", %{"websub_id" => websub.id})
end
def request_subscription(sub) do
PleromaJobQueue.enqueue(:federator_outgoing, __MODULE__, [:request_subscription, sub])
def request_subscription(websub) do
SubscriberWorker.enqueue("request_subscription", %{"websub_id" => websub.id})
end
def refresh_subscriptions do
PleromaJobQueue.enqueue(:federator_outgoing, __MODULE__, [:refresh_subscriptions])
def refresh_subscriptions(worker_args \\ []) do
SubscriberWorker.enqueue("refresh_subscriptions", %{}, worker_args ++ [max_attempts: 1])
end
# Job Worker Callbacks
def perform(:refresh_subscriptions) do
Logger.debug("Federator running refresh subscriptions")
Websub.refresh_subscriptions()
spawn(fn ->
# 6 hours
Process.sleep(1000 * 60 * 60 * 6)
refresh_subscriptions()
end)
end
def perform(:request_subscription, websub) do
Logger.debug("Refreshing #{websub.topic}")
with {:ok, websub} <- Websub.request_subscription(websub) do
Logger.debug("Successfully refreshed #{websub.topic}")
else
_e -> Logger.debug("Couldn't refresh #{websub.topic}")
end
@spec perform(atom(), module(), any()) :: {:ok, any()} | {:error, any()}
def perform(:publish_one, module, params) do
apply(module, :publish_one, [params])
end
def perform(:publish, activity) do
@ -92,14 +81,6 @@ def perform(:publish, activity) do
end
end
def perform(:verify_websub, websub) do
Logger.debug(fn ->
"Running WebSub verification for #{websub.id} (#{websub.topic}, #{websub.callback})"
end)
Websub.verify(websub)
end
def perform(:incoming_doc, doc) do
Logger.info("Got document, trying to parse")
OStatus.handle_incoming(doc)
@ -130,22 +111,27 @@ def perform(:incoming_ap_doc, params) do
end
end
def perform(
:publish_single_websub,
%{xml: _xml, topic: _topic, callback: _callback, secret: _secret} = params
) do
case Websub.publish_one(params) do
{:ok, _} ->
:ok
def perform(:request_subscription, websub) do
Logger.debug("Refreshing #{websub.topic}")
{:error, _} ->
RetryQueue.enqueue(params, Websub)
with {:ok, websub} <- Websub.request_subscription(websub) do
Logger.debug("Successfully refreshed #{websub.topic}")
else
_e -> Logger.debug("Couldn't refresh #{websub.topic}")
end
end
def perform(type, _) do
Logger.debug(fn -> "Unknown task: #{type}" end)
{:error, "Don't know what to do with this"}
def perform(:verify_websub, websub) do
Logger.debug(fn ->
"Running WebSub verification for #{websub.id} (#{websub.topic}, #{websub.callback})"
end)
Websub.verify(websub)
end
def perform(:refresh_subscriptions) do
Logger.debug("Federator running refresh subscriptions")
Websub.refresh_subscriptions()
end
def ap_enabled_actor(id) do

View File

@ -6,7 +6,7 @@ defmodule Pleroma.Web.Federator.Publisher do
alias Pleroma.Activity
alias Pleroma.Config
alias Pleroma.User
alias Pleroma.Web.Federator.RetryQueue
alias Pleroma.Workers.PublisherWorker
require Logger
@ -30,23 +30,11 @@ defmodule Pleroma.Web.Federator.Publisher do
Enqueue publishing a single activity.
"""
@spec enqueue_one(module(), Map.t()) :: :ok
def enqueue_one(module, %{} = params),
do: PleromaJobQueue.enqueue(:federator_outgoing, __MODULE__, [:publish_one, module, params])
@spec perform(atom(), module(), any()) :: {:ok, any()} | {:error, any()}
def perform(:publish_one, module, params) do
case apply(module, :publish_one, [params]) do
{:ok, _} ->
:ok
{:error, _e} ->
RetryQueue.enqueue(params, module)
end
end
def perform(type, _, _) do
Logger.debug("Unknown task: #{type}")
{:error, "Don't know what to do with this"}
def enqueue_one(module, %{} = params) do
PublisherWorker.enqueue(
"publish_one",
%{"module" => to_string(module), "params" => params}
)
end
@doc """

View File

@ -1,239 +0,0 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Web.Federator.RetryQueue do
use GenServer
require Logger
def init(args) do
queue_table = :ets.new(:pleroma_retry_queue, [:bag, :protected])
{:ok, %{args | queue_table: queue_table, running_jobs: :sets.new()}}
end
def start_link(_) do
enabled =
if Pleroma.Config.get(:env) == :test,
do: true,
else: Pleroma.Config.get([__MODULE__, :enabled], false)
if enabled do
Logger.info("Starting retry queue")
linkres =
GenServer.start_link(
__MODULE__,
%{delivered: 0, dropped: 0, queue_table: nil, running_jobs: nil},
name: __MODULE__
)
maybe_kickoff_timer()
linkres
else
Logger.info("Retry queue disabled")
:ignore
end
end
def enqueue(data, transport, retries \\ 0) do
GenServer.cast(__MODULE__, {:maybe_enqueue, data, transport, retries + 1})
end
def get_stats do
GenServer.call(__MODULE__, :get_stats)
end
def reset_stats do
GenServer.call(__MODULE__, :reset_stats)
end
def get_retry_params(retries) do
if retries > Pleroma.Config.get([__MODULE__, :max_retries]) do
{:drop, "Max retries reached"}
else
{:retry, growth_function(retries)}
end
end
def get_retry_timer_interval do
Pleroma.Config.get([:retry_queue, :interval], 1000)
end
defp ets_count_expires(table, current_time) do
:ets.select_count(
table,
[
{
{:"$1", :"$2"},
[{:"=<", :"$1", {:const, current_time}}],
[true]
}
]
)
end
defp ets_pop_n_expired(table, current_time, desired) do
{popped, _continuation} =
:ets.select(
table,
[
{
{:"$1", :"$2"},
[{:"=<", :"$1", {:const, current_time}}],
[:"$_"]
}
],
desired
)
popped
|> Enum.each(fn e ->
:ets.delete_object(table, e)
end)
popped
end
def maybe_start_job(running_jobs, queue_table) do
# we don't want to hit the ets or the DateTime more times than we have to
# could optimize slightly further by not using the count, and instead grabbing
# up to N objects early...
current_time = DateTime.to_unix(DateTime.utc_now())
n_running_jobs = :sets.size(running_jobs)
if n_running_jobs < Pleroma.Config.get([__MODULE__, :max_jobs]) do
n_ready_jobs = ets_count_expires(queue_table, current_time)
if n_ready_jobs > 0 do
# figure out how many we could start
available_job_slots = Pleroma.Config.get([__MODULE__, :max_jobs]) - n_running_jobs
start_n_jobs(running_jobs, queue_table, current_time, available_job_slots)
else
running_jobs
end
else
running_jobs
end
end
defp start_n_jobs(running_jobs, _queue_table, _current_time, 0) do
running_jobs
end
defp start_n_jobs(running_jobs, queue_table, current_time, available_job_slots)
when available_job_slots > 0 do
candidates = ets_pop_n_expired(queue_table, current_time, available_job_slots)
candidates
|> List.foldl(running_jobs, fn {_, e}, rj ->
{:ok, pid} = Task.start(fn -> worker(e) end)
mref = Process.monitor(pid)
:sets.add_element(mref, rj)
end)
end
def worker({:send, data, transport, retries}) do
case transport.publish_one(data) do
{:ok, _} ->
GenServer.cast(__MODULE__, :inc_delivered)
:delivered
{:error, _reason} ->
enqueue(data, transport, retries)
:retry
end
end
def handle_call(:get_stats, _from, %{delivered: delivery_count, dropped: drop_count} = state) do
{:reply, %{delivered: delivery_count, dropped: drop_count}, state}
end
def handle_call(:reset_stats, _from, %{delivered: delivery_count, dropped: drop_count} = state) do
{:reply, %{delivered: delivery_count, dropped: drop_count},
%{state | delivered: 0, dropped: 0}}
end
def handle_cast(:reset_stats, state) do
{:noreply, %{state | delivered: 0, dropped: 0}}
end
def handle_cast(
{:maybe_enqueue, data, transport, retries},
%{dropped: drop_count, queue_table: queue_table, running_jobs: running_jobs} = state
) do
case get_retry_params(retries) do
{:retry, timeout} ->
:ets.insert(queue_table, {timeout, {:send, data, transport, retries}})
running_jobs = maybe_start_job(running_jobs, queue_table)
{:noreply, %{state | running_jobs: running_jobs}}
{:drop, message} ->
Logger.debug(message)
{:noreply, %{state | dropped: drop_count + 1}}
end
end
def handle_cast(:kickoff_timer, state) do
retry_interval = get_retry_timer_interval()
Process.send_after(__MODULE__, :retry_timer_run, retry_interval)
{:noreply, state}
end
def handle_cast(:inc_delivered, %{delivered: delivery_count} = state) do
{:noreply, %{state | delivered: delivery_count + 1}}
end
def handle_cast(:inc_dropped, %{dropped: drop_count} = state) do
{:noreply, %{state | dropped: drop_count + 1}}
end
def handle_info({:send, data, transport, retries}, %{delivered: delivery_count} = state) do
case transport.publish_one(data) do
{:ok, _} ->
{:noreply, %{state | delivered: delivery_count + 1}}
{:error, _reason} ->
enqueue(data, transport, retries)
{:noreply, state}
end
end
def handle_info(
:retry_timer_run,
%{queue_table: queue_table, running_jobs: running_jobs} = state
) do
maybe_kickoff_timer()
running_jobs = maybe_start_job(running_jobs, queue_table)
{:noreply, %{state | running_jobs: running_jobs}}
end
def handle_info({:DOWN, ref, :process, _pid, _reason}, state) do
%{running_jobs: running_jobs, queue_table: queue_table} = state
running_jobs = :sets.del_element(ref, running_jobs)
running_jobs = maybe_start_job(running_jobs, queue_table)
{:noreply, %{state | running_jobs: running_jobs}}
end
def handle_info(unknown, state) do
Logger.debug("RetryQueue: don't know what to do with #{inspect(unknown)}, ignoring")
{:noreply, state}
end
if Pleroma.Config.get(:env) == :test do
defp growth_function(_retries) do
_shutit = Pleroma.Config.get([__MODULE__, :initial_timeout])
DateTime.to_unix(DateTime.utc_now()) - 1
end
else
defp growth_function(retries) do
round(Pleroma.Config.get([__MODULE__, :initial_timeout]) * :math.pow(retries, 3)) +
DateTime.to_unix(DateTime.utc_now())
end
end
defp maybe_kickoff_timer do
GenServer.cast(__MODULE__, :kickoff_timer)
end
end

View File

@ -8,6 +8,7 @@ defmodule Pleroma.Web.MastodonAPI.WebsocketHandler do
alias Pleroma.Repo
alias Pleroma.User
alias Pleroma.Web.OAuth.Token
alias Pleroma.Web.Streamer
@behaviour :cowboy_websocket
@ -24,7 +25,7 @@ defmodule Pleroma.Web.MastodonAPI.WebsocketHandler do
]
@anonymous_streams ["public", "public:local", "hashtag"]
# Handled by periodic keepalive in Pleroma.Web.Streamer.
# Handled by periodic keepalive in Pleroma.Web.Streamer.Ping.
@timeout :infinity
def init(%{qs: qs} = req, state) do
@ -65,7 +66,7 @@ def websocket_info(:subscribe, state) do
}, topic #{state.topic}"
)
Pleroma.Web.Streamer.add_socket(state.topic, streamer_socket(state))
Streamer.add_socket(state.topic, streamer_socket(state))
{:ok, state}
end
@ -80,7 +81,7 @@ def terminate(reason, _req, state) do
}, topic #{state.topic || "?"}: #{inspect(reason)}"
)
Pleroma.Web.Streamer.remove_socket(state.topic, streamer_socket(state))
Streamer.remove_socket(state.topic, streamer_socket(state))
:ok
end

View File

@ -17,6 +17,7 @@ defmodule Pleroma.Web.OAuth.Token.CleanWorker do
)
alias Pleroma.Web.OAuth.Token
alias Pleroma.Workers.BackgroundWorker
def start_link(_), do: GenServer.start_link(__MODULE__, %{})
@ -27,9 +28,11 @@ def init(_) do
@doc false
def handle_info(:perform, state) do
Token.delete_expired_tokens()
BackgroundWorker.enqueue("clean_expired_tokens", %{})
Process.send_after(self(), :perform, @interval)
{:noreply, state}
end
def perform(:clean), do: Token.delete_expired_tokens()
end

View File

@ -3,7 +3,7 @@
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Web.Push do
alias Pleroma.Web.Push.Impl
alias Pleroma.Workers.WebPusherWorker
require Logger
@ -31,6 +31,7 @@ def enabled do
end
end
def send(notification),
do: PleromaJobQueue.enqueue(:web_push, Impl, [notification])
def send(notification) do
WebPusherWorker.enqueue("web_push", %{"notification_id" => notification.id})
end
end

View File

@ -81,6 +81,7 @@ defp parse_url(url) do
{:ok, %Tesla.Env{body: html}} = Pleroma.HTTP.get(url, [], adapter: @hackney_options)
html
|> parse_html
|> maybe_parse()
|> Map.put(:url, url)
|> clean_parsed_data()
@ -91,6 +92,8 @@ defp parse_url(url) do
end
end
defp parse_html(html), do: Floki.parse(html)
defp maybe_parse(html) do
Enum.reduce_while(parsers(), %{}, fn parser, acc ->
case parser.parse(html, acc) do
@ -100,7 +103,8 @@ defp maybe_parse(html) do
end)
end
defp check_parsed_data(%{title: title} = data) when is_binary(title) and byte_size(title) > 0 do
defp check_parsed_data(%{title: title} = data)
when is_binary(title) and byte_size(title) > 0 do
{:ok, data}
end

View File

@ -110,6 +110,7 @@ defmodule Pleroma.Web.Router do
pipeline :http_signature do
plug(Pleroma.Web.Plugs.HTTPSignaturePlug)
plug(Pleroma.Web.Plugs.MappedSignatureToIdentityPlug)
end
scope "/api/pleroma", Pleroma.Web.TwitterAPI do
@ -451,6 +452,7 @@ defmodule Pleroma.Web.Router do
scope "/", Pleroma.Web do
pipe_through(:ostatus)
pipe_through(:http_signature)
get("/objects/:uuid", OStatus.OStatusController, :object)
get("/activities/:uuid", OStatus.OStatusController, :activity)

View File

@ -170,6 +170,15 @@ def publish_one(%{recipient: url, feed: feed} = params) when is_binary(url) do
end
end
def publish_one(%{recipient_id: recipient_id} = params) do
recipient = User.get_cached_by_id(recipient_id)
params
|> Map.delete(:recipient_id)
|> Map.put(:recipient, recipient)
|> publish_one()
end
def publish_one(_), do: :noop
@supported_activities [
@ -218,7 +227,7 @@ def publish(%{info: %{keys: keys}} = user, %{data: %{"type" => type}} = activity
Logger.debug(fn -> "Sending Salmon to #{remote_user.ap_id}" end)
Publisher.enqueue_one(__MODULE__, %{
recipient: remote_user,
recipient_id: remote_user.id,
feed: feed,
unreachable_since: reachable_urls_metadata[remote_user.info.salmon]
})

View File

@ -1,318 +0,0 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Web.Streamer do
use GenServer
require Logger
alias Pleroma.Activity
alias Pleroma.Config
alias Pleroma.Conversation.Participation
alias Pleroma.Notification
alias Pleroma.Object
alias Pleroma.User
alias Pleroma.Web.ActivityPub.ActivityPub
alias Pleroma.Web.ActivityPub.Visibility
alias Pleroma.Web.CommonAPI
alias Pleroma.Web.MastodonAPI.NotificationView
@keepalive_interval :timer.seconds(30)
def start_link(_) do
GenServer.start_link(__MODULE__, %{}, name: __MODULE__)
end
def add_socket(topic, socket) do
GenServer.cast(__MODULE__, %{action: :add, socket: socket, topic: topic})
end
def remove_socket(topic, socket) do
GenServer.cast(__MODULE__, %{action: :remove, socket: socket, topic: topic})
end
def stream(topic, item) do
GenServer.cast(__MODULE__, %{action: :stream, topic: topic, item: item})
end
def init(args) do
Process.send_after(self(), %{action: :ping}, @keepalive_interval)
{:ok, args}
end
def handle_info(%{action: :ping}, topics) do
topics
|> Map.values()
|> List.flatten()
|> Enum.each(fn socket ->
Logger.debug("Sending keepalive ping")
send(socket.transport_pid, {:text, ""})
end)
Process.send_after(self(), %{action: :ping}, @keepalive_interval)
{:noreply, topics}
end
def handle_cast(%{action: :stream, topic: "direct", item: item}, topics) do
recipient_topics =
User.get_recipients_from_activity(item)
|> Enum.map(fn %{id: id} -> "direct:#{id}" end)
Enum.each(recipient_topics || [], fn user_topic ->
Logger.debug("Trying to push direct message to #{user_topic}\n\n")
push_to_socket(topics, user_topic, item)
end)
{:noreply, topics}
end
def handle_cast(%{action: :stream, topic: "participation", item: participation}, topics) do
user_topic = "direct:#{participation.user_id}"
Logger.debug("Trying to push a conversation participation to #{user_topic}\n\n")
push_to_socket(topics, user_topic, participation)
{:noreply, topics}
end
def handle_cast(%{action: :stream, topic: "list", item: item}, topics) do
# filter the recipient list if the activity is not public, see #270.
recipient_lists =
case Visibility.is_public?(item) do
true ->
Pleroma.List.get_lists_from_activity(item)
_ ->
Pleroma.List.get_lists_from_activity(item)
|> Enum.filter(fn list ->
owner = User.get_cached_by_id(list.user_id)
Visibility.visible_for_user?(item, owner)
end)
end
recipient_topics =
recipient_lists
|> Enum.map(fn %{id: id} -> "list:#{id}" end)
Enum.each(recipient_topics || [], fn list_topic ->
Logger.debug("Trying to push message to #{list_topic}\n\n")
push_to_socket(topics, list_topic, item)
end)
{:noreply, topics}
end
def handle_cast(
%{action: :stream, topic: topic, item: %Notification{} = item},
topics
)
when topic in ["user", "user:notification"] do
topics
|> Map.get("#{topic}:#{item.user_id}", [])
|> Enum.each(fn socket ->
with %User{} = user <- User.get_cached_by_ap_id(socket.assigns[:user].ap_id),
true <- should_send?(user, item) do
send(
socket.transport_pid,
{:text, represent_notification(socket.assigns[:user], item)}
)
end
end)
{:noreply, topics}
end
def handle_cast(%{action: :stream, topic: "user", item: item}, topics) do
Logger.debug("Trying to push to users")
recipient_topics =
User.get_recipients_from_activity(item)
|> Enum.map(fn %{id: id} -> "user:#{id}" end)
Enum.each(recipient_topics, fn topic ->
push_to_socket(topics, topic, item)
end)
{:noreply, topics}
end
def handle_cast(%{action: :stream, topic: topic, item: item}, topics) do
Logger.debug("Trying to push to #{topic}")
Logger.debug("Pushing item to #{topic}")
push_to_socket(topics, topic, item)
{:noreply, topics}
end
def handle_cast(%{action: :add, topic: topic, socket: socket}, sockets) do
topic = internal_topic(topic, socket)
sockets_for_topic = sockets[topic] || []
sockets_for_topic = Enum.uniq([socket | sockets_for_topic])
sockets = Map.put(sockets, topic, sockets_for_topic)
Logger.debug("Got new conn for #{topic}")
{:noreply, sockets}
end
def handle_cast(%{action: :remove, topic: topic, socket: socket}, sockets) do
topic = internal_topic(topic, socket)
sockets_for_topic = sockets[topic] || []
sockets_for_topic = List.delete(sockets_for_topic, socket)
sockets = Map.put(sockets, topic, sockets_for_topic)
Logger.debug("Removed conn for #{topic}")
{:noreply, sockets}
end
def handle_cast(m, state) do
Logger.info("Unknown: #{inspect(m)}, #{inspect(state)}")
{:noreply, state}
end
defp represent_update(%Activity{} = activity, %User{} = user) do
%{
event: "update",
payload:
Pleroma.Web.MastodonAPI.StatusView.render(
"status.json",
activity: activity,
for: user
)
|> Jason.encode!()
}
|> Jason.encode!()
end
defp represent_update(%Activity{} = activity) do
%{
event: "update",
payload:
Pleroma.Web.MastodonAPI.StatusView.render(
"status.json",
activity: activity
)
|> Jason.encode!()
}
|> Jason.encode!()
end
def represent_conversation(%Participation{} = participation) do
%{
event: "conversation",
payload:
Pleroma.Web.MastodonAPI.ConversationView.render("participation.json", %{
participation: participation,
for: participation.user
})
|> Jason.encode!()
}
|> Jason.encode!()
end
@spec represent_notification(User.t(), Notification.t()) :: binary()
defp represent_notification(%User{} = user, %Notification{} = notify) do
%{
event: "notification",
payload:
NotificationView.render(
"show.json",
%{notification: notify, for: user}
)
|> Jason.encode!()
}
|> Jason.encode!()
end
defp should_send?(%User{} = user, %Activity{} = item) do
blocks = user.info.blocks || []
mutes = user.info.mutes || []
reblog_mutes = user.info.muted_reblogs || []
domain_blocks = Pleroma.Web.ActivityPub.MRF.subdomains_regex(user.info.domain_blocks)
with parent when not is_nil(parent) <- Object.normalize(item),
true <- Enum.all?([blocks, mutes, reblog_mutes], &(item.actor not in &1)),
true <- Enum.all?([blocks, mutes], &(parent.data["actor"] not in &1)),
%{host: item_host} <- URI.parse(item.actor),
%{host: parent_host} <- URI.parse(parent.data["actor"]),
false <- Pleroma.Web.ActivityPub.MRF.subdomain_match?(domain_blocks, item_host),
false <- Pleroma.Web.ActivityPub.MRF.subdomain_match?(domain_blocks, parent_host),
true <- thread_containment(item, user),
false <- CommonAPI.thread_muted?(user, item) do
true
else
_ -> false
end
end
defp should_send?(%User{} = user, %Notification{activity: activity}) do
should_send?(user, activity)
end
def push_to_socket(topics, topic, %Activity{data: %{"type" => "Announce"}} = item) do
Enum.each(topics[topic] || [], fn socket ->
# Get the current user so we have up-to-date blocks etc.
if socket.assigns[:user] do
user = User.get_cached_by_ap_id(socket.assigns[:user].ap_id)
if should_send?(user, item) do
send(socket.transport_pid, {:text, represent_update(item, user)})
end
else
send(socket.transport_pid, {:text, represent_update(item)})
end
end)
end
def push_to_socket(topics, topic, %Participation{} = participation) do
Enum.each(topics[topic] || [], fn socket ->
send(socket.transport_pid, {:text, represent_conversation(participation)})
end)
end
def push_to_socket(topics, topic, %Activity{
data: %{"type" => "Delete", "deleted_activity_id" => deleted_activity_id}
}) do
Enum.each(topics[topic] || [], fn socket ->
send(
socket.transport_pid,
{:text, %{event: "delete", payload: to_string(deleted_activity_id)} |> Jason.encode!()}
)
end)
end
def push_to_socket(_topics, _topic, %Activity{data: %{"type" => "Delete"}}), do: :noop
def push_to_socket(topics, topic, item) do
Enum.each(topics[topic] || [], fn socket ->
# Get the current user so we have up-to-date blocks etc.
if socket.assigns[:user] do
user = User.get_cached_by_ap_id(socket.assigns[:user].ap_id)
blocks = user.info.blocks || []
mutes = user.info.mutes || []
with true <- Enum.all?([blocks, mutes], &(item.actor not in &1)),
true <- thread_containment(item, user) do
send(socket.transport_pid, {:text, represent_update(item, user)})
end
else
send(socket.transport_pid, {:text, represent_update(item)})
end
end)
end
defp internal_topic(topic, socket) when topic in ~w[user user:notification direct] do
"#{topic}:#{socket.assigns[:user].id}"
end
defp internal_topic(topic, _), do: topic
@spec thread_containment(Activity.t(), User.t()) :: boolean()
defp thread_containment(_activity, %User{info: %{skip_thread_containment: true}}), do: true
defp thread_containment(activity, user) do
if Config.get([:instance, :skip_thread_containment]) do
true
else
ActivityPub.contain_activity(activity, user)
end
end
end

View File

@ -0,0 +1,33 @@
defmodule Pleroma.Web.Streamer.Ping do
use GenServer
require Logger
alias Pleroma.Web.Streamer.State
alias Pleroma.Web.Streamer.StreamerSocket
@keepalive_interval :timer.seconds(30)
def start_link(opts) do
ping_interval = Keyword.get(opts, :ping_interval, @keepalive_interval)
GenServer.start_link(__MODULE__, %{ping_interval: ping_interval}, name: __MODULE__)
end
def init(%{ping_interval: ping_interval} = args) do
Process.send_after(self(), :ping, ping_interval)
{:ok, args}
end
def handle_info(:ping, %{ping_interval: ping_interval} = state) do
State.get_sockets()
|> Map.values()
|> List.flatten()
|> Enum.each(fn %StreamerSocket{transport_pid: transport_pid} ->
Logger.debug("Sending keepalive ping")
send(transport_pid, {:text, ""})
end)
Process.send_after(self(), :ping, ping_interval)
{:noreply, state}
end
end

View File

@ -0,0 +1,78 @@
defmodule Pleroma.Web.Streamer.State do
use GenServer
require Logger
alias Pleroma.Web.Streamer.StreamerSocket
@env Mix.env()
def start_link(_) do
GenServer.start_link(__MODULE__, %{sockets: %{}}, name: __MODULE__)
end
def add_socket(topic, socket) do
GenServer.call(__MODULE__, {:add, topic, socket})
end
def remove_socket(topic, socket) do
do_remove_socket(@env, topic, socket)
end
def get_sockets do
%{sockets: stream_sockets} = GenServer.call(__MODULE__, :get_state)
stream_sockets
end
def init(init_arg) do
{:ok, init_arg}
end
def handle_call(:get_state, _from, state) do
{:reply, state, state}
end
def handle_call({:add, topic, socket}, _from, %{sockets: sockets} = state) do
internal_topic = internal_topic(topic, socket)
stream_socket = StreamerSocket.from_socket(socket)
sockets_for_topic =
sockets
|> Map.get(internal_topic, [])
|> List.insert_at(0, stream_socket)
|> Enum.uniq()
state = put_in(state, [:sockets, internal_topic], sockets_for_topic)
Logger.debug("Got new conn for #{topic}")
{:reply, state, state}
end
def handle_call({:remove, topic, socket}, _from, %{sockets: sockets} = state) do
internal_topic = internal_topic(topic, socket)
stream_socket = StreamerSocket.from_socket(socket)
sockets_for_topic =
sockets
|> Map.get(internal_topic, [])
|> List.delete(stream_socket)
state = Kernel.put_in(state, [:sockets, internal_topic], sockets_for_topic)
{:reply, state, state}
end
defp do_remove_socket(:test, _, _) do
:ok
end
defp do_remove_socket(_env, topic, socket) do
GenServer.call(__MODULE__, {:remove, topic, socket})
end
defp internal_topic(topic, socket)
when topic in ~w[user user:notification direct] do
"#{topic}:#{socket.assigns[:user].id}"
end
defp internal_topic(topic, _) do
topic
end
end

View File

@ -0,0 +1,55 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Web.Streamer do
alias Pleroma.Web.Streamer.State
alias Pleroma.Web.Streamer.Worker
@timeout 60_000
@mix_env Mix.env()
def add_socket(topic, socket) do
State.add_socket(topic, socket)
end
def remove_socket(topic, socket) do
State.remove_socket(topic, socket)
end
def get_sockets do
State.get_sockets()
end
def stream(topics, items) do
if should_send?() do
Task.async(fn ->
:poolboy.transaction(
:streamer_worker,
&Worker.stream(&1, topics, items),
@timeout
)
end)
end
end
def supervisor, do: Pleroma.Web.Streamer.Supervisor
defp should_send? do
handle_should_send(@mix_env)
end
defp handle_should_send(:test) do
case Process.whereis(:streamer_worker) do
nil ->
false
pid ->
Process.alive?(pid)
end
end
defp handle_should_send(_) do
true
end
end

View File

@ -0,0 +1,31 @@
defmodule Pleroma.Web.Streamer.StreamerSocket do
defstruct transport_pid: nil, user: nil
alias Pleroma.User
alias Pleroma.Web.Streamer.StreamerSocket
def from_socket(%{
transport_pid: transport_pid,
assigns: %{user: nil}
}) do
%StreamerSocket{
transport_pid: transport_pid
}
end
def from_socket(%{
transport_pid: transport_pid,
assigns: %{user: %User{} = user}
}) do
%StreamerSocket{
transport_pid: transport_pid,
user: user
}
end
def from_socket(%{transport_pid: transport_pid}) do
%StreamerSocket{
transport_pid: transport_pid
}
end
end

View File

@ -0,0 +1,33 @@
defmodule Pleroma.Web.Streamer.Supervisor do
use Supervisor
def start_link(opts) do
Supervisor.start_link(__MODULE__, opts, name: __MODULE__)
end
def init(args) do
children = [
{Pleroma.Web.Streamer.State, args},
{Pleroma.Web.Streamer.Ping, args},
:poolboy.child_spec(:streamer_worker, poolboy_config())
]
opts = [strategy: :one_for_one, name: Pleroma.Web.Streamer.Supervisor]
Supervisor.init(children, opts)
end
defp poolboy_config do
opts =
Pleroma.Config.get(:streamer,
workers: 3,
overflow_workers: 2
)
[
{:name, {:local, :streamer_worker}},
{:worker_module, Pleroma.Web.Streamer.Worker},
{:size, opts[:workers]},
{:max_overflow, opts[:overflow_workers]}
]
end
end

View File

@ -0,0 +1,220 @@
defmodule Pleroma.Web.Streamer.Worker do
use GenServer
require Logger
alias Pleroma.Activity
alias Pleroma.Config
alias Pleroma.Conversation.Participation
alias Pleroma.Notification
alias Pleroma.Object
alias Pleroma.User
alias Pleroma.Web.ActivityPub.ActivityPub
alias Pleroma.Web.ActivityPub.Visibility
alias Pleroma.Web.CommonAPI
alias Pleroma.Web.Streamer.State
alias Pleroma.Web.Streamer.StreamerSocket
alias Pleroma.Web.StreamerView
def start_link(_) do
GenServer.start_link(__MODULE__, %{}, [])
end
def init(init_arg) do
{:ok, init_arg}
end
def stream(pid, topics, items) do
GenServer.call(pid, {:stream, topics, items})
end
def handle_call({:stream, topics, item}, _from, state) when is_list(topics) do
Enum.each(topics, fn t ->
do_stream(%{topic: t, item: item})
end)
{:reply, state, state}
end
def handle_call({:stream, topic, items}, _from, state) when is_list(items) do
Enum.each(items, fn i ->
do_stream(%{topic: topic, item: i})
end)
{:reply, state, state}
end
def handle_call({:stream, topic, item}, _from, state) do
do_stream(%{topic: topic, item: item})
{:reply, state, state}
end
defp do_stream(%{topic: "direct", item: item}) do
recipient_topics =
User.get_recipients_from_activity(item)
|> Enum.map(fn %{id: id} -> "direct:#{id}" end)
Enum.each(recipient_topics, fn user_topic ->
Logger.debug("Trying to push direct message to #{user_topic}\n\n")
push_to_socket(State.get_sockets(), user_topic, item)
end)
end
defp do_stream(%{topic: "participation", item: participation}) do
user_topic = "direct:#{participation.user_id}"
Logger.debug("Trying to push a conversation participation to #{user_topic}\n\n")
push_to_socket(State.get_sockets(), user_topic, participation)
end
defp do_stream(%{topic: "list", item: item}) do
# filter the recipient list if the activity is not public, see #270.
recipient_lists =
case Visibility.is_public?(item) do
true ->
Pleroma.List.get_lists_from_activity(item)
_ ->
Pleroma.List.get_lists_from_activity(item)
|> Enum.filter(fn list ->
owner = User.get_cached_by_id(list.user_id)
Visibility.visible_for_user?(item, owner)
end)
end
recipient_topics =
recipient_lists
|> Enum.map(fn %{id: id} -> "list:#{id}" end)
Enum.each(recipient_topics, fn list_topic ->
Logger.debug("Trying to push message to #{list_topic}\n\n")
push_to_socket(State.get_sockets(), list_topic, item)
end)
end
defp do_stream(%{topic: topic, item: %Notification{} = item})
when topic in ["user", "user:notification"] do
State.get_sockets()
|> Map.get("#{topic}:#{item.user_id}", [])
|> Enum.each(fn %StreamerSocket{transport_pid: transport_pid, user: socket_user} ->
with %User{} = user <- User.get_cached_by_ap_id(socket_user.ap_id),
true <- should_send?(user, item) do
send(transport_pid, {:text, StreamerView.render("notification.json", socket_user, item)})
end
end)
end
defp do_stream(%{topic: "user", item: item}) do
Logger.debug("Trying to push to users")
recipient_topics =
User.get_recipients_from_activity(item)
|> Enum.map(fn %{id: id} -> "user:#{id}" end)
Enum.each(recipient_topics, fn topic ->
push_to_socket(State.get_sockets(), topic, item)
end)
end
defp do_stream(%{topic: topic, item: item}) do
Logger.debug("Trying to push to #{topic}")
Logger.debug("Pushing item to #{topic}")
push_to_socket(State.get_sockets(), topic, item)
end
defp should_send?(%User{} = user, %Activity{} = item) do
blocks = user.info.blocks || []
mutes = user.info.mutes || []
reblog_mutes = user.info.muted_reblogs || []
domain_blocks = Pleroma.Web.ActivityPub.MRF.subdomains_regex(user.info.domain_blocks)
with parent when not is_nil(parent) <- Object.normalize(item),
true <- Enum.all?([blocks, mutes, reblog_mutes], &(item.actor not in &1)),
true <- Enum.all?([blocks, mutes], &(parent.data["actor"] not in &1)),
%{host: item_host} <- URI.parse(item.actor),
%{host: parent_host} <- URI.parse(parent.data["actor"]),
false <- Pleroma.Web.ActivityPub.MRF.subdomain_match?(domain_blocks, item_host),
false <- Pleroma.Web.ActivityPub.MRF.subdomain_match?(domain_blocks, parent_host),
true <- thread_containment(item, user),
false <- CommonAPI.thread_muted?(user, item) do
true
else
_ -> false
end
end
defp should_send?(%User{} = user, %Notification{activity: activity}) do
should_send?(user, activity)
end
def push_to_socket(topics, topic, %Activity{data: %{"type" => "Announce"}} = item) do
Enum.each(topics[topic] || [], fn %StreamerSocket{
transport_pid: transport_pid,
user: socket_user
} ->
# Get the current user so we have up-to-date blocks etc.
if socket_user do
user = User.get_cached_by_ap_id(socket_user.ap_id)
if should_send?(user, item) do
send(transport_pid, {:text, StreamerView.render("update.json", item, user)})
end
else
send(transport_pid, {:text, StreamerView.render("update.json", item)})
end
end)
end
def push_to_socket(topics, topic, %Participation{} = participation) do
Enum.each(topics[topic] || [], fn %StreamerSocket{transport_pid: transport_pid} ->
send(transport_pid, {:text, StreamerView.render("conversation.json", participation)})
end)
end
def push_to_socket(topics, topic, %Activity{
data: %{"type" => "Delete", "deleted_activity_id" => deleted_activity_id}
}) do
Enum.each(topics[topic] || [], fn %StreamerSocket{transport_pid: transport_pid} ->
send(
transport_pid,
{:text, %{event: "delete", payload: to_string(deleted_activity_id)} |> Jason.encode!()}
)
end)
end
def push_to_socket(_topics, _topic, %Activity{data: %{"type" => "Delete"}}), do: :noop
def push_to_socket(topics, topic, item) do
Enum.each(topics[topic] || [], fn %StreamerSocket{
transport_pid: transport_pid,
user: socket_user
} ->
# Get the current user so we have up-to-date blocks etc.
if socket_user do
user = User.get_cached_by_ap_id(socket_user.ap_id)
blocks = user.info.blocks || []
mutes = user.info.mutes || []
with true <- Enum.all?([blocks, mutes], &(item.actor not in &1)),
true <- thread_containment(item, user) do
send(transport_pid, {:text, StreamerView.render("update.json", item, user)})
end
else
send(transport_pid, {:text, StreamerView.render("update.json", item)})
end
end)
end
@spec thread_containment(Activity.t(), User.t()) :: boolean()
defp thread_containment(_activity, %User{info: %{skip_thread_containment: true}}), do: true
defp thread_containment(activity, user) do
if Config.get([:instance, :skip_thread_containment]) do
true
else
ActivityPub.contain_activity(activity, user)
end
end
end

View File

@ -286,12 +286,7 @@ def follow_import(%{assigns: %{user: follower}} = conn, %{"list" => list}) do
String.split(line, ",") |> List.first()
end)
|> List.delete("Account address") do
PleromaJobQueue.enqueue(:background, User, [
:follow_import,
follower,
followed_identifiers
])
User.follow_import(follower, followed_identifiers)
json(conn, "job started")
end
end
@ -302,12 +297,7 @@ def blocks_import(conn, %{"list" => %Plug.Upload{} = listfile}) do
def blocks_import(%{assigns: %{user: blocker}} = conn, %{"list" => list}) do
with blocked_identifiers <- String.split(list) do
PleromaJobQueue.enqueue(:background, User, [
:blocks_import,
blocker,
blocked_identifiers
])
User.blocks_import(blocker, blocked_identifiers)
json(conn, "job started")
end
end

View File

@ -0,0 +1,66 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Web.StreamerView do
use Pleroma.Web, :view
alias Pleroma.Activity
alias Pleroma.Conversation.Participation
alias Pleroma.Notification
alias Pleroma.User
alias Pleroma.Web.MastodonAPI.NotificationView
def render("update.json", %Activity{} = activity, %User{} = user) do
%{
event: "update",
payload:
Pleroma.Web.MastodonAPI.StatusView.render(
"status.json",
activity: activity,
for: user
)
|> Jason.encode!()
}
|> Jason.encode!()
end
def render("notification.json", %User{} = user, %Notification{} = notify) do
%{
event: "notification",
payload:
NotificationView.render(
"show.json",
%{notification: notify, for: user}
)
|> Jason.encode!()
}
|> Jason.encode!()
end
def render("update.json", %Activity{} = activity) do
%{
event: "update",
payload:
Pleroma.Web.MastodonAPI.StatusView.render(
"status.json",
activity: activity
)
|> Jason.encode!()
}
|> Jason.encode!()
end
def render("conversation.json", %Participation{} = participation) do
%{
event: "conversation",
payload:
Pleroma.Web.MastodonAPI.ConversationView.render("participation.json", %{
participation: participation,
for: participation.user
})
|> Jason.encode!()
}
|> Jason.encode!()
end
end

View File

@ -0,0 +1,18 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Workers.ActivityExpirationWorker do
use Pleroma.Workers.WorkerHelper, queue: "activity_expiration"
@impl Oban.Worker
def perform(
%{
"op" => "activity_expiration",
"activity_expiration_id" => activity_expiration_id
},
_job
) do
Pleroma.Daemons.ActivityExpirationDaemon.perform(:execute, activity_expiration_id)
end
end

View File

@ -0,0 +1,69 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Workers.BackgroundWorker do
alias Pleroma.Activity
alias Pleroma.User
alias Pleroma.Web.ActivityPub.MRF.MediaProxyWarmingPolicy
alias Pleroma.Web.OAuth.Token.CleanWorker
use Pleroma.Workers.WorkerHelper, queue: "background"
@impl Oban.Worker
def perform(%{"op" => "fetch_initial_posts", "user_id" => user_id}, _job) do
user = User.get_cached_by_id(user_id)
User.perform(:fetch_initial_posts, user)
end
def perform(%{"op" => "deactivate_user", "user_id" => user_id, "status" => status}, _job) do
user = User.get_cached_by_id(user_id)
User.perform(:deactivate_async, user, status)
end
def perform(%{"op" => "delete_user", "user_id" => user_id}, _job) do
user = User.get_cached_by_id(user_id)
User.perform(:delete, user)
end
def perform(
%{
"op" => "blocks_import",
"blocker_id" => blocker_id,
"blocked_identifiers" => blocked_identifiers
},
_job
) do
blocker = User.get_cached_by_id(blocker_id)
User.perform(:blocks_import, blocker, blocked_identifiers)
end
def perform(
%{
"op" => "follow_import",
"follower_id" => follower_id,
"followed_identifiers" => followed_identifiers
},
_job
) do
follower = User.get_cached_by_id(follower_id)
User.perform(:follow_import, follower, followed_identifiers)
end
def perform(%{"op" => "clean_expired_tokens"}, _job) do
CleanWorker.perform(:clean)
end
def perform(%{"op" => "media_proxy_preload", "message" => message}, _job) do
MediaProxyWarmingPolicy.perform(:preload, message)
end
def perform(%{"op" => "media_proxy_prefetch", "url" => url}, _job) do
MediaProxyWarmingPolicy.perform(:prefetch, url)
end
def perform(%{"op" => "fetch_data_for_activity", "activity_id" => activity_id}, _job) do
activity = Activity.get_by_id(activity_id)
Pleroma.Web.RichMedia.Helpers.perform(:fetch, activity)
end
end

View File

@ -0,0 +1,16 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Workers.DigestEmailsWorker do
alias Pleroma.User
use Pleroma.Workers.WorkerHelper, queue: "digest_emails"
@impl Oban.Worker
def perform(%{"op" => "digest_email", "user_id" => user_id}, _job) do
user_id
|> User.get_cached_by_id()
|> Pleroma.Daemons.DigestEmailDaemon.perform()
end
end

View File

@ -0,0 +1,15 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Workers.MailerWorker do
use Pleroma.Workers.WorkerHelper, queue: "mailer"
@impl Oban.Worker
def perform(%{"op" => "email", "encoded_email" => encoded_email, "config" => config}, _job) do
encoded_email
|> Base.decode64!()
|> :erlang.binary_to_term()
|> Pleroma.Emails.Mailer.deliver(config)
end
end

View File

@ -0,0 +1,25 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Workers.PublisherWorker do
alias Pleroma.Activity
alias Pleroma.Web.Federator
use Pleroma.Workers.WorkerHelper, queue: "federator_outgoing"
def backoff(attempt) when is_integer(attempt) do
Pleroma.Workers.WorkerHelper.sidekiq_backoff(attempt, 5)
end
@impl Oban.Worker
def perform(%{"op" => "publish", "activity_id" => activity_id}, _job) do
activity = Activity.get_by_id(activity_id)
Federator.perform(:publish, activity)
end
def perform(%{"op" => "publish_one", "module" => module_name, "params" => params}, _job) do
params = Map.new(params, fn {k, v} -> {String.to_atom(k), v} end)
Federator.perform(:publish_one, String.to_atom(module_name), params)
end
end

View File

@ -0,0 +1,18 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Workers.ReceiverWorker do
alias Pleroma.Web.Federator
use Pleroma.Workers.WorkerHelper, queue: "federator_incoming"
@impl Oban.Worker
def perform(%{"op" => "incoming_doc", "body" => doc}, _job) do
Federator.perform(:incoming_doc, doc)
end
def perform(%{"op" => "incoming_ap_doc", "params" => params}, _job) do
Federator.perform(:incoming_ap_doc, params)
end
end

View File

@ -0,0 +1,12 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Workers.ScheduledActivityWorker do
use Pleroma.Workers.WorkerHelper, queue: "scheduled_activities"
@impl Oban.Worker
def perform(%{"op" => "execute", "activity_id" => activity_id}, _job) do
Pleroma.Daemons.ScheduledActivityDaemon.perform(:execute, activity_id)
end
end

View File

@ -0,0 +1,26 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Workers.SubscriberWorker do
alias Pleroma.Repo
alias Pleroma.Web.Federator
alias Pleroma.Web.Websub
use Pleroma.Workers.WorkerHelper, queue: "federator_outgoing"
@impl Oban.Worker
def perform(%{"op" => "refresh_subscriptions"}, _job) do
Federator.perform(:refresh_subscriptions)
end
def perform(%{"op" => "request_subscription", "websub_id" => websub_id}, _job) do
websub = Repo.get(Websub.WebsubClientSubscription, websub_id)
Federator.perform(:request_subscription, websub)
end
def perform(%{"op" => "verify_websub", "websub_id" => websub_id}, _job) do
websub = Repo.get(Websub.WebsubServerSubscription, websub_id)
Federator.perform(:verify_websub, websub)
end
end

View File

@ -0,0 +1,15 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Workers.TransmogrifierWorker do
alias Pleroma.User
use Pleroma.Workers.WorkerHelper, queue: "transmogrifier"
@impl Oban.Worker
def perform(%{"op" => "user_upgrade", "user_id" => user_id}, _job) do
user = User.get_cached_by_id(user_id)
Pleroma.Web.ActivityPub.Transmogrifier.perform(:user_upgrade, user)
end
end

View File

@ -0,0 +1,20 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Workers.WebPusherWorker do
alias Pleroma.Notification
alias Pleroma.Repo
use Pleroma.Workers.WorkerHelper, queue: "web_push"
@impl Oban.Worker
def perform(%{"op" => "web_push", "notification_id" => notification_id}, _job) do
notification =
Notification
|> Repo.get(notification_id)
|> Repo.preload([:activity])
Pleroma.Web.Push.Impl.perform(notification)
end
end

View File

@ -0,0 +1,46 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Workers.WorkerHelper do
alias Pleroma.Config
alias Pleroma.Workers.WorkerHelper
def worker_args(queue) do
case Config.get([:workers, :retries, queue]) do
nil -> []
max_attempts -> [max_attempts: max_attempts]
end
end
def sidekiq_backoff(attempt, pow \\ 4, base_backoff \\ 15) do
backoff =
:math.pow(attempt, pow) +
base_backoff +
:rand.uniform(2 * base_backoff) * attempt
trunc(backoff)
end
defmacro __using__(opts) do
caller_module = __CALLER__.module
queue = Keyword.fetch!(opts, :queue)
quote do
# Note: `max_attempts` is intended to be overridden in `new/2` call
use Oban.Worker,
queue: unquote(queue),
max_attempts: 1
def enqueue(op, params, worker_args \\ []) do
params = Map.merge(%{"op" => op}, params)
queue_atom = String.to_atom(unquote(queue))
worker_args = worker_args ++ WorkerHelper.worker_args(queue_atom)
unquote(caller_module)
|> apply(:new, [params, worker_args])
|> Pleroma.Repo.insert()
end
end
end
end

View File

@ -99,8 +99,10 @@ defp deps do
{:plug_cowboy, "~> 2.0"},
{:phoenix_pubsub, "~> 1.1"},
{:phoenix_ecto, "~> 4.0"},
{:ecto_sql, "~> 3.1"},
{:ecto_sql, "~> 3.2"},
{:postgrex, ">= 0.13.5"},
{:oban, "~> 0.8.1"},
{:quantum, "~> 2.3"},
{:gettext, "~> 0.15"},
{:comeonin, "~> 4.1.1"},
{:pbkdf2_elixir, "~> 0.12.3"},
@ -131,7 +133,7 @@ defp deps do
{:phoenix_swoosh, "~> 0.2"},
{:gen_smtp, "~> 0.13"},
{:websocket_client, git: "https://github.com/jeremyong/websocket_client.git", only: :test},
{:floki, "~> 0.20.0"},
{:floki, "~> 0.23.0"},
{:ex_syslogger, github: "slashmili/ex_syslogger", tag: "1.4.0"},
{:timex, "~> 3.5"},
{:ueberauth, "~> 0.4"},
@ -141,8 +143,8 @@ defp deps do
{:http_signatures,
git: "https://git.pleroma.social/pleroma/http_signatures.git",
ref: "293d77bb6f4a67ac8bde1428735c3b42f22cbb30"},
{:pleroma_job_queue, "~> 0.3"},
{:telemetry, "~> 0.3"},
{:poolboy, "~> 1.5"},
{:prometheus_ex, "~> 3.0"},
{:prometheus_plugs, "~> 1.1"},
{:prometheus_phoenix, "~> 1.3"},

View File

@ -17,12 +17,12 @@
"credo": {:hex, :credo, "0.9.3", "76fa3e9e497ab282e0cf64b98a624aa11da702854c52c82db1bf24e54ab7c97a", [:mix], [{:bunt, "~> 0.2.0", [hex: :bunt, repo: "hexpm", optional: false]}, {:poison, ">= 0.0.0", [hex: :poison, repo: "hexpm", optional: false]}], "hexpm"},
"crontab": {:hex, :crontab, "1.1.7", "b9219f0bdc8678b94143655a8f229716c5810c0636a4489f98c0956137e53985", [:mix], [{:ecto, "~> 1.0 or ~> 2.0 or ~> 3.0", [hex: :ecto, repo: "hexpm", optional: true]}], "hexpm"},
"crypt": {:git, "https://github.com/msantos/crypt", "1f2b58927ab57e72910191a7ebaeff984382a1d3", [ref: "1f2b58927ab57e72910191a7ebaeff984382a1d3"]},
"db_connection": {:hex, :db_connection, "2.0.6", "bde2f85d047969c5b5800cb8f4b3ed6316c8cb11487afedac4aa5f93fd39abfa", [:mix], [{:connection, "~> 1.0.2", [hex: :connection, repo: "hexpm", optional: false]}], "hexpm"},
"db_connection": {:hex, :db_connection, "2.1.1", "a51e8a2ee54ef2ae6ec41a668c85787ed40cb8944928c191280fe34c15b76ae5", [:mix], [{:connection, "~> 1.0.2", [hex: :connection, repo: "hexpm", optional: false]}], "hexpm"},
"decimal": {:hex, :decimal, "1.8.0", "ca462e0d885f09a1c5a342dbd7c1dcf27ea63548c65a65e67334f4b61803822e", [:mix], [], "hexpm"},
"deep_merge": {:hex, :deep_merge, "1.0.0", "b4aa1a0d1acac393bdf38b2291af38cb1d4a52806cf7a4906f718e1feb5ee961", [:mix], [], "hexpm"},
"earmark": {:hex, :earmark, "1.3.6", "ce1d0675e10a5bb46b007549362bd3f5f08908843957687d8484fe7f37466b19", [:mix], [], "hexpm"},
"ecto": {:hex, :ecto, "3.1.4", "69d852da7a9f04ede725855a35ede48d158ca11a404fe94f8b2fb3b2162cd3c9", [:mix], [{:decimal, "~> 1.6", [hex: :decimal, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}], "hexpm"},
"ecto_sql": {:hex, :ecto_sql, "3.1.3", "2c536139190492d9de33c5fefac7323c5eaaa82e1b9bf93482a14649042f7cd9", [:mix], [{:db_connection, "~> 2.0", [hex: :db_connection, repo: "hexpm", optional: false]}, {:ecto, "~> 3.1.0", [hex: :ecto, repo: "hexpm", optional: false]}, {:mariaex, "~> 0.9.1", [hex: :mariaex, repo: "hexpm", optional: true]}, {:myxql, "~> 0.2.0", [hex: :myxql, repo: "hexpm", optional: true]}, {:postgrex, "~> 0.14.0", [hex: :postgrex, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm"},
"ecto": {:hex, :ecto, "3.2.0", "940e2598813f205223d60c78d66e514afe1db5167ed8075510a59e496619cfb5", [:mix], [{:decimal, "~> 1.6", [hex: :decimal, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}], "hexpm"},
"ecto_sql": {:hex, :ecto_sql, "3.2.0", "751cea597e8deb616084894dd75cbabfdbe7255ff01e8c058ca13f0353a3921b", [:mix], [{:db_connection, "~> 2.1", [hex: :db_connection, repo: "hexpm", optional: false]}, {:ecto, "~> 3.2.0", [hex: :ecto, repo: "hexpm", optional: false]}, {:myxql, "~> 0.2.0", [hex: :myxql, repo: "hexpm", optional: true]}, {:postgrex, "~> 0.15.0", [hex: :postgrex, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm"},
"esshd": {:hex, :esshd, "0.1.0", "6f93a2062adb43637edad0ea7357db2702a4b80dd9683482fe00f5134e97f4c1", [:mix], [], "hexpm"},
"eternal": {:hex, :eternal, "1.2.0", "e2a6b6ce3b8c248f7dc31451aefca57e3bdf0e48d73ae5043229380a67614c41", [:mix], [], "hexpm"},
"ex2ms": {:hex, :ex2ms, "1.5.0", "19e27f9212be9a96093fed8cdfbef0a2b56c21237196d26760f11dfcfae58e97", [:mix], [], "hexpm"},
@ -34,8 +34,10 @@
"ex_rated": {:hex, :ex_rated, "1.3.3", "30ecbdabe91f7eaa9d37fa4e81c85ba420f371babeb9d1910adbcd79ec798d27", [:mix], [{:ex2ms, "~> 1.5", [hex: :ex2ms, repo: "hexpm", optional: false]}], "hexpm"},
"ex_syslogger": {:git, "https://github.com/slashmili/ex_syslogger.git", "f3963399047af17e038897c69e20d552e6899e1d", [tag: "1.4.0"]},
"excoveralls": {:hex, :excoveralls, "0.11.1", "dd677fbdd49114fdbdbf445540ec735808250d56b011077798316505064edb2c", [:mix], [{:hackney, "~> 1.0", [hex: :hackney, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: false]}], "hexpm"},
"floki": {:hex, :floki, "0.20.4", "be42ac911fece24b4c72f3b5846774b6e61b83fe685c2fc9d62093277fb3bc86", [:mix], [{:html_entities, "~> 0.4.0", [hex: :html_entities, repo: "hexpm", optional: false]}, {:mochiweb, "~> 2.15", [hex: :mochiweb, repo: "hexpm", optional: false]}], "hexpm"},
"floki": {:hex, :floki, "0.23.0", "956ab6dba828c96e732454809fb0bd8d43ce0979b75f34de6322e73d4c917829", [:mix], [{:html_entities, "~> 0.4.0", [hex: :html_entities, repo: "hexpm", optional: false]}], "hexpm"},
"gen_smtp": {:hex, :gen_smtp, "0.14.0", "39846a03522456077c6429b4badfd1d55e5e7d0fdfb65e935b7c5e38549d9202", [:rebar3], [], "hexpm"},
"gen_stage": {:hex, :gen_stage, "0.14.2", "6a2a578a510c5bfca8a45e6b27552f613b41cf584b58210f017088d3d17d0b14", [:mix], [], "hexpm"},
"gen_state_machine": {:hex, :gen_state_machine, "2.0.5", "9ac15ec6e66acac994cc442dcc2c6f9796cf380ec4b08267223014be1c728a95", [:mix], [], "hexpm"},
"gettext": {:hex, :gettext, "0.17.0", "abe21542c831887a2b16f4c94556db9c421ab301aee417b7c4fbde7fbdbe01ec", [:mix], [], "hexpm"},
"hackney": {:hex, :hackney, "1.15.1", "9f8f471c844b8ce395f7b6d8398139e26ddca9ebc171a8b91342ee15a19963f4", [:rebar3], [{:certifi, "2.5.1", [hex: :certifi, repo: "hexpm", optional: false]}, {:idna, "6.0.0", [hex: :idna, repo: "hexpm", optional: false]}, {:metrics, "1.0.1", [hex: :metrics, repo: "hexpm", optional: false]}, {:mimerl, "~>1.1", [hex: :mimerl, repo: "hexpm", optional: false]}, {:ssl_verify_fun, "1.1.4", [hex: :ssl_verify_fun, repo: "hexpm", optional: false]}], "hexpm"},
"html_entities": {:hex, :html_entities, "0.4.0", "f2fee876858cf6aaa9db608820a3209e45a087c5177332799592142b50e89a6b", [:mix], [], "hexpm"},
@ -46,6 +48,7 @@
"jason": {:hex, :jason, "1.1.2", "b03dedea67a99223a2eaf9f1264ce37154564de899fd3d8b9a21b1a6fd64afe7", [:mix], [{:decimal, "~> 1.0", [hex: :decimal, repo: "hexpm", optional: true]}], "hexpm"},
"joken": {:hex, :joken, "2.0.1", "ec9ab31bf660f343380da033b3316855197c8d4c6ef597fa3fcb451b326beb14", [:mix], [{:jose, "~> 1.9", [hex: :jose, repo: "hexpm", optional: false]}], "hexpm"},
"jose": {:hex, :jose, "1.9.0", "4167c5f6d06ffaebffd15cdb8da61a108445ef5e85ab8f5a7ad926fdf3ada154", [:mix, :rebar3], [{:base64url, "~> 0.0.1", [hex: :base64url, repo: "hexpm", optional: false]}], "hexpm"},
"libring": {:hex, :libring, "1.4.0", "41246ba2f3fbc76b3971f6bce83119dfec1eee17e977a48d8a9cfaaf58c2a8d6", [:mix], [], "hexpm"},
"makeup": {:hex, :makeup, "1.0.0", "671df94cf5a594b739ce03b0d0316aa64312cee2574b6a44becb83cd90fb05dc", [:mix], [{:nimble_parsec, "~> 0.5.0", [hex: :nimble_parsec, repo: "hexpm", optional: false]}], "hexpm"},
"makeup_elixir": {:hex, :makeup_elixir, "0.14.0", "cf8b7c66ad1cff4c14679698d532f0b5d45a3968ffbcbfd590339cb57742f1ae", [:mix], [{:makeup, "~> 1.0", [hex: :makeup, repo: "hexpm", optional: false]}], "hexpm"},
"meck": {:hex, :meck, "0.8.13", "ffedb39f99b0b99703b8601c6f17c7f76313ee12de6b646e671e3188401f7866", [:rebar3], [], "hexpm"},
@ -57,6 +60,7 @@
"mogrify": {:hex, :mogrify, "0.6.1", "de1b527514f2d95a7bbe9642eb556061afb337e220cf97adbf3a4e6438ed70af", [:mix], [], "hexpm"},
"mox": {:hex, :mox, "0.5.1", "f86bb36026aac1e6f924a4b6d024b05e9adbed5c63e8daa069bd66fb3292165b", [:mix], [], "hexpm"},
"nimble_parsec": {:hex, :nimble_parsec, "0.5.1", "c90796ecee0289dbb5ad16d3ad06f957b0cd1199769641c961cfe0b97db190e0", [:mix], [], "hexpm"},
"oban": {:hex, :oban, "0.8.1", "4bbf62eb1829f856d69aeb5069ac7036afe07db8221a17de2a9169cc7a58a318", [:mix], [{:ecto_sql, "~> 3.1", [hex: :ecto_sql, repo: "hexpm", optional: false]}, {:jason, "~> 1.1", [hex: :jason, repo: "hexpm", optional: false]}, {:postgrex, "~> 0.14", [hex: :postgrex, repo: "hexpm", optional: false]}, {:telemetry, "~> 0.4", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm"},
"parse_trans": {:hex, :parse_trans, "3.3.0", "09765507a3c7590a784615cfd421d101aec25098d50b89d7aa1d66646bc571c1", [:rebar3], [], "hexpm"},
"pbkdf2_elixir": {:hex, :pbkdf2_elixir, "0.12.3", "6706a148809a29c306062862c803406e88f048277f6e85b68faf73291e820b84", [:mix], [], "hexpm"},
"phoenix": {:hex, :phoenix, "1.4.9", "746d098e10741c334d88143d3c94cab1756435f94387a63441792e66ec0ee974", [:mix], [{:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}, {:phoenix_pubsub, "~> 1.1", [hex: :phoenix_pubsub, repo: "hexpm", optional: false]}, {:plug, "~> 1.8.1 or ~> 1.9", [hex: :plug, repo: "hexpm", optional: false]}, {:plug_cowboy, "~> 1.0 or ~> 2.0", [hex: :plug_cowboy, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm"},
@ -64,22 +68,24 @@
"phoenix_html": {:hex, :phoenix_html, "2.13.1", "fa8f034b5328e2dfa0e4131b5569379003f34bc1fafdaa84985b0b9d2f12e68b", [:mix], [{:plug, "~> 1.5", [hex: :plug, repo: "hexpm", optional: false]}], "hexpm"},
"phoenix_pubsub": {:hex, :phoenix_pubsub, "1.1.2", "496c303bdf1b2e98a9d26e89af5bba3ab487ba3a3735f74bf1f4064d2a845a3e", [:mix], [], "hexpm"},
"phoenix_swoosh": {:hex, :phoenix_swoosh, "0.2.0", "a7e0b32077cd6d2323ae15198839b05d9caddfa20663fd85787479e81f89520e", [:mix], [{:phoenix, "~> 1.0", [hex: :phoenix, repo: "hexpm", optional: false]}, {:phoenix_html, "~> 2.2", [hex: :phoenix_html, repo: "hexpm", optional: false]}, {:swoosh, "~> 0.1", [hex: :swoosh, repo: "hexpm", optional: false]}], "hexpm"},
"pleroma_job_queue": {:hex, :pleroma_job_queue, "0.3.0", "b84538d621f0c3d6fcc1cff9d5648d3faaf873b8b21b94e6503428a07a48ec47", [:mix], [{:crontab, "~> 1.1", [hex: :crontab, repo: "hexpm", optional: false]}], "hexpm"},
"plug": {:hex, :plug, "1.8.2", "0bcce1daa420f189a6491f3940cc77ea7fb1919761175c9c3b59800d897440fc", [:mix], [{:mime, "~> 1.0", [hex: :mime, repo: "hexpm", optional: false]}, {:plug_crypto, "~> 1.0", [hex: :plug_crypto, repo: "hexpm", optional: false]}, {:telemetry, "~> 0.4", [hex: :telemetry, repo: "hexpm", optional: true]}], "hexpm"},
"plug_cowboy": {:hex, :plug_cowboy, "2.1.0", "b75768153c3a8a9e8039d4b25bb9b14efbc58e9c4a6e6a270abff1cd30cbe320", [:mix], [{:cowboy, "~> 2.5", [hex: :cowboy, repo: "hexpm", optional: false]}, {:plug, "~> 1.7", [hex: :plug, repo: "hexpm", optional: false]}], "hexpm"},
"plug_crypto": {:hex, :plug_crypto, "1.0.0", "18e49317d3fa343f24620ed22795ec29d4a5e602d52d1513ccea0b07d8ea7d4d", [:mix], [], "hexpm"},
"plug_static_index_html": {:hex, :plug_static_index_html, "1.0.0", "840123d4d3975585133485ea86af73cb2600afd7f2a976f9f5fd8b3808e636a0", [:mix], [{:plug, "~> 1.0", [hex: :plug, repo: "hexpm", optional: false]}], "hexpm"},
"poison": {:hex, :poison, "3.1.0", "d9eb636610e096f86f25d9a46f35a9facac35609a7591b3be3326e99a0484665", [:mix], [], "hexpm"},
"postgrex": {:hex, :postgrex, "0.14.3", "5754dee2fdf6e9e508cbf49ab138df964278700b764177e8f3871e658b345a1e", [:mix], [{:connection, "~> 1.0", [hex: :connection, repo: "hexpm", optional: false]}, {:db_connection, "~> 2.0", [hex: :db_connection, repo: "hexpm", optional: false]}, {:decimal, "~> 1.5", [hex: :decimal, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}], "hexpm"},
"poolboy": {:hex, :poolboy, "1.5.2", "392b007a1693a64540cead79830443abf5762f5d30cf50bc95cb2c1aaafa006b", [:rebar3], [], "hexpm"},
"postgrex": {:hex, :postgrex, "0.15.1", "23ce3417de70f4c0e9e7419ad85bdabcc6860a6925fe2c6f3b1b5b1e8e47bf2f", [:mix], [{:connection, "~> 1.0", [hex: :connection, repo: "hexpm", optional: false]}, {:db_connection, "~> 2.1", [hex: :db_connection, repo: "hexpm", optional: false]}, {:decimal, "~> 1.5", [hex: :decimal, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}], "hexpm"},
"prometheus": {:hex, :prometheus, "4.4.1", "1e96073b3ed7788053768fea779cbc896ddc3bdd9ba60687f2ad50b252ac87d6", [:mix, :rebar3], [], "hexpm"},
"prometheus_ecto": {:hex, :prometheus_ecto, "1.4.1", "6c768ea9654de871e5b32fab2eac348467b3021604ebebbcbd8bcbe806a65ed5", [:mix], [{:ecto, "~> 2.0 or ~> 3.0", [hex: :ecto, repo: "hexpm", optional: false]}, {:prometheus_ex, "~> 1.1 or ~> 2.0 or ~> 3.0", [hex: :prometheus_ex, repo: "hexpm", optional: false]}], "hexpm"},
"prometheus_ex": {:hex, :prometheus_ex, "3.0.5", "fa58cfd983487fc5ead331e9a3e0aa622c67232b3ec71710ced122c4c453a02f", [:mix], [{:prometheus, "~> 4.0", [hex: :prometheus, repo: "hexpm", optional: false]}], "hexpm"},
"prometheus_phoenix": {:hex, :prometheus_phoenix, "1.3.0", "c4b527e0b3a9ef1af26bdcfbfad3998f37795b9185d475ca610fe4388fdd3bb5", [:mix], [{:phoenix, "~> 1.4", [hex: :phoenix, repo: "hexpm", optional: false]}, {:prometheus_ex, "~> 1.3 or ~> 2.0 or ~> 3.0", [hex: :prometheus_ex, repo: "hexpm", optional: false]}], "hexpm"},
"prometheus_plugs": {:hex, :prometheus_plugs, "1.1.5", "25933d48f8af3a5941dd7b621c889749894d8a1082a6ff7c67cc99dec26377c5", [:mix], [{:accept, "~> 0.1", [hex: :accept, repo: "hexpm", optional: false]}, {:plug, "~> 1.0", [hex: :plug, repo: "hexpm", optional: false]}, {:prometheus_ex, "~> 1.1 or ~> 2.0 or ~> 3.0", [hex: :prometheus_ex, repo: "hexpm", optional: false]}, {:prometheus_process_collector, "~> 1.1", [hex: :prometheus_process_collector, repo: "hexpm", optional: true]}], "hexpm"},
"quack": {:hex, :quack, "0.1.1", "cca7b4da1a233757fdb44b3334fce80c94785b3ad5a602053b7a002b5a8967bf", [:mix], [{:poison, ">= 1.0.0", [hex: :poison, repo: "hexpm", optional: false]}, {:tesla, "~> 1.2.0", [hex: :tesla, repo: "hexpm", optional: false]}], "hexpm"},
"quantum": {:hex, :quantum, "2.3.4", "72a0e8855e2adc101459eac8454787cb74ab4169de6ca50f670e72142d4960e9", [:mix], [{:calendar, "~> 0.17", [hex: :calendar, repo: "hexpm", optional: true]}, {:crontab, "~> 1.1", [hex: :crontab, repo: "hexpm", optional: false]}, {:gen_stage, "~> 0.12", [hex: :gen_stage, repo: "hexpm", optional: false]}, {:swarm, "~> 3.3", [hex: :swarm, repo: "hexpm", optional: false]}, {:timex, "~> 3.1", [hex: :timex, repo: "hexpm", optional: true]}], "hexpm"},
"ranch": {:hex, :ranch, "1.7.1", "6b1fab51b49196860b733a49c07604465a47bdb78aa10c1c16a3d199f7f8c881", [:rebar3], [], "hexpm"},
"recon": {:git, "https://github.com/ferd/recon.git", "75d70c7c08926d2f24f1ee6de14ee50fe8a52763", [tag: "2.4.0"]},
"ssl_verify_fun": {:hex, :ssl_verify_fun, "1.1.4", "f0eafff810d2041e93f915ef59899c923f4568f4585904d010387ed74988e77b", [:make, :mix, :rebar3], [], "hexpm"},
"swarm": {:hex, :swarm, "3.4.0", "64f8b30055d74640d2186c66354b33b999438692a91be275bb89cdc7e401f448", [:mix], [{:gen_state_machine, "~> 2.0", [hex: :gen_state_machine, repo: "hexpm", optional: false]}, {:libring, "~> 1.0", [hex: :libring, repo: "hexpm", optional: false]}], "hexpm"},
"sweet_xml": {:hex, :sweet_xml, "0.6.6", "fc3e91ec5dd7c787b6195757fbcf0abc670cee1e4172687b45183032221b66b8", [:mix], [], "hexpm"},
"swoosh": {:hex, :swoosh, "0.23.2", "7dda95ff0bf54a2298328d6899c74dae1223777b43563ccebebb4b5d2b61df38", [:mix], [{:cowboy, "~> 1.0.1 or ~> 1.1 or ~> 2.4", [hex: :cowboy, repo: "hexpm", optional: true]}, {:gen_smtp, "~> 0.13", [hex: :gen_smtp, repo: "hexpm", optional: true]}, {:hackney, "~> 1.9", [hex: :hackney, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: false]}, {:mail, "~> 0.2", [hex: :mail, repo: "hexpm", optional: true]}, {:mime, "~> 1.1", [hex: :mime, repo: "hexpm", optional: false]}, {:plug_cowboy, ">= 1.0.0", [hex: :plug_cowboy, repo: "hexpm", optional: true]}], "hexpm"},
"syslog": {:git, "https://github.com/Vagabond/erlang-syslog.git", "4a6c6f2c996483e86c1320e9553f91d337bcb6aa", [tag: "1.0.5"]},

View File

@ -0,0 +1,6 @@
defmodule Pleroma.Repo.Migrations.AddObanJobsTable do
use Ecto.Migration
defdelegate up, to: Oban.Migrations
defdelegate down, to: Oban.Migrations
end

View File

@ -0,0 +1,12 @@
defmodule Pleroma.Repo.Migrations.CreateDeliveries do
use Ecto.Migration
def change do
create_if_not_exists table(:deliveries) do
add(:object_id, references(:objects, type: :id), null: false)
add(:user_id, references(:users, type: :uuid, on_delete: :delete_all), null: false)
end
create_if_not_exists index(:deliveries, :object_id, name: :deliveries_object_id)
create_if_not_exists(unique_index(:deliveries, [:user_id, :object_id]))
end
end

View File

@ -0,0 +1,11 @@
defmodule Pleroma.Repo.Migrations.UpdateOban do
use Ecto.Migration
def up do
Oban.Migrations.up(version: 4)
end
def down do
Oban.Migrations.down(version: 2)
end
end

View File

@ -0,0 +1,141 @@
defmodule Pleroma.Activity.Ir.TopicsTest do
use Pleroma.DataCase
alias Pleroma.Activity
alias Pleroma.Activity.Ir.Topics
alias Pleroma.Object
require Pleroma.Constants
describe "poll answer" do
test "produce no topics" do
activity = %Activity{object: %Object{data: %{"type" => "Answer"}}}
assert [] == Topics.get_activity_topics(activity)
end
end
describe "non poll answer" do
test "always add user and list topics" do
activity = %Activity{object: %Object{data: %{"type" => "FooBar"}}}
topics = Topics.get_activity_topics(activity)
assert Enum.member?(topics, "user")
assert Enum.member?(topics, "list")
end
end
describe "public visibility" do
setup do
activity = %Activity{
object: %Object{data: %{"type" => "Note"}},
data: %{"to" => [Pleroma.Constants.as_public()]}
}
{:ok, activity: activity}
end
test "produces public topic", %{activity: activity} do
topics = Topics.get_activity_topics(activity)
assert Enum.member?(topics, "public")
end
test "local action produces public:local topic", %{activity: activity} do
activity = %{activity | local: true}
topics = Topics.get_activity_topics(activity)
assert Enum.member?(topics, "public:local")
end
test "non-local action does not produce public:local topic", %{activity: activity} do
activity = %{activity | local: false}
topics = Topics.get_activity_topics(activity)
refute Enum.member?(topics, "public:local")
end
end
describe "public visibility create events" do
setup do
activity = %Activity{
object: %Object{data: %{"type" => "Create", "attachment" => []}},
data: %{"to" => [Pleroma.Constants.as_public()]}
}
{:ok, activity: activity}
end
test "with no attachments doesn't produce public:media topics", %{activity: activity} do
topics = Topics.get_activity_topics(activity)
refute Enum.member?(topics, "public:media")
refute Enum.member?(topics, "public:local:media")
end
test "converts tags to hash tags", %{activity: %{object: %{data: data} = object} = activity} do
tagged_data = Map.put(data, "tag", ["foo", "bar"])
activity = %{activity | object: %{object | data: tagged_data}}
topics = Topics.get_activity_topics(activity)
assert Enum.member?(topics, "hashtag:foo")
assert Enum.member?(topics, "hashtag:bar")
end
test "only converts strinngs to hash tags", %{
activity: %{object: %{data: data} = object} = activity
} do
tagged_data = Map.put(data, "tag", [2])
activity = %{activity | object: %{object | data: tagged_data}}
topics = Topics.get_activity_topics(activity)
refute Enum.member?(topics, "hashtag:2")
end
end
describe "public visibility create events with attachments" do
setup do
activity = %Activity{
object: %Object{data: %{"type" => "Create", "attachment" => ["foo"]}},
data: %{"to" => [Pleroma.Constants.as_public()]}
}
{:ok, activity: activity}
end
test "produce public:media topics", %{activity: activity} do
topics = Topics.get_activity_topics(activity)
assert Enum.member?(topics, "public:media")
end
test "local produces public:local:media topics", %{activity: activity} do
topics = Topics.get_activity_topics(activity)
assert Enum.member?(topics, "public:local:media")
end
test "non-local doesn't produce public:local:media topics", %{activity: activity} do
activity = %{activity | local: false}
topics = Topics.get_activity_topics(activity)
refute Enum.member?(topics, "public:local:media")
end
end
describe "non-public visibility" do
test "produces direct topic" do
activity = %Activity{object: %Object{data: %{"type" => "Note"}}, data: %{"to" => []}}
topics = Topics.get_activity_topics(activity)
assert Enum.member?(topics, "direct")
refute Enum.member?(topics, "public")
refute Enum.member?(topics, "public:local")
refute Enum.member?(topics, "public:media")
refute Enum.member?(topics, "public:local:media")
end
end
end

View File

@ -7,6 +7,7 @@ defmodule Pleroma.ActivityTest do
alias Pleroma.Activity
alias Pleroma.Bookmark
alias Pleroma.Object
alias Pleroma.Tests.ObanHelpers
alias Pleroma.ThreadMute
import Pleroma.Factory
@ -125,7 +126,8 @@ test "when association is not loaded" do
}
{:ok, local_activity} = Pleroma.Web.CommonAPI.post(user, %{"status" => "find me!"})
{:ok, remote_activity} = Pleroma.Web.Federator.incoming_ap_doc(params)
{:ok, job} = Pleroma.Web.Federator.incoming_ap_doc(params)
{:ok, remote_activity} = ObanHelpers.perform(job)
%{local_activity: local_activity, remote_activity: remote_activity, user: user}
end

View File

@ -22,6 +22,8 @@ test "it goes through old direct conversations" do
{:ok, _activity} =
CommonAPI.post(user, %{"visibility" => "direct", "status" => "hey @#{other_user.nickname}"})
Pleroma.Tests.ObanHelpers.perform_all()
Repo.delete_all(Conversation)
Repo.delete_all(Conversation.Participation)

View File

@ -10,7 +10,7 @@ defmodule Pleroma.ActivityExpirationWorkerTest do
test "deletes an activity" do
activity = insert(:note_activity)
expiration = insert(:expiration_in_the_past, %{activity_id: activity.id})
Pleroma.ActivityExpirationWorker.perform(:execute, expiration.id)
Pleroma.Daemons.ActivityExpirationDaemon.perform(:execute, expiration.id)
refute Repo.get(Activity, activity.id)
end

View File

@ -2,11 +2,12 @@
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.DigestEmailWorkerTest do
defmodule Pleroma.DigestEmailDaemonTest do
use Pleroma.DataCase
import Pleroma.Factory
alias Pleroma.DigestEmailWorker
alias Pleroma.Daemons.DigestEmailDaemon
alias Pleroma.Tests.ObanHelpers
alias Pleroma.User
alias Pleroma.Web.CommonAPI
@ -22,7 +23,10 @@ test "it sends digest emails" do
User.switch_email_notifications(user2, "digest", true)
CommonAPI.post(user, %{"status" => "hey @#{user2.nickname}!"})
DigestEmailWorker.perform()
DigestEmailDaemon.perform()
ObanHelpers.perform_all()
# Performing job(s) enqueued at previous step
ObanHelpers.perform_all()
assert_received {:email, email}
assert email.to == [{user2.name, user2.email}]

View File

@ -2,7 +2,7 @@
# Copyright © 2017-2018 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.ScheduledActivityWorkerTest do
defmodule Pleroma.ScheduledActivityDaemonTest do
use Pleroma.DataCase
alias Pleroma.ScheduledActivity
import Pleroma.Factory
@ -10,7 +10,7 @@ defmodule Pleroma.ScheduledActivityWorkerTest do
test "creates a status from the scheduled activity" do
user = insert(:user)
scheduled_activity = insert(:scheduled_activity, user: user, params: %{status: "hi"})
Pleroma.ScheduledActivityWorker.perform(:execute, scheduled_activity.id)
Pleroma.Daemons.ScheduledActivityDaemon.perform(:execute, scheduled_activity.id)
refute Repo.get(ScheduledActivity, scheduled_activity.id)
activity = Repo.all(Pleroma.Activity) |> Enum.find(&(&1.actor == user.ap_id))

View File

@ -11,7 +11,6 @@ defmodule Pleroma.Integration.MastodonWebsocketTest do
alias Pleroma.Integration.WebsocketClient
alias Pleroma.Web.CommonAPI
alias Pleroma.Web.OAuth
alias Pleroma.Web.Streamer
@path Pleroma.Web.Endpoint.url()
|> URI.parse()
@ -19,14 +18,9 @@ defmodule Pleroma.Integration.MastodonWebsocketTest do
|> Map.put(:path, "/api/v1/streaming")
|> URI.to_string()
setup do
GenServer.start(Streamer, %{}, name: Streamer)
on_exit(fn ->
if pid = Process.whereis(Streamer) do
Process.exit(pid, :kill)
end
end)
setup_all do
start_supervised(Pleroma.Web.Streamer.supervisor())
:ok
end
def start_socket(qs \\ nil, headers \\ []) do
@ -43,6 +37,7 @@ test "refuses invalid requests" do
capture_log(fn ->
assert {:error, {400, _}} = start_socket()
assert {:error, {404, _}} = start_socket("?stream=ncjdk")
Process.sleep(30)
end)
end
@ -50,6 +45,7 @@ test "requires authentication and a valid token for protected streams" do
capture_log(fn ->
assert {:error, {403, _}} = start_socket("?stream=user&access_token=aaaaaaaaaaaa")
assert {:error, {403, _}} = start_socket("?stream=user")
Process.sleep(30)
end)
end
@ -108,6 +104,7 @@ test "accepts the 'user' stream", %{token: token} = _state do
assert capture_log(fn ->
assert {:error, {403, "Forbidden"}} = start_socket("?stream=user")
Process.sleep(30)
end) =~ ":badarg"
end
@ -116,6 +113,7 @@ test "accepts the 'user:notification' stream", %{token: token} = _state do
assert capture_log(fn ->
assert {:error, {403, "Forbidden"}} = start_socket("?stream=user:notification")
Process.sleep(30)
end) =~ ":badarg"
end
@ -125,6 +123,8 @@ test "accepts valid token on Sec-WebSocket-Protocol header", %{token: token} do
assert capture_log(fn ->
assert {:error, {403, "Forbidden"}} =
start_socket("?stream=user", [{"Sec-WebSocket-Protocol", "I am a friend"}])
Process.sleep(30)
end) =~ ":badarg"
end
end

View File

@ -8,6 +8,7 @@ defmodule Pleroma.NotificationTest do
import Pleroma.Factory
alias Pleroma.Notification
alias Pleroma.Tests.ObanHelpers
alias Pleroma.User
alias Pleroma.Web.ActivityPub.Transmogrifier
alias Pleroma.Web.CommonAPI
@ -68,16 +69,7 @@ test "does not create a notification for subscribed users if status is a reply"
end
describe "create_notification" do
setup do
GenServer.start(Streamer, %{}, name: Streamer)
on_exit(fn ->
if pid = Process.whereis(Streamer) do
Process.exit(pid, :kill)
end
end)
end
@tag needs_streamer: true
test "it creates a notification for user and send to the 'user' and the 'user:notification' stream" do
user = insert(:user)
task = Task.async(fn -> assert_receive {:text, _}, 4_000 end)
@ -588,7 +580,8 @@ test "notifications are deleted if a local user is deleted" do
refute Enum.empty?(Notification.for_user(other_user))
User.delete(user)
{:ok, job} = User.delete(user)
ObanHelpers.perform(job)
assert Enum.empty?(Notification.for_user(other_user))
end
@ -633,6 +626,7 @@ test "notifications are deleted if a remote user is deleted" do
}
{:ok, _delete_activity} = Transmogrifier.handle_incoming(delete_user_message)
ObanHelpers.perform_all()
assert Enum.empty?(Notification.for_user(local_user))
end

View File

@ -40,6 +40,10 @@ defmodule Pleroma.Web.ConnCase do
Ecto.Adapters.SQL.Sandbox.mode(Pleroma.Repo, {:shared, self()})
end
if tags[:needs_streamer] do
start_supervised(Pleroma.Web.Streamer.supervisor())
end
{:ok, conn: Phoenix.ConnTest.build_conn()}
end
end

View File

@ -39,6 +39,10 @@ defmodule Pleroma.DataCase do
Ecto.Adapters.SQL.Sandbox.mode(Pleroma.Repo, {:shared, self()})
end
if tags[:needs_streamer] do
start_supervised(Pleroma.Web.Streamer.supervisor())
end
:ok
end

View File

@ -0,0 +1,42 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2018 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Tests.ObanHelpers do
@moduledoc """
Oban test helpers.
"""
alias Pleroma.Repo
def perform_all do
Oban.Job
|> Repo.all()
|> perform()
end
def perform(%Oban.Job{} = job) do
res = apply(String.to_existing_atom("Elixir." <> job.worker), :perform, [job.args, job])
Repo.delete(job)
res
end
def perform(jobs) when is_list(jobs) do
for job <- jobs, do: perform(job)
end
def member?(%{} = job_args, jobs) when is_list(jobs) do
Enum.any?(jobs, fn job ->
member?(job_args, job.args)
end)
end
def member?(%{} = test_attrs, %{} = attrs) do
Enum.all?(
test_attrs,
fn {k, _v} -> member?(test_attrs[k], attrs[k]) end
)
end
def member?(x, y), do: x == y
end

View File

@ -4,6 +4,7 @@ defmodule Mix.Tasks.Pleroma.DigestTest do
import Pleroma.Factory
import Swoosh.TestAssertions
alias Pleroma.Tests.ObanHelpers
alias Pleroma.Web.CommonAPI
setup_all do
@ -39,6 +40,8 @@ test "Sends digest to the given user" do
:ok = Mix.Tasks.Pleroma.Digest.run(["test", user2.nickname, yesterday_date])
ObanHelpers.perform_all()
assert_receive {:mix_shell, :info, [message]}
assert message =~ "Digest email have been sent"

View File

@ -7,14 +7,16 @@ defmodule Pleroma.UserTest do
alias Pleroma.Builders.UserBuilder
alias Pleroma.Object
alias Pleroma.Repo
alias Pleroma.Tests.ObanHelpers
alias Pleroma.User
alias Pleroma.Web.ActivityPub.ActivityPub
alias Pleroma.Web.CommonAPI
use Pleroma.DataCase
use Oban.Testing, repo: Pleroma.Repo
import Pleroma.Factory
import Mock
import Pleroma.Factory
setup_all do
Tesla.Mock.mock_global(fn env -> apply(HttpRequestMock, :request, [env]) end)
@ -709,7 +711,9 @@ test "it imports user followings from list" do
user3.nickname
]
result = User.follow_import(user1, identifiers)
{:ok, job} = User.follow_import(user1, identifiers)
result = ObanHelpers.perform(job)
assert is_list(result)
assert result == [user2, user3]
end
@ -920,7 +924,9 @@ test "it imports user blocks from list" do
user3.nickname
]
result = User.blocks_import(user1, identifiers)
{:ok, job} = User.blocks_import(user1, identifiers)
result = ObanHelpers.perform(job)
assert is_list(result)
assert result == [user2, user3]
end
@ -1037,7 +1043,9 @@ test ".delete_user_activities deletes all create activities", %{user: user} do
test "it deletes deactivated user" do
{:ok, user} = insert(:user, info: %{deactivated: true}) |> User.set_cache()
assert {:ok, _} = User.delete(user)
{:ok, job} = User.delete(user)
{:ok, _user} = ObanHelpers.perform(job)
refute User.get_by_id(user.id)
end
@ -1055,7 +1063,8 @@ test "it deletes a user, all follow relationships and all activities", %{user: u
{:ok, like_two, _} = CommonAPI.favorite(activity.id, follower)
{:ok, repeat, _} = CommonAPI.repeat(activity_two.id, user)
{:ok, _} = User.delete(user)
{:ok, job} = User.delete(user)
{:ok, _user} = ObanHelpers.perform(job)
follower = User.get_cached_by_id(follower.id)
@ -1087,12 +1096,18 @@ test "it deletes a user, all follow relationships and all activities", %{user: u
{:ok, follower} = User.get_or_fetch_by_ap_id("http://mastodon.example.org/users/admin")
{:ok, _} = User.follow(follower, user)
{:ok, _user} = User.delete(user)
{:ok, job} = User.delete(user)
{:ok, _user} = ObanHelpers.perform(job)
assert called(
Pleroma.Web.ActivityPub.Publisher.publish_one(%{
inbox: "http://mastodon.example.org/inbox"
})
assert ObanHelpers.member?(
%{
"op" => "publish_one",
"params" => %{
"inbox" => "http://mastodon.example.org/inbox",
"id" => "pleroma:fakeid"
}
},
all_enqueued(worker: Pleroma.Workers.PublisherWorker)
)
end
end
@ -1186,7 +1201,8 @@ test "invalidate_cache works" do
test "User.delete() plugs any possible zombie objects" do
user = insert(:user)
{:ok, _} = User.delete(user)
{:ok, job} = User.delete(user)
{:ok, _} = ObanHelpers.perform(job)
{:ok, cached_user} = Cachex.get(:user_cache, "ap_id:#{user.ap_id}")

View File

@ -4,16 +4,21 @@
defmodule Pleroma.Web.ActivityPub.ActivityPubControllerTest do
use Pleroma.Web.ConnCase
use Oban.Testing, repo: Pleroma.Repo
import Pleroma.Factory
alias Pleroma.Activity
alias Pleroma.Delivery
alias Pleroma.Instances
alias Pleroma.Object
alias Pleroma.Tests.ObanHelpers
alias Pleroma.User
alias Pleroma.Web.ActivityPub.ObjectView
alias Pleroma.Web.ActivityPub.Relay
alias Pleroma.Web.ActivityPub.UserView
alias Pleroma.Web.ActivityPub.Utils
alias Pleroma.Web.CommonAPI
alias Pleroma.Workers.ReceiverWorker
setup_all do
Tesla.Mock.mock_global(fn env -> apply(HttpRequestMock, :request, [env]) end)
@ -365,7 +370,8 @@ test "it inserts an incoming activity into the database", %{conn: conn} do
|> post("/inbox", data)
assert "ok" == json_response(conn, 200)
:timer.sleep(500)
ObanHelpers.perform(all_enqueued(worker: ReceiverWorker))
assert Activity.get_by_ap_id(data["id"])
end
@ -407,7 +413,7 @@ test "it inserts an incoming activity into the database", %{conn: conn, data: da
|> post("/users/#{user.nickname}/inbox", data)
assert "ok" == json_response(conn, 200)
:timer.sleep(500)
ObanHelpers.perform(all_enqueued(worker: ReceiverWorker))
assert Activity.get_by_ap_id(data["id"])
end
@ -436,7 +442,7 @@ test "it accepts messages from actors that are followed by the user", %{
|> post("/users/#{recipient.nickname}/inbox", data)
assert "ok" == json_response(conn, 200)
:timer.sleep(500)
ObanHelpers.perform(all_enqueued(worker: ReceiverWorker))
assert Activity.get_by_ap_id(data["id"])
end
@ -526,6 +532,8 @@ test "it removes all follower collections but actor's", %{conn: conn} do
|> post("/users/#{recipient.nickname}/inbox", data)
|> json_response(200)
ObanHelpers.perform(all_enqueued(worker: ReceiverWorker))
activity = Activity.get_by_ap_id(data["id"])
assert activity.id
@ -601,6 +609,7 @@ test "it inserts an incoming create activity into the database", %{conn: conn} d
|> post("/users/#{user.nickname}/outbox", data)
result = json_response(conn, 201)
assert Activity.get_by_ap_id(result["id"])
end
@ -885,4 +894,86 @@ test "it works for more than 10 users", %{conn: conn} do
assert result["totalItems"] == 15
end
end
describe "delivery tracking" do
test "it tracks a signed object fetch", %{conn: conn} do
user = insert(:user, local: false)
activity = insert(:note_activity)
object = Object.normalize(activity)
object_path = String.trim_leading(object.data["id"], Pleroma.Web.Endpoint.url())
conn
|> put_req_header("accept", "application/activity+json")
|> assign(:user, user)
|> get(object_path)
|> json_response(200)
assert Delivery.get(object.id, user.id)
end
test "it tracks a signed activity fetch", %{conn: conn} do
user = insert(:user, local: false)
activity = insert(:note_activity)
object = Object.normalize(activity)
activity_path = String.trim_leading(activity.data["id"], Pleroma.Web.Endpoint.url())
conn
|> put_req_header("accept", "application/activity+json")
|> assign(:user, user)
|> get(activity_path)
|> json_response(200)
assert Delivery.get(object.id, user.id)
end
test "it tracks a signed object fetch when the json is cached", %{conn: conn} do
user = insert(:user, local: false)
other_user = insert(:user, local: false)
activity = insert(:note_activity)
object = Object.normalize(activity)
object_path = String.trim_leading(object.data["id"], Pleroma.Web.Endpoint.url())
conn
|> put_req_header("accept", "application/activity+json")
|> assign(:user, user)
|> get(object_path)
|> json_response(200)
build_conn()
|> put_req_header("accept", "application/activity+json")
|> assign(:user, other_user)
|> get(object_path)
|> json_response(200)
assert Delivery.get(object.id, user.id)
assert Delivery.get(object.id, other_user.id)
end
test "it tracks a signed activity fetch when the json is cached", %{conn: conn} do
user = insert(:user, local: false)
other_user = insert(:user, local: false)
activity = insert(:note_activity)
object = Object.normalize(activity)
activity_path = String.trim_leading(activity.data["id"], Pleroma.Web.Endpoint.url())
conn
|> put_req_header("accept", "application/activity+json")
|> assign(:user, user)
|> get(activity_path)
|> json_response(200)
build_conn()
|> put_req_header("accept", "application/activity+json")
|> assign(:user, other_user)
|> get(activity_path)
|> json_response(200)
assert Delivery.get(object.id, user.id)
assert Delivery.get(object.id, other_user.id)
end
end
end

View File

@ -38,9 +38,7 @@ test "it streams them out" do
stream: fn _, _ -> nil end do
ActivityPub.stream_out_participations(conversation.participations)
Enum.each(participations, fn participation ->
assert called(Pleroma.Web.Streamer.stream("participation", participation))
end)
assert called(Pleroma.Web.Streamer.stream("participation", participations))
end
end
end
@ -686,7 +684,7 @@ test "returns reblogs for users for whom reblogs have not been muted" do
user = insert(:user)
{:ok, like_activity, _object} = ActivityPub.like(user, object_activity)
assert called(Pleroma.Web.Federator.publish(like_activity, 5))
assert called(Pleroma.Web.Federator.publish(like_activity))
end
test "returns exist activity if object already liked" do
@ -747,7 +745,7 @@ test "adds a like activity to the db" do
{:ok, unlike_activity, _, object} = ActivityPub.unlike(user, object)
assert object.data["like_count"] == 0
assert called(Pleroma.Web.Federator.publish(unlike_activity, 5))
assert called(Pleroma.Web.Federator.publish(unlike_activity))
end
test "unliking a previously liked object" do

View File

@ -6,6 +6,7 @@ defmodule Pleroma.Web.ActivityPub.MRF.MediaProxyWarmingPolicyTest do
use Pleroma.DataCase
alias Pleroma.HTTP
alias Pleroma.Tests.ObanHelpers
alias Pleroma.Web.ActivityPub.MRF.MediaProxyWarmingPolicy
import Mock
@ -24,6 +25,11 @@ defmodule Pleroma.Web.ActivityPub.MRF.MediaProxyWarmingPolicyTest do
test "it prefetches media proxy URIs" do
with_mock HTTP, get: fn _, _, _ -> {:ok, []} end do
MediaProxyWarmingPolicy.filter(@message)
ObanHelpers.perform_all()
# Performing jobs which has been just enqueued
ObanHelpers.perform_all()
assert called(HTTP.get(:_, :_, :_))
end
end

View File

@ -3,7 +3,7 @@
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Web.ActivityPub.PublisherTest do
use Pleroma.DataCase
use Pleroma.Web.ConnCase
import ExUnit.CaptureLog
import Pleroma.Factory
@ -12,7 +12,9 @@ defmodule Pleroma.Web.ActivityPub.PublisherTest do
alias Pleroma.Activity
alias Pleroma.Instances
alias Pleroma.Object
alias Pleroma.Web.ActivityPub.Publisher
alias Pleroma.Web.CommonAPI
@as_public "https://www.w3.org/ns/activitystreams#Public"
@ -263,10 +265,74 @@ test "it returns inbox for messages involving single recipients in total" do
assert called(
Pleroma.Web.Federator.Publisher.enqueue_one(Publisher, %{
inbox: "https://domain.com/users/nick1/inbox",
actor: actor,
actor_id: actor.id,
id: note_activity.data["id"]
})
)
end
test_with_mock "publishes a delete activity to peers who signed fetch requests to the create acitvity/object.",
Pleroma.Web.Federator.Publisher,
[:passthrough],
[] do
fetcher =
insert(:user,
local: false,
info: %{
ap_enabled: true,
source_data: %{"inbox" => "https://domain.com/users/nick1/inbox"}
}
)
another_fetcher =
insert(:user,
local: false,
info: %{
ap_enabled: true,
source_data: %{"inbox" => "https://domain2.com/users/nick1/inbox"}
}
)
actor = insert(:user)
note_activity = insert(:note_activity, user: actor)
object = Object.normalize(note_activity)
activity_path = String.trim_leading(note_activity.data["id"], Pleroma.Web.Endpoint.url())
object_path = String.trim_leading(object.data["id"], Pleroma.Web.Endpoint.url())
build_conn()
|> put_req_header("accept", "application/activity+json")
|> assign(:user, fetcher)
|> get(object_path)
|> json_response(200)
build_conn()
|> put_req_header("accept", "application/activity+json")
|> assign(:user, another_fetcher)
|> get(activity_path)
|> json_response(200)
{:ok, delete} = CommonAPI.delete(note_activity.id, actor)
res = Publisher.publish(actor, delete)
assert res == :ok
assert called(
Pleroma.Web.Federator.Publisher.enqueue_one(Publisher, %{
inbox: "https://domain.com/users/nick1/inbox",
actor_id: actor.id,
id: delete.data["id"]
})
)
assert called(
Pleroma.Web.Federator.Publisher.enqueue_one(Publisher, %{
inbox: "https://domain2.com/users/nick1/inbox",
actor_id: actor.id,
id: delete.data["id"]
})
)
end
end
end

View File

@ -99,7 +99,7 @@ test "returns error when object is unknown" do
assert activity.data["type"] == "Announce"
assert activity.data["actor"] == service_actor.ap_id
assert activity.data["object"] == obj.data["id"]
assert called(Pleroma.Web.Federator.publish(activity, 5))
assert called(Pleroma.Web.Federator.publish(activity))
end
test_with_mock "returns announce activity and not publish to federate",
@ -113,7 +113,7 @@ test "returns error when object is unknown" do
assert activity.data["type"] == "Announce"
assert activity.data["actor"] == service_actor.ap_id
assert activity.data["object"] == obj.data["id"]
refute called(Pleroma.Web.Federator.publish(activity, 5))
refute called(Pleroma.Web.Federator.publish(activity))
end
end
end

View File

@ -8,6 +8,7 @@ defmodule Pleroma.Web.ActivityPub.TransmogrifierTest do
alias Pleroma.Object
alias Pleroma.Object.Fetcher
alias Pleroma.Repo
alias Pleroma.Tests.ObanHelpers
alias Pleroma.User
alias Pleroma.Web.ActivityPub.ActivityPub
alias Pleroma.Web.ActivityPub.Transmogrifier
@ -648,6 +649,7 @@ test "it works for incoming user deletes" do
|> Poison.decode!()
{:ok, _} = Transmogrifier.handle_incoming(data)
ObanHelpers.perform_all()
refute User.get_cached_by_ap_id(ap_id)
end
@ -1210,6 +1212,8 @@ test "it upgrades a user to activitypub" do
assert user.info.note_count == 1
{:ok, user} = Transmogrifier.upgrade_user_from_ap_id("https://niu.moe/users/rye")
ObanHelpers.perform_all()
assert user.info.ap_enabled
assert user.info.note_count == 1
assert user.follower_address == "https://niu.moe/users/rye/followers"

View File

@ -87,6 +87,18 @@ test "works with an object that has only IR tags" do
assert Utils.determine_explicit_mentions(object) == []
end
test "works with an object has tags as map" do
object = %{
"tag" => %{
"type" => "Mention",
"href" => "https://example.com/~alyssa",
"name" => "Alyssa P. Hacker"
}
}
assert Utils.determine_explicit_mentions(object) == ["https://example.com/~alyssa"]
end
end
describe "make_unlike_data/3" do
@ -300,8 +312,8 @@ test "updates the state of all Follow activities with the same actor and object"
{:ok, follow_activity_two} =
Utils.update_follow_state_for_all(follow_activity_two, "accept")
assert Repo.get(Activity, follow_activity.id).data["state"] == "accept"
assert Repo.get(Activity, follow_activity_two.id).data["state"] == "accept"
assert refresh_record(follow_activity).data["state"] == "accept"
assert refresh_record(follow_activity_two).data["state"] == "accept"
end
end
@ -323,8 +335,8 @@ test "updates the state of the given follow activity" do
{:ok, follow_activity_two} = Utils.update_follow_state(follow_activity_two, "reject")
assert Repo.get(Activity, follow_activity.id).data["state"] == "pending"
assert Repo.get(Activity, follow_activity_two.id).data["state"] == "reject"
assert refresh_record(follow_activity).data["state"] == "pending"
assert refresh_record(follow_activity_two).data["state"] == "reject"
end
end
@ -401,4 +413,216 @@ test "fetches existing like" do
assert ^like_activity = Utils.get_existing_like(user.ap_id, object)
end
end
describe "get_get_existing_announce/2" do
test "returns nil if announce not found" do
actor = insert(:user)
refute Utils.get_existing_announce(actor.ap_id, %{data: %{"id" => "test"}})
end
test "fetches existing announce" do
note_activity = insert(:note_activity)
assert object = Object.normalize(note_activity)
actor = insert(:user)
{:ok, announce, _object} = ActivityPub.announce(actor, object)
assert Utils.get_existing_announce(actor.ap_id, object) == announce
end
end
describe "fetch_latest_block/2" do
test "fetches last block activities" do
user1 = insert(:user)
user2 = insert(:user)
assert {:ok, %Activity{} = _} = ActivityPub.block(user1, user2)
assert {:ok, %Activity{} = _} = ActivityPub.block(user1, user2)
assert {:ok, %Activity{} = activity} = ActivityPub.block(user1, user2)
assert Utils.fetch_latest_block(user1, user2) == activity
end
end
describe "recipient_in_message/3" do
test "returns true when recipient in `to`" do
recipient = insert(:user)
actor = insert(:user)
assert Utils.recipient_in_message(recipient, actor, %{"to" => recipient.ap_id})
assert Utils.recipient_in_message(
recipient,
actor,
%{"to" => [recipient.ap_id], "cc" => ""}
)
end
test "returns true when recipient in `cc`" do
recipient = insert(:user)
actor = insert(:user)
assert Utils.recipient_in_message(recipient, actor, %{"cc" => recipient.ap_id})
assert Utils.recipient_in_message(
recipient,
actor,
%{"cc" => [recipient.ap_id], "to" => ""}
)
end
test "returns true when recipient in `bto`" do
recipient = insert(:user)
actor = insert(:user)
assert Utils.recipient_in_message(recipient, actor, %{"bto" => recipient.ap_id})
assert Utils.recipient_in_message(
recipient,
actor,
%{"bcc" => "", "bto" => [recipient.ap_id]}
)
end
test "returns true when recipient in `bcc`" do
recipient = insert(:user)
actor = insert(:user)
assert Utils.recipient_in_message(recipient, actor, %{"bcc" => recipient.ap_id})
assert Utils.recipient_in_message(
recipient,
actor,
%{"bto" => "", "bcc" => [recipient.ap_id]}
)
end
test "returns true when message without addresses fields" do
recipient = insert(:user)
actor = insert(:user)
assert Utils.recipient_in_message(recipient, actor, %{"bccc" => recipient.ap_id})
assert Utils.recipient_in_message(
recipient,
actor,
%{"btod" => "", "bccc" => [recipient.ap_id]}
)
end
test "returns false" do
recipient = insert(:user)
actor = insert(:user)
refute Utils.recipient_in_message(recipient, actor, %{"to" => "ap_id"})
end
end
describe "lazy_put_activity_defaults/2" do
test "returns map with id and published data" do
note_activity = insert(:note_activity)
object = Object.normalize(note_activity)
res = Utils.lazy_put_activity_defaults(%{"context" => object.data["id"]})
assert res["context"] == object.data["id"]
assert res["context_id"] == object.id
assert res["id"]
assert res["published"]
end
test "returns map with fake id and published data" do
assert %{
"context" => "pleroma:fakecontext",
"context_id" => -1,
"id" => "pleroma:fakeid",
"published" => _
} = Utils.lazy_put_activity_defaults(%{}, true)
end
test "returns activity data with object" do
note_activity = insert(:note_activity)
object = Object.normalize(note_activity)
res =
Utils.lazy_put_activity_defaults(%{
"context" => object.data["id"],
"object" => %{}
})
assert res["context"] == object.data["id"]
assert res["context_id"] == object.id
assert res["id"]
assert res["published"]
assert res["object"]["id"]
assert res["object"]["published"]
assert res["object"]["context"] == object.data["id"]
assert res["object"]["context_id"] == object.id
end
end
describe "make_flag_data" do
test "returns empty map when params is invalid" do
assert Utils.make_flag_data(%{}, %{}) == %{}
end
test "returns map with Flag object" do
reporter = insert(:user)
target_account = insert(:user)
{:ok, activity} = CommonAPI.post(target_account, %{"status" => "foobar"})
context = Utils.generate_context_id()
content = "foobar"
target_ap_id = target_account.ap_id
activity_ap_id = activity.data["id"]
res =
Utils.make_flag_data(
%{
actor: reporter,
context: context,
account: target_account,
statuses: [%{"id" => activity.data["id"]}],
content: content
},
%{}
)
assert %{
"type" => "Flag",
"content" => ^content,
"context" => ^context,
"object" => [^target_ap_id, ^activity_ap_id],
"state" => "open"
} = res
end
end
describe "add_announce_to_object/2" do
test "adds actor to announcement" do
user = insert(:user)
object = insert(:note)
activity =
insert(:note_activity,
data: %{
"actor" => user.ap_id,
"cc" => [Pleroma.Constants.as_public()]
}
)
assert {:ok, updated_object} = Utils.add_announce_to_object(activity, object)
assert updated_object.data["announcements"] == [user.ap_id]
assert updated_object.data["announcement_count"] == 1
end
end
describe "remove_announce_from_object/2" do
test "removes actor from announcements" do
user = insert(:user)
user2 = insert(:user)
object =
insert(:note,
data: %{"announcements" => [user.ap_id, user2.ap_id], "announcement_count" => 2}
)
activity = insert(:note_activity, data: %{"actor" => user.ap_id})
assert {:ok, updated_object} = Utils.remove_announce_from_object(activity, object)
assert updated_object.data["announcements"] == [user2.ap_id]
assert updated_object.data["announcement_count"] == 1
end
end
end

View File

@ -1309,6 +1309,7 @@ test "returns empty response when no reports created", %{conn: conn} do
|> json_response(:ok)
assert Enum.empty?(response["reports"])
assert response["total"] == 0
end
test "returns reports", %{conn: conn} do
@ -1331,6 +1332,8 @@ test "returns reports", %{conn: conn} do
assert length(response["reports"]) == 1
assert report["id"] == report_id
assert response["total"] == 1
end
test "returns reports with specified state", %{conn: conn} do
@ -1364,6 +1367,8 @@ test "returns reports with specified state", %{conn: conn} do
assert length(response["reports"]) == 1
assert open_report["id"] == first_report_id
assert response["total"] == 1
response =
conn
|> get("/api/pleroma/admin/reports", %{
@ -1376,6 +1381,8 @@ test "returns reports with specified state", %{conn: conn} do
assert length(response["reports"]) == 1
assert closed_report["id"] == second_report_id
assert response["total"] == 1
response =
conn
|> get("/api/pleroma/admin/reports", %{
@ -1384,6 +1391,7 @@ test "returns reports with specified state", %{conn: conn} do
|> json_response(:ok)
assert Enum.empty?(response["reports"])
assert response["total"] == 0
end
test "returns 403 when requested by a non-admin" do
@ -2096,7 +2104,7 @@ test "queues key as atom", %{conn: conn} do
post(conn, "/api/pleroma/admin/config", %{
configs: [
%{
"group" => "pleroma_job_queue",
"group" => "oban",
"key" => ":queues",
"value" => [
%{"tuple" => [":federator_incoming", 50]},
@ -2114,7 +2122,7 @@ test "queues key as atom", %{conn: conn} do
assert json_response(conn, 200) == %{
"configs" => [
%{
"group" => "pleroma_job_queue",
"group" => "oban",
"key" => ":queues",
"value" => [
%{"tuple" => [":federator_incoming", 50]},

View File

@ -4,9 +4,14 @@
defmodule Pleroma.Web.FederatorTest do
alias Pleroma.Instances
alias Pleroma.Tests.ObanHelpers
alias Pleroma.Web.CommonAPI
alias Pleroma.Web.Federator
alias Pleroma.Workers.PublisherWorker
use Pleroma.DataCase
use Oban.Testing, repo: Pleroma.Repo
import Pleroma.Factory
import Mock
@ -24,15 +29,6 @@ defmodule Pleroma.Web.FederatorTest do
clear_config([:instance, :rewrite_policy])
clear_config([:mrf_keyword])
describe "Publisher.perform" do
test "call `perform` with unknown task" do
assert {
:error,
"Don't know what to do with this"
} = Pleroma.Web.Federator.Publisher.perform("test", :ok, :ok)
end
end
describe "Publish an activity" do
setup do
user = insert(:user)
@ -53,6 +49,7 @@ test "with relays active, it publishes to the relay", %{
} do
with_mocks([relay_mock]) do
Federator.publish(activity)
ObanHelpers.perform(all_enqueued(worker: PublisherWorker))
end
assert_received :relay_publish
@ -66,6 +63,7 @@ test "with relays deactivated, it does not publish to the relay", %{
with_mocks([relay_mock]) do
Federator.publish(activity)
ObanHelpers.perform(all_enqueued(worker: PublisherWorker))
end
refute_received :relay_publish
@ -73,10 +71,7 @@ test "with relays deactivated, it does not publish to the relay", %{
end
describe "Targets reachability filtering in `publish`" do
test_with_mock "it federates only to reachable instances via AP",
Pleroma.Web.ActivityPub.Publisher,
[:passthrough],
[] do
test "it federates only to reachable instances via AP" do
user = insert(:user)
{inbox1, inbox2} =
@ -104,20 +99,20 @@ test "with relays deactivated, it does not publish to the relay", %{
{:ok, _activity} =
CommonAPI.post(user, %{"status" => "HI @nick1@domain.com, @nick2@domain2.com!"})
assert called(
Pleroma.Web.ActivityPub.Publisher.publish_one(%{
inbox: inbox1,
unreachable_since: dt
})
)
expected_dt = NaiveDateTime.to_iso8601(dt)
refute called(Pleroma.Web.ActivityPub.Publisher.publish_one(%{inbox: inbox2}))
ObanHelpers.perform(all_enqueued(worker: PublisherWorker))
assert ObanHelpers.member?(
%{
"op" => "publish_one",
"params" => %{"inbox" => inbox1, "unreachable_since" => expected_dt}
},
all_enqueued(worker: PublisherWorker)
)
end
test_with_mock "it federates only to reachable instances via Websub",
Pleroma.Web.Websub,
[:passthrough],
[] do
test "it federates only to reachable instances via Websub" do
user = insert(:user)
websub_topic = Pleroma.Web.OStatus.feed_path(user)
@ -142,23 +137,27 @@ test "with relays deactivated, it does not publish to the relay", %{
{:ok, _activity} = CommonAPI.post(user, %{"status" => "HI"})
assert called(
Pleroma.Web.Websub.publish_one(%{
callback: sub2.callback,
unreachable_since: dt
})
)
expected_callback = sub2.callback
expected_dt = NaiveDateTime.to_iso8601(dt)
refute called(Pleroma.Web.Websub.publish_one(%{callback: sub1.callback}))
ObanHelpers.perform(all_enqueued(worker: PublisherWorker))
assert ObanHelpers.member?(
%{
"op" => "publish_one",
"params" => %{
"callback" => expected_callback,
"unreachable_since" => expected_dt
}
},
all_enqueued(worker: PublisherWorker)
)
end
test_with_mock "it federates only to reachable instances via Salmon",
Pleroma.Web.Salmon,
[:passthrough],
[] do
test "it federates only to reachable instances via Salmon" do
user = insert(:user)
remote_user1 =
_remote_user1 =
insert(:user, %{
local: false,
nickname: "nick1@domain.com",
@ -174,6 +173,8 @@ test "with relays deactivated, it does not publish to the relay", %{
info: %{salmon: "https://domain2.com/salmon"}
})
remote_user2_id = remote_user2.id
dt = NaiveDateTime.utc_now()
Instances.set_unreachable(remote_user2.ap_id, dt)
@ -182,14 +183,20 @@ test "with relays deactivated, it does not publish to the relay", %{
{:ok, _activity} =
CommonAPI.post(user, %{"status" => "HI @nick1@domain.com, @nick2@domain2.com!"})
assert called(
Pleroma.Web.Salmon.publish_one(%{
recipient: remote_user2,
unreachable_since: dt
})
)
expected_dt = NaiveDateTime.to_iso8601(dt)
refute called(Pleroma.Web.Salmon.publish_one(%{recipient: remote_user1}))
ObanHelpers.perform(all_enqueued(worker: PublisherWorker))
assert ObanHelpers.member?(
%{
"op" => "publish_one",
"params" => %{
"recipient_id" => remote_user2_id,
"unreachable_since" => expected_dt
}
},
all_enqueued(worker: PublisherWorker)
)
end
end
@ -209,7 +216,8 @@ test "successfully processes incoming AP docs with correct origin" do
"to" => ["https://www.w3.org/ns/activitystreams#Public"]
}
{:ok, _activity} = Federator.incoming_ap_doc(params)
assert {:ok, job} = Federator.incoming_ap_doc(params)
assert {:ok, _activity} = ObanHelpers.perform(job)
end
test "rejects incoming AP docs with incorrect origin" do
@ -227,7 +235,8 @@ test "rejects incoming AP docs with incorrect origin" do
"to" => ["https://www.w3.org/ns/activitystreams#Public"]
}
:error = Federator.incoming_ap_doc(params)
assert {:ok, job} = Federator.incoming_ap_doc(params)
assert :error = ObanHelpers.perform(job)
end
test "it does not crash if MRF rejects the post" do
@ -242,7 +251,8 @@ test "it does not crash if MRF rejects the post" do
File.read!("test/fixtures/mastodon-post-activity.json")
|> Poison.decode!()
assert Federator.incoming_ap_doc(params) == :error
assert {:ok, job} = Federator.incoming_ap_doc(params)
assert :error = ObanHelpers.perform(job)
end
end
end

View File

@ -16,7 +16,8 @@ defmodule Pleroma.Instances.InstanceTest do
describe "set_reachable/1" do
test "clears `unreachable_since` of existing matching Instance record having non-nil `unreachable_since`" do
instance = insert(:instance, unreachable_since: NaiveDateTime.utc_now())
unreachable_since = NaiveDateTime.to_iso8601(NaiveDateTime.utc_now())
instance = insert(:instance, unreachable_since: unreachable_since)
assert {:ok, instance} = Instance.set_reachable(instance.host)
refute instance.unreachable_since

View File

@ -13,6 +13,7 @@ defmodule Pleroma.Web.MastodonAPI.MastodonAPIControllerTest do
alias Pleroma.Object
alias Pleroma.Repo
alias Pleroma.ScheduledActivity
alias Pleroma.Tests.ObanHelpers
alias Pleroma.User
alias Pleroma.Web.ActivityPub.ActivityPub
alias Pleroma.Web.CommonAPI
@ -751,7 +752,7 @@ test "get statuses by IDs", %{conn: conn} do
query_string = "ids[]=#{id1}&ids[]=#{id2}"
conn = get(conn, "/api/v1/statuses/?#{query_string}")
assert [%{"id" => ^id1}, %{"id" => ^id2}] = json_response(conn, :ok)
assert [%{"id" => ^id1}, %{"id" => ^id2}] = Enum.sort_by(json_response(conn, :ok), & &1["id"])
end
describe "deleting a status" do
@ -3897,6 +3898,7 @@ test "it creates a PasswordResetToken record for user", %{user: user} do
end
test "it sends an email to user", %{user: user} do
ObanHelpers.perform_all()
token_record = Repo.get_by(Pleroma.PasswordResetToken, user_id: user.id)
email = Pleroma.Emails.UserEmail.password_reset_email(user, token_record.token)
@ -3957,6 +3959,8 @@ test "resend account confirmation email", %{conn: conn, user: user} do
|> post("/api/v1/pleroma/accounts/confirmation_resend?email=#{user.email}")
|> json_response(:no_content)
ObanHelpers.perform_all()
email = Pleroma.Emails.UserEmail.account_confirmation_email(user)
notify_email = Config.get([:instance, :notify_email])
instance_name = Config.get([:instance, :name])

View File

@ -1,48 +0,0 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2018 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule MockActivityPub do
def publish_one({ret, waiter}) do
send(waiter, :complete)
{ret, "success"}
end
end
defmodule Pleroma.Web.Federator.RetryQueueTest do
use Pleroma.DataCase
alias Pleroma.Web.Federator.RetryQueue
@small_retry_count 0
@hopeless_retry_count 10
setup do
RetryQueue.reset_stats()
end
test "RetryQueue responds to stats request" do
assert %{delivered: 0, dropped: 0} == RetryQueue.get_stats()
end
test "failed posts are retried" do
{:retry, _timeout} = RetryQueue.get_retry_params(@small_retry_count)
wait_task =
Task.async(fn ->
receive do
:complete -> :ok
end
end)
RetryQueue.enqueue({:ok, wait_task.pid}, MockActivityPub, @small_retry_count)
Task.await(wait_task)
assert %{delivered: 1, dropped: 0} == RetryQueue.get_stats()
end
test "posts that have been tried too many times are dropped" do
{:drop, _timeout} = RetryQueue.get_retry_params(@hopeless_retry_count)
RetryQueue.enqueue({:ok, nil}, MockActivityPub, @hopeless_retry_count)
assert %{delivered: 0, dropped: 1} == RetryQueue.get_stats()
end
end

View File

@ -96,6 +96,6 @@ test "it gets a magic key" do
Salmon.publish(user, activity)
assert called(Publisher.enqueue_one(Salmon, %{recipient: mentioned_user}))
assert called(Publisher.enqueue_one(Salmon, %{recipient_id: mentioned_user.id}))
end
end

View File

@ -0,0 +1,36 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Web.PingTest do
use Pleroma.DataCase
import Pleroma.Factory
alias Pleroma.Web.Streamer
setup do
start_supervised({Streamer.supervisor(), [ping_interval: 30]})
:ok
end
describe "sockets" do
setup do
user = insert(:user)
{:ok, %{user: user}}
end
test "it sends pings", %{user: user} do
task =
Task.async(fn ->
assert_receive {:text, received_event}, 40
assert_receive {:text, received_event}, 40
assert_receive {:text, received_event}, 40
end)
Streamer.add_socket("public", %{transport_pid: task.pid, assigns: %{user: user}})
Task.await(task)
end
end
end

View File

@ -0,0 +1,54 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Web.StateTest do
use Pleroma.DataCase
import Pleroma.Factory
alias Pleroma.Web.Streamer
alias Pleroma.Web.Streamer.StreamerSocket
@moduletag needs_streamer: true
describe "sockets" do
setup do
user = insert(:user)
user2 = insert(:user)
{:ok, %{user: user, user2: user2}}
end
test "it can add a socket", %{user: user} do
Streamer.add_socket("public", %{transport_pid: 1, assigns: %{user: user}})
assert(%{"public" => [%StreamerSocket{transport_pid: 1}]} = Streamer.get_sockets())
end
test "it can add multiple sockets per user", %{user: user} do
Streamer.add_socket("public", %{transport_pid: 1, assigns: %{user: user}})
Streamer.add_socket("public", %{transport_pid: 2, assigns: %{user: user}})
assert(
%{
"public" => [
%StreamerSocket{transport_pid: 2},
%StreamerSocket{transport_pid: 1}
]
} = Streamer.get_sockets()
)
end
test "it will not add a duplicate socket", %{user: user} do
Streamer.add_socket("activity", %{transport_pid: 1, assigns: %{user: user}})
Streamer.add_socket("activity", %{transport_pid: 1, assigns: %{user: user}})
assert(
%{
"activity" => [
%StreamerSocket{transport_pid: 1}
]
} = Streamer.get_sockets()
)
end
end
end

View File

@ -5,24 +5,20 @@
defmodule Pleroma.Web.StreamerTest do
use Pleroma.DataCase
import Pleroma.Factory
alias Pleroma.List
alias Pleroma.User
alias Pleroma.Web.CommonAPI
alias Pleroma.Web.Streamer
import Pleroma.Factory
alias Pleroma.Web.Streamer.StreamerSocket
alias Pleroma.Web.Streamer.Worker
@moduletag needs_streamer: true
clear_config_all([:instance, :skip_thread_containment])
describe "user streams" do
setup do
GenServer.start(Streamer, %{}, name: Streamer)
on_exit(fn ->
if pid = Process.whereis(Streamer) do
Process.exit(pid, :kill)
end
end)
user = insert(:user)
notify = insert(:notification, user: user, activity: build(:note_activity))
{:ok, %{user: user, notify: notify}}
@ -125,12 +121,10 @@ test "it sends to public" do
assert_receive {:text, _}, 4_000
end)
fake_socket = %{
fake_socket = %StreamerSocket{
transport_pid: task.pid,
assigns: %{
user: user
}
}
{:ok, activity} = CommonAPI.post(other_user, %{"status" => "Test"})
@ -138,7 +132,7 @@ test "it sends to public" do
"public" => [fake_socket]
}
Streamer.push_to_socket(topics, "public", activity)
Worker.push_to_socket(topics, "public", activity)
Task.await(task)
@ -155,12 +149,10 @@ test "it sends to public" do
assert received_event == expected_event
end)
fake_socket = %{
fake_socket = %StreamerSocket{
transport_pid: task.pid,
assigns: %{
user: user
}
}
{:ok, activity} = CommonAPI.delete(activity.id, other_user)
@ -168,7 +160,7 @@ test "it sends to public" do
"public" => [fake_socket]
}
Streamer.push_to_socket(topics, "public", activity)
Worker.push_to_socket(topics, "public", activity)
Task.await(task)
end
@ -189,9 +181,9 @@ test "it doesn't send to user if recipients invalid and thread containment is en
)
task = Task.async(fn -> refute_receive {:text, _}, 1_000 end)
fake_socket = %{transport_pid: task.pid, assigns: %{user: user}}
fake_socket = %StreamerSocket{transport_pid: task.pid, user: user}
topics = %{"public" => [fake_socket]}
Streamer.push_to_socket(topics, "public", activity)
Worker.push_to_socket(topics, "public", activity)
Task.await(task)
end
@ -211,9 +203,9 @@ test "it sends message if recipients invalid and thread containment is disabled"
)
task = Task.async(fn -> assert_receive {:text, _}, 1_000 end)
fake_socket = %{transport_pid: task.pid, assigns: %{user: user}}
fake_socket = %StreamerSocket{transport_pid: task.pid, user: user}
topics = %{"public" => [fake_socket]}
Streamer.push_to_socket(topics, "public", activity)
Worker.push_to_socket(topics, "public", activity)
Task.await(task)
end
@ -233,9 +225,9 @@ test "it sends message if recipients invalid and thread containment is enabled b
)
task = Task.async(fn -> assert_receive {:text, _}, 1_000 end)
fake_socket = %{transport_pid: task.pid, assigns: %{user: user}}
fake_socket = %StreamerSocket{transport_pid: task.pid, user: user}
topics = %{"public" => [fake_socket]}
Streamer.push_to_socket(topics, "public", activity)
Worker.push_to_socket(topics, "public", activity)
Task.await(task)
end
@ -251,12 +243,10 @@ test "it doesn't send to blocked users" do
refute_receive {:text, _}, 1_000
end)
fake_socket = %{
fake_socket = %StreamerSocket{
transport_pid: task.pid,
assigns: %{
user: user
}
}
{:ok, activity} = CommonAPI.post(blocked_user, %{"status" => "Test"})
@ -264,7 +254,7 @@ test "it doesn't send to blocked users" do
"public" => [fake_socket]
}
Streamer.push_to_socket(topics, "public", activity)
Worker.push_to_socket(topics, "public", activity)
Task.await(task)
end
@ -284,12 +274,10 @@ test "it doesn't send unwanted DMs to list" do
refute_receive {:text, _}, 1_000
end)
fake_socket = %{
fake_socket = %StreamerSocket{
transport_pid: task.pid,
assigns: %{
user: user_a
}
}
{:ok, activity} =
CommonAPI.post(user_b, %{
@ -301,7 +289,7 @@ test "it doesn't send unwanted DMs to list" do
"list:#{list.id}" => [fake_socket]
}
Streamer.handle_cast(%{action: :stream, topic: "list", item: activity}, topics)
Worker.handle_call({:stream, "list", activity}, self(), topics)
Task.await(task)
end
@ -318,12 +306,10 @@ test "it doesn't send unwanted private posts to list" do
refute_receive {:text, _}, 1_000
end)
fake_socket = %{
fake_socket = %StreamerSocket{
transport_pid: task.pid,
assigns: %{
user: user_a
}
}
{:ok, activity} =
CommonAPI.post(user_b, %{
@ -335,12 +321,12 @@ test "it doesn't send unwanted private posts to list" do
"list:#{list.id}" => [fake_socket]
}
Streamer.handle_cast(%{action: :stream, topic: "list", item: activity}, topics)
Worker.handle_call({:stream, "list", activity}, self(), topics)
Task.await(task)
end
test "it send wanted private posts to list" do
test "it sends wanted private posts to list" do
user_a = insert(:user)
user_b = insert(:user)
@ -354,12 +340,10 @@ test "it send wanted private posts to list" do
assert_receive {:text, _}, 1_000
end)
fake_socket = %{
fake_socket = %StreamerSocket{
transport_pid: task.pid,
assigns: %{
user: user_a
}
}
{:ok, activity} =
CommonAPI.post(user_b, %{
@ -367,11 +351,12 @@ test "it send wanted private posts to list" do
"visibility" => "private"
})
topics = %{
"list:#{list.id}" => [fake_socket]
}
Streamer.add_socket(
"list:#{list.id}",
fake_socket
)
Streamer.handle_cast(%{action: :stream, topic: "list", item: activity}, topics)
Worker.handle_call({:stream, "list", activity}, self(), %{})
Task.await(task)
end
@ -387,12 +372,10 @@ test "it doesn't send muted reblogs" do
refute_receive {:text, _}, 1_000
end)
fake_socket = %{
fake_socket = %StreamerSocket{
transport_pid: task.pid,
assigns: %{
user: user1
}
}
{:ok, create_activity} = CommonAPI.post(user3, %{"status" => "I'm kawen"})
{:ok, announce_activity, _} = CommonAPI.repeat(create_activity.id, user2)
@ -401,7 +384,7 @@ test "it doesn't send muted reblogs" do
"public" => [fake_socket]
}
Streamer.push_to_socket(topics, "public", announce_activity)
Worker.push_to_socket(topics, "public", announce_activity)
Task.await(task)
end
@ -417,6 +400,8 @@ test "it doesn't send posts from muted threads" do
task = Task.async(fn -> refute_receive {:text, _}, 4_000 end)
Process.sleep(4000)
Streamer.add_socket(
"user",
%{transport_pid: task.pid, assigns: %{user: user2}}
@ -428,14 +413,6 @@ test "it doesn't send posts from muted threads" do
describe "direct streams" do
setup do
GenServer.start(Streamer, %{}, name: Streamer)
on_exit(fn ->
if pid = Process.whereis(Streamer) do
Process.exit(pid, :kill)
end
end)
:ok
end
@ -480,6 +457,8 @@ test "it doesn't send conversation update to the 'direct' streamj when the last
refute_receive {:text, _}, 4_000
end)
Process.sleep(1000)
Streamer.add_socket(
"direct",
%{transport_pid: task.pid, assigns: %{user: user}}
@ -521,6 +500,8 @@ test "it sends conversation update to the 'direct' stream when a message is dele
assert last_status["id"] == to_string(create_activity.id)
end)
Process.sleep(1000)
Streamer.add_socket(
"direct",
%{transport_pid: task.pid, assigns: %{user: user}}

View File

@ -5,6 +5,7 @@
defmodule Pleroma.Web.TwitterAPI.TwitterAPITest do
use Pleroma.DataCase
alias Pleroma.Repo
alias Pleroma.Tests.ObanHelpers
alias Pleroma.User
alias Pleroma.UserInviteToken
alias Pleroma.Web.MastodonAPI.AccountView
@ -68,6 +69,7 @@ test "it sends confirmation email if :account_activation_required is specified i
}
{:ok, user} = TwitterAPI.register_user(data)
ObanHelpers.perform_all()
assert user.info.confirmation_pending

View File

@ -4,8 +4,10 @@
defmodule Pleroma.Web.TwitterAPI.UtilControllerTest do
use Pleroma.Web.ConnCase
use Oban.Testing, repo: Pleroma.Repo
alias Pleroma.Repo
alias Pleroma.Tests.ObanHelpers
alias Pleroma.User
alias Pleroma.Web.CommonAPI
import ExUnit.CaptureLog
@ -43,8 +45,7 @@ test "it imports follow lists from file", %{conn: conn} do
{File, [],
read!: fn "follow_list.txt" ->
"Account address,Show boosts\n#{user2.ap_id},true"
end},
{PleromaJobQueue, [:passthrough], []}
end}
]) do
response =
conn
@ -52,15 +53,16 @@ test "it imports follow lists from file", %{conn: conn} do
|> post("/api/pleroma/follow_import", %{"list" => %Plug.Upload{path: "follow_list.txt"}})
|> json_response(:ok)
assert called(
PleromaJobQueue.enqueue(
:background,
User,
[:follow_import, user1, [user2.ap_id]]
)
)
assert response == "job started"
assert ObanHelpers.member?(
%{
"op" => "follow_import",
"follower_id" => user1.id,
"followed_identifiers" => [user2.ap_id]
},
all_enqueued(worker: Pleroma.Workers.BackgroundWorker)
)
end
end
@ -121,8 +123,7 @@ test "it imports blocks users from file", %{conn: conn} do
user3 = insert(:user)
with_mocks([
{File, [], read!: fn "blocks_list.txt" -> "#{user2.ap_id} #{user3.ap_id}" end},
{PleromaJobQueue, [:passthrough], []}
{File, [], read!: fn "blocks_list.txt" -> "#{user2.ap_id} #{user3.ap_id}" end}
]) do
response =
conn
@ -130,15 +131,16 @@ test "it imports blocks users from file", %{conn: conn} do
|> post("/api/pleroma/blocks_import", %{"list" => %Plug.Upload{path: "blocks_list.txt"}})
|> json_response(:ok)
assert called(
PleromaJobQueue.enqueue(
:background,
User,
[:blocks_import, user1, [user2.ap_id, user3.ap_id]]
)
)
assert response == "job started"
assert ObanHelpers.member?(
%{
"op" => "blocks_import",
"blocker_id" => user1.id,
"blocked_identifiers" => [user2.ap_id, user3.ap_id]
},
all_enqueued(worker: Pleroma.Workers.BackgroundWorker)
)
end
end
end
@ -562,6 +564,7 @@ test "it returns HTTP 200", %{conn: conn} do
|> json_response(:ok)
assert response == %{"status" => "success"}
ObanHelpers.perform_all()
user = User.get_cached_by_id(user.id)

View File

@ -4,11 +4,14 @@
defmodule Pleroma.Web.WebsubTest do
use Pleroma.DataCase
use Oban.Testing, repo: Pleroma.Repo
alias Pleroma.Tests.ObanHelpers
alias Pleroma.Web.Router.Helpers
alias Pleroma.Web.Websub
alias Pleroma.Web.Websub.WebsubClientSubscription
alias Pleroma.Web.Websub.WebsubServerSubscription
alias Pleroma.Workers.SubscriberWorker
import Pleroma.Factory
import Tesla.Mock
@ -224,6 +227,7 @@ test "it renews subscriptions that have less than a day of time left" do
})
_refresh = Websub.refresh_subscriptions()
ObanHelpers.perform(all_enqueued(worker: SubscriberWorker))
assert still_good == Repo.get(WebsubClientSubscription, still_good.id)
refute needs_refresh == Repo.get(WebsubClientSubscription, needs_refresh.id)