Merge branch 'develop' into feature/moderation-log-filters
This commit is contained in:
commit
df15ed13d1
|
@ -38,7 +38,12 @@ erl_crash.dump
|
||||||
|
|
||||||
# Prevent committing docs files
|
# Prevent committing docs files
|
||||||
/priv/static/doc/*
|
/priv/static/doc/*
|
||||||
|
docs/generated_config.md
|
||||||
|
|
||||||
# Code test coverage
|
# Code test coverage
|
||||||
/cover
|
/cover
|
||||||
/Elixir.*.coverdata
|
/Elixir.*.coverdata
|
||||||
|
|
||||||
|
.idea
|
||||||
|
pleroma.iml
|
||||||
|
|
||||||
|
|
|
@ -7,6 +7,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
|
||||||
### Security
|
### Security
|
||||||
- OStatus: eliminate the possibility of a protocol downgrade attack.
|
- OStatus: eliminate the possibility of a protocol downgrade attack.
|
||||||
- OStatus: prevent following locked accounts, bypassing the approval process.
|
- OStatus: prevent following locked accounts, bypassing the approval process.
|
||||||
|
- Mastodon API: respect post privacy in `/api/v1/statuses/:id/{favourited,reblogged}_by`
|
||||||
|
|
||||||
### Removed
|
### Removed
|
||||||
- **Breaking:** GNU Social API with Qvitter extensions support
|
- **Breaking:** GNU Social API with Qvitter extensions support
|
||||||
|
@ -20,14 +21,17 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
|
||||||
- **Breaking:** `/api/pleroma/notifications/read` is moved to `/api/v1/pleroma/notifications/read` and now supports `max_id` and responds with Mastodon API entities.
|
- **Breaking:** `/api/pleroma/notifications/read` is moved to `/api/v1/pleroma/notifications/read` and now supports `max_id` and responds with Mastodon API entities.
|
||||||
- Configuration: OpenGraph and TwitterCard providers enabled by default
|
- Configuration: OpenGraph and TwitterCard providers enabled by default
|
||||||
- Configuration: Filter.AnonymizeFilename added ability to retain file extension with custom text
|
- Configuration: Filter.AnonymizeFilename added ability to retain file extension with custom text
|
||||||
- Mastodon API: `pleroma.thread_muted` key in the Status entity
|
- Configuration: added `config/description.exs`, from which `docs/config.md` is generated
|
||||||
- Federation: Return 403 errors when trying to request pages from a user's follower/following collections if they have `hide_followers`/`hide_follows` set
|
- Federation: Return 403 errors when trying to request pages from a user's follower/following collections if they have `hide_followers`/`hide_follows` set
|
||||||
- NodeInfo: Return `skipThreadContainment` in `metadata` for the `skip_thread_containment` option
|
- NodeInfo: Return `skipThreadContainment` in `metadata` for the `skip_thread_containment` option
|
||||||
- NodeInfo: Return `mailerEnabled` in `metadata`
|
- NodeInfo: Return `mailerEnabled` in `metadata`
|
||||||
- Mastodon API: Unsubscribe followers when they unfollow a user
|
- Mastodon API: Unsubscribe followers when they unfollow a user
|
||||||
|
- Mastodon API: `pleroma.thread_muted` key in the Status entity
|
||||||
- AdminAPI: Add "godmode" while fetching user statuses (i.e. admin can see private statuses)
|
- AdminAPI: Add "godmode" while fetching user statuses (i.e. admin can see private statuses)
|
||||||
- Improve digest email template
|
- Improve digest email template
|
||||||
– Pagination: (optional) return `total` alongside with `items` when paginating
|
– Pagination: (optional) return `total` alongside with `items` when paginating
|
||||||
|
- Replaced [pleroma_job_queue](https://git.pleroma.social/pleroma/pleroma_job_queue) and `Pleroma.Web.Federator.RetryQueue` with [Oban](https://github.com/sorentwo/oban) (see [`docs/config.md`](docs/config.md) on migrating customized worker / retry settings)
|
||||||
|
- Introduced [quantum](https://github.com/quantum-elixir/quantum-core) job scheduler
|
||||||
|
|
||||||
### Fixed
|
### Fixed
|
||||||
- Following from Osada
|
- Following from Osada
|
||||||
|
@ -105,11 +109,13 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
|
||||||
- ActivityPub: Optional signing of ActivityPub object fetches.
|
- ActivityPub: Optional signing of ActivityPub object fetches.
|
||||||
- Admin API: Endpoint for fetching latest user's statuses
|
- Admin API: Endpoint for fetching latest user's statuses
|
||||||
- Pleroma API: Add `/api/v1/pleroma/accounts/confirmation_resend?email=<email>` for resending account confirmation.
|
- Pleroma API: Add `/api/v1/pleroma/accounts/confirmation_resend?email=<email>` for resending account confirmation.
|
||||||
|
- Pleroma API: Email change endpoint.
|
||||||
- Relays: Added a task to list relay subscriptions.
|
- Relays: Added a task to list relay subscriptions.
|
||||||
- Mix Tasks: `mix pleroma.database fix_likes_collections`
|
- Mix Tasks: `mix pleroma.database fix_likes_collections`
|
||||||
- Federation: Remove `likes` from objects.
|
- Federation: Remove `likes` from objects.
|
||||||
- Admin API: Added moderation log
|
- Admin API: Added moderation log
|
||||||
- Web response cache (currently, enabled for ActivityPub)
|
- Web response cache (currently, enabled for ActivityPub)
|
||||||
|
- Mastodon API: Added an endpoint to get multiple statuses by IDs (`GET /api/v1/statuses/?ids[]=1&ids[]=2`)
|
||||||
- Admin API: Added moderation log filters (user/start date/end date/search/pagination)
|
- Admin API: Added moderation log filters (user/start date/end date/search/pagination)
|
||||||
|
|
||||||
### Changed
|
### Changed
|
||||||
|
|
|
@ -51,6 +51,24 @@
|
||||||
telemetry_event: [Pleroma.Repo.Instrumenter],
|
telemetry_event: [Pleroma.Repo.Instrumenter],
|
||||||
migration_lock: nil
|
migration_lock: nil
|
||||||
|
|
||||||
|
scheduled_jobs =
|
||||||
|
with digest_config <- Application.get_env(:pleroma, :email_notifications)[:digest],
|
||||||
|
true <- digest_config[:active] do
|
||||||
|
[{digest_config[:schedule], {Pleroma.Daemons.DigestEmailDaemon, :perform, []}}]
|
||||||
|
else
|
||||||
|
_ -> []
|
||||||
|
end
|
||||||
|
|
||||||
|
scheduled_jobs =
|
||||||
|
scheduled_jobs ++
|
||||||
|
[{"0 */6 * * * *", {Pleroma.Web.Websub, :refresh_subscriptions, []}}]
|
||||||
|
|
||||||
|
config :pleroma, Pleroma.Scheduler,
|
||||||
|
global: true,
|
||||||
|
overlap: true,
|
||||||
|
timezone: :utc,
|
||||||
|
jobs: scheduled_jobs
|
||||||
|
|
||||||
config :pleroma, Pleroma.Captcha,
|
config :pleroma, Pleroma.Captcha,
|
||||||
enabled: false,
|
enabled: false,
|
||||||
seconds_valid: 60,
|
seconds_valid: 60,
|
||||||
|
@ -258,7 +276,7 @@
|
||||||
max_account_fields: 10,
|
max_account_fields: 10,
|
||||||
max_remote_account_fields: 20,
|
max_remote_account_fields: 20,
|
||||||
account_field_name_length: 512,
|
account_field_name_length: 512,
|
||||||
account_field_value_length: 512,
|
account_field_value_length: 2048,
|
||||||
external_user_synchronization: true
|
external_user_synchronization: true
|
||||||
|
|
||||||
config :pleroma, :markup,
|
config :pleroma, :markup,
|
||||||
|
@ -313,6 +331,10 @@
|
||||||
follow_handshake_timeout: 500,
|
follow_handshake_timeout: 500,
|
||||||
sign_object_fetches: true
|
sign_object_fetches: true
|
||||||
|
|
||||||
|
config :pleroma, :streamer,
|
||||||
|
workers: 3,
|
||||||
|
overflow_workers: 2
|
||||||
|
|
||||||
config :pleroma, :user, deny_follow_blocked: true
|
config :pleroma, :user, deny_follow_blocked: true
|
||||||
|
|
||||||
config :pleroma, :mrf_normalize_markup, scrub_policy: Pleroma.HTML.Scrubber.Default
|
config :pleroma, :mrf_normalize_markup, scrub_policy: Pleroma.HTML.Scrubber.Default
|
||||||
|
@ -373,6 +395,8 @@
|
||||||
|
|
||||||
config :phoenix, :format_encoders, json: Jason
|
config :phoenix, :format_encoders, json: Jason
|
||||||
|
|
||||||
|
config :phoenix, :json_library, Jason
|
||||||
|
|
||||||
config :pleroma, :gopher,
|
config :pleroma, :gopher,
|
||||||
enabled: false,
|
enabled: false,
|
||||||
ip: {0, 0, 0, 0},
|
ip: {0, 0, 0, 0},
|
||||||
|
@ -449,13 +473,11 @@
|
||||||
"web"
|
"web"
|
||||||
]
|
]
|
||||||
|
|
||||||
config :pleroma, Pleroma.Web.Federator.RetryQueue,
|
config :pleroma, Oban,
|
||||||
enabled: false,
|
repo: Pleroma.Repo,
|
||||||
max_jobs: 20,
|
verbose: false,
|
||||||
initial_timeout: 30,
|
prune: {:maxlen, 1500},
|
||||||
max_retries: 5
|
queues: [
|
||||||
|
|
||||||
config :pleroma_job_queue, :queues,
|
|
||||||
activity_expiration: 10,
|
activity_expiration: 10,
|
||||||
federator_incoming: 50,
|
federator_incoming: 50,
|
||||||
federator_outgoing: 50,
|
federator_outgoing: 50,
|
||||||
|
@ -464,6 +486,13 @@
|
||||||
transmogrifier: 20,
|
transmogrifier: 20,
|
||||||
scheduled_activities: 10,
|
scheduled_activities: 10,
|
||||||
background: 5
|
background: 5
|
||||||
|
]
|
||||||
|
|
||||||
|
config :pleroma, :workers,
|
||||||
|
retries: [
|
||||||
|
federator_incoming: 5,
|
||||||
|
federator_outgoing: 5
|
||||||
|
]
|
||||||
|
|
||||||
config :pleroma, :fetch_initial_posts,
|
config :pleroma, :fetch_initial_posts,
|
||||||
enabled: false,
|
enabled: false,
|
||||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -61,7 +61,11 @@
|
||||||
|
|
||||||
config :web_push_encryption, :http_client, Pleroma.Web.WebPushHttpClientMock
|
config :web_push_encryption, :http_client, Pleroma.Web.WebPushHttpClientMock
|
||||||
|
|
||||||
config :pleroma_job_queue, disabled: true
|
config :pleroma, Oban,
|
||||||
|
queues: false,
|
||||||
|
prune: :disabled
|
||||||
|
|
||||||
|
config :pleroma, Pleroma.Scheduler, jobs: []
|
||||||
|
|
||||||
config :pleroma, Pleroma.ScheduledActivity,
|
config :pleroma, Pleroma.ScheduledActivity,
|
||||||
daily_user_limit: 2,
|
daily_user_limit: 2,
|
||||||
|
|
|
@ -60,9 +60,13 @@ Authentication is required and the user must be an admin.
|
||||||
|
|
||||||
- Method: `POST`
|
- Method: `POST`
|
||||||
- Params:
|
- Params:
|
||||||
- `nickname`
|
`users`: [
|
||||||
- `email`
|
{
|
||||||
- `password`
|
`nickname`,
|
||||||
|
`email`,
|
||||||
|
`password`
|
||||||
|
}
|
||||||
|
]
|
||||||
- Response: User’s nickname
|
- Response: User’s nickname
|
||||||
|
|
||||||
## `/api/pleroma/admin/users/follow`
|
## `/api/pleroma/admin/users/follow`
|
||||||
|
|
|
@ -91,6 +91,20 @@ Additional parameters can be added to the JSON body/Form data:
|
||||||
- `expires_in`: The number of seconds the posted activity should expire in. When a posted activity expires it will be deleted from the server, and a delete request for it will be federated. This needs to be longer than an hour.
|
- `expires_in`: The number of seconds the posted activity should expire in. When a posted activity expires it will be deleted from the server, and a delete request for it will be federated. This needs to be longer than an hour.
|
||||||
- `in_reply_to_conversation_id`: Will reply to a given conversation, addressing only the people who are part of the recipient set of that conversation. Sets the visibility to `direct`.
|
- `in_reply_to_conversation_id`: Will reply to a given conversation, addressing only the people who are part of the recipient set of that conversation. Sets the visibility to `direct`.
|
||||||
|
|
||||||
|
## GET `/api/v1/statuses`
|
||||||
|
|
||||||
|
An endpoint to get multiple statuses by IDs.
|
||||||
|
|
||||||
|
Required parameters:
|
||||||
|
|
||||||
|
- `ids`: array of activity ids
|
||||||
|
|
||||||
|
Usage example: `GET /api/v1/statuses/?ids[]=1&ids[]=2`.
|
||||||
|
|
||||||
|
Returns: array of Status.
|
||||||
|
|
||||||
|
The maximum number of statuses is limited to 100 per request.
|
||||||
|
|
||||||
## PATCH `/api/v1/update_credentials`
|
## PATCH `/api/v1/update_credentials`
|
||||||
|
|
||||||
Additional parameters can be added to the JSON body/Form data:
|
Additional parameters can be added to the JSON body/Form data:
|
||||||
|
|
|
@ -321,6 +321,16 @@ See [Admin-API](Admin-API.md)
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
|
## `/api/pleroma/change_email`
|
||||||
|
### Change account email
|
||||||
|
* Method `POST`
|
||||||
|
* Authentication: required
|
||||||
|
* Params:
|
||||||
|
* `password`: user's password
|
||||||
|
* `email`: new email
|
||||||
|
* Response: JSON. Returns `{"status": "success"}` if the change was successful, `{"error": "[error message]"}` otherwise
|
||||||
|
* Note: Currently, Mastodon has no API for changing email. If they add it in future it might be incompatible with Pleroma.
|
||||||
|
|
||||||
# Pleroma Conversations
|
# Pleroma Conversations
|
||||||
|
|
||||||
Pleroma Conversations have the same general structure that Mastodon Conversations have. The behavior differs in the following ways when using these endpoints:
|
Pleroma Conversations have the same general structure that Mastodon Conversations have. The behavior differs in the following ways when using these endpoints:
|
||||||
|
|
|
@ -135,7 +135,7 @@ config :pleroma, Pleroma.Emails.Mailer,
|
||||||
* `max_account_fields`: The maximum number of custom fields in the user profile (default: `10`)
|
* `max_account_fields`: The maximum number of custom fields in the user profile (default: `10`)
|
||||||
* `max_remote_account_fields`: The maximum number of custom fields in the remote user profile (default: `20`)
|
* `max_remote_account_fields`: The maximum number of custom fields in the remote user profile (default: `20`)
|
||||||
* `account_field_name_length`: An account field name maximum length (default: `512`)
|
* `account_field_name_length`: An account field name maximum length (default: `512`)
|
||||||
* `account_field_value_length`: An account field value maximum length (default: `512`)
|
* `account_field_value_length`: An account field value maximum length (default: `2048`)
|
||||||
* `external_user_synchronization`: Enabling following/followers counters synchronization for external users.
|
* `external_user_synchronization`: Enabling following/followers counters synchronization for external users.
|
||||||
|
|
||||||
|
|
||||||
|
@ -400,35 +400,71 @@ You can then do
|
||||||
curl "http://localhost:4000/api/pleroma/admin/invite_token?admin_token=somerandomtoken"
|
curl "http://localhost:4000/api/pleroma/admin/invite_token?admin_token=somerandomtoken"
|
||||||
```
|
```
|
||||||
|
|
||||||
## :pleroma_job_queue
|
## Oban
|
||||||
|
|
||||||
[Pleroma Job Queue](https://git.pleroma.social/pleroma/pleroma_job_queue) configuration: a list of queues with maximum concurrent jobs.
|
[Oban](https://github.com/sorentwo/oban) asynchronous job processor configuration.
|
||||||
|
|
||||||
|
Configuration options described in [Oban readme](https://github.com/sorentwo/oban#usage):
|
||||||
|
* `repo` - app's Ecto repo (`Pleroma.Repo`)
|
||||||
|
* `verbose` - logs verbosity
|
||||||
|
* `prune` - non-retryable jobs [pruning settings](https://github.com/sorentwo/oban#pruning) (`:disabled` / `{:maxlen, value}` / `{:maxage, value}`)
|
||||||
|
* `queues` - job queues (see below)
|
||||||
|
|
||||||
Pleroma has the following queues:
|
Pleroma has the following queues:
|
||||||
|
|
||||||
|
* `activity_expiration` - Activity expiration
|
||||||
* `federator_outgoing` - Outgoing federation
|
* `federator_outgoing` - Outgoing federation
|
||||||
* `federator_incoming` - Incoming federation
|
* `federator_incoming` - Incoming federation
|
||||||
* `mailer` - Email sender, see [`Pleroma.Emails.Mailer`](#pleroma-emails-mailer)
|
* `mailer` - Email sender, see [`Pleroma.Emails.Mailer`](#pleromaemailsmailer)
|
||||||
* `transmogrifier` - Transmogrifier
|
* `transmogrifier` - Transmogrifier
|
||||||
* `web_push` - Web push notifications
|
* `web_push` - Web push notifications
|
||||||
* `scheduled_activities` - Scheduled activities, see [`Pleroma.ScheduledActivities`](#pleromascheduledactivity)
|
* `scheduled_activities` - Scheduled activities, see [`Pleroma.ScheduledActivity`](#pleromascheduledactivity)
|
||||||
|
|
||||||
Example:
|
Example:
|
||||||
|
|
||||||
```elixir
|
```elixir
|
||||||
config :pleroma_job_queue, :queues,
|
config :pleroma, Oban,
|
||||||
|
repo: Pleroma.Repo,
|
||||||
|
verbose: false,
|
||||||
|
prune: {:maxlen, 1500},
|
||||||
|
queues: [
|
||||||
federator_incoming: 50,
|
federator_incoming: 50,
|
||||||
federator_outgoing: 50
|
federator_outgoing: 50
|
||||||
|
]
|
||||||
```
|
```
|
||||||
|
|
||||||
This config contains two queues: `federator_incoming` and `federator_outgoing`. Both have the `max_jobs` set to `50`.
|
This config contains two queues: `federator_incoming` and `federator_outgoing`. Both have the number of max concurrent jobs set to `50`.
|
||||||
|
|
||||||
## Pleroma.Web.Federator.RetryQueue
|
### Migrating `pleroma_job_queue` settings
|
||||||
|
|
||||||
* `enabled`: If set to `true`, failed federation jobs will be retried
|
`config :pleroma_job_queue, :queues` is replaced by `config :pleroma, Oban, :queues` and uses the same format (keys are queues' names, values are max concurrent jobs numbers).
|
||||||
* `max_jobs`: The maximum amount of parallel federation jobs running at the same time.
|
|
||||||
* `initial_timeout`: The initial timeout in seconds
|
### Note on running with PostgreSQL in silent mode
|
||||||
* `max_retries`: The maximum number of times a federation job is retried
|
|
||||||
|
If you are running PostgreSQL in [`silent_mode`](https://postgresqlco.nf/en/doc/param/silent_mode?version=9.1), it's advised to set [`log_destination`](https://postgresqlco.nf/en/doc/param/log_destination?version=9.1) to `syslog`,
|
||||||
|
otherwise `postmaster.log` file may grow because of "you don't own a lock of type ShareLock" warnings (see https://github.com/sorentwo/oban/issues/52).
|
||||||
|
|
||||||
|
## :workers
|
||||||
|
|
||||||
|
Includes custom worker options not interpretable directly by `Oban`.
|
||||||
|
|
||||||
|
* `retries` — keyword lists where keys are `Oban` queues (see above) and values are numbers of max attempts for failed jobs.
|
||||||
|
|
||||||
|
Example:
|
||||||
|
|
||||||
|
```elixir
|
||||||
|
config :pleroma, :workers,
|
||||||
|
retries: [
|
||||||
|
federator_incoming: 5,
|
||||||
|
federator_outgoing: 5
|
||||||
|
]
|
||||||
|
```
|
||||||
|
|
||||||
|
### Migrating `Pleroma.Web.Federator.RetryQueue` settings
|
||||||
|
|
||||||
|
* `max_retries` is replaced with `config :pleroma, :workers, retries: [federator_outgoing: 5]`
|
||||||
|
* `enabled: false` corresponds to `config :pleroma, :workers, retries: [federator_outgoing: 1]`
|
||||||
|
* deprecated options: `max_jobs`, `initial_timeout`
|
||||||
|
|
||||||
## Pleroma.Web.Metadata
|
## Pleroma.Web.Metadata
|
||||||
* `providers`: a list of metadata providers to enable. Providers available:
|
* `providers`: a list of metadata providers to enable. Providers available:
|
||||||
|
@ -489,6 +525,24 @@ config :auto_linker,
|
||||||
]
|
]
|
||||||
```
|
```
|
||||||
|
|
||||||
|
## Pleroma.Scheduler
|
||||||
|
|
||||||
|
Configuration for [Quantum](https://github.com/quantum-elixir/quantum-core) jobs scheduler.
|
||||||
|
|
||||||
|
See [Quantum readme](https://github.com/quantum-elixir/quantum-core#usage) for the list of supported options.
|
||||||
|
|
||||||
|
Example:
|
||||||
|
|
||||||
|
```elixir
|
||||||
|
config :pleroma, Pleroma.Scheduler,
|
||||||
|
global: true,
|
||||||
|
overlap: true,
|
||||||
|
timezone: :utc,
|
||||||
|
jobs: [{"0 */6 * * * *", {Pleroma.Web.Websub, :refresh_subscriptions, []}}]
|
||||||
|
```
|
||||||
|
|
||||||
|
The above example defines a single job which invokes `Pleroma.Web.Websub.refresh_subscriptions()` every 6 hours ("0 */6 * * * *", [crontab format](https://en.wikipedia.org/wiki/Cron)).
|
||||||
|
|
||||||
## Pleroma.ScheduledActivity
|
## Pleroma.ScheduledActivity
|
||||||
|
|
||||||
* `daily_user_limit`: the number of scheduled activities a user is allowed to create in a single day (Default: `25`)
|
* `daily_user_limit`: the number of scheduled activities a user is allowed to create in a single day (Default: `25`)
|
||||||
|
|
|
@ -27,7 +27,7 @@ def run(["tag"]) do
|
||||||
})
|
})
|
||||||
end
|
end
|
||||||
|
|
||||||
def run(["render_timeline", nickname]) do
|
def run(["render_timeline", nickname | _] = args) do
|
||||||
start_pleroma()
|
start_pleroma()
|
||||||
user = Pleroma.User.get_by_nickname(nickname)
|
user = Pleroma.User.get_by_nickname(nickname)
|
||||||
|
|
||||||
|
@ -37,33 +37,37 @@ def run(["render_timeline", nickname]) do
|
||||||
|> Map.put("blocking_user", user)
|
|> Map.put("blocking_user", user)
|
||||||
|> Map.put("muting_user", user)
|
|> Map.put("muting_user", user)
|
||||||
|> Map.put("user", user)
|
|> Map.put("user", user)
|
||||||
|> Map.put("limit", 80)
|
|> Map.put("limit", 4096)
|
||||||
|> Pleroma.Web.ActivityPub.ActivityPub.fetch_public_activities()
|
|> Pleroma.Web.ActivityPub.ActivityPub.fetch_public_activities()
|
||||||
|> Enum.reverse()
|
|> Enum.reverse()
|
||||||
|
|
||||||
inputs = %{
|
inputs = %{
|
||||||
"One activity" => Enum.take_random(activities, 1),
|
"1 activity" => Enum.take_random(activities, 1),
|
||||||
"Ten activities" => Enum.take_random(activities, 10),
|
"10 activities" => Enum.take_random(activities, 10),
|
||||||
"Twenty activities" => Enum.take_random(activities, 20),
|
"20 activities" => Enum.take_random(activities, 20),
|
||||||
"Forty activities" => Enum.take_random(activities, 40),
|
"40 activities" => Enum.take_random(activities, 40),
|
||||||
"Eighty activities" => Enum.take_random(activities, 80)
|
"80 activities" => Enum.take_random(activities, 80)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
inputs =
|
||||||
|
if Enum.at(args, 2) == "extended" do
|
||||||
|
Map.merge(inputs, %{
|
||||||
|
"200 activities" => Enum.take_random(activities, 200),
|
||||||
|
"500 activities" => Enum.take_random(activities, 500),
|
||||||
|
"2000 activities" => Enum.take_random(activities, 2000),
|
||||||
|
"4096 activities" => Enum.take_random(activities, 4096)
|
||||||
|
})
|
||||||
|
else
|
||||||
|
inputs
|
||||||
|
end
|
||||||
|
|
||||||
Benchee.run(
|
Benchee.run(
|
||||||
%{
|
%{
|
||||||
"Parallel rendering" => fn activities ->
|
|
||||||
Pleroma.Web.MastodonAPI.StatusView.render("index.json", %{
|
|
||||||
activities: activities,
|
|
||||||
for: user,
|
|
||||||
as: :activity
|
|
||||||
})
|
|
||||||
end,
|
|
||||||
"Standart rendering" => fn activities ->
|
"Standart rendering" => fn activities ->
|
||||||
Pleroma.Web.MastodonAPI.StatusView.render("index.json", %{
|
Pleroma.Web.MastodonAPI.StatusView.render("index.json", %{
|
||||||
activities: activities,
|
activities: activities,
|
||||||
for: user,
|
for: user,
|
||||||
as: :activity,
|
as: :activity
|
||||||
parallel: false
|
|
||||||
})
|
})
|
||||||
end
|
end
|
||||||
},
|
},
|
||||||
|
|
|
@ -0,0 +1,42 @@
|
||||||
|
defmodule Mix.Tasks.Pleroma.Docs do
|
||||||
|
use Mix.Task
|
||||||
|
import Mix.Pleroma
|
||||||
|
|
||||||
|
@shortdoc "Generates docs from descriptions.exs"
|
||||||
|
@moduledoc """
|
||||||
|
Generates docs from `descriptions.exs`.
|
||||||
|
|
||||||
|
Supports two formats: `markdown` and `json`.
|
||||||
|
|
||||||
|
## Generate Markdown docs
|
||||||
|
|
||||||
|
`mix pleroma.docs`
|
||||||
|
|
||||||
|
## Generate JSON docs
|
||||||
|
|
||||||
|
`mix pleroma.docs json`
|
||||||
|
"""
|
||||||
|
|
||||||
|
def run(["json"]) do
|
||||||
|
do_run(Pleroma.Docs.JSON)
|
||||||
|
end
|
||||||
|
|
||||||
|
def run(_) do
|
||||||
|
do_run(Pleroma.Docs.Markdown)
|
||||||
|
end
|
||||||
|
|
||||||
|
defp do_run(implementation) do
|
||||||
|
start_pleroma()
|
||||||
|
|
||||||
|
with {descriptions, _paths} <- Mix.Config.eval!("config/description.exs"),
|
||||||
|
{:ok, file_path} <-
|
||||||
|
Pleroma.Docs.Generator.process(
|
||||||
|
implementation,
|
||||||
|
descriptions[:pleroma][:config_description]
|
||||||
|
) do
|
||||||
|
type = if implementation == Pleroma.Docs.Markdown, do: "Markdown", else: "JSON"
|
||||||
|
|
||||||
|
Mix.shell().info([:green, "#{type} docs successfully generated to #{file_path}."])
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
|
@ -6,6 +6,7 @@ defmodule Pleroma.Activity do
|
||||||
use Ecto.Schema
|
use Ecto.Schema
|
||||||
|
|
||||||
alias Pleroma.Activity
|
alias Pleroma.Activity
|
||||||
|
alias Pleroma.Activity.Queries
|
||||||
alias Pleroma.ActivityExpiration
|
alias Pleroma.ActivityExpiration
|
||||||
alias Pleroma.Bookmark
|
alias Pleroma.Bookmark
|
||||||
alias Pleroma.Notification
|
alias Pleroma.Notification
|
||||||
|
@ -65,8 +66,8 @@ defmodule Pleroma.Activity do
|
||||||
timestamps()
|
timestamps()
|
||||||
end
|
end
|
||||||
|
|
||||||
def with_joined_object(query) do
|
def with_joined_object(query, join_type \\ :inner) do
|
||||||
join(query, :inner, [activity], o in Object,
|
join(query, join_type, [activity], o in Object,
|
||||||
on:
|
on:
|
||||||
fragment(
|
fragment(
|
||||||
"(?->>'id') = COALESCE(?->'object'->>'id', ?->>'object')",
|
"(?->>'id') = COALESCE(?->'object'->>'id', ?->>'object')",
|
||||||
|
@ -78,10 +79,10 @@ def with_joined_object(query) do
|
||||||
)
|
)
|
||||||
end
|
end
|
||||||
|
|
||||||
def with_preloaded_object(query) do
|
def with_preloaded_object(query, join_type \\ :inner) do
|
||||||
query
|
query
|
||||||
|> has_named_binding?(:object)
|
|> has_named_binding?(:object)
|
||||||
|> if(do: query, else: with_joined_object(query))
|
|> if(do: query, else: with_joined_object(query, join_type))
|
||||||
|> preload([activity, object: object], object: object)
|
|> preload([activity, object: object], object: object)
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@ -107,12 +108,9 @@ def with_set_thread_muted_field(query, %User{} = user) do
|
||||||
def with_set_thread_muted_field(query, _), do: query
|
def with_set_thread_muted_field(query, _), do: query
|
||||||
|
|
||||||
def get_by_ap_id(ap_id) do
|
def get_by_ap_id(ap_id) do
|
||||||
Repo.one(
|
ap_id
|
||||||
from(
|
|> Queries.by_ap_id()
|
||||||
activity in Activity,
|
|> Repo.one()
|
||||||
where: fragment("(?)->>'id' = ?", activity.data, ^to_string(ap_id))
|
|
||||||
)
|
|
||||||
)
|
|
||||||
end
|
end
|
||||||
|
|
||||||
def get_bookmark(%Activity{} = activity, %User{} = user) do
|
def get_bookmark(%Activity{} = activity, %User{} = user) do
|
||||||
|
@ -133,21 +131,10 @@ def change(struct, params \\ %{}) do
|
||||||
end
|
end
|
||||||
|
|
||||||
def get_by_ap_id_with_object(ap_id) do
|
def get_by_ap_id_with_object(ap_id) do
|
||||||
Repo.one(
|
ap_id
|
||||||
from(
|
|> Queries.by_ap_id()
|
||||||
activity in Activity,
|
|> with_preloaded_object(:left)
|
||||||
where: fragment("(?)->>'id' = ?", activity.data, ^to_string(ap_id)),
|
|> Repo.one()
|
||||||
left_join: o in Object,
|
|
||||||
on:
|
|
||||||
fragment(
|
|
||||||
"(?->>'id') = COALESCE(?->'object'->>'id', ?->>'object')",
|
|
||||||
o.data,
|
|
||||||
activity.data,
|
|
||||||
activity.data
|
|
||||||
),
|
|
||||||
preload: [object: o]
|
|
||||||
)
|
|
||||||
)
|
|
||||||
end
|
end
|
||||||
|
|
||||||
def get_by_id(id) do
|
def get_by_id(id) do
|
||||||
|
@ -158,66 +145,34 @@ def get_by_id(id) do
|
||||||
end
|
end
|
||||||
|
|
||||||
def get_by_id_with_object(id) do
|
def get_by_id_with_object(id) do
|
||||||
from(activity in Activity,
|
Activity
|
||||||
where: activity.id == ^id,
|
|> where(id: ^id)
|
||||||
inner_join: o in Object,
|
|> with_preloaded_object()
|
||||||
on:
|
|
||||||
fragment(
|
|
||||||
"(?->>'id') = COALESCE(?->'object'->>'id', ?->>'object')",
|
|
||||||
o.data,
|
|
||||||
activity.data,
|
|
||||||
activity.data
|
|
||||||
),
|
|
||||||
preload: [object: o]
|
|
||||||
)
|
|
||||||
|> Repo.one()
|
|> Repo.one()
|
||||||
end
|
end
|
||||||
|
|
||||||
def by_object_ap_id(ap_id) do
|
def all_by_ids_with_object(ids) do
|
||||||
from(
|
Activity
|
||||||
activity in Activity,
|
|> where([a], a.id in ^ids)
|
||||||
where:
|
|> with_preloaded_object()
|
||||||
fragment(
|
|> Repo.all()
|
||||||
"coalesce((?)->'object'->>'id', (?)->>'object') = ?",
|
|
||||||
activity.data,
|
|
||||||
activity.data,
|
|
||||||
^to_string(ap_id)
|
|
||||||
)
|
|
||||||
)
|
|
||||||
end
|
end
|
||||||
|
|
||||||
def create_by_object_ap_id(ap_ids) when is_list(ap_ids) do
|
@doc """
|
||||||
from(
|
Accepts `ap_id` or list of `ap_id`.
|
||||||
activity in Activity,
|
Returns a query.
|
||||||
where:
|
"""
|
||||||
fragment(
|
@spec create_by_object_ap_id(String.t() | [String.t()]) :: Ecto.Queryable.t()
|
||||||
"coalesce((?)->'object'->>'id', (?)->>'object') = ANY(?)",
|
def create_by_object_ap_id(ap_id) do
|
||||||
activity.data,
|
ap_id
|
||||||
activity.data,
|
|> Queries.by_object_id()
|
||||||
^ap_ids
|
|> Queries.by_type("Create")
|
||||||
),
|
|
||||||
where: fragment("(?)->>'type' = 'Create'", activity.data)
|
|
||||||
)
|
|
||||||
end
|
end
|
||||||
|
|
||||||
def create_by_object_ap_id(ap_id) when is_binary(ap_id) do
|
|
||||||
from(
|
|
||||||
activity in Activity,
|
|
||||||
where:
|
|
||||||
fragment(
|
|
||||||
"coalesce((?)->'object'->>'id', (?)->>'object') = ?",
|
|
||||||
activity.data,
|
|
||||||
activity.data,
|
|
||||||
^to_string(ap_id)
|
|
||||||
),
|
|
||||||
where: fragment("(?)->>'type' = 'Create'", activity.data)
|
|
||||||
)
|
|
||||||
end
|
|
||||||
|
|
||||||
def create_by_object_ap_id(_), do: nil
|
|
||||||
|
|
||||||
def get_all_create_by_object_ap_id(ap_id) do
|
def get_all_create_by_object_ap_id(ap_id) do
|
||||||
Repo.all(create_by_object_ap_id(ap_id))
|
ap_id
|
||||||
|
|> create_by_object_ap_id()
|
||||||
|
|> Repo.all()
|
||||||
end
|
end
|
||||||
|
|
||||||
def get_create_by_object_ap_id(ap_id) when is_binary(ap_id) do
|
def get_create_by_object_ap_id(ap_id) when is_binary(ap_id) do
|
||||||
|
@ -228,54 +183,17 @@ def get_create_by_object_ap_id(ap_id) when is_binary(ap_id) do
|
||||||
|
|
||||||
def get_create_by_object_ap_id(_), do: nil
|
def get_create_by_object_ap_id(_), do: nil
|
||||||
|
|
||||||
def create_by_object_ap_id_with_object(ap_ids) when is_list(ap_ids) do
|
@doc """
|
||||||
from(
|
Accepts `ap_id` or list of `ap_id`.
|
||||||
activity in Activity,
|
Returns a query.
|
||||||
where:
|
"""
|
||||||
fragment(
|
@spec create_by_object_ap_id_with_object(String.t() | [String.t()]) :: Ecto.Queryable.t()
|
||||||
"coalesce((?)->'object'->>'id', (?)->>'object') = ANY(?)",
|
def create_by_object_ap_id_with_object(ap_id) do
|
||||||
activity.data,
|
ap_id
|
||||||
activity.data,
|
|> create_by_object_ap_id()
|
||||||
^ap_ids
|
|> with_preloaded_object()
|
||||||
),
|
|
||||||
where: fragment("(?)->>'type' = 'Create'", activity.data),
|
|
||||||
inner_join: o in Object,
|
|
||||||
on:
|
|
||||||
fragment(
|
|
||||||
"(?->>'id') = COALESCE(?->'object'->>'id', ?->>'object')",
|
|
||||||
o.data,
|
|
||||||
activity.data,
|
|
||||||
activity.data
|
|
||||||
),
|
|
||||||
preload: [object: o]
|
|
||||||
)
|
|
||||||
end
|
end
|
||||||
|
|
||||||
def create_by_object_ap_id_with_object(ap_id) when is_binary(ap_id) do
|
|
||||||
from(
|
|
||||||
activity in Activity,
|
|
||||||
where:
|
|
||||||
fragment(
|
|
||||||
"coalesce((?)->'object'->>'id', (?)->>'object') = ?",
|
|
||||||
activity.data,
|
|
||||||
activity.data,
|
|
||||||
^to_string(ap_id)
|
|
||||||
),
|
|
||||||
where: fragment("(?)->>'type' = 'Create'", activity.data),
|
|
||||||
inner_join: o in Object,
|
|
||||||
on:
|
|
||||||
fragment(
|
|
||||||
"(?->>'id') = COALESCE(?->'object'->>'id', ?->>'object')",
|
|
||||||
o.data,
|
|
||||||
activity.data,
|
|
||||||
activity.data
|
|
||||||
),
|
|
||||||
preload: [object: o]
|
|
||||||
)
|
|
||||||
end
|
|
||||||
|
|
||||||
def create_by_object_ap_id_with_object(_), do: nil
|
|
||||||
|
|
||||||
def get_create_by_object_ap_id_with_object(ap_id) when is_binary(ap_id) do
|
def get_create_by_object_ap_id_with_object(ap_id) when is_binary(ap_id) do
|
||||||
ap_id
|
ap_id
|
||||||
|> create_by_object_ap_id_with_object()
|
|> create_by_object_ap_id_with_object()
|
||||||
|
@ -299,7 +217,8 @@ def normalize(ap_id) when is_binary(ap_id), do: get_by_ap_id_with_object(ap_id)
|
||||||
def normalize(_), do: nil
|
def normalize(_), do: nil
|
||||||
|
|
||||||
def delete_by_ap_id(id) when is_binary(id) do
|
def delete_by_ap_id(id) when is_binary(id) do
|
||||||
by_object_ap_id(id)
|
id
|
||||||
|
|> Queries.by_object_id()
|
||||||
|> select([u], u)
|
|> select([u], u)
|
||||||
|> Repo.delete_all()
|
|> Repo.delete_all()
|
||||||
|> elem(1)
|
|> elem(1)
|
||||||
|
@ -343,31 +262,10 @@ def all_by_actor_and_id(actor, status_ids) do
|
||||||
end
|
end
|
||||||
|
|
||||||
def follow_requests_for_actor(%Pleroma.User{ap_id: ap_id}) do
|
def follow_requests_for_actor(%Pleroma.User{ap_id: ap_id}) do
|
||||||
from(
|
ap_id
|
||||||
a in Activity,
|
|> Queries.by_object_id()
|
||||||
where:
|
|> Queries.by_type("Follow")
|
||||||
fragment(
|
|> where([a], fragment("? ->> 'state' = 'pending'", a.data))
|
||||||
"? ->> 'type' = 'Follow'",
|
|
||||||
a.data
|
|
||||||
),
|
|
||||||
where:
|
|
||||||
fragment(
|
|
||||||
"? ->> 'state' = 'pending'",
|
|
||||||
a.data
|
|
||||||
),
|
|
||||||
where:
|
|
||||||
fragment(
|
|
||||||
"coalesce((?)->'object'->>'id', (?)->>'object') = ?",
|
|
||||||
a.data,
|
|
||||||
a.data,
|
|
||||||
^ap_id
|
|
||||||
)
|
|
||||||
)
|
|
||||||
end
|
|
||||||
|
|
||||||
@spec query_by_actor(actor()) :: Ecto.Query.t()
|
|
||||||
def query_by_actor(actor) do
|
|
||||||
from(a in Activity, where: a.actor == ^actor)
|
|
||||||
end
|
end
|
||||||
|
|
||||||
def restrict_deactivated_users(query) do
|
def restrict_deactivated_users(query) do
|
||||||
|
|
|
@ -0,0 +1,63 @@
|
||||||
|
# Pleroma: A lightweight social networking server
|
||||||
|
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
|
||||||
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
defmodule Pleroma.Activity.Ir.Topics do
|
||||||
|
alias Pleroma.Object
|
||||||
|
alias Pleroma.Web.ActivityPub.Visibility
|
||||||
|
|
||||||
|
def get_activity_topics(activity) do
|
||||||
|
activity
|
||||||
|
|> Object.normalize()
|
||||||
|
|> generate_topics(activity)
|
||||||
|
|> List.flatten()
|
||||||
|
end
|
||||||
|
|
||||||
|
defp generate_topics(%{data: %{"type" => "Answer"}}, _) do
|
||||||
|
[]
|
||||||
|
end
|
||||||
|
|
||||||
|
defp generate_topics(object, activity) do
|
||||||
|
["user", "list"] ++ visibility_tags(object, activity)
|
||||||
|
end
|
||||||
|
|
||||||
|
defp visibility_tags(object, activity) do
|
||||||
|
case Visibility.get_visibility(activity) do
|
||||||
|
"public" ->
|
||||||
|
if activity.local do
|
||||||
|
["public", "public:local"]
|
||||||
|
else
|
||||||
|
["public"]
|
||||||
|
end
|
||||||
|
|> item_creation_tags(object, activity)
|
||||||
|
|
||||||
|
"direct" ->
|
||||||
|
["direct"]
|
||||||
|
|
||||||
|
_ ->
|
||||||
|
[]
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
defp item_creation_tags(tags, %{data: %{"type" => "Create"}} = object, activity) do
|
||||||
|
tags ++ hashtags_to_topics(object) ++ attachment_topics(object, activity)
|
||||||
|
end
|
||||||
|
|
||||||
|
defp item_creation_tags(tags, _, _) do
|
||||||
|
tags
|
||||||
|
end
|
||||||
|
|
||||||
|
defp hashtags_to_topics(%{data: %{"tag" => tags}}) do
|
||||||
|
tags
|
||||||
|
|> Enum.filter(&is_bitstring(&1))
|
||||||
|
|> Enum.map(fn tag -> "hashtag:" <> tag end)
|
||||||
|
end
|
||||||
|
|
||||||
|
defp hashtags_to_topics(_), do: []
|
||||||
|
|
||||||
|
defp attachment_topics(%{data: %{"attachment" => []}}, _act), do: []
|
||||||
|
|
||||||
|
defp attachment_topics(_object, %{local: true}), do: ["public:media", "public:local:media"]
|
||||||
|
|
||||||
|
defp attachment_topics(_object, _act), do: ["public:media"]
|
||||||
|
end
|
|
@ -13,6 +13,14 @@ defmodule Pleroma.Activity.Queries do
|
||||||
|
|
||||||
alias Pleroma.Activity
|
alias Pleroma.Activity
|
||||||
|
|
||||||
|
@spec by_ap_id(query, String.t()) :: query
|
||||||
|
def by_ap_id(query \\ Activity, ap_id) do
|
||||||
|
from(
|
||||||
|
activity in query,
|
||||||
|
where: fragment("(?)->>'id' = ?", activity.data, ^to_string(ap_id))
|
||||||
|
)
|
||||||
|
end
|
||||||
|
|
||||||
@spec by_actor(query, String.t()) :: query
|
@spec by_actor(query, String.t()) :: query
|
||||||
def by_actor(query \\ Activity, actor) do
|
def by_actor(query \\ Activity, actor) do
|
||||||
from(
|
from(
|
||||||
|
@ -21,8 +29,23 @@ def by_actor(query \\ Activity, actor) do
|
||||||
)
|
)
|
||||||
end
|
end
|
||||||
|
|
||||||
@spec by_object_id(query, String.t()) :: query
|
@spec by_object_id(query, String.t() | [String.t()]) :: query
|
||||||
def by_object_id(query \\ Activity, object_id) do
|
def by_object_id(query \\ Activity, object_id)
|
||||||
|
|
||||||
|
def by_object_id(query, object_ids) when is_list(object_ids) do
|
||||||
|
from(
|
||||||
|
activity in query,
|
||||||
|
where:
|
||||||
|
fragment(
|
||||||
|
"coalesce((?)->'object'->>'id', (?)->>'object') = ANY(?)",
|
||||||
|
activity.data,
|
||||||
|
activity.data,
|
||||||
|
^object_ids
|
||||||
|
)
|
||||||
|
)
|
||||||
|
end
|
||||||
|
|
||||||
|
def by_object_id(query, object_id) when is_binary(object_id) do
|
||||||
from(activity in query,
|
from(activity in query,
|
||||||
where:
|
where:
|
||||||
fragment(
|
fragment(
|
||||||
|
@ -41,9 +64,4 @@ def by_type(query \\ Activity, activity_type) do
|
||||||
where: fragment("(?)->>'type' = ?", activity.data, ^activity_type)
|
where: fragment("(?)->>'type' = ?", activity.data, ^activity_type)
|
||||||
)
|
)
|
||||||
end
|
end
|
||||||
|
|
||||||
@spec limit(query, pos_integer()) :: query
|
|
||||||
def limit(query \\ Activity, limit) do
|
|
||||||
from(activity in query, limit: ^limit)
|
|
||||||
end
|
|
||||||
end
|
end
|
||||||
|
|
|
@ -31,18 +31,19 @@ def start(_type, _args) do
|
||||||
children =
|
children =
|
||||||
[
|
[
|
||||||
Pleroma.Repo,
|
Pleroma.Repo,
|
||||||
|
Pleroma.Scheduler,
|
||||||
Pleroma.Config.TransferTask,
|
Pleroma.Config.TransferTask,
|
||||||
Pleroma.Emoji,
|
Pleroma.Emoji,
|
||||||
Pleroma.Captcha,
|
Pleroma.Captcha,
|
||||||
Pleroma.FlakeId,
|
Pleroma.FlakeId,
|
||||||
Pleroma.ScheduledActivityWorker,
|
Pleroma.Daemons.ScheduledActivityDaemon,
|
||||||
Pleroma.ActivityExpirationWorker
|
Pleroma.Daemons.ActivityExpirationDaemon
|
||||||
] ++
|
] ++
|
||||||
cachex_children() ++
|
cachex_children() ++
|
||||||
hackney_pool_children() ++
|
hackney_pool_children() ++
|
||||||
[
|
[
|
||||||
Pleroma.Web.Federator.RetryQueue,
|
|
||||||
Pleroma.Stats,
|
Pleroma.Stats,
|
||||||
|
{Oban, Pleroma.Config.get(Oban)},
|
||||||
%{
|
%{
|
||||||
id: :web_push_init,
|
id: :web_push_init,
|
||||||
start: {Task, :start_link, [&Pleroma.Web.Push.init/0]},
|
start: {Task, :start_link, [&Pleroma.Web.Push.init/0]},
|
||||||
|
@ -70,9 +71,7 @@ def start(_type, _args) do
|
||||||
# See http://elixir-lang.org/docs/stable/elixir/Supervisor.html
|
# See http://elixir-lang.org/docs/stable/elixir/Supervisor.html
|
||||||
# for other strategies and supported options
|
# for other strategies and supported options
|
||||||
opts = [strategy: :one_for_one, name: Pleroma.Supervisor]
|
opts = [strategy: :one_for_one, name: Pleroma.Supervisor]
|
||||||
result = Supervisor.start_link(children, opts)
|
Supervisor.start_link(children, opts)
|
||||||
:ok = after_supervisor_start()
|
|
||||||
result
|
|
||||||
end
|
end
|
||||||
|
|
||||||
defp setup_instrumenters do
|
defp setup_instrumenters do
|
||||||
|
@ -142,7 +141,7 @@ defp oauth_cleanup_enabled?,
|
||||||
defp streamer_child(:test), do: []
|
defp streamer_child(:test), do: []
|
||||||
|
|
||||||
defp streamer_child(_) do
|
defp streamer_child(_) do
|
||||||
[Pleroma.Web.Streamer]
|
[Pleroma.Web.Streamer.supervisor()]
|
||||||
end
|
end
|
||||||
|
|
||||||
defp oauth_cleanup_child(true),
|
defp oauth_cleanup_child(true),
|
||||||
|
@ -164,17 +163,4 @@ defp hackney_pool_children do
|
||||||
:hackney_pool.child_spec(pool, options)
|
:hackney_pool.child_spec(pool, options)
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
defp after_supervisor_start do
|
|
||||||
with digest_config <- Application.get_env(:pleroma, :email_notifications)[:digest],
|
|
||||||
true <- digest_config[:active] do
|
|
||||||
PleromaJobQueue.schedule(
|
|
||||||
digest_config[:schedule],
|
|
||||||
:digest_emails,
|
|
||||||
Pleroma.DigestEmailWorker
|
|
||||||
)
|
|
||||||
end
|
|
||||||
|
|
||||||
:ok
|
|
||||||
end
|
|
||||||
end
|
end
|
||||||
|
|
|
@ -2,13 +2,14 @@
|
||||||
# Copyright © 2019 Pleroma Authors <https://pleroma.social/>
|
# Copyright © 2019 Pleroma Authors <https://pleroma.social/>
|
||||||
# SPDX-License-Identifier: AGPL-3.0-only
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
defmodule Pleroma.ActivityExpirationWorker do
|
defmodule Pleroma.Daemons.ActivityExpirationDaemon do
|
||||||
alias Pleroma.Activity
|
alias Pleroma.Activity
|
||||||
alias Pleroma.ActivityExpiration
|
alias Pleroma.ActivityExpiration
|
||||||
alias Pleroma.Config
|
alias Pleroma.Config
|
||||||
alias Pleroma.Repo
|
alias Pleroma.Repo
|
||||||
alias Pleroma.User
|
alias Pleroma.User
|
||||||
alias Pleroma.Web.CommonAPI
|
alias Pleroma.Web.CommonAPI
|
||||||
|
|
||||||
require Logger
|
require Logger
|
||||||
use GenServer
|
use GenServer
|
||||||
import Ecto.Query
|
import Ecto.Query
|
||||||
|
@ -49,7 +50,10 @@ def perform(:execute, expiration_id) do
|
||||||
def handle_info(:perform, state) do
|
def handle_info(:perform, state) do
|
||||||
ActivityExpiration.due_expirations(@schedule_interval)
|
ActivityExpiration.due_expirations(@schedule_interval)
|
||||||
|> Enum.each(fn expiration ->
|
|> Enum.each(fn expiration ->
|
||||||
PleromaJobQueue.enqueue(:activity_expiration, __MODULE__, [:execute, expiration.id])
|
Pleroma.Workers.ActivityExpirationWorker.enqueue(
|
||||||
|
"activity_expiration",
|
||||||
|
%{"activity_expiration_id" => expiration.id}
|
||||||
|
)
|
||||||
end)
|
end)
|
||||||
|
|
||||||
schedule_next()
|
schedule_next()
|
|
@ -2,10 +2,11 @@
|
||||||
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
|
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
|
||||||
# SPDX-License-Identifier: AGPL-3.0-only
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
defmodule Pleroma.DigestEmailWorker do
|
defmodule Pleroma.Daemons.DigestEmailDaemon do
|
||||||
import Ecto.Query
|
alias Pleroma.Repo
|
||||||
|
alias Pleroma.Workers.DigestEmailsWorker
|
||||||
|
|
||||||
@queue_name :digest_emails
|
import Ecto.Query
|
||||||
|
|
||||||
def perform do
|
def perform do
|
||||||
config = Pleroma.Config.get([:email_notifications, :digest])
|
config = Pleroma.Config.get([:email_notifications, :digest])
|
||||||
|
@ -20,8 +21,10 @@ def perform do
|
||||||
where: u.last_digest_emailed_at < datetime_add(^now, ^negative_interval, "day"),
|
where: u.last_digest_emailed_at < datetime_add(^now, ^negative_interval, "day"),
|
||||||
select: u
|
select: u
|
||||||
)
|
)
|
||||||
|> Pleroma.Repo.all()
|
|> Repo.all()
|
||||||
|> Enum.each(&PleromaJobQueue.enqueue(@queue_name, __MODULE__, [&1]))
|
|> Enum.each(fn user ->
|
||||||
|
DigestEmailsWorker.enqueue("digest_email", %{"user_id" => user.id})
|
||||||
|
end)
|
||||||
end
|
end
|
||||||
|
|
||||||
@doc """
|
@doc """
|
|
@ -2,7 +2,7 @@
|
||||||
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
|
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
|
||||||
# SPDX-License-Identifier: AGPL-3.0-only
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
defmodule Pleroma.ScheduledActivityWorker do
|
defmodule Pleroma.Daemons.ScheduledActivityDaemon do
|
||||||
@moduledoc """
|
@moduledoc """
|
||||||
Sends scheduled activities to the job queue.
|
Sends scheduled activities to the job queue.
|
||||||
"""
|
"""
|
||||||
|
@ -11,6 +11,7 @@ defmodule Pleroma.ScheduledActivityWorker do
|
||||||
alias Pleroma.ScheduledActivity
|
alias Pleroma.ScheduledActivity
|
||||||
alias Pleroma.User
|
alias Pleroma.User
|
||||||
alias Pleroma.Web.CommonAPI
|
alias Pleroma.Web.CommonAPI
|
||||||
|
|
||||||
use GenServer
|
use GenServer
|
||||||
require Logger
|
require Logger
|
||||||
|
|
||||||
|
@ -45,7 +46,10 @@ def perform(:execute, scheduled_activity_id) do
|
||||||
def handle_info(:perform, state) do
|
def handle_info(:perform, state) do
|
||||||
ScheduledActivity.due_activities(@schedule_interval)
|
ScheduledActivity.due_activities(@schedule_interval)
|
||||||
|> Enum.each(fn scheduled_activity ->
|
|> Enum.each(fn scheduled_activity ->
|
||||||
PleromaJobQueue.enqueue(:scheduled_activities, __MODULE__, [:execute, scheduled_activity.id])
|
Pleroma.Workers.ScheduledActivityWorker.enqueue(
|
||||||
|
"execute",
|
||||||
|
%{"activity_id" => scheduled_activity.id}
|
||||||
|
)
|
||||||
end)
|
end)
|
||||||
|
|
||||||
schedule_next()
|
schedule_next()
|
|
@ -0,0 +1,51 @@
|
||||||
|
# Pleroma: A lightweight social networking server
|
||||||
|
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
|
||||||
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
defmodule Pleroma.Delivery do
|
||||||
|
use Ecto.Schema
|
||||||
|
|
||||||
|
alias Pleroma.Delivery
|
||||||
|
alias Pleroma.FlakeId
|
||||||
|
alias Pleroma.Object
|
||||||
|
alias Pleroma.Repo
|
||||||
|
alias Pleroma.User
|
||||||
|
alias Pleroma.User
|
||||||
|
|
||||||
|
import Ecto.Changeset
|
||||||
|
import Ecto.Query
|
||||||
|
|
||||||
|
schema "deliveries" do
|
||||||
|
belongs_to(:user, User, type: FlakeId)
|
||||||
|
belongs_to(:object, Object)
|
||||||
|
end
|
||||||
|
|
||||||
|
def changeset(delivery, params \\ %{}) do
|
||||||
|
delivery
|
||||||
|
|> cast(params, [:user_id, :object_id])
|
||||||
|
|> validate_required([:user_id, :object_id])
|
||||||
|
|> foreign_key_constraint(:object_id)
|
||||||
|
|> foreign_key_constraint(:user_id)
|
||||||
|
|> unique_constraint(:user_id, name: :deliveries_user_id_object_id_index)
|
||||||
|
end
|
||||||
|
|
||||||
|
def create(object_id, user_id) do
|
||||||
|
%Delivery{}
|
||||||
|
|> changeset(%{user_id: user_id, object_id: object_id})
|
||||||
|
|> Repo.insert(on_conflict: :nothing)
|
||||||
|
end
|
||||||
|
|
||||||
|
def get(object_id, user_id) do
|
||||||
|
from(d in Delivery, where: d.user_id == ^user_id and d.object_id == ^object_id)
|
||||||
|
|> Repo.one()
|
||||||
|
end
|
||||||
|
|
||||||
|
# A hack because user delete activities have a fake id for whatever reason
|
||||||
|
# TODO: Get rid of this
|
||||||
|
def delete_all_by_object_id("pleroma:fake_object_id"), do: {0, []}
|
||||||
|
|
||||||
|
def delete_all_by_object_id(object_id) do
|
||||||
|
from(d in Delivery, where: d.object_id == ^object_id)
|
||||||
|
|> Repo.delete_all()
|
||||||
|
end
|
||||||
|
end
|
|
@ -0,0 +1,73 @@
|
||||||
|
defmodule Pleroma.Docs.Generator do
|
||||||
|
@callback process(keyword()) :: {:ok, String.t()}
|
||||||
|
|
||||||
|
@spec process(module(), keyword()) :: {:ok, String.t()}
|
||||||
|
def process(implementation, descriptions) do
|
||||||
|
implementation.process(descriptions)
|
||||||
|
end
|
||||||
|
|
||||||
|
@spec uploaders_list() :: [module()]
|
||||||
|
def uploaders_list do
|
||||||
|
{:ok, modules} = :application.get_key(:pleroma, :modules)
|
||||||
|
|
||||||
|
Enum.filter(modules, fn module ->
|
||||||
|
name_as_list = Module.split(module)
|
||||||
|
|
||||||
|
List.starts_with?(name_as_list, ["Pleroma", "Uploaders"]) and
|
||||||
|
List.last(name_as_list) != "Uploader"
|
||||||
|
end)
|
||||||
|
end
|
||||||
|
|
||||||
|
@spec filters_list() :: [module()]
|
||||||
|
def filters_list do
|
||||||
|
{:ok, modules} = :application.get_key(:pleroma, :modules)
|
||||||
|
|
||||||
|
Enum.filter(modules, fn module ->
|
||||||
|
name_as_list = Module.split(module)
|
||||||
|
|
||||||
|
List.starts_with?(name_as_list, ["Pleroma", "Upload", "Filter"])
|
||||||
|
end)
|
||||||
|
end
|
||||||
|
|
||||||
|
@spec mrf_list() :: [module()]
|
||||||
|
def mrf_list do
|
||||||
|
{:ok, modules} = :application.get_key(:pleroma, :modules)
|
||||||
|
|
||||||
|
Enum.filter(modules, fn module ->
|
||||||
|
name_as_list = Module.split(module)
|
||||||
|
|
||||||
|
List.starts_with?(name_as_list, ["Pleroma", "Web", "ActivityPub", "MRF"]) and
|
||||||
|
length(name_as_list) > 4
|
||||||
|
end)
|
||||||
|
end
|
||||||
|
|
||||||
|
@spec richmedia_parsers() :: [module()]
|
||||||
|
def richmedia_parsers do
|
||||||
|
{:ok, modules} = :application.get_key(:pleroma, :modules)
|
||||||
|
|
||||||
|
Enum.filter(modules, fn module ->
|
||||||
|
name_as_list = Module.split(module)
|
||||||
|
|
||||||
|
List.starts_with?(name_as_list, ["Pleroma", "Web", "RichMedia", "Parsers"]) and
|
||||||
|
length(name_as_list) == 5
|
||||||
|
end)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
defimpl Jason.Encoder, for: Tuple do
|
||||||
|
def encode(tuple, opts) do
|
||||||
|
Jason.Encode.list(Tuple.to_list(tuple), opts)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
defimpl Jason.Encoder, for: [Regex, Function] do
|
||||||
|
def encode(term, opts) do
|
||||||
|
Jason.Encode.string(inspect(term), opts)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
defimpl String.Chars, for: Regex do
|
||||||
|
def to_string(term) do
|
||||||
|
inspect(term)
|
||||||
|
end
|
||||||
|
end
|
|
@ -0,0 +1,20 @@
|
||||||
|
defmodule Pleroma.Docs.JSON do
|
||||||
|
@behaviour Pleroma.Docs.Generator
|
||||||
|
|
||||||
|
@spec process(keyword()) :: {:ok, String.t()}
|
||||||
|
def process(descriptions) do
|
||||||
|
config_path = "docs/generate_config.json"
|
||||||
|
|
||||||
|
with {:ok, file} <- File.open(config_path, [:write]),
|
||||||
|
json <- generate_json(descriptions),
|
||||||
|
:ok <- IO.write(file, json),
|
||||||
|
:ok <- File.close(file) do
|
||||||
|
{:ok, config_path}
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
@spec generate_json([keyword()]) :: String.t()
|
||||||
|
def generate_json(descriptions) do
|
||||||
|
Jason.encode!(descriptions)
|
||||||
|
end
|
||||||
|
end
|
|
@ -0,0 +1,78 @@
|
||||||
|
defmodule Pleroma.Docs.Markdown do
|
||||||
|
@behaviour Pleroma.Docs.Generator
|
||||||
|
|
||||||
|
@spec process(keyword()) :: {:ok, String.t()}
|
||||||
|
def process(descriptions) do
|
||||||
|
config_path = "docs/generated_config.md"
|
||||||
|
{:ok, file} = File.open(config_path, [:utf8, :write])
|
||||||
|
IO.write(file, "# Generated configuration\n")
|
||||||
|
IO.write(file, "Date of generation: #{Date.utc_today()}\n\n")
|
||||||
|
|
||||||
|
IO.write(
|
||||||
|
file,
|
||||||
|
"This file describe the configuration, it is recommended to edit the relevant `*.secret.exs` file instead of the others founds in the ``config`` directory.\n\n" <>
|
||||||
|
"If you run Pleroma with ``MIX_ENV=prod`` the file is ``prod.secret.exs``, otherwise it is ``dev.secret.exs``.\n\n"
|
||||||
|
)
|
||||||
|
|
||||||
|
for group <- descriptions do
|
||||||
|
if is_nil(group[:key]) do
|
||||||
|
IO.write(file, "## #{inspect(group[:group])}\n")
|
||||||
|
else
|
||||||
|
IO.write(file, "## #{inspect(group[:key])}\n")
|
||||||
|
end
|
||||||
|
|
||||||
|
IO.write(file, "#{group[:description]}\n")
|
||||||
|
|
||||||
|
for child <- group[:children] do
|
||||||
|
print_child_header(file, child)
|
||||||
|
|
||||||
|
print_suggestions(file, child[:suggestions])
|
||||||
|
|
||||||
|
if child[:children] do
|
||||||
|
for subchild <- child[:children] do
|
||||||
|
print_child_header(file, subchild)
|
||||||
|
|
||||||
|
print_suggestions(file, subchild[:suggestions])
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
IO.write(file, "\n")
|
||||||
|
end
|
||||||
|
|
||||||
|
:ok = File.close(file)
|
||||||
|
{:ok, config_path}
|
||||||
|
end
|
||||||
|
|
||||||
|
defp print_suggestion(file, suggestion) when is_list(suggestion) do
|
||||||
|
IO.write(file, " `#{inspect(suggestion)}`\n")
|
||||||
|
end
|
||||||
|
|
||||||
|
defp print_suggestion(file, suggestion) when is_function(suggestion) do
|
||||||
|
IO.write(file, " `#{inspect(suggestion.())}`\n")
|
||||||
|
end
|
||||||
|
|
||||||
|
defp print_suggestion(file, suggestion, as_list \\ false) do
|
||||||
|
list_mark = if as_list, do: "- ", else: ""
|
||||||
|
IO.write(file, " #{list_mark}`#{inspect(suggestion)}`\n")
|
||||||
|
end
|
||||||
|
|
||||||
|
defp print_suggestions(_file, nil), do: nil
|
||||||
|
|
||||||
|
defp print_suggestions(file, suggestions) do
|
||||||
|
IO.write(file, "Suggestions:\n")
|
||||||
|
|
||||||
|
if length(suggestions) > 1 do
|
||||||
|
for suggestion <- suggestions do
|
||||||
|
print_suggestion(file, suggestion, true)
|
||||||
|
end
|
||||||
|
else
|
||||||
|
print_suggestion(file, List.first(suggestions))
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
defp print_child_header(file, child) do
|
||||||
|
IO.write(file, "- `#{inspect(child[:key])}` -`#{inspect(child[:type])}` \n")
|
||||||
|
IO.write(file, "#{child[:description]} \n")
|
||||||
|
end
|
||||||
|
end
|
|
@ -9,6 +9,7 @@ defmodule Pleroma.Emails.Mailer do
|
||||||
The module contains functions to delivery email using Swoosh.Mailer.
|
The module contains functions to delivery email using Swoosh.Mailer.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
alias Pleroma.Workers.MailerWorker
|
||||||
alias Swoosh.DeliveryError
|
alias Swoosh.DeliveryError
|
||||||
|
|
||||||
@otp_app :pleroma
|
@otp_app :pleroma
|
||||||
|
@ -19,7 +20,12 @@ def enabled?, do: Pleroma.Config.get([__MODULE__, :enabled])
|
||||||
|
|
||||||
@doc "add email to queue"
|
@doc "add email to queue"
|
||||||
def deliver_async(email, config \\ []) do
|
def deliver_async(email, config \\ []) do
|
||||||
PleromaJobQueue.enqueue(:mailer, __MODULE__, [:deliver_async, email, config])
|
encoded_email =
|
||||||
|
email
|
||||||
|
|> :erlang.term_to_binary()
|
||||||
|
|> Base.encode64()
|
||||||
|
|
||||||
|
MailerWorker.enqueue("email", %{"encoded_email" => encoded_email, "config" => config})
|
||||||
end
|
end
|
||||||
|
|
||||||
@doc "callback to perform send email from queue"
|
@doc "callback to perform send email from queue"
|
||||||
|
|
|
@ -9,6 +9,7 @@ defmodule Pleroma.Healthcheck do
|
||||||
alias Pleroma.Healthcheck
|
alias Pleroma.Healthcheck
|
||||||
alias Pleroma.Repo
|
alias Pleroma.Repo
|
||||||
|
|
||||||
|
@derive Jason.Encoder
|
||||||
defstruct pool_size: 0,
|
defstruct pool_size: 0,
|
||||||
active: 0,
|
active: 0,
|
||||||
idle: 0,
|
idle: 0,
|
|
@ -90,7 +90,7 @@ def set_reachable(_), do: {:error, nil}
|
||||||
def set_unreachable(url_or_host, unreachable_since \\ nil)
|
def set_unreachable(url_or_host, unreachable_since \\ nil)
|
||||||
|
|
||||||
def set_unreachable(url_or_host, unreachable_since) when is_binary(url_or_host) do
|
def set_unreachable(url_or_host, unreachable_since) when is_binary(url_or_host) do
|
||||||
unreachable_since = unreachable_since || DateTime.utc_now()
|
unreachable_since = parse_datetime(unreachable_since) || NaiveDateTime.utc_now()
|
||||||
host = host(url_or_host)
|
host = host(url_or_host)
|
||||||
existing_record = Repo.get_by(Instance, %{host: host})
|
existing_record = Repo.get_by(Instance, %{host: host})
|
||||||
|
|
||||||
|
@ -114,4 +114,10 @@ def set_unreachable(url_or_host, unreachable_since) when is_binary(url_or_host)
|
||||||
end
|
end
|
||||||
|
|
||||||
def set_unreachable(_, _), do: {:error, nil}
|
def set_unreachable(_, _), do: {:error, nil}
|
||||||
|
|
||||||
|
defp parse_datetime(datetime) when is_binary(datetime) do
|
||||||
|
NaiveDateTime.from_iso8601(datetime)
|
||||||
|
end
|
||||||
|
|
||||||
|
defp parse_datetime(datetime), do: datetime
|
||||||
end
|
end
|
||||||
|
|
|
@ -210,8 +210,10 @@ def create_notification(%Activity{} = activity, %User{} = user) do
|
||||||
unless skip?(activity, user) do
|
unless skip?(activity, user) do
|
||||||
notification = %Notification{user_id: user.id, activity: activity}
|
notification = %Notification{user_id: user.id, activity: activity}
|
||||||
{:ok, notification} = Repo.insert(notification)
|
{:ok, notification} = Repo.insert(notification)
|
||||||
Streamer.stream("user", notification)
|
|
||||||
Streamer.stream("user:notification", notification)
|
["user", "user:notification"]
|
||||||
|
|> Streamer.stream(notification)
|
||||||
|
|
||||||
Push.send(notification)
|
Push.send(notification)
|
||||||
notification
|
notification
|
||||||
end
|
end
|
||||||
|
|
|
@ -20,6 +20,7 @@ defmodule Pleroma.Plugs.Cache do
|
||||||
|
|
||||||
- `ttl`: An expiration time (time-to-live). This value should be in milliseconds or `nil` to disable expiration. Defaults to `nil`.
|
- `ttl`: An expiration time (time-to-live). This value should be in milliseconds or `nil` to disable expiration. Defaults to `nil`.
|
||||||
- `query_params`: Take URL query string into account (`true`), ignore it (`false`) or limit to specific params only (list). Defaults to `true`.
|
- `query_params`: Take URL query string into account (`true`), ignore it (`false`) or limit to specific params only (list). Defaults to `true`.
|
||||||
|
- `tracking_fun`: A function that is called on successfull responses, no matter if the request is cached or not. It should accept a conn as the first argument and the value assigned to `tracking_fun_data` as the second.
|
||||||
|
|
||||||
Additionally, you can overwrite the TTL inside a controller action by assigning `cache_ttl` to the connection struct:
|
Additionally, you can overwrite the TTL inside a controller action by assigning `cache_ttl` to the connection struct:
|
||||||
|
|
||||||
|
@ -56,6 +57,11 @@ def call(%{method: "GET"} = conn, opts) do
|
||||||
{:ok, nil} ->
|
{:ok, nil} ->
|
||||||
cache_resp(conn, opts)
|
cache_resp(conn, opts)
|
||||||
|
|
||||||
|
{:ok, {content_type, body, tracking_fun_data}} ->
|
||||||
|
conn = opts.tracking_fun.(conn, tracking_fun_data)
|
||||||
|
|
||||||
|
send_cached(conn, {content_type, body})
|
||||||
|
|
||||||
{:ok, record} ->
|
{:ok, record} ->
|
||||||
send_cached(conn, record)
|
send_cached(conn, record)
|
||||||
|
|
||||||
|
@ -88,9 +94,17 @@ defp cache_resp(conn, opts) do
|
||||||
ttl = Map.get(conn.assigns, :cache_ttl, opts.ttl)
|
ttl = Map.get(conn.assigns, :cache_ttl, opts.ttl)
|
||||||
key = cache_key(conn, opts)
|
key = cache_key(conn, opts)
|
||||||
content_type = content_type(conn)
|
content_type = content_type(conn)
|
||||||
record = {content_type, body}
|
|
||||||
|
|
||||||
Cachex.put(:web_resp_cache, key, record, ttl: ttl)
|
conn =
|
||||||
|
unless opts[:tracking_fun] do
|
||||||
|
Cachex.put(:web_resp_cache, key, {content_type, body}, ttl: ttl)
|
||||||
|
conn
|
||||||
|
else
|
||||||
|
tracking_fun_data = Map.get(conn.assigns, :tracking_fun_data, nil)
|
||||||
|
Cachex.put(:web_resp_cache, key, {content_type, body, tracking_fun_data}, ttl: ttl)
|
||||||
|
|
||||||
|
opts.tracking_fun.(conn, tracking_fun_data)
|
||||||
|
end
|
||||||
|
|
||||||
put_resp_header(conn, "x-cache", "MISS from Pleroma")
|
put_resp_header(conn, "x-cache", "MISS from Pleroma")
|
||||||
|
|
||||||
|
|
|
@ -15,7 +15,8 @@ def call(%{assigns: %{valid_signature: true}} = conn, _opts) do
|
||||||
end
|
end
|
||||||
|
|
||||||
def call(conn, _opts) do
|
def call(conn, _opts) do
|
||||||
[signature | _] = get_req_header(conn, "signature")
|
headers = get_req_header(conn, "signature")
|
||||||
|
signature = Enum.at(headers, 0)
|
||||||
|
|
||||||
if signature do
|
if signature do
|
||||||
# set (request-target) header to the appropriate value
|
# set (request-target) header to the appropriate value
|
||||||
|
|
|
@ -0,0 +1,7 @@
|
||||||
|
# Pleroma: A lightweight social networking server
|
||||||
|
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
|
||||||
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
defmodule Pleroma.Scheduler do
|
||||||
|
use Quantum.Scheduler, otp_app: :pleroma
|
||||||
|
end
|
|
@ -11,6 +11,7 @@ defmodule Pleroma.User do
|
||||||
alias Comeonin.Pbkdf2
|
alias Comeonin.Pbkdf2
|
||||||
alias Ecto.Multi
|
alias Ecto.Multi
|
||||||
alias Pleroma.Activity
|
alias Pleroma.Activity
|
||||||
|
alias Pleroma.Delivery
|
||||||
alias Pleroma.Keys
|
alias Pleroma.Keys
|
||||||
alias Pleroma.Notification
|
alias Pleroma.Notification
|
||||||
alias Pleroma.Object
|
alias Pleroma.Object
|
||||||
|
@ -27,6 +28,7 @@ defmodule Pleroma.User do
|
||||||
alias Pleroma.Web.OStatus
|
alias Pleroma.Web.OStatus
|
||||||
alias Pleroma.Web.RelMe
|
alias Pleroma.Web.RelMe
|
||||||
alias Pleroma.Web.Websub
|
alias Pleroma.Web.Websub
|
||||||
|
alias Pleroma.Workers.BackgroundWorker
|
||||||
|
|
||||||
require Logger
|
require Logger
|
||||||
|
|
||||||
|
@ -61,6 +63,7 @@ defmodule Pleroma.User do
|
||||||
field(:last_digest_emailed_at, :naive_datetime)
|
field(:last_digest_emailed_at, :naive_datetime)
|
||||||
has_many(:notifications, Notification)
|
has_many(:notifications, Notification)
|
||||||
has_many(:registrations, Registration)
|
has_many(:registrations, Registration)
|
||||||
|
has_many(:deliveries, Delivery)
|
||||||
embeds_one(:info, User.Info)
|
embeds_one(:info, User.Info)
|
||||||
|
|
||||||
timestamps()
|
timestamps()
|
||||||
|
@ -174,11 +177,25 @@ def following_count(%User{} = user) do
|
||||||
|> Repo.aggregate(:count, :id)
|
|> Repo.aggregate(:count, :id)
|
||||||
end
|
end
|
||||||
|
|
||||||
|
defp truncate_if_exists(params, key, max_length) do
|
||||||
|
if Map.has_key?(params, key) and is_binary(params[key]) do
|
||||||
|
{value, _chopped} = String.split_at(params[key], max_length)
|
||||||
|
Map.put(params, key, value)
|
||||||
|
else
|
||||||
|
params
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
def remote_user_creation(params) do
|
def remote_user_creation(params) do
|
||||||
bio_limit = Pleroma.Config.get([:instance, :user_bio_length], 5000)
|
bio_limit = Pleroma.Config.get([:instance, :user_bio_length], 5000)
|
||||||
name_limit = Pleroma.Config.get([:instance, :user_name_length], 100)
|
name_limit = Pleroma.Config.get([:instance, :user_name_length], 100)
|
||||||
|
|
||||||
params = Map.put(params, :info, params[:info] || %{})
|
params =
|
||||||
|
params
|
||||||
|
|> Map.put(:info, params[:info] || %{})
|
||||||
|
|> truncate_if_exists(:name, name_limit)
|
||||||
|
|> truncate_if_exists(:bio, bio_limit)
|
||||||
|
|
||||||
info_cng = User.Info.remote_user_creation(%User.Info{}, params[:info])
|
info_cng = User.Info.remote_user_creation(%User.Info{}, params[:info])
|
||||||
|
|
||||||
changes =
|
changes =
|
||||||
|
@ -633,8 +650,9 @@ def get_or_fetch_by_nickname(nickname) do
|
||||||
end
|
end
|
||||||
|
|
||||||
@doc "Fetch some posts when the user has just been federated with"
|
@doc "Fetch some posts when the user has just been federated with"
|
||||||
def fetch_initial_posts(user),
|
def fetch_initial_posts(user) do
|
||||||
do: PleromaJobQueue.enqueue(:background, __MODULE__, [:fetch_initial_posts, user])
|
BackgroundWorker.enqueue("fetch_initial_posts", %{"user_id" => user.id})
|
||||||
|
end
|
||||||
|
|
||||||
@spec get_followers_query(User.t(), pos_integer() | nil) :: Ecto.Query.t()
|
@spec get_followers_query(User.t(), pos_integer() | nil) :: Ecto.Query.t()
|
||||||
def get_followers_query(%User{} = user, nil) do
|
def get_followers_query(%User{} = user, nil) do
|
||||||
|
@ -1064,7 +1082,7 @@ def unblock_domain(user, domain) do
|
||||||
end
|
end
|
||||||
|
|
||||||
def deactivate_async(user, status \\ true) do
|
def deactivate_async(user, status \\ true) do
|
||||||
PleromaJobQueue.enqueue(:background, __MODULE__, [:deactivate_async, user, status])
|
BackgroundWorker.enqueue("deactivate_user", %{"user_id" => user.id, "status" => status})
|
||||||
end
|
end
|
||||||
|
|
||||||
def deactivate(%User{} = user, status \\ true) do
|
def deactivate(%User{} = user, status \\ true) do
|
||||||
|
@ -1092,9 +1110,9 @@ def update_notification_settings(%User{} = user, settings \\ %{}) do
|
||||||
|> update_and_set_cache()
|
|> update_and_set_cache()
|
||||||
end
|
end
|
||||||
|
|
||||||
@spec delete(User.t()) :: :ok
|
def delete(%User{} = user) do
|
||||||
def delete(%User{} = user),
|
BackgroundWorker.enqueue("delete_user", %{"user_id" => user.id})
|
||||||
do: PleromaJobQueue.enqueue(:background, __MODULE__, [:delete, user])
|
end
|
||||||
|
|
||||||
@spec perform(atom(), User.t()) :: {:ok, User.t()}
|
@spec perform(atom(), User.t()) :: {:ok, User.t()}
|
||||||
def perform(:delete, %User{} = user) do
|
def perform(:delete, %User{} = user) do
|
||||||
|
@ -1201,25 +1219,24 @@ def external_users(opts \\ []) do
|
||||||
Repo.all(query)
|
Repo.all(query)
|
||||||
end
|
end
|
||||||
|
|
||||||
def blocks_import(%User{} = blocker, blocked_identifiers) when is_list(blocked_identifiers),
|
def blocks_import(%User{} = blocker, blocked_identifiers) when is_list(blocked_identifiers) do
|
||||||
do:
|
BackgroundWorker.enqueue("blocks_import", %{
|
||||||
PleromaJobQueue.enqueue(:background, __MODULE__, [
|
"blocker_id" => blocker.id,
|
||||||
:blocks_import,
|
"blocked_identifiers" => blocked_identifiers
|
||||||
blocker,
|
})
|
||||||
blocked_identifiers
|
end
|
||||||
])
|
|
||||||
|
|
||||||
def follow_import(%User{} = follower, followed_identifiers) when is_list(followed_identifiers),
|
def follow_import(%User{} = follower, followed_identifiers)
|
||||||
do:
|
when is_list(followed_identifiers) do
|
||||||
PleromaJobQueue.enqueue(:background, __MODULE__, [
|
BackgroundWorker.enqueue("follow_import", %{
|
||||||
:follow_import,
|
"follower_id" => follower.id,
|
||||||
follower,
|
"followed_identifiers" => followed_identifiers
|
||||||
followed_identifiers
|
})
|
||||||
])
|
end
|
||||||
|
|
||||||
def delete_user_activities(%User{ap_id: ap_id} = user) do
|
def delete_user_activities(%User{ap_id: ap_id} = user) do
|
||||||
ap_id
|
ap_id
|
||||||
|> Activity.query_by_actor()
|
|> Activity.Queries.by_actor()
|
||||||
|> RepoStreamer.chunk_stream(50)
|
|> RepoStreamer.chunk_stream(50)
|
||||||
|> Stream.each(fn activities ->
|
|> Stream.each(fn activities ->
|
||||||
Enum.each(activities, &delete_activity(&1))
|
Enum.each(activities, &delete_activity(&1))
|
||||||
|
@ -1624,4 +1641,25 @@ defp put_password_hash(changeset), do: changeset
|
||||||
def is_internal_user?(%User{nickname: nil}), do: true
|
def is_internal_user?(%User{nickname: nil}), do: true
|
||||||
def is_internal_user?(%User{local: true, nickname: "internal." <> _}), do: true
|
def is_internal_user?(%User{local: true, nickname: "internal." <> _}), do: true
|
||||||
def is_internal_user?(_), do: false
|
def is_internal_user?(_), do: false
|
||||||
|
|
||||||
|
# A hack because user delete activities have a fake id for whatever reason
|
||||||
|
# TODO: Get rid of this
|
||||||
|
def get_delivered_users_by_object_id("pleroma:fake_object_id"), do: []
|
||||||
|
|
||||||
|
def get_delivered_users_by_object_id(object_id) do
|
||||||
|
from(u in User,
|
||||||
|
inner_join: delivery in assoc(u, :deliveries),
|
||||||
|
where: delivery.object_id == ^object_id
|
||||||
|
)
|
||||||
|
|> Repo.all()
|
||||||
|
end
|
||||||
|
|
||||||
|
def change_email(user, email) do
|
||||||
|
user
|
||||||
|
|> cast(%{email: email}, [:email])
|
||||||
|
|> validate_required([:email])
|
||||||
|
|> unique_constraint(:email)
|
||||||
|
|> validate_format(:email, @email_regex)
|
||||||
|
|> update_and_set_cache()
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -242,6 +242,13 @@ def set_keys(info, keys) do
|
||||||
end
|
end
|
||||||
|
|
||||||
def remote_user_creation(info, params) do
|
def remote_user_creation(info, params) do
|
||||||
|
params =
|
||||||
|
if Map.has_key?(params, :fields) do
|
||||||
|
Map.put(params, :fields, Enum.map(params[:fields], &truncate_field/1))
|
||||||
|
else
|
||||||
|
params
|
||||||
|
end
|
||||||
|
|
||||||
info
|
info
|
||||||
|> cast(params, [
|
|> cast(params, [
|
||||||
:ap_enabled,
|
:ap_enabled,
|
||||||
|
@ -324,6 +331,16 @@ defp valid_field?(%{"name" => name, "value" => value}) do
|
||||||
|
|
||||||
defp valid_field?(_), do: false
|
defp valid_field?(_), do: false
|
||||||
|
|
||||||
|
defp truncate_field(%{"name" => name, "value" => value}) do
|
||||||
|
{name, _chopped} =
|
||||||
|
String.split_at(name, Pleroma.Config.get([:instance, :account_field_name_length], 255))
|
||||||
|
|
||||||
|
{value, _chopped} =
|
||||||
|
String.split_at(value, Pleroma.Config.get([:instance, :account_field_value_length], 255))
|
||||||
|
|
||||||
|
%{"name" => name, "value" => value}
|
||||||
|
end
|
||||||
|
|
||||||
@spec confirmation_changeset(Info.t(), keyword()) :: Changeset.t()
|
@spec confirmation_changeset(Info.t(), keyword()) :: Changeset.t()
|
||||||
def confirmation_changeset(info, opts) do
|
def confirmation_changeset(info, opts) do
|
||||||
need_confirmation? = Keyword.get(opts, :need_confirmation)
|
need_confirmation? = Keyword.get(opts, :need_confirmation)
|
||||||
|
|
|
@ -4,6 +4,7 @@
|
||||||
|
|
||||||
defmodule Pleroma.Web.ActivityPub.ActivityPub do
|
defmodule Pleroma.Web.ActivityPub.ActivityPub do
|
||||||
alias Pleroma.Activity
|
alias Pleroma.Activity
|
||||||
|
alias Pleroma.Activity.Ir.Topics
|
||||||
alias Pleroma.Config
|
alias Pleroma.Config
|
||||||
alias Pleroma.Conversation
|
alias Pleroma.Conversation
|
||||||
alias Pleroma.Notification
|
alias Pleroma.Notification
|
||||||
|
@ -16,7 +17,9 @@ defmodule Pleroma.Web.ActivityPub.ActivityPub do
|
||||||
alias Pleroma.User
|
alias Pleroma.User
|
||||||
alias Pleroma.Web.ActivityPub.MRF
|
alias Pleroma.Web.ActivityPub.MRF
|
||||||
alias Pleroma.Web.ActivityPub.Transmogrifier
|
alias Pleroma.Web.ActivityPub.Transmogrifier
|
||||||
|
alias Pleroma.Web.Streamer
|
||||||
alias Pleroma.Web.WebFinger
|
alias Pleroma.Web.WebFinger
|
||||||
|
alias Pleroma.Workers.BackgroundWorker
|
||||||
|
|
||||||
import Ecto.Query
|
import Ecto.Query
|
||||||
import Pleroma.Web.ActivityPub.Utils
|
import Pleroma.Web.ActivityPub.Utils
|
||||||
|
@ -145,7 +148,7 @@ def insert(map, local \\ true, fake \\ false, bypass_actor_check \\ false) when
|
||||||
activity
|
activity
|
||||||
end
|
end
|
||||||
|
|
||||||
PleromaJobQueue.enqueue(:background, Pleroma.Web.RichMedia.Helpers, [:fetch, activity])
|
BackgroundWorker.enqueue("fetch_data_for_activity", %{"activity_id" => activity.id})
|
||||||
|
|
||||||
Notification.create_notifications(activity)
|
Notification.create_notifications(activity)
|
||||||
|
|
||||||
|
@ -186,9 +189,7 @@ def stream_out_participations(participations) do
|
||||||
participations
|
participations
|
||||||
|> Repo.preload(:user)
|
|> Repo.preload(:user)
|
||||||
|
|
||||||
Enum.each(participations, fn participation ->
|
Streamer.stream("participation", participations)
|
||||||
Pleroma.Web.Streamer.stream("participation", participation)
|
|
||||||
end)
|
|
||||||
end
|
end
|
||||||
|
|
||||||
def stream_out_participations(%Object{data: %{"context" => context}}, user) do
|
def stream_out_participations(%Object{data: %{"context" => context}}, user) do
|
||||||
|
@ -207,41 +208,15 @@ def stream_out_participations(%Object{data: %{"context" => context}}, user) do
|
||||||
|
|
||||||
def stream_out_participations(_, _), do: :noop
|
def stream_out_participations(_, _), do: :noop
|
||||||
|
|
||||||
def stream_out(activity) do
|
def stream_out(%Activity{data: %{"type" => data_type}} = activity)
|
||||||
if activity.data["type"] in ["Create", "Announce", "Delete"] do
|
when data_type in ["Create", "Announce", "Delete"] do
|
||||||
object = Object.normalize(activity)
|
activity
|
||||||
# Do not stream out poll replies
|
|> Topics.get_activity_topics()
|
||||||
unless object.data["type"] == "Answer" do
|
|> Streamer.stream(activity)
|
||||||
Pleroma.Web.Streamer.stream("user", activity)
|
|
||||||
Pleroma.Web.Streamer.stream("list", activity)
|
|
||||||
|
|
||||||
if get_visibility(activity) == "public" do
|
|
||||||
Pleroma.Web.Streamer.stream("public", activity)
|
|
||||||
|
|
||||||
if activity.local do
|
|
||||||
Pleroma.Web.Streamer.stream("public:local", activity)
|
|
||||||
end
|
end
|
||||||
|
|
||||||
if activity.data["type"] in ["Create"] do
|
def stream_out(_activity) do
|
||||||
object.data
|
:noop
|
||||||
|> Map.get("tag", [])
|
|
||||||
|> Enum.filter(fn tag -> is_bitstring(tag) end)
|
|
||||||
|> Enum.each(fn tag -> Pleroma.Web.Streamer.stream("hashtag:" <> tag, activity) end)
|
|
||||||
|
|
||||||
if object.data["attachment"] != [] do
|
|
||||||
Pleroma.Web.Streamer.stream("public:media", activity)
|
|
||||||
|
|
||||||
if activity.local do
|
|
||||||
Pleroma.Web.Streamer.stream("public:local:media", activity)
|
|
||||||
end
|
|
||||||
end
|
|
||||||
end
|
|
||||||
else
|
|
||||||
if get_visibility(activity) == "direct",
|
|
||||||
do: Pleroma.Web.Streamer.stream("direct", activity)
|
|
||||||
end
|
|
||||||
end
|
|
||||||
end
|
|
||||||
end
|
end
|
||||||
|
|
||||||
def create(%{to: to, actor: actor, context: context, object: object} = params, fake \\ false) do
|
def create(%{to: to, actor: actor, context: context, object: object} = params, fake \\ false) do
|
||||||
|
|
|
@ -6,6 +6,7 @@ defmodule Pleroma.Web.ActivityPub.ActivityPubController do
|
||||||
use Pleroma.Web, :controller
|
use Pleroma.Web, :controller
|
||||||
|
|
||||||
alias Pleroma.Activity
|
alias Pleroma.Activity
|
||||||
|
alias Pleroma.Delivery
|
||||||
alias Pleroma.Object
|
alias Pleroma.Object
|
||||||
alias Pleroma.Object.Fetcher
|
alias Pleroma.Object.Fetcher
|
||||||
alias Pleroma.User
|
alias Pleroma.User
|
||||||
|
@ -23,7 +24,12 @@ defmodule Pleroma.Web.ActivityPub.ActivityPubController do
|
||||||
|
|
||||||
action_fallback(:errors)
|
action_fallback(:errors)
|
||||||
|
|
||||||
plug(Pleroma.Plugs.Cache, [query_params: false] when action in [:activity, :object])
|
plug(
|
||||||
|
Pleroma.Plugs.Cache,
|
||||||
|
[query_params: false, tracking_fun: &__MODULE__.track_object_fetch/2]
|
||||||
|
when action in [:activity, :object]
|
||||||
|
)
|
||||||
|
|
||||||
plug(Pleroma.Web.FederatingPlug when action in [:inbox, :relay])
|
plug(Pleroma.Web.FederatingPlug when action in [:inbox, :relay])
|
||||||
plug(:set_requester_reachable when action in [:inbox])
|
plug(:set_requester_reachable when action in [:inbox])
|
||||||
plug(:relay_active? when action in [:relay])
|
plug(:relay_active? when action in [:relay])
|
||||||
|
@ -54,6 +60,7 @@ def object(conn, %{"uuid" => uuid}) do
|
||||||
%Object{} = object <- Object.get_cached_by_ap_id(ap_id),
|
%Object{} = object <- Object.get_cached_by_ap_id(ap_id),
|
||||||
{_, true} <- {:public?, Visibility.is_public?(object)} do
|
{_, true} <- {:public?, Visibility.is_public?(object)} do
|
||||||
conn
|
conn
|
||||||
|
|> assign(:tracking_fun_data, object.id)
|
||||||
|> set_cache_ttl_for(object)
|
|> set_cache_ttl_for(object)
|
||||||
|> put_resp_content_type("application/activity+json")
|
|> put_resp_content_type("application/activity+json")
|
||||||
|> put_view(ObjectView)
|
|> put_view(ObjectView)
|
||||||
|
@ -64,6 +71,16 @@ def object(conn, %{"uuid" => uuid}) do
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
def track_object_fetch(conn, nil), do: conn
|
||||||
|
|
||||||
|
def track_object_fetch(conn, object_id) do
|
||||||
|
with %{assigns: %{user: %User{id: user_id}}} <- conn do
|
||||||
|
Delivery.create(object_id, user_id)
|
||||||
|
end
|
||||||
|
|
||||||
|
conn
|
||||||
|
end
|
||||||
|
|
||||||
def object_likes(conn, %{"uuid" => uuid, "page" => page}) do
|
def object_likes(conn, %{"uuid" => uuid, "page" => page}) do
|
||||||
with ap_id <- o_status_url(conn, :object, uuid),
|
with ap_id <- o_status_url(conn, :object, uuid),
|
||||||
%Object{} = object <- Object.get_cached_by_ap_id(ap_id),
|
%Object{} = object <- Object.get_cached_by_ap_id(ap_id),
|
||||||
|
@ -99,6 +116,7 @@ def activity(conn, %{"uuid" => uuid}) do
|
||||||
%Activity{} = activity <- Activity.normalize(ap_id),
|
%Activity{} = activity <- Activity.normalize(ap_id),
|
||||||
{_, true} <- {:public?, Visibility.is_public?(activity)} do
|
{_, true} <- {:public?, Visibility.is_public?(activity)} do
|
||||||
conn
|
conn
|
||||||
|
|> maybe_set_tracking_data(activity)
|
||||||
|> set_cache_ttl_for(activity)
|
|> set_cache_ttl_for(activity)
|
||||||
|> put_resp_content_type("application/activity+json")
|
|> put_resp_content_type("application/activity+json")
|
||||||
|> put_view(ObjectView)
|
|> put_view(ObjectView)
|
||||||
|
@ -109,6 +127,13 @@ def activity(conn, %{"uuid" => uuid}) do
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
defp maybe_set_tracking_data(conn, %Activity{data: %{"type" => "Create"}} = activity) do
|
||||||
|
object_id = Object.normalize(activity).id
|
||||||
|
assign(conn, :tracking_fun_data, object_id)
|
||||||
|
end
|
||||||
|
|
||||||
|
defp maybe_set_tracking_data(conn, _activity), do: conn
|
||||||
|
|
||||||
defp set_cache_ttl_for(conn, %Activity{object: object}) do
|
defp set_cache_ttl_for(conn, %Activity{object: object}) do
|
||||||
set_cache_ttl_for(conn, object)
|
set_cache_ttl_for(conn, object)
|
||||||
end
|
end
|
||||||
|
|
|
@ -8,6 +8,7 @@ defmodule Pleroma.Web.ActivityPub.MRF.MediaProxyWarmingPolicy do
|
||||||
|
|
||||||
alias Pleroma.HTTP
|
alias Pleroma.HTTP
|
||||||
alias Pleroma.Web.MediaProxy
|
alias Pleroma.Web.MediaProxy
|
||||||
|
alias Pleroma.Workers.BackgroundWorker
|
||||||
|
|
||||||
require Logger
|
require Logger
|
||||||
|
|
||||||
|
@ -30,7 +31,7 @@ def perform(:preload, %{"object" => %{"attachment" => attachments}} = _message)
|
||||||
url
|
url
|
||||||
|> Enum.each(fn
|
|> Enum.each(fn
|
||||||
%{"href" => href} ->
|
%{"href" => href} ->
|
||||||
PleromaJobQueue.enqueue(:background, __MODULE__, [:prefetch, href])
|
BackgroundWorker.enqueue("media_proxy_prefetch", %{"url" => href})
|
||||||
|
|
||||||
x ->
|
x ->
|
||||||
Logger.debug("Unhandled attachment URL object #{inspect(x)}")
|
Logger.debug("Unhandled attachment URL object #{inspect(x)}")
|
||||||
|
@ -46,7 +47,7 @@ def filter(
|
||||||
%{"type" => "Create", "object" => %{"attachment" => attachments} = _object} = message
|
%{"type" => "Create", "object" => %{"attachment" => attachments} = _object} = message
|
||||||
)
|
)
|
||||||
when is_list(attachments) and length(attachments) > 0 do
|
when is_list(attachments) and length(attachments) > 0 do
|
||||||
PleromaJobQueue.enqueue(:background, __MODULE__, [:preload, message])
|
BackgroundWorker.enqueue("media_proxy_preload", %{"message" => message})
|
||||||
|
|
||||||
{:ok, message}
|
{:ok, message}
|
||||||
end
|
end
|
||||||
|
|
|
@ -5,8 +5,10 @@
|
||||||
defmodule Pleroma.Web.ActivityPub.Publisher do
|
defmodule Pleroma.Web.ActivityPub.Publisher do
|
||||||
alias Pleroma.Activity
|
alias Pleroma.Activity
|
||||||
alias Pleroma.Config
|
alias Pleroma.Config
|
||||||
|
alias Pleroma.Delivery
|
||||||
alias Pleroma.HTTP
|
alias Pleroma.HTTP
|
||||||
alias Pleroma.Instances
|
alias Pleroma.Instances
|
||||||
|
alias Pleroma.Object
|
||||||
alias Pleroma.User
|
alias Pleroma.User
|
||||||
alias Pleroma.Web.ActivityPub.Relay
|
alias Pleroma.Web.ActivityPub.Relay
|
||||||
alias Pleroma.Web.ActivityPub.Transmogrifier
|
alias Pleroma.Web.ActivityPub.Transmogrifier
|
||||||
|
@ -84,6 +86,15 @@ def publish_one(%{inbox: inbox, json: json, actor: %User{} = actor, id: id} = pa
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
def publish_one(%{actor_id: actor_id} = params) do
|
||||||
|
actor = User.get_cached_by_id(actor_id)
|
||||||
|
|
||||||
|
params
|
||||||
|
|> Map.delete(:actor_id)
|
||||||
|
|> Map.put(:actor, actor)
|
||||||
|
|> publish_one()
|
||||||
|
end
|
||||||
|
|
||||||
defp should_federate?(inbox, public) do
|
defp should_federate?(inbox, public) do
|
||||||
if public do
|
if public do
|
||||||
true
|
true
|
||||||
|
@ -107,7 +118,18 @@ defp recipients(actor, activity) do
|
||||||
{:ok, []}
|
{:ok, []}
|
||||||
end
|
end
|
||||||
|
|
||||||
Pleroma.Web.Salmon.remote_users(actor, activity) ++ followers
|
fetchers =
|
||||||
|
with %Activity{data: %{"type" => "Delete"}} <- activity,
|
||||||
|
%Object{id: object_id} <- Object.normalize(activity),
|
||||||
|
fetchers <- User.get_delivered_users_by_object_id(object_id),
|
||||||
|
_ <- Delivery.delete_all_by_object_id(object_id) do
|
||||||
|
fetchers
|
||||||
|
else
|
||||||
|
_ ->
|
||||||
|
[]
|
||||||
|
end
|
||||||
|
|
||||||
|
Pleroma.Web.Salmon.remote_users(actor, activity) ++ followers ++ fetchers
|
||||||
end
|
end
|
||||||
|
|
||||||
defp get_cc_ap_ids(ap_id, recipients) do
|
defp get_cc_ap_ids(ap_id, recipients) do
|
||||||
|
@ -159,7 +181,8 @@ def determine_inbox(
|
||||||
Publishes an activity with BCC to all relevant peers.
|
Publishes an activity with BCC to all relevant peers.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def publish(actor, %{data: %{"bcc" => bcc}} = activity) when is_list(bcc) and bcc != [] do
|
def publish(%User{} = actor, %{data: %{"bcc" => bcc}} = activity)
|
||||||
|
when is_list(bcc) and bcc != [] do
|
||||||
public = is_public?(activity)
|
public = is_public?(activity)
|
||||||
{:ok, data} = Transmogrifier.prepare_outgoing(activity.data)
|
{:ok, data} = Transmogrifier.prepare_outgoing(activity.data)
|
||||||
|
|
||||||
|
@ -186,7 +209,7 @@ def publish(actor, %{data: %{"bcc" => bcc}} = activity) when is_list(bcc) and bc
|
||||||
Pleroma.Web.Federator.Publisher.enqueue_one(__MODULE__, %{
|
Pleroma.Web.Federator.Publisher.enqueue_one(__MODULE__, %{
|
||||||
inbox: inbox,
|
inbox: inbox,
|
||||||
json: json,
|
json: json,
|
||||||
actor: actor,
|
actor_id: actor.id,
|
||||||
id: activity.data["id"],
|
id: activity.data["id"],
|
||||||
unreachable_since: unreachable_since
|
unreachable_since: unreachable_since
|
||||||
})
|
})
|
||||||
|
@ -221,7 +244,7 @@ def publish(%User{} = actor, %Activity{} = activity) do
|
||||||
%{
|
%{
|
||||||
inbox: inbox,
|
inbox: inbox,
|
||||||
json: json,
|
json: json,
|
||||||
actor: actor,
|
actor_id: actor.id,
|
||||||
id: activity.data["id"],
|
id: activity.data["id"],
|
||||||
unreachable_since: unreachable_since
|
unreachable_since: unreachable_since
|
||||||
}
|
}
|
||||||
|
|
|
@ -15,6 +15,7 @@ defmodule Pleroma.Web.ActivityPub.Transmogrifier do
|
||||||
alias Pleroma.Web.ActivityPub.Utils
|
alias Pleroma.Web.ActivityPub.Utils
|
||||||
alias Pleroma.Web.ActivityPub.Visibility
|
alias Pleroma.Web.ActivityPub.Visibility
|
||||||
alias Pleroma.Web.Federator
|
alias Pleroma.Web.Federator
|
||||||
|
alias Pleroma.Workers.TransmogrifierWorker
|
||||||
|
|
||||||
import Ecto.Query
|
import Ecto.Query
|
||||||
|
|
||||||
|
@ -185,12 +186,12 @@ def fix_in_reply_to(%{"inReplyTo" => in_reply_to} = object, options)
|
||||||
|> Map.put("context", replied_object.data["context"] || object["conversation"])
|
|> Map.put("context", replied_object.data["context"] || object["conversation"])
|
||||||
else
|
else
|
||||||
e ->
|
e ->
|
||||||
Logger.error("Couldn't fetch \"#{inspect(in_reply_to_id)}\", error: #{inspect(e)}")
|
Logger.error("Couldn't fetch #{inspect(in_reply_to_id)}, error: #{inspect(e)}")
|
||||||
object
|
object
|
||||||
end
|
end
|
||||||
|
|
||||||
e ->
|
e ->
|
||||||
Logger.error("Couldn't fetch \"#{inspect(in_reply_to_id)}\", error: #{inspect(e)}")
|
Logger.error("Couldn't fetch #{inspect(in_reply_to_id)}, error: #{inspect(e)}")
|
||||||
object
|
object
|
||||||
end
|
end
|
||||||
else
|
else
|
||||||
|
@ -1051,7 +1052,7 @@ def upgrade_user_from_ap_id(ap_id) do
|
||||||
already_ap <- User.ap_enabled?(user),
|
already_ap <- User.ap_enabled?(user),
|
||||||
{:ok, user} <- user |> User.upgrade_changeset(data) |> User.update_and_set_cache() do
|
{:ok, user} <- user |> User.upgrade_changeset(data) |> User.update_and_set_cache() do
|
||||||
unless already_ap do
|
unless already_ap do
|
||||||
PleromaJobQueue.enqueue(:transmogrifier, __MODULE__, [:user_upgrade, user])
|
TransmogrifierWorker.enqueue("user_upgrade", %{"user_id" => user.id})
|
||||||
end
|
end
|
||||||
|
|
||||||
{:ok, user}
|
{:ok, user}
|
||||||
|
|
|
@ -85,15 +85,13 @@ defp extract_list(lst) when is_list(lst), do: lst
|
||||||
defp extract_list(_), do: []
|
defp extract_list(_), do: []
|
||||||
|
|
||||||
def maybe_splice_recipient(ap_id, params) do
|
def maybe_splice_recipient(ap_id, params) do
|
||||||
need_splice =
|
need_splice? =
|
||||||
!recipient_in_collection(ap_id, params["to"]) &&
|
!recipient_in_collection(ap_id, params["to"]) &&
|
||||||
!recipient_in_collection(ap_id, params["cc"])
|
!recipient_in_collection(ap_id, params["cc"])
|
||||||
|
|
||||||
|
if need_splice? do
|
||||||
cc_list = extract_list(params["cc"])
|
cc_list = extract_list(params["cc"])
|
||||||
|
Map.put(params, "cc", [ap_id | cc_list])
|
||||||
if need_splice do
|
|
||||||
params
|
|
||||||
|> Map.put("cc", [ap_id | cc_list])
|
|
||||||
else
|
else
|
||||||
params
|
params
|
||||||
end
|
end
|
||||||
|
@ -139,7 +137,7 @@ def get_notified_from_object(%{"type" => type} = object) when type in @supported
|
||||||
"object" => object
|
"object" => object
|
||||||
}
|
}
|
||||||
|
|
||||||
Notification.get_notified_from_activity(%Activity{data: fake_create_activity}, false)
|
get_notified_from_object(fake_create_activity)
|
||||||
end
|
end
|
||||||
|
|
||||||
def get_notified_from_object(object) do
|
def get_notified_from_object(object) do
|
||||||
|
@ -169,14 +167,7 @@ def create_context(context) do
|
||||||
@spec maybe_federate(any()) :: :ok
|
@spec maybe_federate(any()) :: :ok
|
||||||
def maybe_federate(%Activity{local: true} = activity) do
|
def maybe_federate(%Activity{local: true} = activity) do
|
||||||
if Pleroma.Config.get!([:instance, :federating]) do
|
if Pleroma.Config.get!([:instance, :federating]) do
|
||||||
priority =
|
Pleroma.Web.Federator.publish(activity)
|
||||||
case activity.data["type"] do
|
|
||||||
"Delete" -> 10
|
|
||||||
"Create" -> 1
|
|
||||||
_ -> 5
|
|
||||||
end
|
|
||||||
|
|
||||||
Pleroma.Web.Federator.publish(activity, priority)
|
|
||||||
end
|
end
|
||||||
|
|
||||||
:ok
|
:ok
|
||||||
|
@ -188,9 +179,9 @@ def maybe_federate(_), do: :ok
|
||||||
Adds an id and a published data if they aren't there,
|
Adds an id and a published data if they aren't there,
|
||||||
also adds it to an included object
|
also adds it to an included object
|
||||||
"""
|
"""
|
||||||
def lazy_put_activity_defaults(map, fake \\ false) do
|
def lazy_put_activity_defaults(map, fake? \\ false) do
|
||||||
map =
|
map =
|
||||||
unless fake do
|
if not fake? do
|
||||||
%{data: %{"id" => context}, id: context_id} = create_context(map["context"])
|
%{data: %{"id" => context}, id: context_id} = create_context(map["context"])
|
||||||
|
|
||||||
map
|
map
|
||||||
|
@ -207,7 +198,7 @@ def lazy_put_activity_defaults(map, fake \\ false) do
|
||||||
end
|
end
|
||||||
|
|
||||||
if is_map(map["object"]) do
|
if is_map(map["object"]) do
|
||||||
object = lazy_put_object_defaults(map["object"], map, fake)
|
object = lazy_put_object_defaults(map["object"], map, fake?)
|
||||||
%{map | "object" => object}
|
%{map | "object" => object}
|
||||||
else
|
else
|
||||||
map
|
map
|
||||||
|
@ -217,9 +208,9 @@ def lazy_put_activity_defaults(map, fake \\ false) do
|
||||||
@doc """
|
@doc """
|
||||||
Adds an id and published date if they aren't there.
|
Adds an id and published date if they aren't there.
|
||||||
"""
|
"""
|
||||||
def lazy_put_object_defaults(map, activity \\ %{}, fake)
|
def lazy_put_object_defaults(map, activity \\ %{}, fake?)
|
||||||
|
|
||||||
def lazy_put_object_defaults(map, activity, true = _fake) do
|
def lazy_put_object_defaults(map, activity, true = _fake?) do
|
||||||
map
|
map
|
||||||
|> Map.put_new_lazy("published", &make_date/0)
|
|> Map.put_new_lazy("published", &make_date/0)
|
||||||
|> Map.put_new("id", "pleroma:fake_object_id")
|
|> Map.put_new("id", "pleroma:fake_object_id")
|
||||||
|
@ -228,7 +219,7 @@ def lazy_put_object_defaults(map, activity, true = _fake) do
|
||||||
|> Map.put_new("context_id", activity["context_id"])
|
|> Map.put_new("context_id", activity["context_id"])
|
||||||
end
|
end
|
||||||
|
|
||||||
def lazy_put_object_defaults(map, activity, _fake) do
|
def lazy_put_object_defaults(map, activity, _fake?) do
|
||||||
map
|
map
|
||||||
|> Map.put_new_lazy("id", &generate_object_id/0)
|
|> Map.put_new_lazy("id", &generate_object_id/0)
|
||||||
|> Map.put_new_lazy("published", &make_date/0)
|
|> Map.put_new_lazy("published", &make_date/0)
|
||||||
|
@ -242,9 +233,7 @@ def lazy_put_object_defaults(map, activity, _fake) do
|
||||||
def insert_full_object(%{"object" => %{"type" => type} = object_data} = map)
|
def insert_full_object(%{"object" => %{"type" => type} = object_data} = map)
|
||||||
when is_map(object_data) and type in @supported_object_types do
|
when is_map(object_data) and type in @supported_object_types do
|
||||||
with {:ok, object} <- Object.create(object_data) do
|
with {:ok, object} <- Object.create(object_data) do
|
||||||
map =
|
map = Map.put(map, "object", object.data["id"])
|
||||||
map
|
|
||||||
|> Map.put("object", object.data["id"])
|
|
||||||
|
|
||||||
{:ok, map, object}
|
{:ok, map, object}
|
||||||
end
|
end
|
||||||
|
@ -263,7 +252,7 @@ def get_existing_like(actor, %{data: %{"id" => id}}) do
|
||||||
|> Activity.Queries.by_actor()
|
|> Activity.Queries.by_actor()
|
||||||
|> Activity.Queries.by_object_id(id)
|
|> Activity.Queries.by_object_id(id)
|
||||||
|> Activity.Queries.by_type("Like")
|
|> Activity.Queries.by_type("Like")
|
||||||
|> Activity.Queries.limit(1)
|
|> limit(1)
|
||||||
|> Repo.one()
|
|> Repo.one()
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@ -380,12 +369,11 @@ def update_follow_state(
|
||||||
%Activity{data: %{"actor" => actor, "object" => object}} = activity,
|
%Activity{data: %{"actor" => actor, "object" => object}} = activity,
|
||||||
state
|
state
|
||||||
) do
|
) do
|
||||||
with new_data <-
|
new_data = Map.put(activity.data, "state", state)
|
||||||
activity.data
|
changeset = Changeset.change(activity, data: new_data)
|
||||||
|> Map.put("state", state),
|
|
||||||
changeset <- Changeset.change(activity, data: new_data),
|
with {:ok, activity} <- Repo.update(changeset) do
|
||||||
{:ok, activity} <- Repo.update(changeset),
|
User.set_follow_state_cache(actor, object, state)
|
||||||
_ <- User.set_follow_state_cache(actor, object, state) do
|
|
||||||
{:ok, activity}
|
{:ok, activity}
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
@ -410,28 +398,14 @@ def make_follow_data(
|
||||||
end
|
end
|
||||||
|
|
||||||
def fetch_latest_follow(%User{ap_id: follower_id}, %User{ap_id: followed_id}) do
|
def fetch_latest_follow(%User{ap_id: follower_id}, %User{ap_id: followed_id}) do
|
||||||
query =
|
"Follow"
|
||||||
from(
|
|> Activity.Queries.by_type()
|
||||||
activity in Activity,
|
|> where(actor: ^follower_id)
|
||||||
where:
|
|
||||||
fragment(
|
|
||||||
"? ->> 'type' = 'Follow'",
|
|
||||||
activity.data
|
|
||||||
),
|
|
||||||
where: activity.actor == ^follower_id,
|
|
||||||
# this is to use the index
|
# this is to use the index
|
||||||
where:
|
|> Activity.Queries.by_object_id(followed_id)
|
||||||
fragment(
|
|> order_by([activity], fragment("? desc nulls last", activity.id))
|
||||||
"coalesce((?)->'object'->>'id', (?)->>'object') = ?",
|
|> limit(1)
|
||||||
activity.data,
|
|> Repo.one()
|
||||||
activity.data,
|
|
||||||
^followed_id
|
|
||||||
),
|
|
||||||
order_by: [fragment("? desc nulls last", activity.id)],
|
|
||||||
limit: 1
|
|
||||||
)
|
|
||||||
|
|
||||||
Repo.one(query)
|
|
||||||
end
|
end
|
||||||
|
|
||||||
#### Announce-related helpers
|
#### Announce-related helpers
|
||||||
|
@ -439,23 +413,13 @@ def fetch_latest_follow(%User{ap_id: follower_id}, %User{ap_id: followed_id}) do
|
||||||
@doc """
|
@doc """
|
||||||
Retruns an existing announce activity if the notice has already been announced
|
Retruns an existing announce activity if the notice has already been announced
|
||||||
"""
|
"""
|
||||||
def get_existing_announce(actor, %{data: %{"id" => id}}) do
|
def get_existing_announce(actor, %{data: %{"id" => ap_id}}) do
|
||||||
query =
|
"Announce"
|
||||||
from(
|
|> Activity.Queries.by_type()
|
||||||
activity in Activity,
|
|> where(actor: ^actor)
|
||||||
where: activity.actor == ^actor,
|
|
||||||
# this is to use the index
|
# this is to use the index
|
||||||
where:
|
|> Activity.Queries.by_object_id(ap_id)
|
||||||
fragment(
|
|> Repo.one()
|
||||||
"coalesce((?)->'object'->>'id', (?)->>'object') = ?",
|
|
||||||
activity.data,
|
|
||||||
activity.data,
|
|
||||||
^id
|
|
||||||
),
|
|
||||||
where: fragment("(?)->>'type' = 'Announce'", activity.data)
|
|
||||||
)
|
|
||||||
|
|
||||||
Repo.one(query)
|
|
||||||
end
|
end
|
||||||
|
|
||||||
@doc """
|
@doc """
|
||||||
|
@ -538,11 +502,13 @@ def add_announce_to_object(
|
||||||
object
|
object
|
||||||
) do
|
) do
|
||||||
announcements =
|
announcements =
|
||||||
if is_list(object.data["announcements"]), do: object.data["announcements"], else: []
|
if is_list(object.data["announcements"]) do
|
||||||
|
Enum.uniq([actor | object.data["announcements"]])
|
||||||
with announcements <- [actor | announcements] |> Enum.uniq() do
|
else
|
||||||
update_element_in_object("announcement", announcements, object)
|
[actor]
|
||||||
end
|
end
|
||||||
|
|
||||||
|
update_element_in_object("announcement", announcements, object)
|
||||||
end
|
end
|
||||||
|
|
||||||
def add_announce_to_object(_, object), do: {:ok, object}
|
def add_announce_to_object(_, object), do: {:ok, object}
|
||||||
|
@ -570,28 +536,14 @@ def make_unfollow_data(follower, followed, follow_activity, activity_id) do
|
||||||
|
|
||||||
#### Block-related helpers
|
#### Block-related helpers
|
||||||
def fetch_latest_block(%User{ap_id: blocker_id}, %User{ap_id: blocked_id}) do
|
def fetch_latest_block(%User{ap_id: blocker_id}, %User{ap_id: blocked_id}) do
|
||||||
query =
|
"Block"
|
||||||
from(
|
|> Activity.Queries.by_type()
|
||||||
activity in Activity,
|
|> where(actor: ^blocker_id)
|
||||||
where:
|
|
||||||
fragment(
|
|
||||||
"? ->> 'type' = 'Block'",
|
|
||||||
activity.data
|
|
||||||
),
|
|
||||||
where: activity.actor == ^blocker_id,
|
|
||||||
# this is to use the index
|
# this is to use the index
|
||||||
where:
|
|> Activity.Queries.by_object_id(blocked_id)
|
||||||
fragment(
|
|> order_by([activity], fragment("? desc nulls last", activity.id))
|
||||||
"coalesce((?)->'object'->>'id', (?)->>'object') = ?",
|
|> limit(1)
|
||||||
activity.data,
|
|> Repo.one()
|
||||||
activity.data,
|
|
||||||
^blocked_id
|
|
||||||
),
|
|
||||||
order_by: [fragment("? desc nulls last", activity.id)],
|
|
||||||
limit: 1
|
|
||||||
)
|
|
||||||
|
|
||||||
Repo.one(query)
|
|
||||||
end
|
end
|
||||||
|
|
||||||
def make_block_data(blocker, blocked, activity_id) do
|
def make_block_data(blocker, blocked, activity_id) do
|
||||||
|
@ -695,11 +647,11 @@ def fetch_ordered_collection(from, pages_left, acc \\ []) do
|
||||||
#### Report-related helpers
|
#### Report-related helpers
|
||||||
|
|
||||||
def update_report_state(%Activity{} = activity, state) when state in @supported_report_states do
|
def update_report_state(%Activity{} = activity, state) when state in @supported_report_states do
|
||||||
with new_data <- Map.put(activity.data, "state", state),
|
new_data = Map.put(activity.data, "state", state)
|
||||||
changeset <- Changeset.change(activity, data: new_data),
|
|
||||||
{:ok, activity} <- Repo.update(changeset) do
|
activity
|
||||||
{:ok, activity}
|
|> Changeset.change(data: new_data)
|
||||||
end
|
|> Repo.update()
|
||||||
end
|
end
|
||||||
|
|
||||||
def update_report_state(_, _), do: {:error, "Unsupported state"}
|
def update_report_state(_, _), do: {:error, "Unsupported state"}
|
||||||
|
@ -766,21 +718,13 @@ defp get_updated_targets(
|
||||||
end
|
end
|
||||||
|
|
||||||
def get_existing_votes(actor, %{data: %{"id" => id}}) do
|
def get_existing_votes(actor, %{data: %{"id" => id}}) do
|
||||||
query =
|
actor
|
||||||
from(
|
|> Activity.Queries.by_actor()
|
||||||
[activity, object: object] in Activity.with_preloaded_object(Activity),
|
|> Activity.Queries.by_type("Create")
|
||||||
where: fragment("(?)->>'type' = 'Create'", activity.data),
|
|> Activity.with_preloaded_object()
|
||||||
where: fragment("(?)->>'actor' = ?", activity.data, ^actor),
|
|> where([a, object: o], fragment("(?)->>'inReplyTo' = ?", o.data, ^to_string(id)))
|
||||||
where:
|
|> where([a, object: o], fragment("(?)->>'type' = 'Answer'", o.data))
|
||||||
fragment(
|
|> Repo.all()
|
||||||
"(?)->>'inReplyTo' = ?",
|
|
||||||
object.data,
|
|
||||||
^to_string(id)
|
|
||||||
),
|
|
||||||
where: fragment("(?)->>'type' = 'Answer'", object.data)
|
|
||||||
)
|
|
||||||
|
|
||||||
Repo.all(query)
|
|
||||||
end
|
end
|
||||||
|
|
||||||
defp maybe_put(map, _key, nil), do: map
|
defp maybe_put(map, _key, nil), do: map
|
||||||
|
|
|
@ -90,6 +90,8 @@ defp do_convert(entity) when is_list(entity) do
|
||||||
for v <- entity, into: [], do: do_convert(v)
|
for v <- entity, into: [], do: do_convert(v)
|
||||||
end
|
end
|
||||||
|
|
||||||
|
defp do_convert(%Regex{} = entity), do: inspect(entity)
|
||||||
|
|
||||||
defp do_convert(entity) when is_map(entity) do
|
defp do_convert(entity) when is_map(entity) do
|
||||||
for {k, v} <- entity, into: %{}, do: {do_convert(k), do_convert(v)}
|
for {k, v} <- entity, into: %{}, do: {do_convert(k), do_convert(v)}
|
||||||
end
|
end
|
||||||
|
@ -122,7 +124,7 @@ def transform(entity) when is_binary(entity) or is_map(entity) or is_list(entity
|
||||||
|
|
||||||
def transform(entity), do: :erlang.term_to_binary(entity)
|
def transform(entity), do: :erlang.term_to_binary(entity)
|
||||||
|
|
||||||
defp do_transform(%Regex{} = entity) when is_map(entity), do: entity
|
defp do_transform(%Regex{} = entity), do: entity
|
||||||
|
|
||||||
defp do_transform(%{"tuple" => [":dispatch", [entity]]}) do
|
defp do_transform(%{"tuple" => [":dispatch", [entity]]}) do
|
||||||
{dispatch_settings, []} = do_eval(entity)
|
{dispatch_settings, []} = do_eval(entity)
|
||||||
|
@ -154,8 +156,15 @@ defp do_transform(entity) when is_binary(entity) do
|
||||||
defp do_transform(entity), do: entity
|
defp do_transform(entity), do: entity
|
||||||
|
|
||||||
defp do_transform_string("~r/" <> pattern) do
|
defp do_transform_string("~r/" <> pattern) do
|
||||||
pattern = String.trim_trailing(pattern, "/")
|
modificator = String.split(pattern, "/") |> List.last()
|
||||||
~r/#{pattern}/
|
pattern = String.trim_trailing(pattern, "/" <> modificator)
|
||||||
|
|
||||||
|
case modificator do
|
||||||
|
"" -> ~r/#{pattern}/
|
||||||
|
"i" -> ~r/#{pattern}/i
|
||||||
|
"u" -> ~r/#{pattern}/u
|
||||||
|
"s" -> ~r/#{pattern}/s
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
defp do_transform_string(":" <> atom), do: String.to_atom(atom)
|
defp do_transform_string(":" <> atom), do: String.to_atom(atom)
|
||||||
|
|
|
@ -34,27 +34,14 @@ defp param_to_integer(val, default) when is_binary(val) do
|
||||||
|
|
||||||
defp param_to_integer(_, default), do: default
|
defp param_to_integer(_, default), do: default
|
||||||
|
|
||||||
def add_link_headers(
|
def add_link_headers(conn, activities, extra_params \\ %{}) do
|
||||||
conn,
|
case List.last(activities) do
|
||||||
method,
|
%{id: max_id} ->
|
||||||
activities,
|
|
||||||
param \\ nil,
|
|
||||||
params \\ %{},
|
|
||||||
func3 \\ nil,
|
|
||||||
func4 \\ nil
|
|
||||||
) do
|
|
||||||
params =
|
params =
|
||||||
conn.params
|
conn.params
|
||||||
|
|> Map.drop(Map.keys(conn.path_params))
|
||||||
|> Map.drop(["since_id", "max_id", "min_id"])
|
|> Map.drop(["since_id", "max_id", "min_id"])
|
||||||
|> Map.merge(params)
|
|> Map.merge(extra_params)
|
||||||
|
|
||||||
last = List.last(activities)
|
|
||||||
|
|
||||||
func3 = func3 || (&mastodon_api_url/3)
|
|
||||||
func4 = func4 || (&mastodon_api_url/4)
|
|
||||||
|
|
||||||
if last do
|
|
||||||
max_id = last.id
|
|
||||||
|
|
||||||
limit =
|
limit =
|
||||||
params
|
params
|
||||||
|
@ -72,40 +59,12 @@ def add_link_headers(
|
||||||
|> Map.get(:id)
|
|> Map.get(:id)
|
||||||
end
|
end
|
||||||
|
|
||||||
{next_url, prev_url} =
|
next_url = current_url(conn, Map.merge(params, %{max_id: max_id}))
|
||||||
if param do
|
prev_url = current_url(conn, Map.merge(params, %{min_id: min_id}))
|
||||||
{
|
|
||||||
func4.(
|
|
||||||
Pleroma.Web.Endpoint,
|
|
||||||
method,
|
|
||||||
param,
|
|
||||||
Map.merge(params, %{max_id: max_id})
|
|
||||||
),
|
|
||||||
func4.(
|
|
||||||
Pleroma.Web.Endpoint,
|
|
||||||
method,
|
|
||||||
param,
|
|
||||||
Map.merge(params, %{min_id: min_id})
|
|
||||||
)
|
|
||||||
}
|
|
||||||
else
|
|
||||||
{
|
|
||||||
func3.(
|
|
||||||
Pleroma.Web.Endpoint,
|
|
||||||
method,
|
|
||||||
Map.merge(params, %{max_id: max_id})
|
|
||||||
),
|
|
||||||
func3.(
|
|
||||||
Pleroma.Web.Endpoint,
|
|
||||||
method,
|
|
||||||
Map.merge(params, %{min_id: min_id})
|
|
||||||
)
|
|
||||||
}
|
|
||||||
end
|
|
||||||
|
|
||||||
conn
|
put_resp_header(conn, "link", "<#{next_url}>; rel=\"next\", <#{prev_url}>; rel=\"prev\"")
|
||||||
|> put_resp_header("link", "<#{next_url}>; rel=\"next\", <#{prev_url}>; rel=\"prev\"")
|
|
||||||
else
|
_ ->
|
||||||
conn
|
conn
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -10,16 +10,17 @@ defmodule Pleroma.Web.Federator do
|
||||||
alias Pleroma.Web.ActivityPub.Transmogrifier
|
alias Pleroma.Web.ActivityPub.Transmogrifier
|
||||||
alias Pleroma.Web.ActivityPub.Utils
|
alias Pleroma.Web.ActivityPub.Utils
|
||||||
alias Pleroma.Web.Federator.Publisher
|
alias Pleroma.Web.Federator.Publisher
|
||||||
alias Pleroma.Web.Federator.RetryQueue
|
|
||||||
alias Pleroma.Web.OStatus
|
alias Pleroma.Web.OStatus
|
||||||
alias Pleroma.Web.Websub
|
alias Pleroma.Web.Websub
|
||||||
|
alias Pleroma.Workers.PublisherWorker
|
||||||
|
alias Pleroma.Workers.ReceiverWorker
|
||||||
|
alias Pleroma.Workers.SubscriberWorker
|
||||||
|
|
||||||
require Logger
|
require Logger
|
||||||
|
|
||||||
def init do
|
def init do
|
||||||
# 1 minute
|
# To do: consider removing this call in favor of scheduled execution (`quantum`-based)
|
||||||
Process.sleep(1000 * 60)
|
refresh_subscriptions(schedule_in: 60)
|
||||||
refresh_subscriptions()
|
|
||||||
end
|
end
|
||||||
|
|
||||||
@doc "Addresses [memory leaks on recursive replies fetching](https://git.pleroma.social/pleroma/pleroma/issues/161)"
|
@doc "Addresses [memory leaks on recursive replies fetching](https://git.pleroma.social/pleroma/pleroma/issues/161)"
|
||||||
|
@ -37,50 +38,38 @@ def allowed_incoming_reply_depth?(depth) do
|
||||||
# Client API
|
# Client API
|
||||||
|
|
||||||
def incoming_doc(doc) do
|
def incoming_doc(doc) do
|
||||||
PleromaJobQueue.enqueue(:federator_incoming, __MODULE__, [:incoming_doc, doc])
|
ReceiverWorker.enqueue("incoming_doc", %{"body" => doc})
|
||||||
end
|
end
|
||||||
|
|
||||||
def incoming_ap_doc(params) do
|
def incoming_ap_doc(params) do
|
||||||
PleromaJobQueue.enqueue(:federator_incoming, __MODULE__, [:incoming_ap_doc, params])
|
ReceiverWorker.enqueue("incoming_ap_doc", %{"params" => params})
|
||||||
end
|
end
|
||||||
|
|
||||||
def publish(activity, priority \\ 1) do
|
def publish(%{id: "pleroma:fakeid"} = activity) do
|
||||||
PleromaJobQueue.enqueue(:federator_outgoing, __MODULE__, [:publish, activity], priority)
|
perform(:publish, activity)
|
||||||
|
end
|
||||||
|
|
||||||
|
def publish(activity) do
|
||||||
|
PublisherWorker.enqueue("publish", %{"activity_id" => activity.id})
|
||||||
end
|
end
|
||||||
|
|
||||||
def verify_websub(websub) do
|
def verify_websub(websub) do
|
||||||
PleromaJobQueue.enqueue(:federator_outgoing, __MODULE__, [:verify_websub, websub])
|
SubscriberWorker.enqueue("verify_websub", %{"websub_id" => websub.id})
|
||||||
end
|
end
|
||||||
|
|
||||||
def request_subscription(sub) do
|
def request_subscription(websub) do
|
||||||
PleromaJobQueue.enqueue(:federator_outgoing, __MODULE__, [:request_subscription, sub])
|
SubscriberWorker.enqueue("request_subscription", %{"websub_id" => websub.id})
|
||||||
end
|
end
|
||||||
|
|
||||||
def refresh_subscriptions do
|
def refresh_subscriptions(worker_args \\ []) do
|
||||||
PleromaJobQueue.enqueue(:federator_outgoing, __MODULE__, [:refresh_subscriptions])
|
SubscriberWorker.enqueue("refresh_subscriptions", %{}, worker_args ++ [max_attempts: 1])
|
||||||
end
|
end
|
||||||
|
|
||||||
# Job Worker Callbacks
|
# Job Worker Callbacks
|
||||||
|
|
||||||
def perform(:refresh_subscriptions) do
|
@spec perform(atom(), module(), any()) :: {:ok, any()} | {:error, any()}
|
||||||
Logger.debug("Federator running refresh subscriptions")
|
def perform(:publish_one, module, params) do
|
||||||
Websub.refresh_subscriptions()
|
apply(module, :publish_one, [params])
|
||||||
|
|
||||||
spawn(fn ->
|
|
||||||
# 6 hours
|
|
||||||
Process.sleep(1000 * 60 * 60 * 6)
|
|
||||||
refresh_subscriptions()
|
|
||||||
end)
|
|
||||||
end
|
|
||||||
|
|
||||||
def perform(:request_subscription, websub) do
|
|
||||||
Logger.debug("Refreshing #{websub.topic}")
|
|
||||||
|
|
||||||
with {:ok, websub} <- Websub.request_subscription(websub) do
|
|
||||||
Logger.debug("Successfully refreshed #{websub.topic}")
|
|
||||||
else
|
|
||||||
_e -> Logger.debug("Couldn't refresh #{websub.topic}")
|
|
||||||
end
|
|
||||||
end
|
end
|
||||||
|
|
||||||
def perform(:publish, activity) do
|
def perform(:publish, activity) do
|
||||||
|
@ -92,14 +81,6 @@ def perform(:publish, activity) do
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def perform(:verify_websub, websub) do
|
|
||||||
Logger.debug(fn ->
|
|
||||||
"Running WebSub verification for #{websub.id} (#{websub.topic}, #{websub.callback})"
|
|
||||||
end)
|
|
||||||
|
|
||||||
Websub.verify(websub)
|
|
||||||
end
|
|
||||||
|
|
||||||
def perform(:incoming_doc, doc) do
|
def perform(:incoming_doc, doc) do
|
||||||
Logger.info("Got document, trying to parse")
|
Logger.info("Got document, trying to parse")
|
||||||
OStatus.handle_incoming(doc)
|
OStatus.handle_incoming(doc)
|
||||||
|
@ -130,22 +111,27 @@ def perform(:incoming_ap_doc, params) do
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def perform(
|
def perform(:request_subscription, websub) do
|
||||||
:publish_single_websub,
|
Logger.debug("Refreshing #{websub.topic}")
|
||||||
%{xml: _xml, topic: _topic, callback: _callback, secret: _secret} = params
|
|
||||||
) do
|
|
||||||
case Websub.publish_one(params) do
|
|
||||||
{:ok, _} ->
|
|
||||||
:ok
|
|
||||||
|
|
||||||
{:error, _} ->
|
with {:ok, websub} <- Websub.request_subscription(websub) do
|
||||||
RetryQueue.enqueue(params, Websub)
|
Logger.debug("Successfully refreshed #{websub.topic}")
|
||||||
|
else
|
||||||
|
_e -> Logger.debug("Couldn't refresh #{websub.topic}")
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def perform(type, _) do
|
def perform(:verify_websub, websub) do
|
||||||
Logger.debug(fn -> "Unknown task: #{type}" end)
|
Logger.debug(fn ->
|
||||||
{:error, "Don't know what to do with this"}
|
"Running WebSub verification for #{websub.id} (#{websub.topic}, #{websub.callback})"
|
||||||
|
end)
|
||||||
|
|
||||||
|
Websub.verify(websub)
|
||||||
|
end
|
||||||
|
|
||||||
|
def perform(:refresh_subscriptions) do
|
||||||
|
Logger.debug("Federator running refresh subscriptions")
|
||||||
|
Websub.refresh_subscriptions()
|
||||||
end
|
end
|
||||||
|
|
||||||
def ap_enabled_actor(id) do
|
def ap_enabled_actor(id) do
|
||||||
|
|
|
@ -6,7 +6,7 @@ defmodule Pleroma.Web.Federator.Publisher do
|
||||||
alias Pleroma.Activity
|
alias Pleroma.Activity
|
||||||
alias Pleroma.Config
|
alias Pleroma.Config
|
||||||
alias Pleroma.User
|
alias Pleroma.User
|
||||||
alias Pleroma.Web.Federator.RetryQueue
|
alias Pleroma.Workers.PublisherWorker
|
||||||
|
|
||||||
require Logger
|
require Logger
|
||||||
|
|
||||||
|
@ -30,23 +30,11 @@ defmodule Pleroma.Web.Federator.Publisher do
|
||||||
Enqueue publishing a single activity.
|
Enqueue publishing a single activity.
|
||||||
"""
|
"""
|
||||||
@spec enqueue_one(module(), Map.t()) :: :ok
|
@spec enqueue_one(module(), Map.t()) :: :ok
|
||||||
def enqueue_one(module, %{} = params),
|
def enqueue_one(module, %{} = params) do
|
||||||
do: PleromaJobQueue.enqueue(:federator_outgoing, __MODULE__, [:publish_one, module, params])
|
PublisherWorker.enqueue(
|
||||||
|
"publish_one",
|
||||||
@spec perform(atom(), module(), any()) :: {:ok, any()} | {:error, any()}
|
%{"module" => to_string(module), "params" => params}
|
||||||
def perform(:publish_one, module, params) do
|
)
|
||||||
case apply(module, :publish_one, [params]) do
|
|
||||||
{:ok, _} ->
|
|
||||||
:ok
|
|
||||||
|
|
||||||
{:error, _e} ->
|
|
||||||
RetryQueue.enqueue(params, module)
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
def perform(type, _, _) do
|
|
||||||
Logger.debug("Unknown task: #{type}")
|
|
||||||
{:error, "Don't know what to do with this"}
|
|
||||||
end
|
end
|
||||||
|
|
||||||
@doc """
|
@doc """
|
||||||
|
|
|
@ -1,239 +0,0 @@
|
||||||
# Pleroma: A lightweight social networking server
|
|
||||||
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
|
|
||||||
# SPDX-License-Identifier: AGPL-3.0-only
|
|
||||||
|
|
||||||
defmodule Pleroma.Web.Federator.RetryQueue do
|
|
||||||
use GenServer
|
|
||||||
|
|
||||||
require Logger
|
|
||||||
|
|
||||||
def init(args) do
|
|
||||||
queue_table = :ets.new(:pleroma_retry_queue, [:bag, :protected])
|
|
||||||
|
|
||||||
{:ok, %{args | queue_table: queue_table, running_jobs: :sets.new()}}
|
|
||||||
end
|
|
||||||
|
|
||||||
def start_link(_) do
|
|
||||||
enabled =
|
|
||||||
if Pleroma.Config.get(:env) == :test,
|
|
||||||
do: true,
|
|
||||||
else: Pleroma.Config.get([__MODULE__, :enabled], false)
|
|
||||||
|
|
||||||
if enabled do
|
|
||||||
Logger.info("Starting retry queue")
|
|
||||||
|
|
||||||
linkres =
|
|
||||||
GenServer.start_link(
|
|
||||||
__MODULE__,
|
|
||||||
%{delivered: 0, dropped: 0, queue_table: nil, running_jobs: nil},
|
|
||||||
name: __MODULE__
|
|
||||||
)
|
|
||||||
|
|
||||||
maybe_kickoff_timer()
|
|
||||||
linkres
|
|
||||||
else
|
|
||||||
Logger.info("Retry queue disabled")
|
|
||||||
:ignore
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
def enqueue(data, transport, retries \\ 0) do
|
|
||||||
GenServer.cast(__MODULE__, {:maybe_enqueue, data, transport, retries + 1})
|
|
||||||
end
|
|
||||||
|
|
||||||
def get_stats do
|
|
||||||
GenServer.call(__MODULE__, :get_stats)
|
|
||||||
end
|
|
||||||
|
|
||||||
def reset_stats do
|
|
||||||
GenServer.call(__MODULE__, :reset_stats)
|
|
||||||
end
|
|
||||||
|
|
||||||
def get_retry_params(retries) do
|
|
||||||
if retries > Pleroma.Config.get([__MODULE__, :max_retries]) do
|
|
||||||
{:drop, "Max retries reached"}
|
|
||||||
else
|
|
||||||
{:retry, growth_function(retries)}
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
def get_retry_timer_interval do
|
|
||||||
Pleroma.Config.get([:retry_queue, :interval], 1000)
|
|
||||||
end
|
|
||||||
|
|
||||||
defp ets_count_expires(table, current_time) do
|
|
||||||
:ets.select_count(
|
|
||||||
table,
|
|
||||||
[
|
|
||||||
{
|
|
||||||
{:"$1", :"$2"},
|
|
||||||
[{:"=<", :"$1", {:const, current_time}}],
|
|
||||||
[true]
|
|
||||||
}
|
|
||||||
]
|
|
||||||
)
|
|
||||||
end
|
|
||||||
|
|
||||||
defp ets_pop_n_expired(table, current_time, desired) do
|
|
||||||
{popped, _continuation} =
|
|
||||||
:ets.select(
|
|
||||||
table,
|
|
||||||
[
|
|
||||||
{
|
|
||||||
{:"$1", :"$2"},
|
|
||||||
[{:"=<", :"$1", {:const, current_time}}],
|
|
||||||
[:"$_"]
|
|
||||||
}
|
|
||||||
],
|
|
||||||
desired
|
|
||||||
)
|
|
||||||
|
|
||||||
popped
|
|
||||||
|> Enum.each(fn e ->
|
|
||||||
:ets.delete_object(table, e)
|
|
||||||
end)
|
|
||||||
|
|
||||||
popped
|
|
||||||
end
|
|
||||||
|
|
||||||
def maybe_start_job(running_jobs, queue_table) do
|
|
||||||
# we don't want to hit the ets or the DateTime more times than we have to
|
|
||||||
# could optimize slightly further by not using the count, and instead grabbing
|
|
||||||
# up to N objects early...
|
|
||||||
current_time = DateTime.to_unix(DateTime.utc_now())
|
|
||||||
n_running_jobs = :sets.size(running_jobs)
|
|
||||||
|
|
||||||
if n_running_jobs < Pleroma.Config.get([__MODULE__, :max_jobs]) do
|
|
||||||
n_ready_jobs = ets_count_expires(queue_table, current_time)
|
|
||||||
|
|
||||||
if n_ready_jobs > 0 do
|
|
||||||
# figure out how many we could start
|
|
||||||
available_job_slots = Pleroma.Config.get([__MODULE__, :max_jobs]) - n_running_jobs
|
|
||||||
start_n_jobs(running_jobs, queue_table, current_time, available_job_slots)
|
|
||||||
else
|
|
||||||
running_jobs
|
|
||||||
end
|
|
||||||
else
|
|
||||||
running_jobs
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
defp start_n_jobs(running_jobs, _queue_table, _current_time, 0) do
|
|
||||||
running_jobs
|
|
||||||
end
|
|
||||||
|
|
||||||
defp start_n_jobs(running_jobs, queue_table, current_time, available_job_slots)
|
|
||||||
when available_job_slots > 0 do
|
|
||||||
candidates = ets_pop_n_expired(queue_table, current_time, available_job_slots)
|
|
||||||
|
|
||||||
candidates
|
|
||||||
|> List.foldl(running_jobs, fn {_, e}, rj ->
|
|
||||||
{:ok, pid} = Task.start(fn -> worker(e) end)
|
|
||||||
mref = Process.monitor(pid)
|
|
||||||
:sets.add_element(mref, rj)
|
|
||||||
end)
|
|
||||||
end
|
|
||||||
|
|
||||||
def worker({:send, data, transport, retries}) do
|
|
||||||
case transport.publish_one(data) do
|
|
||||||
{:ok, _} ->
|
|
||||||
GenServer.cast(__MODULE__, :inc_delivered)
|
|
||||||
:delivered
|
|
||||||
|
|
||||||
{:error, _reason} ->
|
|
||||||
enqueue(data, transport, retries)
|
|
||||||
:retry
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
def handle_call(:get_stats, _from, %{delivered: delivery_count, dropped: drop_count} = state) do
|
|
||||||
{:reply, %{delivered: delivery_count, dropped: drop_count}, state}
|
|
||||||
end
|
|
||||||
|
|
||||||
def handle_call(:reset_stats, _from, %{delivered: delivery_count, dropped: drop_count} = state) do
|
|
||||||
{:reply, %{delivered: delivery_count, dropped: drop_count},
|
|
||||||
%{state | delivered: 0, dropped: 0}}
|
|
||||||
end
|
|
||||||
|
|
||||||
def handle_cast(:reset_stats, state) do
|
|
||||||
{:noreply, %{state | delivered: 0, dropped: 0}}
|
|
||||||
end
|
|
||||||
|
|
||||||
def handle_cast(
|
|
||||||
{:maybe_enqueue, data, transport, retries},
|
|
||||||
%{dropped: drop_count, queue_table: queue_table, running_jobs: running_jobs} = state
|
|
||||||
) do
|
|
||||||
case get_retry_params(retries) do
|
|
||||||
{:retry, timeout} ->
|
|
||||||
:ets.insert(queue_table, {timeout, {:send, data, transport, retries}})
|
|
||||||
running_jobs = maybe_start_job(running_jobs, queue_table)
|
|
||||||
{:noreply, %{state | running_jobs: running_jobs}}
|
|
||||||
|
|
||||||
{:drop, message} ->
|
|
||||||
Logger.debug(message)
|
|
||||||
{:noreply, %{state | dropped: drop_count + 1}}
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
def handle_cast(:kickoff_timer, state) do
|
|
||||||
retry_interval = get_retry_timer_interval()
|
|
||||||
Process.send_after(__MODULE__, :retry_timer_run, retry_interval)
|
|
||||||
{:noreply, state}
|
|
||||||
end
|
|
||||||
|
|
||||||
def handle_cast(:inc_delivered, %{delivered: delivery_count} = state) do
|
|
||||||
{:noreply, %{state | delivered: delivery_count + 1}}
|
|
||||||
end
|
|
||||||
|
|
||||||
def handle_cast(:inc_dropped, %{dropped: drop_count} = state) do
|
|
||||||
{:noreply, %{state | dropped: drop_count + 1}}
|
|
||||||
end
|
|
||||||
|
|
||||||
def handle_info({:send, data, transport, retries}, %{delivered: delivery_count} = state) do
|
|
||||||
case transport.publish_one(data) do
|
|
||||||
{:ok, _} ->
|
|
||||||
{:noreply, %{state | delivered: delivery_count + 1}}
|
|
||||||
|
|
||||||
{:error, _reason} ->
|
|
||||||
enqueue(data, transport, retries)
|
|
||||||
{:noreply, state}
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
def handle_info(
|
|
||||||
:retry_timer_run,
|
|
||||||
%{queue_table: queue_table, running_jobs: running_jobs} = state
|
|
||||||
) do
|
|
||||||
maybe_kickoff_timer()
|
|
||||||
running_jobs = maybe_start_job(running_jobs, queue_table)
|
|
||||||
{:noreply, %{state | running_jobs: running_jobs}}
|
|
||||||
end
|
|
||||||
|
|
||||||
def handle_info({:DOWN, ref, :process, _pid, _reason}, state) do
|
|
||||||
%{running_jobs: running_jobs, queue_table: queue_table} = state
|
|
||||||
running_jobs = :sets.del_element(ref, running_jobs)
|
|
||||||
running_jobs = maybe_start_job(running_jobs, queue_table)
|
|
||||||
{:noreply, %{state | running_jobs: running_jobs}}
|
|
||||||
end
|
|
||||||
|
|
||||||
def handle_info(unknown, state) do
|
|
||||||
Logger.debug("RetryQueue: don't know what to do with #{inspect(unknown)}, ignoring")
|
|
||||||
{:noreply, state}
|
|
||||||
end
|
|
||||||
|
|
||||||
if Pleroma.Config.get(:env) == :test do
|
|
||||||
defp growth_function(_retries) do
|
|
||||||
_shutit = Pleroma.Config.get([__MODULE__, :initial_timeout])
|
|
||||||
DateTime.to_unix(DateTime.utc_now()) - 1
|
|
||||||
end
|
|
||||||
else
|
|
||||||
defp growth_function(retries) do
|
|
||||||
round(Pleroma.Config.get([__MODULE__, :initial_timeout]) * :math.pow(retries, 3)) +
|
|
||||||
DateTime.to_unix(DateTime.utc_now())
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
defp maybe_kickoff_timer do
|
|
||||||
GenServer.cast(__MODULE__, :kickoff_timer)
|
|
||||||
end
|
|
||||||
end
|
|
|
@ -6,7 +6,7 @@ defmodule Pleroma.Web.MastodonAPI.MastodonAPIController do
|
||||||
use Pleroma.Web, :controller
|
use Pleroma.Web, :controller
|
||||||
|
|
||||||
import Pleroma.Web.ControllerHelper,
|
import Pleroma.Web.ControllerHelper,
|
||||||
only: [json_response: 3, add_link_headers: 5, add_link_headers: 4, add_link_headers: 3]
|
only: [json_response: 3, add_link_headers: 2, add_link_headers: 3]
|
||||||
|
|
||||||
alias Ecto.Changeset
|
alias Ecto.Changeset
|
||||||
alias Pleroma.Activity
|
alias Pleroma.Activity
|
||||||
|
@ -365,7 +365,7 @@ def home_timeline(%{assigns: %{user: user}} = conn, params) do
|
||||||
|> Enum.reverse()
|
|> Enum.reverse()
|
||||||
|
|
||||||
conn
|
conn
|
||||||
|> add_link_headers(:home_timeline, activities)
|
|> add_link_headers(activities)
|
||||||
|> put_view(StatusView)
|
|> put_view(StatusView)
|
||||||
|> render("index.json", %{activities: activities, for: user, as: :activity})
|
|> render("index.json", %{activities: activities, for: user, as: :activity})
|
||||||
end
|
end
|
||||||
|
@ -384,7 +384,7 @@ def public_timeline(%{assigns: %{user: user}} = conn, params) do
|
||||||
|> Enum.reverse()
|
|> Enum.reverse()
|
||||||
|
|
||||||
conn
|
conn
|
||||||
|> add_link_headers(:public_timeline, activities, false, %{"local" => local_only})
|
|> add_link_headers(activities, %{"local" => local_only})
|
||||||
|> put_view(StatusView)
|
|> put_view(StatusView)
|
||||||
|> render("index.json", %{activities: activities, for: user, as: :activity})
|
|> render("index.json", %{activities: activities, for: user, as: :activity})
|
||||||
end
|
end
|
||||||
|
@ -398,7 +398,7 @@ def user_statuses(%{assigns: %{user: reading_user}} = conn, params) do
|
||||||
activities = ActivityPub.fetch_user_activities(user, reading_user, params)
|
activities = ActivityPub.fetch_user_activities(user, reading_user, params)
|
||||||
|
|
||||||
conn
|
conn
|
||||||
|> add_link_headers(:user_statuses, activities, params["id"])
|
|> add_link_headers(activities)
|
||||||
|> put_view(StatusView)
|
|> put_view(StatusView)
|
||||||
|> render("index.json", %{
|
|> render("index.json", %{
|
||||||
activities: activities,
|
activities: activities,
|
||||||
|
@ -422,11 +422,25 @@ def dm_timeline(%{assigns: %{user: user}} = conn, params) do
|
||||||
|> Pagination.fetch_paginated(params)
|
|> Pagination.fetch_paginated(params)
|
||||||
|
|
||||||
conn
|
conn
|
||||||
|> add_link_headers(:dm_timeline, activities)
|
|> add_link_headers(activities)
|
||||||
|> put_view(StatusView)
|
|> put_view(StatusView)
|
||||||
|> render("index.json", %{activities: activities, for: user, as: :activity})
|
|> render("index.json", %{activities: activities, for: user, as: :activity})
|
||||||
end
|
end
|
||||||
|
|
||||||
|
def get_statuses(%{assigns: %{user: user}} = conn, %{"ids" => ids}) do
|
||||||
|
limit = 100
|
||||||
|
|
||||||
|
activities =
|
||||||
|
ids
|
||||||
|
|> Enum.take(limit)
|
||||||
|
|> Activity.all_by_ids_with_object()
|
||||||
|
|> Enum.filter(&Visibility.visible_for_user?(&1, user))
|
||||||
|
|
||||||
|
conn
|
||||||
|
|> put_view(StatusView)
|
||||||
|
|> render("index.json", activities: activities, for: user, as: :activity)
|
||||||
|
end
|
||||||
|
|
||||||
def get_status(%{assigns: %{user: user}} = conn, %{"id" => id}) do
|
def get_status(%{assigns: %{user: user}} = conn, %{"id" => id}) do
|
||||||
with %Activity{} = activity <- Activity.get_by_id_with_object(id),
|
with %Activity{} = activity <- Activity.get_by_id_with_object(id),
|
||||||
true <- Visibility.visible_for_user?(activity, user) do
|
true <- Visibility.visible_for_user?(activity, user) do
|
||||||
|
@ -523,7 +537,7 @@ def poll_vote(%{assigns: %{user: user}} = conn, %{"id" => id, "choices" => choic
|
||||||
def scheduled_statuses(%{assigns: %{user: user}} = conn, params) do
|
def scheduled_statuses(%{assigns: %{user: user}} = conn, params) do
|
||||||
with scheduled_activities <- MastodonAPI.get_scheduled_activities(user, params) do
|
with scheduled_activities <- MastodonAPI.get_scheduled_activities(user, params) do
|
||||||
conn
|
conn
|
||||||
|> add_link_headers(:scheduled_statuses, scheduled_activities)
|
|> add_link_headers(scheduled_activities)
|
||||||
|> put_view(ScheduledActivityView)
|
|> put_view(ScheduledActivityView)
|
||||||
|> render("index.json", %{scheduled_activities: scheduled_activities})
|
|> render("index.json", %{scheduled_activities: scheduled_activities})
|
||||||
end
|
end
|
||||||
|
@ -706,7 +720,7 @@ def notifications(%{assigns: %{user: user}} = conn, params) do
|
||||||
notifications = MastodonAPI.get_notifications(user, params)
|
notifications = MastodonAPI.get_notifications(user, params)
|
||||||
|
|
||||||
conn
|
conn
|
||||||
|> add_link_headers(:notifications, notifications)
|
|> add_link_headers(notifications)
|
||||||
|> put_view(NotificationView)
|
|> put_view(NotificationView)
|
||||||
|> render("index.json", %{notifications: notifications, for: user})
|
|> render("index.json", %{notifications: notifications, for: user})
|
||||||
end
|
end
|
||||||
|
@ -828,6 +842,7 @@ def get_mascot(%{assigns: %{user: user}} = conn, _params) do
|
||||||
|
|
||||||
def favourited_by(%{assigns: %{user: user}} = conn, %{"id" => id}) do
|
def favourited_by(%{assigns: %{user: user}} = conn, %{"id" => id}) do
|
||||||
with %Activity{} = activity <- Activity.get_by_id_with_object(id),
|
with %Activity{} = activity <- Activity.get_by_id_with_object(id),
|
||||||
|
{:visible, true} <- {:visible, Visibility.visible_for_user?(activity, user)},
|
||||||
%Object{data: %{"likes" => likes}} <- Object.normalize(activity) do
|
%Object{data: %{"likes" => likes}} <- Object.normalize(activity) do
|
||||||
q = from(u in User, where: u.ap_id in ^likes)
|
q = from(u in User, where: u.ap_id in ^likes)
|
||||||
|
|
||||||
|
@ -839,12 +854,14 @@ def favourited_by(%{assigns: %{user: user}} = conn, %{"id" => id}) do
|
||||||
|> put_view(AccountView)
|
|> put_view(AccountView)
|
||||||
|> render("accounts.json", %{for: user, users: users, as: :user})
|
|> render("accounts.json", %{for: user, users: users, as: :user})
|
||||||
else
|
else
|
||||||
|
{:visible, false} -> {:error, :not_found}
|
||||||
_ -> json(conn, [])
|
_ -> json(conn, [])
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def reblogged_by(%{assigns: %{user: user}} = conn, %{"id" => id}) do
|
def reblogged_by(%{assigns: %{user: user}} = conn, %{"id" => id}) do
|
||||||
with %Activity{} = activity <- Activity.get_by_id_with_object(id),
|
with %Activity{} = activity <- Activity.get_by_id_with_object(id),
|
||||||
|
{:visible, true} <- {:visible, Visibility.visible_for_user?(activity, user)},
|
||||||
%Object{data: %{"announcements" => announces}} <- Object.normalize(activity) do
|
%Object{data: %{"announcements" => announces}} <- Object.normalize(activity) do
|
||||||
q = from(u in User, where: u.ap_id in ^announces)
|
q = from(u in User, where: u.ap_id in ^announces)
|
||||||
|
|
||||||
|
@ -856,6 +873,7 @@ def reblogged_by(%{assigns: %{user: user}} = conn, %{"id" => id}) do
|
||||||
|> put_view(AccountView)
|
|> put_view(AccountView)
|
||||||
|> render("accounts.json", %{for: user, users: users, as: :user})
|
|> render("accounts.json", %{for: user, users: users, as: :user})
|
||||||
else
|
else
|
||||||
|
{:visible, false} -> {:error, :not_found}
|
||||||
_ -> json(conn, [])
|
_ -> json(conn, [])
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
@ -894,7 +912,7 @@ def hashtag_timeline(%{assigns: %{user: user}} = conn, params) do
|
||||||
|> Enum.reverse()
|
|> Enum.reverse()
|
||||||
|
|
||||||
conn
|
conn
|
||||||
|> add_link_headers(:hashtag_timeline, activities, params["tag"], %{"local" => local_only})
|
|> add_link_headers(activities, %{"local" => local_only})
|
||||||
|> put_view(StatusView)
|
|> put_view(StatusView)
|
||||||
|> render("index.json", %{activities: activities, for: user, as: :activity})
|
|> render("index.json", %{activities: activities, for: user, as: :activity})
|
||||||
end
|
end
|
||||||
|
@ -910,7 +928,7 @@ def followers(%{assigns: %{user: for_user}} = conn, %{"id" => id} = params) do
|
||||||
end
|
end
|
||||||
|
|
||||||
conn
|
conn
|
||||||
|> add_link_headers(:followers, followers, user)
|
|> add_link_headers(followers)
|
||||||
|> put_view(AccountView)
|
|> put_view(AccountView)
|
||||||
|> render("accounts.json", %{for: for_user, users: followers, as: :user})
|
|> render("accounts.json", %{for: for_user, users: followers, as: :user})
|
||||||
end
|
end
|
||||||
|
@ -927,7 +945,7 @@ def following(%{assigns: %{user: for_user}} = conn, %{"id" => id} = params) do
|
||||||
end
|
end
|
||||||
|
|
||||||
conn
|
conn
|
||||||
|> add_link_headers(:following, followers, user)
|
|> add_link_headers(followers)
|
||||||
|> put_view(AccountView)
|
|> put_view(AccountView)
|
||||||
|> render("accounts.json", %{for: for_user, users: followers, as: :user})
|
|> render("accounts.json", %{for: for_user, users: followers, as: :user})
|
||||||
end
|
end
|
||||||
|
@ -1152,7 +1170,7 @@ def favourites(%{assigns: %{user: user}} = conn, params) do
|
||||||
|> Enum.reverse()
|
|> Enum.reverse()
|
||||||
|
|
||||||
conn
|
conn
|
||||||
|> add_link_headers(:favourites, activities)
|
|> add_link_headers(activities)
|
||||||
|> put_view(StatusView)
|
|> put_view(StatusView)
|
||||||
|> render("index.json", %{activities: activities, for: user, as: :activity})
|
|> render("index.json", %{activities: activities, for: user, as: :activity})
|
||||||
end
|
end
|
||||||
|
@ -1179,7 +1197,7 @@ def user_favourites(%{assigns: %{user: for_user}} = conn, %{"id" => id} = params
|
||||||
|> Enum.reverse()
|
|> Enum.reverse()
|
||||||
|
|
||||||
conn
|
conn
|
||||||
|> add_link_headers(:favourites, activities)
|
|> add_link_headers(activities)
|
||||||
|> put_view(StatusView)
|
|> put_view(StatusView)
|
||||||
|> render("index.json", %{activities: activities, for: for_user, as: :activity})
|
|> render("index.json", %{activities: activities, for: for_user, as: :activity})
|
||||||
else
|
else
|
||||||
|
@ -1200,7 +1218,7 @@ def bookmarks(%{assigns: %{user: user}} = conn, params) do
|
||||||
|> Enum.map(fn b -> Map.put(b.activity, :bookmark, Map.delete(b, :activity)) end)
|
|> Enum.map(fn b -> Map.put(b.activity, :bookmark, Map.delete(b, :activity)) end)
|
||||||
|
|
||||||
conn
|
conn
|
||||||
|> add_link_headers(:bookmarks, bookmarks)
|
|> add_link_headers(bookmarks)
|
||||||
|> put_view(StatusView)
|
|> put_view(StatusView)
|
||||||
|> render("index.json", %{activities: activities, for: user, as: :activity})
|
|> render("index.json", %{activities: activities, for: user, as: :activity})
|
||||||
end
|
end
|
||||||
|
@ -1640,7 +1658,7 @@ def conversations(%{assigns: %{user: user}} = conn, params) do
|
||||||
end)
|
end)
|
||||||
|
|
||||||
conn
|
conn
|
||||||
|> add_link_headers(:conversations, participations)
|
|> add_link_headers(participations)
|
||||||
|> json(conversations)
|
|> json(conversations)
|
||||||
end
|
end
|
||||||
|
|
||||||
|
|
|
@ -73,14 +73,12 @@ defp reblogged?(activity, user) do
|
||||||
|
|
||||||
def render("index.json", opts) do
|
def render("index.json", opts) do
|
||||||
replied_to_activities = get_replied_to_activities(opts.activities)
|
replied_to_activities = get_replied_to_activities(opts.activities)
|
||||||
parallel = unless is_nil(opts[:parallel]), do: opts[:parallel], else: true
|
|
||||||
|
|
||||||
opts.activities
|
opts.activities
|
||||||
|> safe_render_many(
|
|> safe_render_many(
|
||||||
StatusView,
|
StatusView,
|
||||||
"status.json",
|
"status.json",
|
||||||
Map.put(opts, :replied_to_activities, replied_to_activities),
|
Map.put(opts, :replied_to_activities, replied_to_activities)
|
||||||
parallel
|
|
||||||
)
|
)
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@ -499,7 +497,7 @@ def build_tags(object_tags) when is_list(object_tags) do
|
||||||
object_tags = for tag when is_binary(tag) <- object_tags, do: tag
|
object_tags = for tag when is_binary(tag) <- object_tags, do: tag
|
||||||
|
|
||||||
Enum.reduce(object_tags, [], fn tag, tags ->
|
Enum.reduce(object_tags, [], fn tag, tags ->
|
||||||
tags ++ [%{name: tag, url: "/tag/#{tag}"}]
|
tags ++ [%{name: tag, url: "/tag/#{URI.encode(tag)}"}]
|
||||||
end)
|
end)
|
||||||
end
|
end
|
||||||
|
|
||||||
|
|
|
@ -8,6 +8,7 @@ defmodule Pleroma.Web.MastodonAPI.WebsocketHandler do
|
||||||
alias Pleroma.Repo
|
alias Pleroma.Repo
|
||||||
alias Pleroma.User
|
alias Pleroma.User
|
||||||
alias Pleroma.Web.OAuth.Token
|
alias Pleroma.Web.OAuth.Token
|
||||||
|
alias Pleroma.Web.Streamer
|
||||||
|
|
||||||
@behaviour :cowboy_websocket
|
@behaviour :cowboy_websocket
|
||||||
|
|
||||||
|
@ -24,7 +25,7 @@ defmodule Pleroma.Web.MastodonAPI.WebsocketHandler do
|
||||||
]
|
]
|
||||||
@anonymous_streams ["public", "public:local", "hashtag"]
|
@anonymous_streams ["public", "public:local", "hashtag"]
|
||||||
|
|
||||||
# Handled by periodic keepalive in Pleroma.Web.Streamer.
|
# Handled by periodic keepalive in Pleroma.Web.Streamer.Ping.
|
||||||
@timeout :infinity
|
@timeout :infinity
|
||||||
|
|
||||||
def init(%{qs: qs} = req, state) do
|
def init(%{qs: qs} = req, state) do
|
||||||
|
@ -65,7 +66,7 @@ def websocket_info(:subscribe, state) do
|
||||||
}, topic #{state.topic}"
|
}, topic #{state.topic}"
|
||||||
)
|
)
|
||||||
|
|
||||||
Pleroma.Web.Streamer.add_socket(state.topic, streamer_socket(state))
|
Streamer.add_socket(state.topic, streamer_socket(state))
|
||||||
{:ok, state}
|
{:ok, state}
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@ -80,7 +81,7 @@ def terminate(reason, _req, state) do
|
||||||
}, topic #{state.topic || "?"}: #{inspect(reason)}"
|
}, topic #{state.topic || "?"}: #{inspect(reason)}"
|
||||||
)
|
)
|
||||||
|
|
||||||
Pleroma.Web.Streamer.remove_socket(state.topic, streamer_socket(state))
|
Streamer.remove_socket(state.topic, streamer_socket(state))
|
||||||
:ok
|
:ok
|
||||||
end
|
end
|
||||||
|
|
||||||
|
|
|
@ -17,6 +17,7 @@ defmodule Pleroma.Web.OAuth.Token.CleanWorker do
|
||||||
)
|
)
|
||||||
|
|
||||||
alias Pleroma.Web.OAuth.Token
|
alias Pleroma.Web.OAuth.Token
|
||||||
|
alias Pleroma.Workers.BackgroundWorker
|
||||||
|
|
||||||
def start_link(_), do: GenServer.start_link(__MODULE__, %{})
|
def start_link(_), do: GenServer.start_link(__MODULE__, %{})
|
||||||
|
|
||||||
|
@ -27,9 +28,11 @@ def init(_) do
|
||||||
|
|
||||||
@doc false
|
@doc false
|
||||||
def handle_info(:perform, state) do
|
def handle_info(:perform, state) do
|
||||||
Token.delete_expired_tokens()
|
BackgroundWorker.enqueue("clean_expired_tokens", %{})
|
||||||
|
|
||||||
Process.send_after(self(), :perform, @interval)
|
Process.send_after(self(), :perform, @interval)
|
||||||
{:noreply, state}
|
{:noreply, state}
|
||||||
end
|
end
|
||||||
|
|
||||||
|
def perform(:clean), do: Token.delete_expired_tokens()
|
||||||
end
|
end
|
||||||
|
|
|
@ -5,7 +5,7 @@
|
||||||
defmodule Pleroma.Web.PleromaAPI.PleromaAPIController do
|
defmodule Pleroma.Web.PleromaAPI.PleromaAPIController do
|
||||||
use Pleroma.Web, :controller
|
use Pleroma.Web, :controller
|
||||||
|
|
||||||
import Pleroma.Web.ControllerHelper, only: [add_link_headers: 7]
|
import Pleroma.Web.ControllerHelper, only: [add_link_headers: 2]
|
||||||
|
|
||||||
alias Pleroma.Conversation.Participation
|
alias Pleroma.Conversation.Participation
|
||||||
alias Pleroma.Notification
|
alias Pleroma.Notification
|
||||||
|
@ -27,31 +27,22 @@ def conversation_statuses(
|
||||||
%{assigns: %{user: user}} = conn,
|
%{assigns: %{user: user}} = conn,
|
||||||
%{"id" => participation_id} = params
|
%{"id" => participation_id} = params
|
||||||
) do
|
) do
|
||||||
|
participation = Participation.get(participation_id, preload: [:conversation])
|
||||||
|
|
||||||
|
if user.id == participation.user_id do
|
||||||
params =
|
params =
|
||||||
params
|
params
|
||||||
|> Map.put("blocking_user", user)
|
|> Map.put("blocking_user", user)
|
||||||
|> Map.put("muting_user", user)
|
|> Map.put("muting_user", user)
|
||||||
|> Map.put("user", user)
|
|> Map.put("user", user)
|
||||||
|
|
||||||
participation =
|
|
||||||
participation_id
|
|
||||||
|> Participation.get(preload: [:conversation])
|
|
||||||
|
|
||||||
if user.id == participation.user_id do
|
|
||||||
activities =
|
activities =
|
||||||
participation.conversation.ap_id
|
participation.conversation.ap_id
|
||||||
|> ActivityPub.fetch_activities_for_context(params)
|
|> ActivityPub.fetch_activities_for_context(params)
|
||||||
|> Enum.reverse()
|
|> Enum.reverse()
|
||||||
|
|
||||||
conn
|
conn
|
||||||
|> add_link_headers(
|
|> add_link_headers(activities)
|
||||||
:conversation_statuses,
|
|
||||||
activities,
|
|
||||||
participation_id,
|
|
||||||
params,
|
|
||||||
nil,
|
|
||||||
&pleroma_api_url/4
|
|
||||||
)
|
|
||||||
|> put_view(StatusView)
|
|> put_view(StatusView)
|
||||||
|> render("index.json", %{activities: activities, for: user, as: :activity})
|
|> render("index.json", %{activities: activities, for: user, as: :activity})
|
||||||
end
|
end
|
||||||
|
|
|
@ -3,7 +3,7 @@
|
||||||
# SPDX-License-Identifier: AGPL-3.0-only
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
defmodule Pleroma.Web.Push do
|
defmodule Pleroma.Web.Push do
|
||||||
alias Pleroma.Web.Push.Impl
|
alias Pleroma.Workers.WebPusherWorker
|
||||||
|
|
||||||
require Logger
|
require Logger
|
||||||
|
|
||||||
|
@ -31,6 +31,7 @@ def enabled do
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def send(notification),
|
def send(notification) do
|
||||||
do: PleromaJobQueue.enqueue(:web_push, Impl, [notification])
|
WebPusherWorker.enqueue("web_push", %{"notification_id" => notification.id})
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -81,6 +81,7 @@ defp parse_url(url) do
|
||||||
{:ok, %Tesla.Env{body: html}} = Pleroma.HTTP.get(url, [], adapter: @hackney_options)
|
{:ok, %Tesla.Env{body: html}} = Pleroma.HTTP.get(url, [], adapter: @hackney_options)
|
||||||
|
|
||||||
html
|
html
|
||||||
|
|> parse_html
|
||||||
|> maybe_parse()
|
|> maybe_parse()
|
||||||
|> Map.put(:url, url)
|
|> Map.put(:url, url)
|
||||||
|> clean_parsed_data()
|
|> clean_parsed_data()
|
||||||
|
@ -91,6 +92,8 @@ defp parse_url(url) do
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
defp parse_html(html), do: Floki.parse(html)
|
||||||
|
|
||||||
defp maybe_parse(html) do
|
defp maybe_parse(html) do
|
||||||
Enum.reduce_while(parsers(), %{}, fn parser, acc ->
|
Enum.reduce_while(parsers(), %{}, fn parser, acc ->
|
||||||
case parser.parse(html, acc) do
|
case parser.parse(html, acc) do
|
||||||
|
@ -100,7 +103,8 @@ defp maybe_parse(html) do
|
||||||
end)
|
end)
|
||||||
end
|
end
|
||||||
|
|
||||||
defp check_parsed_data(%{title: title} = data) when is_binary(title) and byte_size(title) > 0 do
|
defp check_parsed_data(%{title: title} = data)
|
||||||
|
when is_binary(title) and byte_size(title) > 0 do
|
||||||
{:ok, data}
|
{:ok, data}
|
||||||
end
|
end
|
||||||
|
|
||||||
|
|
|
@ -135,6 +135,7 @@ defmodule Pleroma.Web.Router do
|
||||||
|
|
||||||
pipeline :http_signature do
|
pipeline :http_signature do
|
||||||
plug(Pleroma.Web.Plugs.HTTPSignaturePlug)
|
plug(Pleroma.Web.Plugs.HTTPSignaturePlug)
|
||||||
|
plug(Pleroma.Web.Plugs.MappedSignatureToIdentityPlug)
|
||||||
end
|
end
|
||||||
|
|
||||||
scope "/api/pleroma", Pleroma.Web.TwitterAPI do
|
scope "/api/pleroma", Pleroma.Web.TwitterAPI do
|
||||||
|
@ -224,6 +225,7 @@ defmodule Pleroma.Web.Router do
|
||||||
scope [] do
|
scope [] do
|
||||||
pipe_through(:oauth_write)
|
pipe_through(:oauth_write)
|
||||||
|
|
||||||
|
post("/change_email", UtilController, :change_email)
|
||||||
post("/change_password", UtilController, :change_password)
|
post("/change_password", UtilController, :change_password)
|
||||||
post("/delete_account", UtilController, :delete_account)
|
post("/delete_account", UtilController, :delete_account)
|
||||||
put("/notification_settings", UtilController, :update_notificaton_settings)
|
put("/notification_settings", UtilController, :update_notificaton_settings)
|
||||||
|
@ -443,6 +445,7 @@ defmodule Pleroma.Web.Router do
|
||||||
get("/timelines/tag/:tag", MastodonAPIController, :hashtag_timeline)
|
get("/timelines/tag/:tag", MastodonAPIController, :hashtag_timeline)
|
||||||
get("/timelines/list/:list_id", MastodonAPIController, :list_timeline)
|
get("/timelines/list/:list_id", MastodonAPIController, :list_timeline)
|
||||||
|
|
||||||
|
get("/statuses", MastodonAPIController, :get_statuses)
|
||||||
get("/statuses/:id", MastodonAPIController, :get_status)
|
get("/statuses/:id", MastodonAPIController, :get_status)
|
||||||
get("/statuses/:id/context", MastodonAPIController, :get_context)
|
get("/statuses/:id/context", MastodonAPIController, :get_context)
|
||||||
|
|
||||||
|
@ -512,6 +515,7 @@ defmodule Pleroma.Web.Router do
|
||||||
|
|
||||||
scope "/", Pleroma.Web do
|
scope "/", Pleroma.Web do
|
||||||
pipe_through(:ostatus)
|
pipe_through(:ostatus)
|
||||||
|
pipe_through(:http_signature)
|
||||||
|
|
||||||
get("/objects/:uuid", OStatus.OStatusController, :object)
|
get("/objects/:uuid", OStatus.OStatusController, :object)
|
||||||
get("/activities/:uuid", OStatus.OStatusController, :activity)
|
get("/activities/:uuid", OStatus.OStatusController, :activity)
|
||||||
|
|
|
@ -170,6 +170,15 @@ def publish_one(%{recipient: url, feed: feed} = params) when is_binary(url) do
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
def publish_one(%{recipient_id: recipient_id} = params) do
|
||||||
|
recipient = User.get_cached_by_id(recipient_id)
|
||||||
|
|
||||||
|
params
|
||||||
|
|> Map.delete(:recipient_id)
|
||||||
|
|> Map.put(:recipient, recipient)
|
||||||
|
|> publish_one()
|
||||||
|
end
|
||||||
|
|
||||||
def publish_one(_), do: :noop
|
def publish_one(_), do: :noop
|
||||||
|
|
||||||
@supported_activities [
|
@supported_activities [
|
||||||
|
@ -218,7 +227,7 @@ def publish(%{info: %{keys: keys}} = user, %{data: %{"type" => type}} = activity
|
||||||
Logger.debug(fn -> "Sending Salmon to #{remote_user.ap_id}" end)
|
Logger.debug(fn -> "Sending Salmon to #{remote_user.ap_id}" end)
|
||||||
|
|
||||||
Publisher.enqueue_one(__MODULE__, %{
|
Publisher.enqueue_one(__MODULE__, %{
|
||||||
recipient: remote_user,
|
recipient_id: remote_user.id,
|
||||||
feed: feed,
|
feed: feed,
|
||||||
unreachable_since: reachable_urls_metadata[remote_user.info.salmon]
|
unreachable_since: reachable_urls_metadata[remote_user.info.salmon]
|
||||||
})
|
})
|
||||||
|
|
|
@ -1,318 +0,0 @@
|
||||||
# Pleroma: A lightweight social networking server
|
|
||||||
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
|
|
||||||
# SPDX-License-Identifier: AGPL-3.0-only
|
|
||||||
|
|
||||||
defmodule Pleroma.Web.Streamer do
|
|
||||||
use GenServer
|
|
||||||
require Logger
|
|
||||||
alias Pleroma.Activity
|
|
||||||
alias Pleroma.Config
|
|
||||||
alias Pleroma.Conversation.Participation
|
|
||||||
alias Pleroma.Notification
|
|
||||||
alias Pleroma.Object
|
|
||||||
alias Pleroma.User
|
|
||||||
alias Pleroma.Web.ActivityPub.ActivityPub
|
|
||||||
alias Pleroma.Web.ActivityPub.Visibility
|
|
||||||
alias Pleroma.Web.CommonAPI
|
|
||||||
alias Pleroma.Web.MastodonAPI.NotificationView
|
|
||||||
|
|
||||||
@keepalive_interval :timer.seconds(30)
|
|
||||||
|
|
||||||
def start_link(_) do
|
|
||||||
GenServer.start_link(__MODULE__, %{}, name: __MODULE__)
|
|
||||||
end
|
|
||||||
|
|
||||||
def add_socket(topic, socket) do
|
|
||||||
GenServer.cast(__MODULE__, %{action: :add, socket: socket, topic: topic})
|
|
||||||
end
|
|
||||||
|
|
||||||
def remove_socket(topic, socket) do
|
|
||||||
GenServer.cast(__MODULE__, %{action: :remove, socket: socket, topic: topic})
|
|
||||||
end
|
|
||||||
|
|
||||||
def stream(topic, item) do
|
|
||||||
GenServer.cast(__MODULE__, %{action: :stream, topic: topic, item: item})
|
|
||||||
end
|
|
||||||
|
|
||||||
def init(args) do
|
|
||||||
Process.send_after(self(), %{action: :ping}, @keepalive_interval)
|
|
||||||
|
|
||||||
{:ok, args}
|
|
||||||
end
|
|
||||||
|
|
||||||
def handle_info(%{action: :ping}, topics) do
|
|
||||||
topics
|
|
||||||
|> Map.values()
|
|
||||||
|> List.flatten()
|
|
||||||
|> Enum.each(fn socket ->
|
|
||||||
Logger.debug("Sending keepalive ping")
|
|
||||||
send(socket.transport_pid, {:text, ""})
|
|
||||||
end)
|
|
||||||
|
|
||||||
Process.send_after(self(), %{action: :ping}, @keepalive_interval)
|
|
||||||
|
|
||||||
{:noreply, topics}
|
|
||||||
end
|
|
||||||
|
|
||||||
def handle_cast(%{action: :stream, topic: "direct", item: item}, topics) do
|
|
||||||
recipient_topics =
|
|
||||||
User.get_recipients_from_activity(item)
|
|
||||||
|> Enum.map(fn %{id: id} -> "direct:#{id}" end)
|
|
||||||
|
|
||||||
Enum.each(recipient_topics || [], fn user_topic ->
|
|
||||||
Logger.debug("Trying to push direct message to #{user_topic}\n\n")
|
|
||||||
push_to_socket(topics, user_topic, item)
|
|
||||||
end)
|
|
||||||
|
|
||||||
{:noreply, topics}
|
|
||||||
end
|
|
||||||
|
|
||||||
def handle_cast(%{action: :stream, topic: "participation", item: participation}, topics) do
|
|
||||||
user_topic = "direct:#{participation.user_id}"
|
|
||||||
Logger.debug("Trying to push a conversation participation to #{user_topic}\n\n")
|
|
||||||
|
|
||||||
push_to_socket(topics, user_topic, participation)
|
|
||||||
|
|
||||||
{:noreply, topics}
|
|
||||||
end
|
|
||||||
|
|
||||||
def handle_cast(%{action: :stream, topic: "list", item: item}, topics) do
|
|
||||||
# filter the recipient list if the activity is not public, see #270.
|
|
||||||
recipient_lists =
|
|
||||||
case Visibility.is_public?(item) do
|
|
||||||
true ->
|
|
||||||
Pleroma.List.get_lists_from_activity(item)
|
|
||||||
|
|
||||||
_ ->
|
|
||||||
Pleroma.List.get_lists_from_activity(item)
|
|
||||||
|> Enum.filter(fn list ->
|
|
||||||
owner = User.get_cached_by_id(list.user_id)
|
|
||||||
|
|
||||||
Visibility.visible_for_user?(item, owner)
|
|
||||||
end)
|
|
||||||
end
|
|
||||||
|
|
||||||
recipient_topics =
|
|
||||||
recipient_lists
|
|
||||||
|> Enum.map(fn %{id: id} -> "list:#{id}" end)
|
|
||||||
|
|
||||||
Enum.each(recipient_topics || [], fn list_topic ->
|
|
||||||
Logger.debug("Trying to push message to #{list_topic}\n\n")
|
|
||||||
push_to_socket(topics, list_topic, item)
|
|
||||||
end)
|
|
||||||
|
|
||||||
{:noreply, topics}
|
|
||||||
end
|
|
||||||
|
|
||||||
def handle_cast(
|
|
||||||
%{action: :stream, topic: topic, item: %Notification{} = item},
|
|
||||||
topics
|
|
||||||
)
|
|
||||||
when topic in ["user", "user:notification"] do
|
|
||||||
topics
|
|
||||||
|> Map.get("#{topic}:#{item.user_id}", [])
|
|
||||||
|> Enum.each(fn socket ->
|
|
||||||
with %User{} = user <- User.get_cached_by_ap_id(socket.assigns[:user].ap_id),
|
|
||||||
true <- should_send?(user, item) do
|
|
||||||
send(
|
|
||||||
socket.transport_pid,
|
|
||||||
{:text, represent_notification(socket.assigns[:user], item)}
|
|
||||||
)
|
|
||||||
end
|
|
||||||
end)
|
|
||||||
|
|
||||||
{:noreply, topics}
|
|
||||||
end
|
|
||||||
|
|
||||||
def handle_cast(%{action: :stream, topic: "user", item: item}, topics) do
|
|
||||||
Logger.debug("Trying to push to users")
|
|
||||||
|
|
||||||
recipient_topics =
|
|
||||||
User.get_recipients_from_activity(item)
|
|
||||||
|> Enum.map(fn %{id: id} -> "user:#{id}" end)
|
|
||||||
|
|
||||||
Enum.each(recipient_topics, fn topic ->
|
|
||||||
push_to_socket(topics, topic, item)
|
|
||||||
end)
|
|
||||||
|
|
||||||
{:noreply, topics}
|
|
||||||
end
|
|
||||||
|
|
||||||
def handle_cast(%{action: :stream, topic: topic, item: item}, topics) do
|
|
||||||
Logger.debug("Trying to push to #{topic}")
|
|
||||||
Logger.debug("Pushing item to #{topic}")
|
|
||||||
push_to_socket(topics, topic, item)
|
|
||||||
{:noreply, topics}
|
|
||||||
end
|
|
||||||
|
|
||||||
def handle_cast(%{action: :add, topic: topic, socket: socket}, sockets) do
|
|
||||||
topic = internal_topic(topic, socket)
|
|
||||||
sockets_for_topic = sockets[topic] || []
|
|
||||||
sockets_for_topic = Enum.uniq([socket | sockets_for_topic])
|
|
||||||
sockets = Map.put(sockets, topic, sockets_for_topic)
|
|
||||||
Logger.debug("Got new conn for #{topic}")
|
|
||||||
{:noreply, sockets}
|
|
||||||
end
|
|
||||||
|
|
||||||
def handle_cast(%{action: :remove, topic: topic, socket: socket}, sockets) do
|
|
||||||
topic = internal_topic(topic, socket)
|
|
||||||
sockets_for_topic = sockets[topic] || []
|
|
||||||
sockets_for_topic = List.delete(sockets_for_topic, socket)
|
|
||||||
sockets = Map.put(sockets, topic, sockets_for_topic)
|
|
||||||
Logger.debug("Removed conn for #{topic}")
|
|
||||||
{:noreply, sockets}
|
|
||||||
end
|
|
||||||
|
|
||||||
def handle_cast(m, state) do
|
|
||||||
Logger.info("Unknown: #{inspect(m)}, #{inspect(state)}")
|
|
||||||
{:noreply, state}
|
|
||||||
end
|
|
||||||
|
|
||||||
defp represent_update(%Activity{} = activity, %User{} = user) do
|
|
||||||
%{
|
|
||||||
event: "update",
|
|
||||||
payload:
|
|
||||||
Pleroma.Web.MastodonAPI.StatusView.render(
|
|
||||||
"status.json",
|
|
||||||
activity: activity,
|
|
||||||
for: user
|
|
||||||
)
|
|
||||||
|> Jason.encode!()
|
|
||||||
}
|
|
||||||
|> Jason.encode!()
|
|
||||||
end
|
|
||||||
|
|
||||||
defp represent_update(%Activity{} = activity) do
|
|
||||||
%{
|
|
||||||
event: "update",
|
|
||||||
payload:
|
|
||||||
Pleroma.Web.MastodonAPI.StatusView.render(
|
|
||||||
"status.json",
|
|
||||||
activity: activity
|
|
||||||
)
|
|
||||||
|> Jason.encode!()
|
|
||||||
}
|
|
||||||
|> Jason.encode!()
|
|
||||||
end
|
|
||||||
|
|
||||||
def represent_conversation(%Participation{} = participation) do
|
|
||||||
%{
|
|
||||||
event: "conversation",
|
|
||||||
payload:
|
|
||||||
Pleroma.Web.MastodonAPI.ConversationView.render("participation.json", %{
|
|
||||||
participation: participation,
|
|
||||||
for: participation.user
|
|
||||||
})
|
|
||||||
|> Jason.encode!()
|
|
||||||
}
|
|
||||||
|> Jason.encode!()
|
|
||||||
end
|
|
||||||
|
|
||||||
@spec represent_notification(User.t(), Notification.t()) :: binary()
|
|
||||||
defp represent_notification(%User{} = user, %Notification{} = notify) do
|
|
||||||
%{
|
|
||||||
event: "notification",
|
|
||||||
payload:
|
|
||||||
NotificationView.render(
|
|
||||||
"show.json",
|
|
||||||
%{notification: notify, for: user}
|
|
||||||
)
|
|
||||||
|> Jason.encode!()
|
|
||||||
}
|
|
||||||
|> Jason.encode!()
|
|
||||||
end
|
|
||||||
|
|
||||||
defp should_send?(%User{} = user, %Activity{} = item) do
|
|
||||||
blocks = user.info.blocks || []
|
|
||||||
mutes = user.info.mutes || []
|
|
||||||
reblog_mutes = user.info.muted_reblogs || []
|
|
||||||
domain_blocks = Pleroma.Web.ActivityPub.MRF.subdomains_regex(user.info.domain_blocks)
|
|
||||||
|
|
||||||
with parent when not is_nil(parent) <- Object.normalize(item),
|
|
||||||
true <- Enum.all?([blocks, mutes, reblog_mutes], &(item.actor not in &1)),
|
|
||||||
true <- Enum.all?([blocks, mutes], &(parent.data["actor"] not in &1)),
|
|
||||||
%{host: item_host} <- URI.parse(item.actor),
|
|
||||||
%{host: parent_host} <- URI.parse(parent.data["actor"]),
|
|
||||||
false <- Pleroma.Web.ActivityPub.MRF.subdomain_match?(domain_blocks, item_host),
|
|
||||||
false <- Pleroma.Web.ActivityPub.MRF.subdomain_match?(domain_blocks, parent_host),
|
|
||||||
true <- thread_containment(item, user),
|
|
||||||
false <- CommonAPI.thread_muted?(user, item) do
|
|
||||||
true
|
|
||||||
else
|
|
||||||
_ -> false
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
defp should_send?(%User{} = user, %Notification{activity: activity}) do
|
|
||||||
should_send?(user, activity)
|
|
||||||
end
|
|
||||||
|
|
||||||
def push_to_socket(topics, topic, %Activity{data: %{"type" => "Announce"}} = item) do
|
|
||||||
Enum.each(topics[topic] || [], fn socket ->
|
|
||||||
# Get the current user so we have up-to-date blocks etc.
|
|
||||||
if socket.assigns[:user] do
|
|
||||||
user = User.get_cached_by_ap_id(socket.assigns[:user].ap_id)
|
|
||||||
|
|
||||||
if should_send?(user, item) do
|
|
||||||
send(socket.transport_pid, {:text, represent_update(item, user)})
|
|
||||||
end
|
|
||||||
else
|
|
||||||
send(socket.transport_pid, {:text, represent_update(item)})
|
|
||||||
end
|
|
||||||
end)
|
|
||||||
end
|
|
||||||
|
|
||||||
def push_to_socket(topics, topic, %Participation{} = participation) do
|
|
||||||
Enum.each(topics[topic] || [], fn socket ->
|
|
||||||
send(socket.transport_pid, {:text, represent_conversation(participation)})
|
|
||||||
end)
|
|
||||||
end
|
|
||||||
|
|
||||||
def push_to_socket(topics, topic, %Activity{
|
|
||||||
data: %{"type" => "Delete", "deleted_activity_id" => deleted_activity_id}
|
|
||||||
}) do
|
|
||||||
Enum.each(topics[topic] || [], fn socket ->
|
|
||||||
send(
|
|
||||||
socket.transport_pid,
|
|
||||||
{:text, %{event: "delete", payload: to_string(deleted_activity_id)} |> Jason.encode!()}
|
|
||||||
)
|
|
||||||
end)
|
|
||||||
end
|
|
||||||
|
|
||||||
def push_to_socket(_topics, _topic, %Activity{data: %{"type" => "Delete"}}), do: :noop
|
|
||||||
|
|
||||||
def push_to_socket(topics, topic, item) do
|
|
||||||
Enum.each(topics[topic] || [], fn socket ->
|
|
||||||
# Get the current user so we have up-to-date blocks etc.
|
|
||||||
if socket.assigns[:user] do
|
|
||||||
user = User.get_cached_by_ap_id(socket.assigns[:user].ap_id)
|
|
||||||
blocks = user.info.blocks || []
|
|
||||||
mutes = user.info.mutes || []
|
|
||||||
|
|
||||||
with true <- Enum.all?([blocks, mutes], &(item.actor not in &1)),
|
|
||||||
true <- thread_containment(item, user) do
|
|
||||||
send(socket.transport_pid, {:text, represent_update(item, user)})
|
|
||||||
end
|
|
||||||
else
|
|
||||||
send(socket.transport_pid, {:text, represent_update(item)})
|
|
||||||
end
|
|
||||||
end)
|
|
||||||
end
|
|
||||||
|
|
||||||
defp internal_topic(topic, socket) when topic in ~w[user user:notification direct] do
|
|
||||||
"#{topic}:#{socket.assigns[:user].id}"
|
|
||||||
end
|
|
||||||
|
|
||||||
defp internal_topic(topic, _), do: topic
|
|
||||||
|
|
||||||
@spec thread_containment(Activity.t(), User.t()) :: boolean()
|
|
||||||
defp thread_containment(_activity, %User{info: %{skip_thread_containment: true}}), do: true
|
|
||||||
|
|
||||||
defp thread_containment(activity, user) do
|
|
||||||
if Config.get([:instance, :skip_thread_containment]) do
|
|
||||||
true
|
|
||||||
else
|
|
||||||
ActivityPub.contain_activity(activity, user)
|
|
||||||
end
|
|
||||||
end
|
|
||||||
end
|
|
|
@ -0,0 +1,33 @@
|
||||||
|
defmodule Pleroma.Web.Streamer.Ping do
|
||||||
|
use GenServer
|
||||||
|
require Logger
|
||||||
|
|
||||||
|
alias Pleroma.Web.Streamer.State
|
||||||
|
alias Pleroma.Web.Streamer.StreamerSocket
|
||||||
|
|
||||||
|
@keepalive_interval :timer.seconds(30)
|
||||||
|
|
||||||
|
def start_link(opts) do
|
||||||
|
ping_interval = Keyword.get(opts, :ping_interval, @keepalive_interval)
|
||||||
|
GenServer.start_link(__MODULE__, %{ping_interval: ping_interval}, name: __MODULE__)
|
||||||
|
end
|
||||||
|
|
||||||
|
def init(%{ping_interval: ping_interval} = args) do
|
||||||
|
Process.send_after(self(), :ping, ping_interval)
|
||||||
|
{:ok, args}
|
||||||
|
end
|
||||||
|
|
||||||
|
def handle_info(:ping, %{ping_interval: ping_interval} = state) do
|
||||||
|
State.get_sockets()
|
||||||
|
|> Map.values()
|
||||||
|
|> List.flatten()
|
||||||
|
|> Enum.each(fn %StreamerSocket{transport_pid: transport_pid} ->
|
||||||
|
Logger.debug("Sending keepalive ping")
|
||||||
|
send(transport_pid, {:text, ""})
|
||||||
|
end)
|
||||||
|
|
||||||
|
Process.send_after(self(), :ping, ping_interval)
|
||||||
|
|
||||||
|
{:noreply, state}
|
||||||
|
end
|
||||||
|
end
|
|
@ -0,0 +1,68 @@
|
||||||
|
defmodule Pleroma.Web.Streamer.State do
|
||||||
|
use GenServer
|
||||||
|
require Logger
|
||||||
|
|
||||||
|
alias Pleroma.Web.Streamer.StreamerSocket
|
||||||
|
|
||||||
|
def start_link(_) do
|
||||||
|
GenServer.start_link(__MODULE__, %{sockets: %{}}, name: __MODULE__)
|
||||||
|
end
|
||||||
|
|
||||||
|
def add_socket(topic, socket) do
|
||||||
|
GenServer.call(__MODULE__, {:add, socket, topic})
|
||||||
|
end
|
||||||
|
|
||||||
|
def remove_socket(topic, socket) do
|
||||||
|
GenServer.call(__MODULE__, {:remove, socket, topic})
|
||||||
|
end
|
||||||
|
|
||||||
|
def get_sockets do
|
||||||
|
%{sockets: stream_sockets} = GenServer.call(__MODULE__, :get_state)
|
||||||
|
stream_sockets
|
||||||
|
end
|
||||||
|
|
||||||
|
def init(init_arg) do
|
||||||
|
{:ok, init_arg}
|
||||||
|
end
|
||||||
|
|
||||||
|
def handle_call(:get_state, _from, state) do
|
||||||
|
{:reply, state, state}
|
||||||
|
end
|
||||||
|
|
||||||
|
def handle_call({:add, socket, topic}, _from, %{sockets: sockets} = state) do
|
||||||
|
internal_topic = internal_topic(topic, socket)
|
||||||
|
stream_socket = StreamerSocket.from_socket(socket)
|
||||||
|
|
||||||
|
sockets_for_topic =
|
||||||
|
sockets
|
||||||
|
|> Map.get(internal_topic, [])
|
||||||
|
|> List.insert_at(0, stream_socket)
|
||||||
|
|> Enum.uniq()
|
||||||
|
|
||||||
|
state = put_in(state, [:sockets, internal_topic], sockets_for_topic)
|
||||||
|
Logger.debug("Got new conn for #{topic}")
|
||||||
|
{:reply, state, state}
|
||||||
|
end
|
||||||
|
|
||||||
|
def handle_call({:remove, socket, topic}, _from, %{sockets: sockets} = state) do
|
||||||
|
internal_topic = internal_topic(topic, socket)
|
||||||
|
stream_socket = StreamerSocket.from_socket(socket)
|
||||||
|
|
||||||
|
sockets_for_topic =
|
||||||
|
sockets
|
||||||
|
|> Map.get(internal_topic, [])
|
||||||
|
|> List.delete(stream_socket)
|
||||||
|
|
||||||
|
state = Kernel.put_in(state, [:sockets, internal_topic], sockets_for_topic)
|
||||||
|
{:reply, state, state}
|
||||||
|
end
|
||||||
|
|
||||||
|
defp internal_topic(topic, socket)
|
||||||
|
when topic in ~w[user user:notification direct] do
|
||||||
|
"#{topic}:#{socket.assigns[:user].id}"
|
||||||
|
end
|
||||||
|
|
||||||
|
defp internal_topic(topic, _) do
|
||||||
|
topic
|
||||||
|
end
|
||||||
|
end
|
|
@ -0,0 +1,55 @@
|
||||||
|
# Pleroma: A lightweight social networking server
|
||||||
|
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
|
||||||
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
defmodule Pleroma.Web.Streamer do
|
||||||
|
alias Pleroma.Web.Streamer.State
|
||||||
|
alias Pleroma.Web.Streamer.Worker
|
||||||
|
|
||||||
|
@timeout 60_000
|
||||||
|
@mix_env Mix.env()
|
||||||
|
|
||||||
|
def add_socket(topic, socket) do
|
||||||
|
State.add_socket(topic, socket)
|
||||||
|
end
|
||||||
|
|
||||||
|
def remove_socket(topic, socket) do
|
||||||
|
State.remove_socket(topic, socket)
|
||||||
|
end
|
||||||
|
|
||||||
|
def get_sockets do
|
||||||
|
State.get_sockets()
|
||||||
|
end
|
||||||
|
|
||||||
|
def stream(topics, items) do
|
||||||
|
if should_send?() do
|
||||||
|
Task.async(fn ->
|
||||||
|
:poolboy.transaction(
|
||||||
|
:streamer_worker,
|
||||||
|
&Worker.stream(&1, topics, items),
|
||||||
|
@timeout
|
||||||
|
)
|
||||||
|
end)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def supervisor, do: Pleroma.Web.Streamer.Supervisor
|
||||||
|
|
||||||
|
defp should_send? do
|
||||||
|
handle_should_send(@mix_env)
|
||||||
|
end
|
||||||
|
|
||||||
|
defp handle_should_send(:test) do
|
||||||
|
case Process.whereis(:streamer_worker) do
|
||||||
|
nil ->
|
||||||
|
false
|
||||||
|
|
||||||
|
pid ->
|
||||||
|
Process.alive?(pid)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
defp handle_should_send(_) do
|
||||||
|
true
|
||||||
|
end
|
||||||
|
end
|
|
@ -0,0 +1,31 @@
|
||||||
|
defmodule Pleroma.Web.Streamer.StreamerSocket do
|
||||||
|
defstruct transport_pid: nil, user: nil
|
||||||
|
|
||||||
|
alias Pleroma.User
|
||||||
|
alias Pleroma.Web.Streamer.StreamerSocket
|
||||||
|
|
||||||
|
def from_socket(%{
|
||||||
|
transport_pid: transport_pid,
|
||||||
|
assigns: %{user: nil}
|
||||||
|
}) do
|
||||||
|
%StreamerSocket{
|
||||||
|
transport_pid: transport_pid
|
||||||
|
}
|
||||||
|
end
|
||||||
|
|
||||||
|
def from_socket(%{
|
||||||
|
transport_pid: transport_pid,
|
||||||
|
assigns: %{user: %User{} = user}
|
||||||
|
}) do
|
||||||
|
%StreamerSocket{
|
||||||
|
transport_pid: transport_pid,
|
||||||
|
user: user
|
||||||
|
}
|
||||||
|
end
|
||||||
|
|
||||||
|
def from_socket(%{transport_pid: transport_pid}) do
|
||||||
|
%StreamerSocket{
|
||||||
|
transport_pid: transport_pid
|
||||||
|
}
|
||||||
|
end
|
||||||
|
end
|
|
@ -0,0 +1,33 @@
|
||||||
|
defmodule Pleroma.Web.Streamer.Supervisor do
|
||||||
|
use Supervisor
|
||||||
|
|
||||||
|
def start_link(opts) do
|
||||||
|
Supervisor.start_link(__MODULE__, opts, name: __MODULE__)
|
||||||
|
end
|
||||||
|
|
||||||
|
def init(args) do
|
||||||
|
children = [
|
||||||
|
{Pleroma.Web.Streamer.State, args},
|
||||||
|
{Pleroma.Web.Streamer.Ping, args},
|
||||||
|
:poolboy.child_spec(:streamer_worker, poolboy_config())
|
||||||
|
]
|
||||||
|
|
||||||
|
opts = [strategy: :one_for_one, name: Pleroma.Web.Streamer.Supervisor]
|
||||||
|
Supervisor.init(children, opts)
|
||||||
|
end
|
||||||
|
|
||||||
|
defp poolboy_config do
|
||||||
|
opts =
|
||||||
|
Pleroma.Config.get(:streamer,
|
||||||
|
workers: 3,
|
||||||
|
overflow_workers: 2
|
||||||
|
)
|
||||||
|
|
||||||
|
[
|
||||||
|
{:name, {:local, :streamer_worker}},
|
||||||
|
{:worker_module, Pleroma.Web.Streamer.Worker},
|
||||||
|
{:size, opts[:workers]},
|
||||||
|
{:max_overflow, opts[:overflow_workers]}
|
||||||
|
]
|
||||||
|
end
|
||||||
|
end
|
|
@ -0,0 +1,220 @@
|
||||||
|
defmodule Pleroma.Web.Streamer.Worker do
|
||||||
|
use GenServer
|
||||||
|
|
||||||
|
require Logger
|
||||||
|
|
||||||
|
alias Pleroma.Activity
|
||||||
|
alias Pleroma.Config
|
||||||
|
alias Pleroma.Conversation.Participation
|
||||||
|
alias Pleroma.Notification
|
||||||
|
alias Pleroma.Object
|
||||||
|
alias Pleroma.User
|
||||||
|
alias Pleroma.Web.ActivityPub.ActivityPub
|
||||||
|
alias Pleroma.Web.ActivityPub.Visibility
|
||||||
|
alias Pleroma.Web.CommonAPI
|
||||||
|
alias Pleroma.Web.Streamer.State
|
||||||
|
alias Pleroma.Web.Streamer.StreamerSocket
|
||||||
|
alias Pleroma.Web.StreamerView
|
||||||
|
|
||||||
|
def start_link(_) do
|
||||||
|
GenServer.start_link(__MODULE__, %{}, [])
|
||||||
|
end
|
||||||
|
|
||||||
|
def init(init_arg) do
|
||||||
|
{:ok, init_arg}
|
||||||
|
end
|
||||||
|
|
||||||
|
def stream(pid, topics, items) do
|
||||||
|
GenServer.call(pid, {:stream, topics, items})
|
||||||
|
end
|
||||||
|
|
||||||
|
def handle_call({:stream, topics, item}, _from, state) when is_list(topics) do
|
||||||
|
Enum.each(topics, fn t ->
|
||||||
|
do_stream(%{topic: t, item: item})
|
||||||
|
end)
|
||||||
|
|
||||||
|
{:reply, state, state}
|
||||||
|
end
|
||||||
|
|
||||||
|
def handle_call({:stream, topic, items}, _from, state) when is_list(items) do
|
||||||
|
Enum.each(items, fn i ->
|
||||||
|
do_stream(%{topic: topic, item: i})
|
||||||
|
end)
|
||||||
|
|
||||||
|
{:reply, state, state}
|
||||||
|
end
|
||||||
|
|
||||||
|
def handle_call({:stream, topic, item}, _from, state) do
|
||||||
|
do_stream(%{topic: topic, item: item})
|
||||||
|
|
||||||
|
{:reply, state, state}
|
||||||
|
end
|
||||||
|
|
||||||
|
defp do_stream(%{topic: "direct", item: item}) do
|
||||||
|
recipient_topics =
|
||||||
|
User.get_recipients_from_activity(item)
|
||||||
|
|> Enum.map(fn %{id: id} -> "direct:#{id}" end)
|
||||||
|
|
||||||
|
Enum.each(recipient_topics, fn user_topic ->
|
||||||
|
Logger.debug("Trying to push direct message to #{user_topic}\n\n")
|
||||||
|
push_to_socket(State.get_sockets(), user_topic, item)
|
||||||
|
end)
|
||||||
|
end
|
||||||
|
|
||||||
|
defp do_stream(%{topic: "participation", item: participation}) do
|
||||||
|
user_topic = "direct:#{participation.user_id}"
|
||||||
|
Logger.debug("Trying to push a conversation participation to #{user_topic}\n\n")
|
||||||
|
|
||||||
|
push_to_socket(State.get_sockets(), user_topic, participation)
|
||||||
|
end
|
||||||
|
|
||||||
|
defp do_stream(%{topic: "list", item: item}) do
|
||||||
|
# filter the recipient list if the activity is not public, see #270.
|
||||||
|
recipient_lists =
|
||||||
|
case Visibility.is_public?(item) do
|
||||||
|
true ->
|
||||||
|
Pleroma.List.get_lists_from_activity(item)
|
||||||
|
|
||||||
|
_ ->
|
||||||
|
Pleroma.List.get_lists_from_activity(item)
|
||||||
|
|> Enum.filter(fn list ->
|
||||||
|
owner = User.get_cached_by_id(list.user_id)
|
||||||
|
|
||||||
|
Visibility.visible_for_user?(item, owner)
|
||||||
|
end)
|
||||||
|
end
|
||||||
|
|
||||||
|
recipient_topics =
|
||||||
|
recipient_lists
|
||||||
|
|> Enum.map(fn %{id: id} -> "list:#{id}" end)
|
||||||
|
|
||||||
|
Enum.each(recipient_topics, fn list_topic ->
|
||||||
|
Logger.debug("Trying to push message to #{list_topic}\n\n")
|
||||||
|
push_to_socket(State.get_sockets(), list_topic, item)
|
||||||
|
end)
|
||||||
|
end
|
||||||
|
|
||||||
|
defp do_stream(%{topic: topic, item: %Notification{} = item})
|
||||||
|
when topic in ["user", "user:notification"] do
|
||||||
|
State.get_sockets()
|
||||||
|
|> Map.get("#{topic}:#{item.user_id}", [])
|
||||||
|
|> Enum.each(fn %StreamerSocket{transport_pid: transport_pid, user: socket_user} ->
|
||||||
|
with %User{} = user <- User.get_cached_by_ap_id(socket_user.ap_id),
|
||||||
|
true <- should_send?(user, item) do
|
||||||
|
send(transport_pid, {:text, StreamerView.render("notification.json", socket_user, item)})
|
||||||
|
end
|
||||||
|
end)
|
||||||
|
end
|
||||||
|
|
||||||
|
defp do_stream(%{topic: "user", item: item}) do
|
||||||
|
Logger.debug("Trying to push to users")
|
||||||
|
|
||||||
|
recipient_topics =
|
||||||
|
User.get_recipients_from_activity(item)
|
||||||
|
|> Enum.map(fn %{id: id} -> "user:#{id}" end)
|
||||||
|
|
||||||
|
Enum.each(recipient_topics, fn topic ->
|
||||||
|
push_to_socket(State.get_sockets(), topic, item)
|
||||||
|
end)
|
||||||
|
end
|
||||||
|
|
||||||
|
defp do_stream(%{topic: topic, item: item}) do
|
||||||
|
Logger.debug("Trying to push to #{topic}")
|
||||||
|
Logger.debug("Pushing item to #{topic}")
|
||||||
|
push_to_socket(State.get_sockets(), topic, item)
|
||||||
|
end
|
||||||
|
|
||||||
|
defp should_send?(%User{} = user, %Activity{} = item) do
|
||||||
|
blocks = user.info.blocks || []
|
||||||
|
mutes = user.info.mutes || []
|
||||||
|
reblog_mutes = user.info.muted_reblogs || []
|
||||||
|
domain_blocks = Pleroma.Web.ActivityPub.MRF.subdomains_regex(user.info.domain_blocks)
|
||||||
|
|
||||||
|
with parent when not is_nil(parent) <- Object.normalize(item),
|
||||||
|
true <- Enum.all?([blocks, mutes, reblog_mutes], &(item.actor not in &1)),
|
||||||
|
true <- Enum.all?([blocks, mutes], &(parent.data["actor"] not in &1)),
|
||||||
|
%{host: item_host} <- URI.parse(item.actor),
|
||||||
|
%{host: parent_host} <- URI.parse(parent.data["actor"]),
|
||||||
|
false <- Pleroma.Web.ActivityPub.MRF.subdomain_match?(domain_blocks, item_host),
|
||||||
|
false <- Pleroma.Web.ActivityPub.MRF.subdomain_match?(domain_blocks, parent_host),
|
||||||
|
true <- thread_containment(item, user),
|
||||||
|
false <- CommonAPI.thread_muted?(user, item) do
|
||||||
|
true
|
||||||
|
else
|
||||||
|
_ -> false
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
defp should_send?(%User{} = user, %Notification{activity: activity}) do
|
||||||
|
should_send?(user, activity)
|
||||||
|
end
|
||||||
|
|
||||||
|
def push_to_socket(topics, topic, %Activity{data: %{"type" => "Announce"}} = item) do
|
||||||
|
Enum.each(topics[topic] || [], fn %StreamerSocket{
|
||||||
|
transport_pid: transport_pid,
|
||||||
|
user: socket_user
|
||||||
|
} ->
|
||||||
|
# Get the current user so we have up-to-date blocks etc.
|
||||||
|
if socket_user do
|
||||||
|
user = User.get_cached_by_ap_id(socket_user.ap_id)
|
||||||
|
|
||||||
|
if should_send?(user, item) do
|
||||||
|
send(transport_pid, {:text, StreamerView.render("update.json", item, user)})
|
||||||
|
end
|
||||||
|
else
|
||||||
|
send(transport_pid, {:text, StreamerView.render("update.json", item)})
|
||||||
|
end
|
||||||
|
end)
|
||||||
|
end
|
||||||
|
|
||||||
|
def push_to_socket(topics, topic, %Participation{} = participation) do
|
||||||
|
Enum.each(topics[topic] || [], fn %StreamerSocket{transport_pid: transport_pid} ->
|
||||||
|
send(transport_pid, {:text, StreamerView.render("conversation.json", participation)})
|
||||||
|
end)
|
||||||
|
end
|
||||||
|
|
||||||
|
def push_to_socket(topics, topic, %Activity{
|
||||||
|
data: %{"type" => "Delete", "deleted_activity_id" => deleted_activity_id}
|
||||||
|
}) do
|
||||||
|
Enum.each(topics[topic] || [], fn %StreamerSocket{transport_pid: transport_pid} ->
|
||||||
|
send(
|
||||||
|
transport_pid,
|
||||||
|
{:text, %{event: "delete", payload: to_string(deleted_activity_id)} |> Jason.encode!()}
|
||||||
|
)
|
||||||
|
end)
|
||||||
|
end
|
||||||
|
|
||||||
|
def push_to_socket(_topics, _topic, %Activity{data: %{"type" => "Delete"}}), do: :noop
|
||||||
|
|
||||||
|
def push_to_socket(topics, topic, item) do
|
||||||
|
Enum.each(topics[topic] || [], fn %StreamerSocket{
|
||||||
|
transport_pid: transport_pid,
|
||||||
|
user: socket_user
|
||||||
|
} ->
|
||||||
|
# Get the current user so we have up-to-date blocks etc.
|
||||||
|
if socket_user do
|
||||||
|
user = User.get_cached_by_ap_id(socket_user.ap_id)
|
||||||
|
blocks = user.info.blocks || []
|
||||||
|
mutes = user.info.mutes || []
|
||||||
|
|
||||||
|
with true <- Enum.all?([blocks, mutes], &(item.actor not in &1)),
|
||||||
|
true <- thread_containment(item, user) do
|
||||||
|
send(transport_pid, {:text, StreamerView.render("update.json", item, user)})
|
||||||
|
end
|
||||||
|
else
|
||||||
|
send(transport_pid, {:text, StreamerView.render("update.json", item)})
|
||||||
|
end
|
||||||
|
end)
|
||||||
|
end
|
||||||
|
|
||||||
|
@spec thread_containment(Activity.t(), User.t()) :: boolean()
|
||||||
|
defp thread_containment(_activity, %User{info: %{skip_thread_containment: true}}), do: true
|
||||||
|
|
||||||
|
defp thread_containment(activity, user) do
|
||||||
|
if Config.get([:instance, :skip_thread_containment]) do
|
||||||
|
true
|
||||||
|
else
|
||||||
|
ActivityPub.contain_activity(activity, user)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
|
@ -265,12 +265,7 @@ def follow_import(%{assigns: %{user: follower}} = conn, %{"list" => list}) do
|
||||||
String.split(line, ",") |> List.first()
|
String.split(line, ",") |> List.first()
|
||||||
end)
|
end)
|
||||||
|> List.delete("Account address") do
|
|> List.delete("Account address") do
|
||||||
PleromaJobQueue.enqueue(:background, User, [
|
User.follow_import(follower, followed_identifiers)
|
||||||
:follow_import,
|
|
||||||
follower,
|
|
||||||
followed_identifiers
|
|
||||||
])
|
|
||||||
|
|
||||||
json(conn, "job started")
|
json(conn, "job started")
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
@ -281,12 +276,7 @@ def blocks_import(conn, %{"list" => %Plug.Upload{} = listfile}) do
|
||||||
|
|
||||||
def blocks_import(%{assigns: %{user: blocker}} = conn, %{"list" => list}) do
|
def blocks_import(%{assigns: %{user: blocker}} = conn, %{"list" => list}) do
|
||||||
with blocked_identifiers <- String.split(list) do
|
with blocked_identifiers <- String.split(list) do
|
||||||
PleromaJobQueue.enqueue(:background, User, [
|
User.blocks_import(blocker, blocked_identifiers)
|
||||||
:blocks_import,
|
|
||||||
blocker,
|
|
||||||
blocked_identifiers
|
|
||||||
])
|
|
||||||
|
|
||||||
json(conn, "job started")
|
json(conn, "job started")
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
@ -314,6 +304,25 @@ def change_password(%{assigns: %{user: user}} = conn, params) do
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
def change_email(%{assigns: %{user: user}} = conn, params) do
|
||||||
|
case CommonAPI.Utils.confirm_current_password(user, params["password"]) do
|
||||||
|
{:ok, user} ->
|
||||||
|
with {:ok, _user} <- User.change_email(user, params["email"]) do
|
||||||
|
json(conn, %{status: "success"})
|
||||||
|
else
|
||||||
|
{:error, changeset} ->
|
||||||
|
{_, {error, _}} = Enum.at(changeset.errors, 0)
|
||||||
|
json(conn, %{error: "Email #{error}."})
|
||||||
|
|
||||||
|
_ ->
|
||||||
|
json(conn, %{error: "Unable to change email."})
|
||||||
|
end
|
||||||
|
|
||||||
|
{:error, msg} ->
|
||||||
|
json(conn, %{error: msg})
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
def delete_account(%{assigns: %{user: user}} = conn, params) do
|
def delete_account(%{assigns: %{user: user}} = conn, params) do
|
||||||
case CommonAPI.Utils.confirm_current_password(user, params["password"]) do
|
case CommonAPI.Utils.confirm_current_password(user, params["password"]) do
|
||||||
{:ok, user} ->
|
{:ok, user} ->
|
||||||
|
|
|
@ -0,0 +1,66 @@
|
||||||
|
# Pleroma: A lightweight social networking server
|
||||||
|
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
|
||||||
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
defmodule Pleroma.Web.StreamerView do
|
||||||
|
use Pleroma.Web, :view
|
||||||
|
|
||||||
|
alias Pleroma.Activity
|
||||||
|
alias Pleroma.Conversation.Participation
|
||||||
|
alias Pleroma.Notification
|
||||||
|
alias Pleroma.User
|
||||||
|
alias Pleroma.Web.MastodonAPI.NotificationView
|
||||||
|
|
||||||
|
def render("update.json", %Activity{} = activity, %User{} = user) do
|
||||||
|
%{
|
||||||
|
event: "update",
|
||||||
|
payload:
|
||||||
|
Pleroma.Web.MastodonAPI.StatusView.render(
|
||||||
|
"status.json",
|
||||||
|
activity: activity,
|
||||||
|
for: user
|
||||||
|
)
|
||||||
|
|> Jason.encode!()
|
||||||
|
}
|
||||||
|
|> Jason.encode!()
|
||||||
|
end
|
||||||
|
|
||||||
|
def render("notification.json", %User{} = user, %Notification{} = notify) do
|
||||||
|
%{
|
||||||
|
event: "notification",
|
||||||
|
payload:
|
||||||
|
NotificationView.render(
|
||||||
|
"show.json",
|
||||||
|
%{notification: notify, for: user}
|
||||||
|
)
|
||||||
|
|> Jason.encode!()
|
||||||
|
}
|
||||||
|
|> Jason.encode!()
|
||||||
|
end
|
||||||
|
|
||||||
|
def render("update.json", %Activity{} = activity) do
|
||||||
|
%{
|
||||||
|
event: "update",
|
||||||
|
payload:
|
||||||
|
Pleroma.Web.MastodonAPI.StatusView.render(
|
||||||
|
"status.json",
|
||||||
|
activity: activity
|
||||||
|
)
|
||||||
|
|> Jason.encode!()
|
||||||
|
}
|
||||||
|
|> Jason.encode!()
|
||||||
|
end
|
||||||
|
|
||||||
|
def render("conversation.json", %Participation{} = participation) do
|
||||||
|
%{
|
||||||
|
event: "conversation",
|
||||||
|
payload:
|
||||||
|
Pleroma.Web.MastodonAPI.ConversationView.render("participation.json", %{
|
||||||
|
participation: participation,
|
||||||
|
for: participation.user
|
||||||
|
})
|
||||||
|
|> Jason.encode!()
|
||||||
|
}
|
||||||
|
|> Jason.encode!()
|
||||||
|
end
|
||||||
|
end
|
|
@ -66,23 +66,9 @@ def safe_render(view, template, assigns \\ %{}) do
|
||||||
end
|
end
|
||||||
|
|
||||||
@doc """
|
@doc """
|
||||||
Same as `render_many/4` but wrapped in rescue block and parallelized (unless disabled by passing false as a fifth argument).
|
Same as `render_many/4` but wrapped in rescue block.
|
||||||
"""
|
"""
|
||||||
def safe_render_many(collection, view, template, assigns \\ %{}, parallel \\ true)
|
def safe_render_many(collection, view, template, assigns \\ %{}) do
|
||||||
|
|
||||||
def safe_render_many(collection, view, template, assigns, true) do
|
|
||||||
Enum.map(collection, fn resource ->
|
|
||||||
Task.async(fn ->
|
|
||||||
as = Map.get(assigns, :as) || view.__resource__
|
|
||||||
assigns = Map.put(assigns, as, resource)
|
|
||||||
safe_render(view, template, assigns)
|
|
||||||
end)
|
|
||||||
end)
|
|
||||||
|> Enum.map(&Task.await(&1, :infinity))
|
|
||||||
|> Enum.filter(& &1)
|
|
||||||
end
|
|
||||||
|
|
||||||
def safe_render_many(collection, view, template, assigns, false) do
|
|
||||||
Enum.map(collection, fn resource ->
|
Enum.map(collection, fn resource ->
|
||||||
as = Map.get(assigns, :as) || view.__resource__
|
as = Map.get(assigns, :as) || view.__resource__
|
||||||
assigns = Map.put(assigns, as, resource)
|
assigns = Map.put(assigns, as, resource)
|
||||||
|
|
|
@ -0,0 +1,18 @@
|
||||||
|
# Pleroma: A lightweight social networking server
|
||||||
|
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
|
||||||
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
defmodule Pleroma.Workers.ActivityExpirationWorker do
|
||||||
|
use Pleroma.Workers.WorkerHelper, queue: "activity_expiration"
|
||||||
|
|
||||||
|
@impl Oban.Worker
|
||||||
|
def perform(
|
||||||
|
%{
|
||||||
|
"op" => "activity_expiration",
|
||||||
|
"activity_expiration_id" => activity_expiration_id
|
||||||
|
},
|
||||||
|
_job
|
||||||
|
) do
|
||||||
|
Pleroma.Daemons.ActivityExpirationDaemon.perform(:execute, activity_expiration_id)
|
||||||
|
end
|
||||||
|
end
|
|
@ -0,0 +1,69 @@
|
||||||
|
# Pleroma: A lightweight social networking server
|
||||||
|
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
|
||||||
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
defmodule Pleroma.Workers.BackgroundWorker do
|
||||||
|
alias Pleroma.Activity
|
||||||
|
alias Pleroma.User
|
||||||
|
alias Pleroma.Web.ActivityPub.MRF.MediaProxyWarmingPolicy
|
||||||
|
alias Pleroma.Web.OAuth.Token.CleanWorker
|
||||||
|
|
||||||
|
use Pleroma.Workers.WorkerHelper, queue: "background"
|
||||||
|
|
||||||
|
@impl Oban.Worker
|
||||||
|
def perform(%{"op" => "fetch_initial_posts", "user_id" => user_id}, _job) do
|
||||||
|
user = User.get_cached_by_id(user_id)
|
||||||
|
User.perform(:fetch_initial_posts, user)
|
||||||
|
end
|
||||||
|
|
||||||
|
def perform(%{"op" => "deactivate_user", "user_id" => user_id, "status" => status}, _job) do
|
||||||
|
user = User.get_cached_by_id(user_id)
|
||||||
|
User.perform(:deactivate_async, user, status)
|
||||||
|
end
|
||||||
|
|
||||||
|
def perform(%{"op" => "delete_user", "user_id" => user_id}, _job) do
|
||||||
|
user = User.get_cached_by_id(user_id)
|
||||||
|
User.perform(:delete, user)
|
||||||
|
end
|
||||||
|
|
||||||
|
def perform(
|
||||||
|
%{
|
||||||
|
"op" => "blocks_import",
|
||||||
|
"blocker_id" => blocker_id,
|
||||||
|
"blocked_identifiers" => blocked_identifiers
|
||||||
|
},
|
||||||
|
_job
|
||||||
|
) do
|
||||||
|
blocker = User.get_cached_by_id(blocker_id)
|
||||||
|
User.perform(:blocks_import, blocker, blocked_identifiers)
|
||||||
|
end
|
||||||
|
|
||||||
|
def perform(
|
||||||
|
%{
|
||||||
|
"op" => "follow_import",
|
||||||
|
"follower_id" => follower_id,
|
||||||
|
"followed_identifiers" => followed_identifiers
|
||||||
|
},
|
||||||
|
_job
|
||||||
|
) do
|
||||||
|
follower = User.get_cached_by_id(follower_id)
|
||||||
|
User.perform(:follow_import, follower, followed_identifiers)
|
||||||
|
end
|
||||||
|
|
||||||
|
def perform(%{"op" => "clean_expired_tokens"}, _job) do
|
||||||
|
CleanWorker.perform(:clean)
|
||||||
|
end
|
||||||
|
|
||||||
|
def perform(%{"op" => "media_proxy_preload", "message" => message}, _job) do
|
||||||
|
MediaProxyWarmingPolicy.perform(:preload, message)
|
||||||
|
end
|
||||||
|
|
||||||
|
def perform(%{"op" => "media_proxy_prefetch", "url" => url}, _job) do
|
||||||
|
MediaProxyWarmingPolicy.perform(:prefetch, url)
|
||||||
|
end
|
||||||
|
|
||||||
|
def perform(%{"op" => "fetch_data_for_activity", "activity_id" => activity_id}, _job) do
|
||||||
|
activity = Activity.get_by_id(activity_id)
|
||||||
|
Pleroma.Web.RichMedia.Helpers.perform(:fetch, activity)
|
||||||
|
end
|
||||||
|
end
|
|
@ -0,0 +1,16 @@
|
||||||
|
# Pleroma: A lightweight social networking server
|
||||||
|
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
|
||||||
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
defmodule Pleroma.Workers.DigestEmailsWorker do
|
||||||
|
alias Pleroma.User
|
||||||
|
|
||||||
|
use Pleroma.Workers.WorkerHelper, queue: "digest_emails"
|
||||||
|
|
||||||
|
@impl Oban.Worker
|
||||||
|
def perform(%{"op" => "digest_email", "user_id" => user_id}, _job) do
|
||||||
|
user_id
|
||||||
|
|> User.get_cached_by_id()
|
||||||
|
|> Pleroma.Daemons.DigestEmailDaemon.perform()
|
||||||
|
end
|
||||||
|
end
|
|
@ -0,0 +1,15 @@
|
||||||
|
# Pleroma: A lightweight social networking server
|
||||||
|
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
|
||||||
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
defmodule Pleroma.Workers.MailerWorker do
|
||||||
|
use Pleroma.Workers.WorkerHelper, queue: "mailer"
|
||||||
|
|
||||||
|
@impl Oban.Worker
|
||||||
|
def perform(%{"op" => "email", "encoded_email" => encoded_email, "config" => config}, _job) do
|
||||||
|
encoded_email
|
||||||
|
|> Base.decode64!()
|
||||||
|
|> :erlang.binary_to_term()
|
||||||
|
|> Pleroma.Emails.Mailer.deliver(config)
|
||||||
|
end
|
||||||
|
end
|
|
@ -0,0 +1,25 @@
|
||||||
|
# Pleroma: A lightweight social networking server
|
||||||
|
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
|
||||||
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
defmodule Pleroma.Workers.PublisherWorker do
|
||||||
|
alias Pleroma.Activity
|
||||||
|
alias Pleroma.Web.Federator
|
||||||
|
|
||||||
|
use Pleroma.Workers.WorkerHelper, queue: "federator_outgoing"
|
||||||
|
|
||||||
|
def backoff(attempt) when is_integer(attempt) do
|
||||||
|
Pleroma.Workers.WorkerHelper.sidekiq_backoff(attempt, 5)
|
||||||
|
end
|
||||||
|
|
||||||
|
@impl Oban.Worker
|
||||||
|
def perform(%{"op" => "publish", "activity_id" => activity_id}, _job) do
|
||||||
|
activity = Activity.get_by_id(activity_id)
|
||||||
|
Federator.perform(:publish, activity)
|
||||||
|
end
|
||||||
|
|
||||||
|
def perform(%{"op" => "publish_one", "module" => module_name, "params" => params}, _job) do
|
||||||
|
params = Map.new(params, fn {k, v} -> {String.to_atom(k), v} end)
|
||||||
|
Federator.perform(:publish_one, String.to_atom(module_name), params)
|
||||||
|
end
|
||||||
|
end
|
|
@ -0,0 +1,18 @@
|
||||||
|
# Pleroma: A lightweight social networking server
|
||||||
|
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
|
||||||
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
defmodule Pleroma.Workers.ReceiverWorker do
|
||||||
|
alias Pleroma.Web.Federator
|
||||||
|
|
||||||
|
use Pleroma.Workers.WorkerHelper, queue: "federator_incoming"
|
||||||
|
|
||||||
|
@impl Oban.Worker
|
||||||
|
def perform(%{"op" => "incoming_doc", "body" => doc}, _job) do
|
||||||
|
Federator.perform(:incoming_doc, doc)
|
||||||
|
end
|
||||||
|
|
||||||
|
def perform(%{"op" => "incoming_ap_doc", "params" => params}, _job) do
|
||||||
|
Federator.perform(:incoming_ap_doc, params)
|
||||||
|
end
|
||||||
|
end
|
|
@ -0,0 +1,12 @@
|
||||||
|
# Pleroma: A lightweight social networking server
|
||||||
|
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
|
||||||
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
defmodule Pleroma.Workers.ScheduledActivityWorker do
|
||||||
|
use Pleroma.Workers.WorkerHelper, queue: "scheduled_activities"
|
||||||
|
|
||||||
|
@impl Oban.Worker
|
||||||
|
def perform(%{"op" => "execute", "activity_id" => activity_id}, _job) do
|
||||||
|
Pleroma.Daemons.ScheduledActivityDaemon.perform(:execute, activity_id)
|
||||||
|
end
|
||||||
|
end
|
|
@ -0,0 +1,26 @@
|
||||||
|
# Pleroma: A lightweight social networking server
|
||||||
|
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
|
||||||
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
defmodule Pleroma.Workers.SubscriberWorker do
|
||||||
|
alias Pleroma.Repo
|
||||||
|
alias Pleroma.Web.Federator
|
||||||
|
alias Pleroma.Web.Websub
|
||||||
|
|
||||||
|
use Pleroma.Workers.WorkerHelper, queue: "federator_outgoing"
|
||||||
|
|
||||||
|
@impl Oban.Worker
|
||||||
|
def perform(%{"op" => "refresh_subscriptions"}, _job) do
|
||||||
|
Federator.perform(:refresh_subscriptions)
|
||||||
|
end
|
||||||
|
|
||||||
|
def perform(%{"op" => "request_subscription", "websub_id" => websub_id}, _job) do
|
||||||
|
websub = Repo.get(Websub.WebsubClientSubscription, websub_id)
|
||||||
|
Federator.perform(:request_subscription, websub)
|
||||||
|
end
|
||||||
|
|
||||||
|
def perform(%{"op" => "verify_websub", "websub_id" => websub_id}, _job) do
|
||||||
|
websub = Repo.get(Websub.WebsubServerSubscription, websub_id)
|
||||||
|
Federator.perform(:verify_websub, websub)
|
||||||
|
end
|
||||||
|
end
|
|
@ -0,0 +1,15 @@
|
||||||
|
# Pleroma: A lightweight social networking server
|
||||||
|
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
|
||||||
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
defmodule Pleroma.Workers.TransmogrifierWorker do
|
||||||
|
alias Pleroma.User
|
||||||
|
|
||||||
|
use Pleroma.Workers.WorkerHelper, queue: "transmogrifier"
|
||||||
|
|
||||||
|
@impl Oban.Worker
|
||||||
|
def perform(%{"op" => "user_upgrade", "user_id" => user_id}, _job) do
|
||||||
|
user = User.get_cached_by_id(user_id)
|
||||||
|
Pleroma.Web.ActivityPub.Transmogrifier.perform(:user_upgrade, user)
|
||||||
|
end
|
||||||
|
end
|
|
@ -0,0 +1,16 @@
|
||||||
|
# Pleroma: A lightweight social networking server
|
||||||
|
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
|
||||||
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
defmodule Pleroma.Workers.WebPusherWorker do
|
||||||
|
alias Pleroma.Notification
|
||||||
|
alias Pleroma.Repo
|
||||||
|
|
||||||
|
use Pleroma.Workers.WorkerHelper, queue: "web_push"
|
||||||
|
|
||||||
|
@impl Oban.Worker
|
||||||
|
def perform(%{"op" => "web_push", "notification_id" => notification_id}, _job) do
|
||||||
|
notification = Repo.get(Notification, notification_id)
|
||||||
|
Pleroma.Web.Push.Impl.perform(notification)
|
||||||
|
end
|
||||||
|
end
|
|
@ -0,0 +1,46 @@
|
||||||
|
# Pleroma: A lightweight social networking server
|
||||||
|
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
|
||||||
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
defmodule Pleroma.Workers.WorkerHelper do
|
||||||
|
alias Pleroma.Config
|
||||||
|
alias Pleroma.Workers.WorkerHelper
|
||||||
|
|
||||||
|
def worker_args(queue) do
|
||||||
|
case Config.get([:workers, :retries, queue]) do
|
||||||
|
nil -> []
|
||||||
|
max_attempts -> [max_attempts: max_attempts]
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def sidekiq_backoff(attempt, pow \\ 4, base_backoff \\ 15) do
|
||||||
|
backoff =
|
||||||
|
:math.pow(attempt, pow) +
|
||||||
|
base_backoff +
|
||||||
|
:rand.uniform(2 * base_backoff) * attempt
|
||||||
|
|
||||||
|
trunc(backoff)
|
||||||
|
end
|
||||||
|
|
||||||
|
defmacro __using__(opts) do
|
||||||
|
caller_module = __CALLER__.module
|
||||||
|
queue = Keyword.fetch!(opts, :queue)
|
||||||
|
|
||||||
|
quote do
|
||||||
|
# Note: `max_attempts` is intended to be overridden in `new/2` call
|
||||||
|
use Oban.Worker,
|
||||||
|
queue: unquote(queue),
|
||||||
|
max_attempts: 1
|
||||||
|
|
||||||
|
def enqueue(op, params, worker_args \\ []) do
|
||||||
|
params = Map.merge(%{"op" => op}, params)
|
||||||
|
queue_atom = String.to_atom(unquote(queue))
|
||||||
|
worker_args = worker_args ++ WorkerHelper.worker_args(queue_atom)
|
||||||
|
|
||||||
|
unquote(caller_module)
|
||||||
|
|> apply(:new, [params, worker_args])
|
||||||
|
|> Pleroma.Repo.insert()
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
8
mix.exs
8
mix.exs
|
@ -101,6 +101,8 @@ defp deps do
|
||||||
{:phoenix_ecto, "~> 4.0"},
|
{:phoenix_ecto, "~> 4.0"},
|
||||||
{:ecto_sql, "~> 3.1"},
|
{:ecto_sql, "~> 3.1"},
|
||||||
{:postgrex, ">= 0.13.5"},
|
{:postgrex, ">= 0.13.5"},
|
||||||
|
{:oban, "~> 0.7"},
|
||||||
|
{:quantum, "~> 2.3"},
|
||||||
{:gettext, "~> 0.15"},
|
{:gettext, "~> 0.15"},
|
||||||
{:comeonin, "~> 4.1.1"},
|
{:comeonin, "~> 4.1.1"},
|
||||||
{:pbkdf2_elixir, "~> 0.12.3"},
|
{:pbkdf2_elixir, "~> 0.12.3"},
|
||||||
|
@ -125,13 +127,13 @@ defp deps do
|
||||||
{:crypt,
|
{:crypt,
|
||||||
git: "https://github.com/msantos/crypt", ref: "1f2b58927ab57e72910191a7ebaeff984382a1d3"},
|
git: "https://github.com/msantos/crypt", ref: "1f2b58927ab57e72910191a7ebaeff984382a1d3"},
|
||||||
{:cors_plug, "~> 1.5"},
|
{:cors_plug, "~> 1.5"},
|
||||||
{:ex_doc, "~> 0.20.2", only: :dev, runtime: false},
|
{:ex_doc, "~> 0.21", only: :dev, runtime: false},
|
||||||
{:web_push_encryption, "~> 0.2.1"},
|
{:web_push_encryption, "~> 0.2.1"},
|
||||||
{:swoosh, "~> 0.23.2"},
|
{:swoosh, "~> 0.23.2"},
|
||||||
{:phoenix_swoosh, "~> 0.2"},
|
{:phoenix_swoosh, "~> 0.2"},
|
||||||
{:gen_smtp, "~> 0.13"},
|
{:gen_smtp, "~> 0.13"},
|
||||||
{:websocket_client, git: "https://github.com/jeremyong/websocket_client.git", only: :test},
|
{:websocket_client, git: "https://github.com/jeremyong/websocket_client.git", only: :test},
|
||||||
{:floki, "~> 0.20.0"},
|
{:floki, "~> 0.23.0"},
|
||||||
{:ex_syslogger, github: "slashmili/ex_syslogger", tag: "1.4.0"},
|
{:ex_syslogger, github: "slashmili/ex_syslogger", tag: "1.4.0"},
|
||||||
{:timex, "~> 3.5"},
|
{:timex, "~> 3.5"},
|
||||||
{:ueberauth, "~> 0.4"},
|
{:ueberauth, "~> 0.4"},
|
||||||
|
@ -141,8 +143,8 @@ defp deps do
|
||||||
{:http_signatures,
|
{:http_signatures,
|
||||||
git: "https://git.pleroma.social/pleroma/http_signatures.git",
|
git: "https://git.pleroma.social/pleroma/http_signatures.git",
|
||||||
ref: "293d77bb6f4a67ac8bde1428735c3b42f22cbb30"},
|
ref: "293d77bb6f4a67ac8bde1428735c3b42f22cbb30"},
|
||||||
{:pleroma_job_queue, "~> 0.3"},
|
|
||||||
{:telemetry, "~> 0.3"},
|
{:telemetry, "~> 0.3"},
|
||||||
|
{:poolboy, "~> 1.5"},
|
||||||
{:prometheus_ex, "~> 3.0"},
|
{:prometheus_ex, "~> 3.0"},
|
||||||
{:prometheus_plugs, "~> 1.1"},
|
{:prometheus_plugs, "~> 1.1"},
|
||||||
{:prometheus_phoenix, "~> 1.3"},
|
{:prometheus_phoenix, "~> 1.3"},
|
||||||
|
|
22
mix.lock
22
mix.lock
|
@ -17,10 +17,10 @@
|
||||||
"credo": {:hex, :credo, "0.9.3", "76fa3e9e497ab282e0cf64b98a624aa11da702854c52c82db1bf24e54ab7c97a", [:mix], [{:bunt, "~> 0.2.0", [hex: :bunt, repo: "hexpm", optional: false]}, {:poison, ">= 0.0.0", [hex: :poison, repo: "hexpm", optional: false]}], "hexpm"},
|
"credo": {:hex, :credo, "0.9.3", "76fa3e9e497ab282e0cf64b98a624aa11da702854c52c82db1bf24e54ab7c97a", [:mix], [{:bunt, "~> 0.2.0", [hex: :bunt, repo: "hexpm", optional: false]}, {:poison, ">= 0.0.0", [hex: :poison, repo: "hexpm", optional: false]}], "hexpm"},
|
||||||
"crontab": {:hex, :crontab, "1.1.7", "b9219f0bdc8678b94143655a8f229716c5810c0636a4489f98c0956137e53985", [:mix], [{:ecto, "~> 1.0 or ~> 2.0 or ~> 3.0", [hex: :ecto, repo: "hexpm", optional: true]}], "hexpm"},
|
"crontab": {:hex, :crontab, "1.1.7", "b9219f0bdc8678b94143655a8f229716c5810c0636a4489f98c0956137e53985", [:mix], [{:ecto, "~> 1.0 or ~> 2.0 or ~> 3.0", [hex: :ecto, repo: "hexpm", optional: true]}], "hexpm"},
|
||||||
"crypt": {:git, "https://github.com/msantos/crypt", "1f2b58927ab57e72910191a7ebaeff984382a1d3", [ref: "1f2b58927ab57e72910191a7ebaeff984382a1d3"]},
|
"crypt": {:git, "https://github.com/msantos/crypt", "1f2b58927ab57e72910191a7ebaeff984382a1d3", [ref: "1f2b58927ab57e72910191a7ebaeff984382a1d3"]},
|
||||||
"db_connection": {:hex, :db_connection, "2.0.6", "bde2f85d047969c5b5800cb8f4b3ed6316c8cb11487afedac4aa5f93fd39abfa", [:mix], [{:connection, "~> 1.0.2", [hex: :connection, repo: "hexpm", optional: false]}], "hexpm"},
|
"db_connection": {:hex, :db_connection, "2.1.1", "a51e8a2ee54ef2ae6ec41a668c85787ed40cb8944928c191280fe34c15b76ae5", [:mix], [{:connection, "~> 1.0.2", [hex: :connection, repo: "hexpm", optional: false]}], "hexpm"},
|
||||||
"decimal": {:hex, :decimal, "1.8.0", "ca462e0d885f09a1c5a342dbd7c1dcf27ea63548c65a65e67334f4b61803822e", [:mix], [], "hexpm"},
|
"decimal": {:hex, :decimal, "1.8.0", "ca462e0d885f09a1c5a342dbd7c1dcf27ea63548c65a65e67334f4b61803822e", [:mix], [], "hexpm"},
|
||||||
"deep_merge": {:hex, :deep_merge, "1.0.0", "b4aa1a0d1acac393bdf38b2291af38cb1d4a52806cf7a4906f718e1feb5ee961", [:mix], [], "hexpm"},
|
"deep_merge": {:hex, :deep_merge, "1.0.0", "b4aa1a0d1acac393bdf38b2291af38cb1d4a52806cf7a4906f718e1feb5ee961", [:mix], [], "hexpm"},
|
||||||
"earmark": {:hex, :earmark, "1.3.2", "b840562ea3d67795ffbb5bd88940b1bed0ed9fa32834915125ea7d02e35888a5", [:mix], [], "hexpm"},
|
"earmark": {:hex, :earmark, "1.3.6", "ce1d0675e10a5bb46b007549362bd3f5f08908843957687d8484fe7f37466b19", [:mix], [], "hexpm"},
|
||||||
"ecto": {:hex, :ecto, "3.1.4", "69d852da7a9f04ede725855a35ede48d158ca11a404fe94f8b2fb3b2162cd3c9", [:mix], [{:decimal, "~> 1.6", [hex: :decimal, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}], "hexpm"},
|
"ecto": {:hex, :ecto, "3.1.4", "69d852da7a9f04ede725855a35ede48d158ca11a404fe94f8b2fb3b2162cd3c9", [:mix], [{:decimal, "~> 1.6", [hex: :decimal, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}], "hexpm"},
|
||||||
"ecto_sql": {:hex, :ecto_sql, "3.1.3", "2c536139190492d9de33c5fefac7323c5eaaa82e1b9bf93482a14649042f7cd9", [:mix], [{:db_connection, "~> 2.0", [hex: :db_connection, repo: "hexpm", optional: false]}, {:ecto, "~> 3.1.0", [hex: :ecto, repo: "hexpm", optional: false]}, {:mariaex, "~> 0.9.1", [hex: :mariaex, repo: "hexpm", optional: true]}, {:myxql, "~> 0.2.0", [hex: :myxql, repo: "hexpm", optional: true]}, {:postgrex, "~> 0.14.0", [hex: :postgrex, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm"},
|
"ecto_sql": {:hex, :ecto_sql, "3.1.3", "2c536139190492d9de33c5fefac7323c5eaaa82e1b9bf93482a14649042f7cd9", [:mix], [{:db_connection, "~> 2.0", [hex: :db_connection, repo: "hexpm", optional: false]}, {:ecto, "~> 3.1.0", [hex: :ecto, repo: "hexpm", optional: false]}, {:mariaex, "~> 0.9.1", [hex: :mariaex, repo: "hexpm", optional: true]}, {:myxql, "~> 0.2.0", [hex: :myxql, repo: "hexpm", optional: true]}, {:postgrex, "~> 0.14.0", [hex: :postgrex, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm"},
|
||||||
"esshd": {:hex, :esshd, "0.1.0", "6f93a2062adb43637edad0ea7357db2702a4b80dd9683482fe00f5134e97f4c1", [:mix], [], "hexpm"},
|
"esshd": {:hex, :esshd, "0.1.0", "6f93a2062adb43637edad0ea7357db2702a4b80dd9683482fe00f5134e97f4c1", [:mix], [], "hexpm"},
|
||||||
|
@ -29,13 +29,15 @@
|
||||||
"ex_aws": {:hex, :ex_aws, "2.1.0", "b92651527d6c09c479f9013caa9c7331f19cba38a650590d82ebf2c6c16a1d8a", [:mix], [{:configparser_ex, "~> 2.0", [hex: :configparser_ex, repo: "hexpm", optional: true]}, {:hackney, "1.6.3 or 1.6.5 or 1.7.1 or 1.8.6 or ~> 1.9", [hex: :hackney, repo: "hexpm", optional: true]}, {:jsx, "~> 2.8", [hex: :jsx, repo: "hexpm", optional: true]}, {:poison, ">= 1.2.0", [hex: :poison, repo: "hexpm", optional: true]}, {:sweet_xml, "~> 0.6", [hex: :sweet_xml, repo: "hexpm", optional: true]}, {:xml_builder, "~> 0.1.0", [hex: :xml_builder, repo: "hexpm", optional: true]}], "hexpm"},
|
"ex_aws": {:hex, :ex_aws, "2.1.0", "b92651527d6c09c479f9013caa9c7331f19cba38a650590d82ebf2c6c16a1d8a", [:mix], [{:configparser_ex, "~> 2.0", [hex: :configparser_ex, repo: "hexpm", optional: true]}, {:hackney, "1.6.3 or 1.6.5 or 1.7.1 or 1.8.6 or ~> 1.9", [hex: :hackney, repo: "hexpm", optional: true]}, {:jsx, "~> 2.8", [hex: :jsx, repo: "hexpm", optional: true]}, {:poison, ">= 1.2.0", [hex: :poison, repo: "hexpm", optional: true]}, {:sweet_xml, "~> 0.6", [hex: :sweet_xml, repo: "hexpm", optional: true]}, {:xml_builder, "~> 0.1.0", [hex: :xml_builder, repo: "hexpm", optional: true]}], "hexpm"},
|
||||||
"ex_aws_s3": {:hex, :ex_aws_s3, "2.0.1", "9e09366e77f25d3d88c5393824e613344631be8db0d1839faca49686e99b6704", [:mix], [{:ex_aws, "~> 2.0", [hex: :ex_aws, repo: "hexpm", optional: false]}, {:sweet_xml, ">= 0.0.0", [hex: :sweet_xml, repo: "hexpm", optional: true]}], "hexpm"},
|
"ex_aws_s3": {:hex, :ex_aws_s3, "2.0.1", "9e09366e77f25d3d88c5393824e613344631be8db0d1839faca49686e99b6704", [:mix], [{:ex_aws, "~> 2.0", [hex: :ex_aws, repo: "hexpm", optional: false]}, {:sweet_xml, ">= 0.0.0", [hex: :sweet_xml, repo: "hexpm", optional: true]}], "hexpm"},
|
||||||
"ex_const": {:hex, :ex_const, "0.2.4", "d06e540c9d834865b012a17407761455efa71d0ce91e5831e86881b9c9d82448", [:mix], [], "hexpm"},
|
"ex_const": {:hex, :ex_const, "0.2.4", "d06e540c9d834865b012a17407761455efa71d0ce91e5831e86881b9c9d82448", [:mix], [], "hexpm"},
|
||||||
"ex_doc": {:hex, :ex_doc, "0.20.2", "1bd0dfb0304bade58beb77f20f21ee3558cc3c753743ae0ddbb0fd7ba2912331", [:mix], [{:earmark, "~> 1.3", [hex: :earmark, repo: "hexpm", optional: false]}, {:makeup_elixir, "~> 0.10", [hex: :makeup_elixir, repo: "hexpm", optional: false]}], "hexpm"},
|
"ex_doc": {:hex, :ex_doc, "0.21.2", "caca5bc28ed7b3bdc0b662f8afe2bee1eedb5c3cf7b322feeeb7c6ebbde089d6", [:mix], [{:earmark, "~> 1.3.3 or ~> 1.4", [hex: :earmark, repo: "hexpm", optional: false]}, {:makeup_elixir, "~> 0.14", [hex: :makeup_elixir, repo: "hexpm", optional: false]}], "hexpm"},
|
||||||
"ex_machina": {:hex, :ex_machina, "2.3.0", "92a5ad0a8b10ea6314b876a99c8c9e3f25f4dde71a2a835845b136b9adaf199a", [:mix], [{:ecto, "~> 2.2 or ~> 3.0", [hex: :ecto, repo: "hexpm", optional: true]}, {:ecto_sql, "~> 3.0", [hex: :ecto_sql, repo: "hexpm", optional: true]}], "hexpm"},
|
"ex_machina": {:hex, :ex_machina, "2.3.0", "92a5ad0a8b10ea6314b876a99c8c9e3f25f4dde71a2a835845b136b9adaf199a", [:mix], [{:ecto, "~> 2.2 or ~> 3.0", [hex: :ecto, repo: "hexpm", optional: true]}, {:ecto_sql, "~> 3.0", [hex: :ecto_sql, repo: "hexpm", optional: true]}], "hexpm"},
|
||||||
"ex_rated": {:hex, :ex_rated, "1.3.3", "30ecbdabe91f7eaa9d37fa4e81c85ba420f371babeb9d1910adbcd79ec798d27", [:mix], [{:ex2ms, "~> 1.5", [hex: :ex2ms, repo: "hexpm", optional: false]}], "hexpm"},
|
"ex_rated": {:hex, :ex_rated, "1.3.3", "30ecbdabe91f7eaa9d37fa4e81c85ba420f371babeb9d1910adbcd79ec798d27", [:mix], [{:ex2ms, "~> 1.5", [hex: :ex2ms, repo: "hexpm", optional: false]}], "hexpm"},
|
||||||
"ex_syslogger": {:git, "https://github.com/slashmili/ex_syslogger.git", "f3963399047af17e038897c69e20d552e6899e1d", [tag: "1.4.0"]},
|
"ex_syslogger": {:git, "https://github.com/slashmili/ex_syslogger.git", "f3963399047af17e038897c69e20d552e6899e1d", [tag: "1.4.0"]},
|
||||||
"excoveralls": {:hex, :excoveralls, "0.11.1", "dd677fbdd49114fdbdbf445540ec735808250d56b011077798316505064edb2c", [:mix], [{:hackney, "~> 1.0", [hex: :hackney, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: false]}], "hexpm"},
|
"excoveralls": {:hex, :excoveralls, "0.11.1", "dd677fbdd49114fdbdbf445540ec735808250d56b011077798316505064edb2c", [:mix], [{:hackney, "~> 1.0", [hex: :hackney, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: false]}], "hexpm"},
|
||||||
"floki": {:hex, :floki, "0.20.4", "be42ac911fece24b4c72f3b5846774b6e61b83fe685c2fc9d62093277fb3bc86", [:mix], [{:html_entities, "~> 0.4.0", [hex: :html_entities, repo: "hexpm", optional: false]}, {:mochiweb, "~> 2.15", [hex: :mochiweb, repo: "hexpm", optional: false]}], "hexpm"},
|
"floki": {:hex, :floki, "0.23.0", "956ab6dba828c96e732454809fb0bd8d43ce0979b75f34de6322e73d4c917829", [:mix], [{:html_entities, "~> 0.4.0", [hex: :html_entities, repo: "hexpm", optional: false]}], "hexpm"},
|
||||||
"gen_smtp": {:hex, :gen_smtp, "0.14.0", "39846a03522456077c6429b4badfd1d55e5e7d0fdfb65e935b7c5e38549d9202", [:rebar3], [], "hexpm"},
|
"gen_smtp": {:hex, :gen_smtp, "0.14.0", "39846a03522456077c6429b4badfd1d55e5e7d0fdfb65e935b7c5e38549d9202", [:rebar3], [], "hexpm"},
|
||||||
|
"gen_stage": {:hex, :gen_stage, "0.14.2", "6a2a578a510c5bfca8a45e6b27552f613b41cf584b58210f017088d3d17d0b14", [:mix], [], "hexpm"},
|
||||||
|
"gen_state_machine": {:hex, :gen_state_machine, "2.0.5", "9ac15ec6e66acac994cc442dcc2c6f9796cf380ec4b08267223014be1c728a95", [:mix], [], "hexpm"},
|
||||||
"gettext": {:hex, :gettext, "0.17.0", "abe21542c831887a2b16f4c94556db9c421ab301aee417b7c4fbde7fbdbe01ec", [:mix], [], "hexpm"},
|
"gettext": {:hex, :gettext, "0.17.0", "abe21542c831887a2b16f4c94556db9c421ab301aee417b7c4fbde7fbdbe01ec", [:mix], [], "hexpm"},
|
||||||
"hackney": {:hex, :hackney, "1.15.1", "9f8f471c844b8ce395f7b6d8398139e26ddca9ebc171a8b91342ee15a19963f4", [:rebar3], [{:certifi, "2.5.1", [hex: :certifi, repo: "hexpm", optional: false]}, {:idna, "6.0.0", [hex: :idna, repo: "hexpm", optional: false]}, {:metrics, "1.0.1", [hex: :metrics, repo: "hexpm", optional: false]}, {:mimerl, "~>1.1", [hex: :mimerl, repo: "hexpm", optional: false]}, {:ssl_verify_fun, "1.1.4", [hex: :ssl_verify_fun, repo: "hexpm", optional: false]}], "hexpm"},
|
"hackney": {:hex, :hackney, "1.15.1", "9f8f471c844b8ce395f7b6d8398139e26ddca9ebc171a8b91342ee15a19963f4", [:rebar3], [{:certifi, "2.5.1", [hex: :certifi, repo: "hexpm", optional: false]}, {:idna, "6.0.0", [hex: :idna, repo: "hexpm", optional: false]}, {:metrics, "1.0.1", [hex: :metrics, repo: "hexpm", optional: false]}, {:mimerl, "~>1.1", [hex: :mimerl, repo: "hexpm", optional: false]}, {:ssl_verify_fun, "1.1.4", [hex: :ssl_verify_fun, repo: "hexpm", optional: false]}], "hexpm"},
|
||||||
"html_entities": {:hex, :html_entities, "0.4.0", "f2fee876858cf6aaa9db608820a3209e45a087c5177332799592142b50e89a6b", [:mix], [], "hexpm"},
|
"html_entities": {:hex, :html_entities, "0.4.0", "f2fee876858cf6aaa9db608820a3209e45a087c5177332799592142b50e89a6b", [:mix], [], "hexpm"},
|
||||||
|
@ -46,8 +48,9 @@
|
||||||
"jason": {:hex, :jason, "1.1.2", "b03dedea67a99223a2eaf9f1264ce37154564de899fd3d8b9a21b1a6fd64afe7", [:mix], [{:decimal, "~> 1.0", [hex: :decimal, repo: "hexpm", optional: true]}], "hexpm"},
|
"jason": {:hex, :jason, "1.1.2", "b03dedea67a99223a2eaf9f1264ce37154564de899fd3d8b9a21b1a6fd64afe7", [:mix], [{:decimal, "~> 1.0", [hex: :decimal, repo: "hexpm", optional: true]}], "hexpm"},
|
||||||
"joken": {:hex, :joken, "2.0.1", "ec9ab31bf660f343380da033b3316855197c8d4c6ef597fa3fcb451b326beb14", [:mix], [{:jose, "~> 1.9", [hex: :jose, repo: "hexpm", optional: false]}], "hexpm"},
|
"joken": {:hex, :joken, "2.0.1", "ec9ab31bf660f343380da033b3316855197c8d4c6ef597fa3fcb451b326beb14", [:mix], [{:jose, "~> 1.9", [hex: :jose, repo: "hexpm", optional: false]}], "hexpm"},
|
||||||
"jose": {:hex, :jose, "1.9.0", "4167c5f6d06ffaebffd15cdb8da61a108445ef5e85ab8f5a7ad926fdf3ada154", [:mix, :rebar3], [{:base64url, "~> 0.0.1", [hex: :base64url, repo: "hexpm", optional: false]}], "hexpm"},
|
"jose": {:hex, :jose, "1.9.0", "4167c5f6d06ffaebffd15cdb8da61a108445ef5e85ab8f5a7ad926fdf3ada154", [:mix, :rebar3], [{:base64url, "~> 0.0.1", [hex: :base64url, repo: "hexpm", optional: false]}], "hexpm"},
|
||||||
"makeup": {:hex, :makeup, "0.8.0", "9cf32aea71c7fe0a4b2e9246c2c4978f9070257e5c9ce6d4a28ec450a839b55f", [:mix], [{:nimble_parsec, "~> 0.5.0", [hex: :nimble_parsec, repo: "hexpm", optional: false]}], "hexpm"},
|
"libring": {:hex, :libring, "1.4.0", "41246ba2f3fbc76b3971f6bce83119dfec1eee17e977a48d8a9cfaaf58c2a8d6", [:mix], [], "hexpm"},
|
||||||
"makeup_elixir": {:hex, :makeup_elixir, "0.13.0", "be7a477997dcac2e48a9d695ec730b2d22418292675c75aa2d34ba0909dcdeda", [:mix], [{:makeup, "~> 0.8", [hex: :makeup, repo: "hexpm", optional: false]}], "hexpm"},
|
"makeup": {:hex, :makeup, "1.0.0", "671df94cf5a594b739ce03b0d0316aa64312cee2574b6a44becb83cd90fb05dc", [:mix], [{:nimble_parsec, "~> 0.5.0", [hex: :nimble_parsec, repo: "hexpm", optional: false]}], "hexpm"},
|
||||||
|
"makeup_elixir": {:hex, :makeup_elixir, "0.14.0", "cf8b7c66ad1cff4c14679698d532f0b5d45a3968ffbcbfd590339cb57742f1ae", [:mix], [{:makeup, "~> 1.0", [hex: :makeup, repo: "hexpm", optional: false]}], "hexpm"},
|
||||||
"meck": {:hex, :meck, "0.8.13", "ffedb39f99b0b99703b8601c6f17c7f76313ee12de6b646e671e3188401f7866", [:rebar3], [], "hexpm"},
|
"meck": {:hex, :meck, "0.8.13", "ffedb39f99b0b99703b8601c6f17c7f76313ee12de6b646e671e3188401f7866", [:rebar3], [], "hexpm"},
|
||||||
"metrics": {:hex, :metrics, "1.0.1", "25f094dea2cda98213cecc3aeff09e940299d950904393b2a29d191c346a8486", [:rebar3], [], "hexpm"},
|
"metrics": {:hex, :metrics, "1.0.1", "25f094dea2cda98213cecc3aeff09e940299d950904393b2a29d191c346a8486", [:rebar3], [], "hexpm"},
|
||||||
"mime": {:hex, :mime, "1.3.1", "30ce04ab3175b6ad0bdce0035cba77bba68b813d523d1aac73d9781b4d193cf8", [:mix], [], "hexpm"},
|
"mime": {:hex, :mime, "1.3.1", "30ce04ab3175b6ad0bdce0035cba77bba68b813d523d1aac73d9781b4d193cf8", [:mix], [], "hexpm"},
|
||||||
|
@ -56,7 +59,8 @@
|
||||||
"mock": {:hex, :mock, "0.3.3", "42a433794b1291a9cf1525c6d26b38e039e0d3a360732b5e467bfc77ef26c914", [:mix], [{:meck, "~> 0.8.13", [hex: :meck, repo: "hexpm", optional: false]}], "hexpm"},
|
"mock": {:hex, :mock, "0.3.3", "42a433794b1291a9cf1525c6d26b38e039e0d3a360732b5e467bfc77ef26c914", [:mix], [{:meck, "~> 0.8.13", [hex: :meck, repo: "hexpm", optional: false]}], "hexpm"},
|
||||||
"mogrify": {:hex, :mogrify, "0.6.1", "de1b527514f2d95a7bbe9642eb556061afb337e220cf97adbf3a4e6438ed70af", [:mix], [], "hexpm"},
|
"mogrify": {:hex, :mogrify, "0.6.1", "de1b527514f2d95a7bbe9642eb556061afb337e220cf97adbf3a4e6438ed70af", [:mix], [], "hexpm"},
|
||||||
"mox": {:hex, :mox, "0.5.1", "f86bb36026aac1e6f924a4b6d024b05e9adbed5c63e8daa069bd66fb3292165b", [:mix], [], "hexpm"},
|
"mox": {:hex, :mox, "0.5.1", "f86bb36026aac1e6f924a4b6d024b05e9adbed5c63e8daa069bd66fb3292165b", [:mix], [], "hexpm"},
|
||||||
"nimble_parsec": {:hex, :nimble_parsec, "0.5.0", "90e2eca3d0266e5c53f8fbe0079694740b9c91b6747f2b7e3c5d21966bba8300", [:mix], [], "hexpm"},
|
"nimble_parsec": {:hex, :nimble_parsec, "0.5.1", "c90796ecee0289dbb5ad16d3ad06f957b0cd1199769641c961cfe0b97db190e0", [:mix], [], "hexpm"},
|
||||||
|
"oban": {:hex, :oban, "0.7.1", "171bdd1b69c1a4a839f8c768f5e962fc22d1de1513d459fb6b8e0cbd34817a9a", [:mix], [{:ecto_sql, "~> 3.1", [hex: :ecto_sql, repo: "hexpm", optional: false]}, {:jason, "~> 1.1", [hex: :jason, repo: "hexpm", optional: false]}, {:postgrex, "~> 0.14", [hex: :postgrex, repo: "hexpm", optional: false]}, {:telemetry, "~> 0.4", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm"},
|
||||||
"parse_trans": {:hex, :parse_trans, "3.3.0", "09765507a3c7590a784615cfd421d101aec25098d50b89d7aa1d66646bc571c1", [:rebar3], [], "hexpm"},
|
"parse_trans": {:hex, :parse_trans, "3.3.0", "09765507a3c7590a784615cfd421d101aec25098d50b89d7aa1d66646bc571c1", [:rebar3], [], "hexpm"},
|
||||||
"pbkdf2_elixir": {:hex, :pbkdf2_elixir, "0.12.3", "6706a148809a29c306062862c803406e88f048277f6e85b68faf73291e820b84", [:mix], [], "hexpm"},
|
"pbkdf2_elixir": {:hex, :pbkdf2_elixir, "0.12.3", "6706a148809a29c306062862c803406e88f048277f6e85b68faf73291e820b84", [:mix], [], "hexpm"},
|
||||||
"phoenix": {:hex, :phoenix, "1.4.9", "746d098e10741c334d88143d3c94cab1756435f94387a63441792e66ec0ee974", [:mix], [{:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}, {:phoenix_pubsub, "~> 1.1", [hex: :phoenix_pubsub, repo: "hexpm", optional: false]}, {:plug, "~> 1.8.1 or ~> 1.9", [hex: :plug, repo: "hexpm", optional: false]}, {:plug_cowboy, "~> 1.0 or ~> 2.0", [hex: :plug_cowboy, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm"},
|
"phoenix": {:hex, :phoenix, "1.4.9", "746d098e10741c334d88143d3c94cab1756435f94387a63441792e66ec0ee974", [:mix], [{:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}, {:phoenix_pubsub, "~> 1.1", [hex: :phoenix_pubsub, repo: "hexpm", optional: false]}, {:plug, "~> 1.8.1 or ~> 1.9", [hex: :plug, repo: "hexpm", optional: false]}, {:plug_cowboy, "~> 1.0 or ~> 2.0", [hex: :plug_cowboy, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm"},
|
||||||
|
@ -64,12 +68,12 @@
|
||||||
"phoenix_html": {:hex, :phoenix_html, "2.13.1", "fa8f034b5328e2dfa0e4131b5569379003f34bc1fafdaa84985b0b9d2f12e68b", [:mix], [{:plug, "~> 1.5", [hex: :plug, repo: "hexpm", optional: false]}], "hexpm"},
|
"phoenix_html": {:hex, :phoenix_html, "2.13.1", "fa8f034b5328e2dfa0e4131b5569379003f34bc1fafdaa84985b0b9d2f12e68b", [:mix], [{:plug, "~> 1.5", [hex: :plug, repo: "hexpm", optional: false]}], "hexpm"},
|
||||||
"phoenix_pubsub": {:hex, :phoenix_pubsub, "1.1.2", "496c303bdf1b2e98a9d26e89af5bba3ab487ba3a3735f74bf1f4064d2a845a3e", [:mix], [], "hexpm"},
|
"phoenix_pubsub": {:hex, :phoenix_pubsub, "1.1.2", "496c303bdf1b2e98a9d26e89af5bba3ab487ba3a3735f74bf1f4064d2a845a3e", [:mix], [], "hexpm"},
|
||||||
"phoenix_swoosh": {:hex, :phoenix_swoosh, "0.2.0", "a7e0b32077cd6d2323ae15198839b05d9caddfa20663fd85787479e81f89520e", [:mix], [{:phoenix, "~> 1.0", [hex: :phoenix, repo: "hexpm", optional: false]}, {:phoenix_html, "~> 2.2", [hex: :phoenix_html, repo: "hexpm", optional: false]}, {:swoosh, "~> 0.1", [hex: :swoosh, repo: "hexpm", optional: false]}], "hexpm"},
|
"phoenix_swoosh": {:hex, :phoenix_swoosh, "0.2.0", "a7e0b32077cd6d2323ae15198839b05d9caddfa20663fd85787479e81f89520e", [:mix], [{:phoenix, "~> 1.0", [hex: :phoenix, repo: "hexpm", optional: false]}, {:phoenix_html, "~> 2.2", [hex: :phoenix_html, repo: "hexpm", optional: false]}, {:swoosh, "~> 0.1", [hex: :swoosh, repo: "hexpm", optional: false]}], "hexpm"},
|
||||||
"pleroma_job_queue": {:hex, :pleroma_job_queue, "0.3.0", "b84538d621f0c3d6fcc1cff9d5648d3faaf873b8b21b94e6503428a07a48ec47", [:mix], [{:crontab, "~> 1.1", [hex: :crontab, repo: "hexpm", optional: false]}], "hexpm"},
|
|
||||||
"plug": {:hex, :plug, "1.8.2", "0bcce1daa420f189a6491f3940cc77ea7fb1919761175c9c3b59800d897440fc", [:mix], [{:mime, "~> 1.0", [hex: :mime, repo: "hexpm", optional: false]}, {:plug_crypto, "~> 1.0", [hex: :plug_crypto, repo: "hexpm", optional: false]}, {:telemetry, "~> 0.4", [hex: :telemetry, repo: "hexpm", optional: true]}], "hexpm"},
|
"plug": {:hex, :plug, "1.8.2", "0bcce1daa420f189a6491f3940cc77ea7fb1919761175c9c3b59800d897440fc", [:mix], [{:mime, "~> 1.0", [hex: :mime, repo: "hexpm", optional: false]}, {:plug_crypto, "~> 1.0", [hex: :plug_crypto, repo: "hexpm", optional: false]}, {:telemetry, "~> 0.4", [hex: :telemetry, repo: "hexpm", optional: true]}], "hexpm"},
|
||||||
"plug_cowboy": {:hex, :plug_cowboy, "2.1.0", "b75768153c3a8a9e8039d4b25bb9b14efbc58e9c4a6e6a270abff1cd30cbe320", [:mix], [{:cowboy, "~> 2.5", [hex: :cowboy, repo: "hexpm", optional: false]}, {:plug, "~> 1.7", [hex: :plug, repo: "hexpm", optional: false]}], "hexpm"},
|
"plug_cowboy": {:hex, :plug_cowboy, "2.1.0", "b75768153c3a8a9e8039d4b25bb9b14efbc58e9c4a6e6a270abff1cd30cbe320", [:mix], [{:cowboy, "~> 2.5", [hex: :cowboy, repo: "hexpm", optional: false]}, {:plug, "~> 1.7", [hex: :plug, repo: "hexpm", optional: false]}], "hexpm"},
|
||||||
"plug_crypto": {:hex, :plug_crypto, "1.0.0", "18e49317d3fa343f24620ed22795ec29d4a5e602d52d1513ccea0b07d8ea7d4d", [:mix], [], "hexpm"},
|
"plug_crypto": {:hex, :plug_crypto, "1.0.0", "18e49317d3fa343f24620ed22795ec29d4a5e602d52d1513ccea0b07d8ea7d4d", [:mix], [], "hexpm"},
|
||||||
"plug_static_index_html": {:hex, :plug_static_index_html, "1.0.0", "840123d4d3975585133485ea86af73cb2600afd7f2a976f9f5fd8b3808e636a0", [:mix], [{:plug, "~> 1.0", [hex: :plug, repo: "hexpm", optional: false]}], "hexpm"},
|
"plug_static_index_html": {:hex, :plug_static_index_html, "1.0.0", "840123d4d3975585133485ea86af73cb2600afd7f2a976f9f5fd8b3808e636a0", [:mix], [{:plug, "~> 1.0", [hex: :plug, repo: "hexpm", optional: false]}], "hexpm"},
|
||||||
"poison": {:hex, :poison, "3.1.0", "d9eb636610e096f86f25d9a46f35a9facac35609a7591b3be3326e99a0484665", [:mix], [], "hexpm"},
|
"poison": {:hex, :poison, "3.1.0", "d9eb636610e096f86f25d9a46f35a9facac35609a7591b3be3326e99a0484665", [:mix], [], "hexpm"},
|
||||||
|
"poolboy": {:hex, :poolboy, "1.5.2", "392b007a1693a64540cead79830443abf5762f5d30cf50bc95cb2c1aaafa006b", [:rebar3], [], "hexpm"},
|
||||||
"postgrex": {:hex, :postgrex, "0.14.3", "5754dee2fdf6e9e508cbf49ab138df964278700b764177e8f3871e658b345a1e", [:mix], [{:connection, "~> 1.0", [hex: :connection, repo: "hexpm", optional: false]}, {:db_connection, "~> 2.0", [hex: :db_connection, repo: "hexpm", optional: false]}, {:decimal, "~> 1.5", [hex: :decimal, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}], "hexpm"},
|
"postgrex": {:hex, :postgrex, "0.14.3", "5754dee2fdf6e9e508cbf49ab138df964278700b764177e8f3871e658b345a1e", [:mix], [{:connection, "~> 1.0", [hex: :connection, repo: "hexpm", optional: false]}, {:db_connection, "~> 2.0", [hex: :db_connection, repo: "hexpm", optional: false]}, {:decimal, "~> 1.5", [hex: :decimal, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}], "hexpm"},
|
||||||
"prometheus": {:hex, :prometheus, "4.4.1", "1e96073b3ed7788053768fea779cbc896ddc3bdd9ba60687f2ad50b252ac87d6", [:mix, :rebar3], [], "hexpm"},
|
"prometheus": {:hex, :prometheus, "4.4.1", "1e96073b3ed7788053768fea779cbc896ddc3bdd9ba60687f2ad50b252ac87d6", [:mix, :rebar3], [], "hexpm"},
|
||||||
"prometheus_ecto": {:hex, :prometheus_ecto, "1.4.1", "6c768ea9654de871e5b32fab2eac348467b3021604ebebbcbd8bcbe806a65ed5", [:mix], [{:ecto, "~> 2.0 or ~> 3.0", [hex: :ecto, repo: "hexpm", optional: false]}, {:prometheus_ex, "~> 1.1 or ~> 2.0 or ~> 3.0", [hex: :prometheus_ex, repo: "hexpm", optional: false]}], "hexpm"},
|
"prometheus_ecto": {:hex, :prometheus_ecto, "1.4.1", "6c768ea9654de871e5b32fab2eac348467b3021604ebebbcbd8bcbe806a65ed5", [:mix], [{:ecto, "~> 2.0 or ~> 3.0", [hex: :ecto, repo: "hexpm", optional: false]}, {:prometheus_ex, "~> 1.1 or ~> 2.0 or ~> 3.0", [hex: :prometheus_ex, repo: "hexpm", optional: false]}], "hexpm"},
|
||||||
|
@ -77,9 +81,11 @@
|
||||||
"prometheus_phoenix": {:hex, :prometheus_phoenix, "1.3.0", "c4b527e0b3a9ef1af26bdcfbfad3998f37795b9185d475ca610fe4388fdd3bb5", [:mix], [{:phoenix, "~> 1.4", [hex: :phoenix, repo: "hexpm", optional: false]}, {:prometheus_ex, "~> 1.3 or ~> 2.0 or ~> 3.0", [hex: :prometheus_ex, repo: "hexpm", optional: false]}], "hexpm"},
|
"prometheus_phoenix": {:hex, :prometheus_phoenix, "1.3.0", "c4b527e0b3a9ef1af26bdcfbfad3998f37795b9185d475ca610fe4388fdd3bb5", [:mix], [{:phoenix, "~> 1.4", [hex: :phoenix, repo: "hexpm", optional: false]}, {:prometheus_ex, "~> 1.3 or ~> 2.0 or ~> 3.0", [hex: :prometheus_ex, repo: "hexpm", optional: false]}], "hexpm"},
|
||||||
"prometheus_plugs": {:hex, :prometheus_plugs, "1.1.5", "25933d48f8af3a5941dd7b621c889749894d8a1082a6ff7c67cc99dec26377c5", [:mix], [{:accept, "~> 0.1", [hex: :accept, repo: "hexpm", optional: false]}, {:plug, "~> 1.0", [hex: :plug, repo: "hexpm", optional: false]}, {:prometheus_ex, "~> 1.1 or ~> 2.0 or ~> 3.0", [hex: :prometheus_ex, repo: "hexpm", optional: false]}, {:prometheus_process_collector, "~> 1.1", [hex: :prometheus_process_collector, repo: "hexpm", optional: true]}], "hexpm"},
|
"prometheus_plugs": {:hex, :prometheus_plugs, "1.1.5", "25933d48f8af3a5941dd7b621c889749894d8a1082a6ff7c67cc99dec26377c5", [:mix], [{:accept, "~> 0.1", [hex: :accept, repo: "hexpm", optional: false]}, {:plug, "~> 1.0", [hex: :plug, repo: "hexpm", optional: false]}, {:prometheus_ex, "~> 1.1 or ~> 2.0 or ~> 3.0", [hex: :prometheus_ex, repo: "hexpm", optional: false]}, {:prometheus_process_collector, "~> 1.1", [hex: :prometheus_process_collector, repo: "hexpm", optional: true]}], "hexpm"},
|
||||||
"quack": {:hex, :quack, "0.1.1", "cca7b4da1a233757fdb44b3334fce80c94785b3ad5a602053b7a002b5a8967bf", [:mix], [{:poison, ">= 1.0.0", [hex: :poison, repo: "hexpm", optional: false]}, {:tesla, "~> 1.2.0", [hex: :tesla, repo: "hexpm", optional: false]}], "hexpm"},
|
"quack": {:hex, :quack, "0.1.1", "cca7b4da1a233757fdb44b3334fce80c94785b3ad5a602053b7a002b5a8967bf", [:mix], [{:poison, ">= 1.0.0", [hex: :poison, repo: "hexpm", optional: false]}, {:tesla, "~> 1.2.0", [hex: :tesla, repo: "hexpm", optional: false]}], "hexpm"},
|
||||||
|
"quantum": {:hex, :quantum, "2.3.4", "72a0e8855e2adc101459eac8454787cb74ab4169de6ca50f670e72142d4960e9", [:mix], [{:calendar, "~> 0.17", [hex: :calendar, repo: "hexpm", optional: true]}, {:crontab, "~> 1.1", [hex: :crontab, repo: "hexpm", optional: false]}, {:gen_stage, "~> 0.12", [hex: :gen_stage, repo: "hexpm", optional: false]}, {:swarm, "~> 3.3", [hex: :swarm, repo: "hexpm", optional: false]}, {:timex, "~> 3.1", [hex: :timex, repo: "hexpm", optional: true]}], "hexpm"},
|
||||||
"ranch": {:hex, :ranch, "1.7.1", "6b1fab51b49196860b733a49c07604465a47bdb78aa10c1c16a3d199f7f8c881", [:rebar3], [], "hexpm"},
|
"ranch": {:hex, :ranch, "1.7.1", "6b1fab51b49196860b733a49c07604465a47bdb78aa10c1c16a3d199f7f8c881", [:rebar3], [], "hexpm"},
|
||||||
"recon": {:git, "https://github.com/ferd/recon.git", "75d70c7c08926d2f24f1ee6de14ee50fe8a52763", [tag: "2.4.0"]},
|
"recon": {:git, "https://github.com/ferd/recon.git", "75d70c7c08926d2f24f1ee6de14ee50fe8a52763", [tag: "2.4.0"]},
|
||||||
"ssl_verify_fun": {:hex, :ssl_verify_fun, "1.1.4", "f0eafff810d2041e93f915ef59899c923f4568f4585904d010387ed74988e77b", [:make, :mix, :rebar3], [], "hexpm"},
|
"ssl_verify_fun": {:hex, :ssl_verify_fun, "1.1.4", "f0eafff810d2041e93f915ef59899c923f4568f4585904d010387ed74988e77b", [:make, :mix, :rebar3], [], "hexpm"},
|
||||||
|
"swarm": {:hex, :swarm, "3.4.0", "64f8b30055d74640d2186c66354b33b999438692a91be275bb89cdc7e401f448", [:mix], [{:gen_state_machine, "~> 2.0", [hex: :gen_state_machine, repo: "hexpm", optional: false]}, {:libring, "~> 1.0", [hex: :libring, repo: "hexpm", optional: false]}], "hexpm"},
|
||||||
"sweet_xml": {:hex, :sweet_xml, "0.6.6", "fc3e91ec5dd7c787b6195757fbcf0abc670cee1e4172687b45183032221b66b8", [:mix], [], "hexpm"},
|
"sweet_xml": {:hex, :sweet_xml, "0.6.6", "fc3e91ec5dd7c787b6195757fbcf0abc670cee1e4172687b45183032221b66b8", [:mix], [], "hexpm"},
|
||||||
"swoosh": {:hex, :swoosh, "0.23.2", "7dda95ff0bf54a2298328d6899c74dae1223777b43563ccebebb4b5d2b61df38", [:mix], [{:cowboy, "~> 1.0.1 or ~> 1.1 or ~> 2.4", [hex: :cowboy, repo: "hexpm", optional: true]}, {:gen_smtp, "~> 0.13", [hex: :gen_smtp, repo: "hexpm", optional: true]}, {:hackney, "~> 1.9", [hex: :hackney, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: false]}, {:mail, "~> 0.2", [hex: :mail, repo: "hexpm", optional: true]}, {:mime, "~> 1.1", [hex: :mime, repo: "hexpm", optional: false]}, {:plug_cowboy, ">= 1.0.0", [hex: :plug_cowboy, repo: "hexpm", optional: true]}], "hexpm"},
|
"swoosh": {:hex, :swoosh, "0.23.2", "7dda95ff0bf54a2298328d6899c74dae1223777b43563ccebebb4b5d2b61df38", [:mix], [{:cowboy, "~> 1.0.1 or ~> 1.1 or ~> 2.4", [hex: :cowboy, repo: "hexpm", optional: true]}, {:gen_smtp, "~> 0.13", [hex: :gen_smtp, repo: "hexpm", optional: true]}, {:hackney, "~> 1.9", [hex: :hackney, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: false]}, {:mail, "~> 0.2", [hex: :mail, repo: "hexpm", optional: true]}, {:mime, "~> 1.1", [hex: :mime, repo: "hexpm", optional: false]}, {:plug_cowboy, ">= 1.0.0", [hex: :plug_cowboy, repo: "hexpm", optional: true]}], "hexpm"},
|
||||||
"syslog": {:git, "https://github.com/Vagabond/erlang-syslog.git", "4a6c6f2c996483e86c1320e9553f91d337bcb6aa", [tag: "1.0.5"]},
|
"syslog": {:git, "https://github.com/Vagabond/erlang-syslog.git", "4a6c6f2c996483e86c1320e9553f91d337bcb6aa", [tag: "1.0.5"]},
|
||||||
|
|
|
@ -0,0 +1,6 @@
|
||||||
|
defmodule Pleroma.Repo.Migrations.AddObanJobsTable do
|
||||||
|
use Ecto.Migration
|
||||||
|
|
||||||
|
defdelegate up, to: Oban.Migrations
|
||||||
|
defdelegate down, to: Oban.Migrations
|
||||||
|
end
|
|
@ -0,0 +1,12 @@
|
||||||
|
defmodule Pleroma.Repo.Migrations.CreateDeliveries do
|
||||||
|
use Ecto.Migration
|
||||||
|
|
||||||
|
def change do
|
||||||
|
create_if_not_exists table(:deliveries) do
|
||||||
|
add(:object_id, references(:objects, type: :id), null: false)
|
||||||
|
add(:user_id, references(:users, type: :uuid, on_delete: :delete_all), null: false)
|
||||||
|
end
|
||||||
|
create_if_not_exists index(:deliveries, :object_id, name: :deliveries_object_id)
|
||||||
|
create_if_not_exists(unique_index(:deliveries, [:user_id, :object_id]))
|
||||||
|
end
|
||||||
|
end
|
|
@ -0,0 +1,141 @@
|
||||||
|
defmodule Pleroma.Activity.Ir.TopicsTest do
|
||||||
|
use Pleroma.DataCase
|
||||||
|
|
||||||
|
alias Pleroma.Activity
|
||||||
|
alias Pleroma.Activity.Ir.Topics
|
||||||
|
alias Pleroma.Object
|
||||||
|
|
||||||
|
require Pleroma.Constants
|
||||||
|
|
||||||
|
describe "poll answer" do
|
||||||
|
test "produce no topics" do
|
||||||
|
activity = %Activity{object: %Object{data: %{"type" => "Answer"}}}
|
||||||
|
|
||||||
|
assert [] == Topics.get_activity_topics(activity)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
describe "non poll answer" do
|
||||||
|
test "always add user and list topics" do
|
||||||
|
activity = %Activity{object: %Object{data: %{"type" => "FooBar"}}}
|
||||||
|
topics = Topics.get_activity_topics(activity)
|
||||||
|
|
||||||
|
assert Enum.member?(topics, "user")
|
||||||
|
assert Enum.member?(topics, "list")
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
describe "public visibility" do
|
||||||
|
setup do
|
||||||
|
activity = %Activity{
|
||||||
|
object: %Object{data: %{"type" => "Note"}},
|
||||||
|
data: %{"to" => [Pleroma.Constants.as_public()]}
|
||||||
|
}
|
||||||
|
|
||||||
|
{:ok, activity: activity}
|
||||||
|
end
|
||||||
|
|
||||||
|
test "produces public topic", %{activity: activity} do
|
||||||
|
topics = Topics.get_activity_topics(activity)
|
||||||
|
|
||||||
|
assert Enum.member?(topics, "public")
|
||||||
|
end
|
||||||
|
|
||||||
|
test "local action produces public:local topic", %{activity: activity} do
|
||||||
|
activity = %{activity | local: true}
|
||||||
|
topics = Topics.get_activity_topics(activity)
|
||||||
|
|
||||||
|
assert Enum.member?(topics, "public:local")
|
||||||
|
end
|
||||||
|
|
||||||
|
test "non-local action does not produce public:local topic", %{activity: activity} do
|
||||||
|
activity = %{activity | local: false}
|
||||||
|
topics = Topics.get_activity_topics(activity)
|
||||||
|
|
||||||
|
refute Enum.member?(topics, "public:local")
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
describe "public visibility create events" do
|
||||||
|
setup do
|
||||||
|
activity = %Activity{
|
||||||
|
object: %Object{data: %{"type" => "Create", "attachment" => []}},
|
||||||
|
data: %{"to" => [Pleroma.Constants.as_public()]}
|
||||||
|
}
|
||||||
|
|
||||||
|
{:ok, activity: activity}
|
||||||
|
end
|
||||||
|
|
||||||
|
test "with no attachments doesn't produce public:media topics", %{activity: activity} do
|
||||||
|
topics = Topics.get_activity_topics(activity)
|
||||||
|
|
||||||
|
refute Enum.member?(topics, "public:media")
|
||||||
|
refute Enum.member?(topics, "public:local:media")
|
||||||
|
end
|
||||||
|
|
||||||
|
test "converts tags to hash tags", %{activity: %{object: %{data: data} = object} = activity} do
|
||||||
|
tagged_data = Map.put(data, "tag", ["foo", "bar"])
|
||||||
|
activity = %{activity | object: %{object | data: tagged_data}}
|
||||||
|
|
||||||
|
topics = Topics.get_activity_topics(activity)
|
||||||
|
|
||||||
|
assert Enum.member?(topics, "hashtag:foo")
|
||||||
|
assert Enum.member?(topics, "hashtag:bar")
|
||||||
|
end
|
||||||
|
|
||||||
|
test "only converts strinngs to hash tags", %{
|
||||||
|
activity: %{object: %{data: data} = object} = activity
|
||||||
|
} do
|
||||||
|
tagged_data = Map.put(data, "tag", [2])
|
||||||
|
activity = %{activity | object: %{object | data: tagged_data}}
|
||||||
|
|
||||||
|
topics = Topics.get_activity_topics(activity)
|
||||||
|
|
||||||
|
refute Enum.member?(topics, "hashtag:2")
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
describe "public visibility create events with attachments" do
|
||||||
|
setup do
|
||||||
|
activity = %Activity{
|
||||||
|
object: %Object{data: %{"type" => "Create", "attachment" => ["foo"]}},
|
||||||
|
data: %{"to" => [Pleroma.Constants.as_public()]}
|
||||||
|
}
|
||||||
|
|
||||||
|
{:ok, activity: activity}
|
||||||
|
end
|
||||||
|
|
||||||
|
test "produce public:media topics", %{activity: activity} do
|
||||||
|
topics = Topics.get_activity_topics(activity)
|
||||||
|
|
||||||
|
assert Enum.member?(topics, "public:media")
|
||||||
|
end
|
||||||
|
|
||||||
|
test "local produces public:local:media topics", %{activity: activity} do
|
||||||
|
topics = Topics.get_activity_topics(activity)
|
||||||
|
|
||||||
|
assert Enum.member?(topics, "public:local:media")
|
||||||
|
end
|
||||||
|
|
||||||
|
test "non-local doesn't produce public:local:media topics", %{activity: activity} do
|
||||||
|
activity = %{activity | local: false}
|
||||||
|
|
||||||
|
topics = Topics.get_activity_topics(activity)
|
||||||
|
|
||||||
|
refute Enum.member?(topics, "public:local:media")
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
describe "non-public visibility" do
|
||||||
|
test "produces direct topic" do
|
||||||
|
activity = %Activity{object: %Object{data: %{"type" => "Note"}}, data: %{"to" => []}}
|
||||||
|
topics = Topics.get_activity_topics(activity)
|
||||||
|
|
||||||
|
assert Enum.member?(topics, "direct")
|
||||||
|
refute Enum.member?(topics, "public")
|
||||||
|
refute Enum.member?(topics, "public:local")
|
||||||
|
refute Enum.member?(topics, "public:media")
|
||||||
|
refute Enum.member?(topics, "public:local:media")
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
|
@ -7,6 +7,7 @@ defmodule Pleroma.ActivityTest do
|
||||||
alias Pleroma.Activity
|
alias Pleroma.Activity
|
||||||
alias Pleroma.Bookmark
|
alias Pleroma.Bookmark
|
||||||
alias Pleroma.Object
|
alias Pleroma.Object
|
||||||
|
alias Pleroma.Tests.ObanHelpers
|
||||||
alias Pleroma.ThreadMute
|
alias Pleroma.ThreadMute
|
||||||
import Pleroma.Factory
|
import Pleroma.Factory
|
||||||
|
|
||||||
|
@ -125,7 +126,8 @@ test "when association is not loaded" do
|
||||||
}
|
}
|
||||||
|
|
||||||
{:ok, local_activity} = Pleroma.Web.CommonAPI.post(user, %{"status" => "find me!"})
|
{:ok, local_activity} = Pleroma.Web.CommonAPI.post(user, %{"status" => "find me!"})
|
||||||
{:ok, remote_activity} = Pleroma.Web.Federator.incoming_ap_doc(params)
|
{:ok, job} = Pleroma.Web.Federator.incoming_ap_doc(params)
|
||||||
|
{:ok, remote_activity} = ObanHelpers.perform(job)
|
||||||
%{local_activity: local_activity, remote_activity: remote_activity, user: user}
|
%{local_activity: local_activity, remote_activity: remote_activity, user: user}
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@ -173,4 +175,51 @@ test "add an activity with an expiration" do
|
||||||
|> where([a], a.activity_id == ^activity.id)
|
|> where([a], a.activity_id == ^activity.id)
|
||||||
|> Repo.one!()
|
|> Repo.one!()
|
||||||
end
|
end
|
||||||
|
|
||||||
|
test "all_by_ids_with_object/1" do
|
||||||
|
%{id: id1} = insert(:note_activity)
|
||||||
|
%{id: id2} = insert(:note_activity)
|
||||||
|
|
||||||
|
activities =
|
||||||
|
[id1, id2]
|
||||||
|
|> Activity.all_by_ids_with_object()
|
||||||
|
|> Enum.sort(&(&1.id < &2.id))
|
||||||
|
|
||||||
|
assert [%{id: ^id1, object: %Object{}}, %{id: ^id2, object: %Object{}}] = activities
|
||||||
|
end
|
||||||
|
|
||||||
|
test "get_by_id_with_object/1" do
|
||||||
|
%{id: id} = insert(:note_activity)
|
||||||
|
|
||||||
|
assert %Activity{id: ^id, object: %Object{}} = Activity.get_by_id_with_object(id)
|
||||||
|
end
|
||||||
|
|
||||||
|
test "get_by_ap_id_with_object/1" do
|
||||||
|
%{data: %{"id" => ap_id}} = insert(:note_activity)
|
||||||
|
|
||||||
|
assert %Activity{data: %{"id" => ^ap_id}, object: %Object{}} =
|
||||||
|
Activity.get_by_ap_id_with_object(ap_id)
|
||||||
|
end
|
||||||
|
|
||||||
|
test "get_by_id/1" do
|
||||||
|
%{id: id} = insert(:note_activity)
|
||||||
|
|
||||||
|
assert %Activity{id: ^id} = Activity.get_by_id(id)
|
||||||
|
end
|
||||||
|
|
||||||
|
test "all_by_actor_and_id/2" do
|
||||||
|
user = insert(:user)
|
||||||
|
|
||||||
|
{:ok, %{id: id1}} = Pleroma.Web.CommonAPI.post(user, %{"status" => "cofe"})
|
||||||
|
{:ok, %{id: id2}} = Pleroma.Web.CommonAPI.post(user, %{"status" => "cofefe"})
|
||||||
|
|
||||||
|
assert [] == Activity.all_by_actor_and_id(user, [])
|
||||||
|
|
||||||
|
activities =
|
||||||
|
user.ap_id
|
||||||
|
|> Activity.all_by_actor_and_id([id1, id2])
|
||||||
|
|> Enum.sort(&(&1.id < &2.id))
|
||||||
|
|
||||||
|
assert [%Activity{id: ^id1}, %Activity{id: ^id2}] = activities
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -22,6 +22,8 @@ test "it goes through old direct conversations" do
|
||||||
{:ok, _activity} =
|
{:ok, _activity} =
|
||||||
CommonAPI.post(user, %{"visibility" => "direct", "status" => "hey @#{other_user.nickname}"})
|
CommonAPI.post(user, %{"visibility" => "direct", "status" => "hey @#{other_user.nickname}"})
|
||||||
|
|
||||||
|
Pleroma.Tests.ObanHelpers.perform_all()
|
||||||
|
|
||||||
Repo.delete_all(Conversation)
|
Repo.delete_all(Conversation)
|
||||||
Repo.delete_all(Conversation.Participation)
|
Repo.delete_all(Conversation.Participation)
|
||||||
|
|
||||||
|
|
|
@ -10,7 +10,7 @@ defmodule Pleroma.ActivityExpirationWorkerTest do
|
||||||
test "deletes an activity" do
|
test "deletes an activity" do
|
||||||
activity = insert(:note_activity)
|
activity = insert(:note_activity)
|
||||||
expiration = insert(:expiration_in_the_past, %{activity_id: activity.id})
|
expiration = insert(:expiration_in_the_past, %{activity_id: activity.id})
|
||||||
Pleroma.ActivityExpirationWorker.perform(:execute, expiration.id)
|
Pleroma.Daemons.ActivityExpirationDaemon.perform(:execute, expiration.id)
|
||||||
|
|
||||||
refute Repo.get(Activity, activity.id)
|
refute Repo.get(Activity, activity.id)
|
||||||
end
|
end
|
|
@ -2,11 +2,12 @@
|
||||||
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
|
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
|
||||||
# SPDX-License-Identifier: AGPL-3.0-only
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
defmodule Pleroma.DigestEmailWorkerTest do
|
defmodule Pleroma.DigestEmailDaemonTest do
|
||||||
use Pleroma.DataCase
|
use Pleroma.DataCase
|
||||||
import Pleroma.Factory
|
import Pleroma.Factory
|
||||||
|
|
||||||
alias Pleroma.DigestEmailWorker
|
alias Pleroma.Daemons.DigestEmailDaemon
|
||||||
|
alias Pleroma.Tests.ObanHelpers
|
||||||
alias Pleroma.User
|
alias Pleroma.User
|
||||||
alias Pleroma.Web.CommonAPI
|
alias Pleroma.Web.CommonAPI
|
||||||
|
|
||||||
|
@ -22,7 +23,10 @@ test "it sends digest emails" do
|
||||||
User.switch_email_notifications(user2, "digest", true)
|
User.switch_email_notifications(user2, "digest", true)
|
||||||
CommonAPI.post(user, %{"status" => "hey @#{user2.nickname}!"})
|
CommonAPI.post(user, %{"status" => "hey @#{user2.nickname}!"})
|
||||||
|
|
||||||
DigestEmailWorker.perform()
|
DigestEmailDaemon.perform()
|
||||||
|
ObanHelpers.perform_all()
|
||||||
|
# Performing job(s) enqueued at previous step
|
||||||
|
ObanHelpers.perform_all()
|
||||||
|
|
||||||
assert_received {:email, email}
|
assert_received {:email, email}
|
||||||
assert email.to == [{user2.name, user2.email}]
|
assert email.to == [{user2.name, user2.email}]
|
|
@ -2,7 +2,7 @@
|
||||||
# Copyright © 2017-2018 Pleroma Authors <https://pleroma.social/>
|
# Copyright © 2017-2018 Pleroma Authors <https://pleroma.social/>
|
||||||
# SPDX-License-Identifier: AGPL-3.0-only
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
defmodule Pleroma.ScheduledActivityWorkerTest do
|
defmodule Pleroma.ScheduledActivityDaemonTest do
|
||||||
use Pleroma.DataCase
|
use Pleroma.DataCase
|
||||||
alias Pleroma.ScheduledActivity
|
alias Pleroma.ScheduledActivity
|
||||||
import Pleroma.Factory
|
import Pleroma.Factory
|
||||||
|
@ -10,7 +10,7 @@ defmodule Pleroma.ScheduledActivityWorkerTest do
|
||||||
test "creates a status from the scheduled activity" do
|
test "creates a status from the scheduled activity" do
|
||||||
user = insert(:user)
|
user = insert(:user)
|
||||||
scheduled_activity = insert(:scheduled_activity, user: user, params: %{status: "hi"})
|
scheduled_activity = insert(:scheduled_activity, user: user, params: %{status: "hi"})
|
||||||
Pleroma.ScheduledActivityWorker.perform(:execute, scheduled_activity.id)
|
Pleroma.Daemons.ScheduledActivityDaemon.perform(:execute, scheduled_activity.id)
|
||||||
|
|
||||||
refute Repo.get(ScheduledActivity, scheduled_activity.id)
|
refute Repo.get(ScheduledActivity, scheduled_activity.id)
|
||||||
activity = Repo.all(Pleroma.Activity) |> Enum.find(&(&1.actor == user.ap_id))
|
activity = Repo.all(Pleroma.Activity) |> Enum.find(&(&1.actor == user.ap_id))
|
|
@ -5,12 +5,12 @@
|
||||||
defmodule Pleroma.Integration.MastodonWebsocketTest do
|
defmodule Pleroma.Integration.MastodonWebsocketTest do
|
||||||
use Pleroma.DataCase
|
use Pleroma.DataCase
|
||||||
|
|
||||||
|
import ExUnit.CaptureLog
|
||||||
import Pleroma.Factory
|
import Pleroma.Factory
|
||||||
|
|
||||||
alias Pleroma.Integration.WebsocketClient
|
alias Pleroma.Integration.WebsocketClient
|
||||||
alias Pleroma.Web.CommonAPI
|
alias Pleroma.Web.CommonAPI
|
||||||
alias Pleroma.Web.OAuth
|
alias Pleroma.Web.OAuth
|
||||||
alias Pleroma.Web.Streamer
|
|
||||||
|
|
||||||
@path Pleroma.Web.Endpoint.url()
|
@path Pleroma.Web.Endpoint.url()
|
||||||
|> URI.parse()
|
|> URI.parse()
|
||||||
|
@ -18,16 +18,6 @@ defmodule Pleroma.Integration.MastodonWebsocketTest do
|
||||||
|> Map.put(:path, "/api/v1/streaming")
|
|> Map.put(:path, "/api/v1/streaming")
|
||||||
|> URI.to_string()
|
|> URI.to_string()
|
||||||
|
|
||||||
setup do
|
|
||||||
GenServer.start(Streamer, %{}, name: Streamer)
|
|
||||||
|
|
||||||
on_exit(fn ->
|
|
||||||
if pid = Process.whereis(Streamer) do
|
|
||||||
Process.exit(pid, :kill)
|
|
||||||
end
|
|
||||||
end)
|
|
||||||
end
|
|
||||||
|
|
||||||
def start_socket(qs \\ nil, headers \\ []) do
|
def start_socket(qs \\ nil, headers \\ []) do
|
||||||
path =
|
path =
|
||||||
case qs do
|
case qs do
|
||||||
|
@ -39,21 +29,27 @@ def start_socket(qs \\ nil, headers \\ []) do
|
||||||
end
|
end
|
||||||
|
|
||||||
test "refuses invalid requests" do
|
test "refuses invalid requests" do
|
||||||
|
capture_log(fn ->
|
||||||
assert {:error, {400, _}} = start_socket()
|
assert {:error, {400, _}} = start_socket()
|
||||||
assert {:error, {404, _}} = start_socket("?stream=ncjdk")
|
assert {:error, {404, _}} = start_socket("?stream=ncjdk")
|
||||||
|
end)
|
||||||
end
|
end
|
||||||
|
|
||||||
test "requires authentication and a valid token for protected streams" do
|
test "requires authentication and a valid token for protected streams" do
|
||||||
|
capture_log(fn ->
|
||||||
assert {:error, {403, _}} = start_socket("?stream=user&access_token=aaaaaaaaaaaa")
|
assert {:error, {403, _}} = start_socket("?stream=user&access_token=aaaaaaaaaaaa")
|
||||||
assert {:error, {403, _}} = start_socket("?stream=user")
|
assert {:error, {403, _}} = start_socket("?stream=user")
|
||||||
|
end)
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@tag needs_streamer: true
|
||||||
test "allows public streams without authentication" do
|
test "allows public streams without authentication" do
|
||||||
assert {:ok, _} = start_socket("?stream=public")
|
assert {:ok, _} = start_socket("?stream=public")
|
||||||
assert {:ok, _} = start_socket("?stream=public:local")
|
assert {:ok, _} = start_socket("?stream=public:local")
|
||||||
assert {:ok, _} = start_socket("?stream=hashtag&tag=lain")
|
assert {:ok, _} = start_socket("?stream=hashtag&tag=lain")
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@tag needs_streamer: true
|
||||||
test "receives well formatted events" do
|
test "receives well formatted events" do
|
||||||
user = insert(:user)
|
user = insert(:user)
|
||||||
{:ok, _} = start_socket("?stream=public")
|
{:ok, _} = start_socket("?stream=public")
|
||||||
|
@ -98,21 +94,32 @@ test "accepts valid tokens", state do
|
||||||
assert {:ok, _} = start_socket("?stream=user&access_token=#{state.token.token}")
|
assert {:ok, _} = start_socket("?stream=user&access_token=#{state.token.token}")
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@tag needs_streamer: true
|
||||||
test "accepts the 'user' stream", %{token: token} = _state do
|
test "accepts the 'user' stream", %{token: token} = _state do
|
||||||
assert {:ok, _} = start_socket("?stream=user&access_token=#{token.token}")
|
assert {:ok, _} = start_socket("?stream=user&access_token=#{token.token}")
|
||||||
|
|
||||||
|
assert capture_log(fn ->
|
||||||
assert {:error, {403, "Forbidden"}} = start_socket("?stream=user")
|
assert {:error, {403, "Forbidden"}} = start_socket("?stream=user")
|
||||||
|
end) =~ ":badarg"
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@tag needs_streamer: true
|
||||||
test "accepts the 'user:notification' stream", %{token: token} = _state do
|
test "accepts the 'user:notification' stream", %{token: token} = _state do
|
||||||
assert {:ok, _} = start_socket("?stream=user:notification&access_token=#{token.token}")
|
assert {:ok, _} = start_socket("?stream=user:notification&access_token=#{token.token}")
|
||||||
|
|
||||||
|
assert capture_log(fn ->
|
||||||
assert {:error, {403, "Forbidden"}} = start_socket("?stream=user:notification")
|
assert {:error, {403, "Forbidden"}} = start_socket("?stream=user:notification")
|
||||||
|
end) =~ ":badarg"
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@tag needs_streamer: true
|
||||||
test "accepts valid token on Sec-WebSocket-Protocol header", %{token: token} do
|
test "accepts valid token on Sec-WebSocket-Protocol header", %{token: token} do
|
||||||
assert {:ok, _} = start_socket("?stream=user", [{"Sec-WebSocket-Protocol", token.token}])
|
assert {:ok, _} = start_socket("?stream=user", [{"Sec-WebSocket-Protocol", token.token}])
|
||||||
|
|
||||||
|
assert capture_log(fn ->
|
||||||
assert {:error, {403, "Forbidden"}} =
|
assert {:error, {403, "Forbidden"}} =
|
||||||
start_socket("?stream=user", [{"Sec-WebSocket-Protocol", "I am a friend"}])
|
start_socket("?stream=user", [{"Sec-WebSocket-Protocol", "I am a friend"}])
|
||||||
|
end) =~ ":badarg"
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -8,6 +8,7 @@ defmodule Pleroma.NotificationTest do
|
||||||
import Pleroma.Factory
|
import Pleroma.Factory
|
||||||
|
|
||||||
alias Pleroma.Notification
|
alias Pleroma.Notification
|
||||||
|
alias Pleroma.Tests.ObanHelpers
|
||||||
alias Pleroma.User
|
alias Pleroma.User
|
||||||
alias Pleroma.Web.ActivityPub.Transmogrifier
|
alias Pleroma.Web.ActivityPub.Transmogrifier
|
||||||
alias Pleroma.Web.CommonAPI
|
alias Pleroma.Web.CommonAPI
|
||||||
|
@ -68,16 +69,7 @@ test "does not create a notification for subscribed users if status is a reply"
|
||||||
end
|
end
|
||||||
|
|
||||||
describe "create_notification" do
|
describe "create_notification" do
|
||||||
setup do
|
@tag needs_streamer: true
|
||||||
GenServer.start(Streamer, %{}, name: Streamer)
|
|
||||||
|
|
||||||
on_exit(fn ->
|
|
||||||
if pid = Process.whereis(Streamer) do
|
|
||||||
Process.exit(pid, :kill)
|
|
||||||
end
|
|
||||||
end)
|
|
||||||
end
|
|
||||||
|
|
||||||
test "it creates a notification for user and send to the 'user' and the 'user:notification' stream" do
|
test "it creates a notification for user and send to the 'user' and the 'user:notification' stream" do
|
||||||
user = insert(:user)
|
user = insert(:user)
|
||||||
task = Task.async(fn -> assert_receive {:text, _}, 4_000 end)
|
task = Task.async(fn -> assert_receive {:text, _}, 4_000 end)
|
||||||
|
@ -588,7 +580,8 @@ test "notifications are deleted if a local user is deleted" do
|
||||||
|
|
||||||
refute Enum.empty?(Notification.for_user(other_user))
|
refute Enum.empty?(Notification.for_user(other_user))
|
||||||
|
|
||||||
User.delete(user)
|
{:ok, job} = User.delete(user)
|
||||||
|
ObanHelpers.perform(job)
|
||||||
|
|
||||||
assert Enum.empty?(Notification.for_user(other_user))
|
assert Enum.empty?(Notification.for_user(other_user))
|
||||||
end
|
end
|
||||||
|
@ -633,6 +626,7 @@ test "notifications are deleted if a remote user is deleted" do
|
||||||
}
|
}
|
||||||
|
|
||||||
{:ok, _delete_activity} = Transmogrifier.handle_incoming(delete_user_message)
|
{:ok, _delete_activity} = Transmogrifier.handle_incoming(delete_user_message)
|
||||||
|
ObanHelpers.perform_all()
|
||||||
|
|
||||||
assert Enum.empty?(Notification.for_user(local_user))
|
assert Enum.empty?(Notification.for_user(local_user))
|
||||||
end
|
end
|
||||||
|
|
|
@ -40,6 +40,10 @@ defmodule Pleroma.Web.ConnCase do
|
||||||
Ecto.Adapters.SQL.Sandbox.mode(Pleroma.Repo, {:shared, self()})
|
Ecto.Adapters.SQL.Sandbox.mode(Pleroma.Repo, {:shared, self()})
|
||||||
end
|
end
|
||||||
|
|
||||||
|
if tags[:needs_streamer] do
|
||||||
|
start_supervised(Pleroma.Web.Streamer.supervisor())
|
||||||
|
end
|
||||||
|
|
||||||
{:ok, conn: Phoenix.ConnTest.build_conn()}
|
{:ok, conn: Phoenix.ConnTest.build_conn()}
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -39,6 +39,10 @@ defmodule Pleroma.DataCase do
|
||||||
Ecto.Adapters.SQL.Sandbox.mode(Pleroma.Repo, {:shared, self()})
|
Ecto.Adapters.SQL.Sandbox.mode(Pleroma.Repo, {:shared, self()})
|
||||||
end
|
end
|
||||||
|
|
||||||
|
if tags[:needs_streamer] do
|
||||||
|
start_supervised(Pleroma.Web.Streamer.supervisor())
|
||||||
|
end
|
||||||
|
|
||||||
:ok
|
:ok
|
||||||
end
|
end
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,42 @@
|
||||||
|
# Pleroma: A lightweight social networking server
|
||||||
|
# Copyright © 2017-2018 Pleroma Authors <https://pleroma.social/>
|
||||||
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
defmodule Pleroma.Tests.ObanHelpers do
|
||||||
|
@moduledoc """
|
||||||
|
Oban test helpers.
|
||||||
|
"""
|
||||||
|
|
||||||
|
alias Pleroma.Repo
|
||||||
|
|
||||||
|
def perform_all do
|
||||||
|
Oban.Job
|
||||||
|
|> Repo.all()
|
||||||
|
|> perform()
|
||||||
|
end
|
||||||
|
|
||||||
|
def perform(%Oban.Job{} = job) do
|
||||||
|
res = apply(String.to_existing_atom("Elixir." <> job.worker), :perform, [job.args, job])
|
||||||
|
Repo.delete(job)
|
||||||
|
res
|
||||||
|
end
|
||||||
|
|
||||||
|
def perform(jobs) when is_list(jobs) do
|
||||||
|
for job <- jobs, do: perform(job)
|
||||||
|
end
|
||||||
|
|
||||||
|
def member?(%{} = job_args, jobs) when is_list(jobs) do
|
||||||
|
Enum.any?(jobs, fn job ->
|
||||||
|
member?(job_args, job.args)
|
||||||
|
end)
|
||||||
|
end
|
||||||
|
|
||||||
|
def member?(%{} = test_attrs, %{} = attrs) do
|
||||||
|
Enum.all?(
|
||||||
|
test_attrs,
|
||||||
|
fn {k, _v} -> member?(test_attrs[k], attrs[k]) end
|
||||||
|
)
|
||||||
|
end
|
||||||
|
|
||||||
|
def member?(x, y), do: x == y
|
||||||
|
end
|
|
@ -4,6 +4,7 @@ defmodule Mix.Tasks.Pleroma.DigestTest do
|
||||||
import Pleroma.Factory
|
import Pleroma.Factory
|
||||||
import Swoosh.TestAssertions
|
import Swoosh.TestAssertions
|
||||||
|
|
||||||
|
alias Pleroma.Tests.ObanHelpers
|
||||||
alias Pleroma.Web.CommonAPI
|
alias Pleroma.Web.CommonAPI
|
||||||
|
|
||||||
setup_all do
|
setup_all do
|
||||||
|
@ -39,6 +40,8 @@ test "Sends digest to the given user" do
|
||||||
|
|
||||||
:ok = Mix.Tasks.Pleroma.Digest.run(["test", user2.nickname, yesterday_date])
|
:ok = Mix.Tasks.Pleroma.Digest.run(["test", user2.nickname, yesterday_date])
|
||||||
|
|
||||||
|
ObanHelpers.perform_all()
|
||||||
|
|
||||||
assert_receive {:mix_shell, :info, [message]}
|
assert_receive {:mix_shell, :info, [message]}
|
||||||
assert message =~ "Digest email have been sent"
|
assert message =~ "Digest email have been sent"
|
||||||
|
|
||||||
|
|
|
@ -7,14 +7,16 @@ defmodule Pleroma.UserTest do
|
||||||
alias Pleroma.Builders.UserBuilder
|
alias Pleroma.Builders.UserBuilder
|
||||||
alias Pleroma.Object
|
alias Pleroma.Object
|
||||||
alias Pleroma.Repo
|
alias Pleroma.Repo
|
||||||
|
alias Pleroma.Tests.ObanHelpers
|
||||||
alias Pleroma.User
|
alias Pleroma.User
|
||||||
alias Pleroma.Web.ActivityPub.ActivityPub
|
alias Pleroma.Web.ActivityPub.ActivityPub
|
||||||
alias Pleroma.Web.CommonAPI
|
alias Pleroma.Web.CommonAPI
|
||||||
|
|
||||||
use Pleroma.DataCase
|
use Pleroma.DataCase
|
||||||
|
use Oban.Testing, repo: Pleroma.Repo
|
||||||
|
|
||||||
import Pleroma.Factory
|
|
||||||
import Mock
|
import Mock
|
||||||
|
import Pleroma.Factory
|
||||||
|
|
||||||
setup_all do
|
setup_all do
|
||||||
Tesla.Mock.mock_global(fn env -> apply(HttpRequestMock, :request, [env]) end)
|
Tesla.Mock.mock_global(fn env -> apply(HttpRequestMock, :request, [env]) end)
|
||||||
|
@ -570,22 +572,6 @@ test "it has required fields" do
|
||||||
refute cs.valid?
|
refute cs.valid?
|
||||||
end)
|
end)
|
||||||
end
|
end
|
||||||
|
|
||||||
test "it restricts some sizes" do
|
|
||||||
bio_limit = Pleroma.Config.get([:instance, :user_bio_length], 5000)
|
|
||||||
name_limit = Pleroma.Config.get([:instance, :user_name_length], 100)
|
|
||||||
|
|
||||||
[bio: bio_limit, name: name_limit]
|
|
||||||
|> Enum.each(fn {field, size} ->
|
|
||||||
string = String.pad_leading(".", size)
|
|
||||||
cs = User.remote_user_creation(Map.put(@valid_remote, field, string))
|
|
||||||
assert cs.valid?
|
|
||||||
|
|
||||||
string = String.pad_leading(".", size + 1)
|
|
||||||
cs = User.remote_user_creation(Map.put(@valid_remote, field, string))
|
|
||||||
refute cs.valid?
|
|
||||||
end)
|
|
||||||
end
|
|
||||||
end
|
end
|
||||||
|
|
||||||
describe "followers and friends" do
|
describe "followers and friends" do
|
||||||
|
@ -725,7 +711,9 @@ test "it imports user followings from list" do
|
||||||
user3.nickname
|
user3.nickname
|
||||||
]
|
]
|
||||||
|
|
||||||
result = User.follow_import(user1, identifiers)
|
{:ok, job} = User.follow_import(user1, identifiers)
|
||||||
|
result = ObanHelpers.perform(job)
|
||||||
|
|
||||||
assert is_list(result)
|
assert is_list(result)
|
||||||
assert result == [user2, user3]
|
assert result == [user2, user3]
|
||||||
end
|
end
|
||||||
|
@ -936,7 +924,9 @@ test "it imports user blocks from list" do
|
||||||
user3.nickname
|
user3.nickname
|
||||||
]
|
]
|
||||||
|
|
||||||
result = User.blocks_import(user1, identifiers)
|
{:ok, job} = User.blocks_import(user1, identifiers)
|
||||||
|
result = ObanHelpers.perform(job)
|
||||||
|
|
||||||
assert is_list(result)
|
assert is_list(result)
|
||||||
assert result == [user2, user3]
|
assert result == [user2, user3]
|
||||||
end
|
end
|
||||||
|
@ -1053,7 +1043,9 @@ test ".delete_user_activities deletes all create activities", %{user: user} do
|
||||||
test "it deletes deactivated user" do
|
test "it deletes deactivated user" do
|
||||||
{:ok, user} = insert(:user, info: %{deactivated: true}) |> User.set_cache()
|
{:ok, user} = insert(:user, info: %{deactivated: true}) |> User.set_cache()
|
||||||
|
|
||||||
assert {:ok, _} = User.delete(user)
|
{:ok, job} = User.delete(user)
|
||||||
|
{:ok, _user} = ObanHelpers.perform(job)
|
||||||
|
|
||||||
refute User.get_by_id(user.id)
|
refute User.get_by_id(user.id)
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@ -1071,7 +1063,8 @@ test "it deletes a user, all follow relationships and all activities", %{user: u
|
||||||
{:ok, like_two, _} = CommonAPI.favorite(activity.id, follower)
|
{:ok, like_two, _} = CommonAPI.favorite(activity.id, follower)
|
||||||
{:ok, repeat, _} = CommonAPI.repeat(activity_two.id, user)
|
{:ok, repeat, _} = CommonAPI.repeat(activity_two.id, user)
|
||||||
|
|
||||||
{:ok, _} = User.delete(user)
|
{:ok, job} = User.delete(user)
|
||||||
|
{:ok, _user} = ObanHelpers.perform(job)
|
||||||
|
|
||||||
follower = User.get_cached_by_id(follower.id)
|
follower = User.get_cached_by_id(follower.id)
|
||||||
|
|
||||||
|
@ -1081,7 +1074,7 @@ test "it deletes a user, all follow relationships and all activities", %{user: u
|
||||||
|
|
||||||
user_activities =
|
user_activities =
|
||||||
user.ap_id
|
user.ap_id
|
||||||
|> Activity.query_by_actor()
|
|> Activity.Queries.by_actor()
|
||||||
|> Repo.all()
|
|> Repo.all()
|
||||||
|> Enum.map(fn act -> act.data["type"] end)
|
|> Enum.map(fn act -> act.data["type"] end)
|
||||||
|
|
||||||
|
@ -1103,12 +1096,18 @@ test "it deletes a user, all follow relationships and all activities", %{user: u
|
||||||
{:ok, follower} = User.get_or_fetch_by_ap_id("http://mastodon.example.org/users/admin")
|
{:ok, follower} = User.get_or_fetch_by_ap_id("http://mastodon.example.org/users/admin")
|
||||||
{:ok, _} = User.follow(follower, user)
|
{:ok, _} = User.follow(follower, user)
|
||||||
|
|
||||||
{:ok, _user} = User.delete(user)
|
{:ok, job} = User.delete(user)
|
||||||
|
{:ok, _user} = ObanHelpers.perform(job)
|
||||||
|
|
||||||
assert called(
|
assert ObanHelpers.member?(
|
||||||
Pleroma.Web.ActivityPub.Publisher.publish_one(%{
|
%{
|
||||||
inbox: "http://mastodon.example.org/inbox"
|
"op" => "publish_one",
|
||||||
})
|
"params" => %{
|
||||||
|
"inbox" => "http://mastodon.example.org/inbox",
|
||||||
|
"id" => "pleroma:fakeid"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
all_enqueued(worker: Pleroma.Workers.PublisherWorker)
|
||||||
)
|
)
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
@ -1117,13 +1116,62 @@ test "get_public_key_for_ap_id fetches a user that's not in the db" do
|
||||||
assert {:ok, _key} = User.get_public_key_for_ap_id("http://mastodon.example.org/users/admin")
|
assert {:ok, _key} = User.get_public_key_for_ap_id("http://mastodon.example.org/users/admin")
|
||||||
end
|
end
|
||||||
|
|
||||||
test "insert or update a user from given data" do
|
describe "insert or update a user from given data" do
|
||||||
|
test "with normal data" do
|
||||||
user = insert(:user, %{nickname: "nick@name.de"})
|
user = insert(:user, %{nickname: "nick@name.de"})
|
||||||
data = %{ap_id: user.ap_id <> "xxx", name: user.name, nickname: user.nickname}
|
data = %{ap_id: user.ap_id <> "xxx", name: user.name, nickname: user.nickname}
|
||||||
|
|
||||||
assert {:ok, %User{}} = User.insert_or_update_user(data)
|
assert {:ok, %User{}} = User.insert_or_update_user(data)
|
||||||
end
|
end
|
||||||
|
|
||||||
|
test "with overly long fields" do
|
||||||
|
current_max_length = Pleroma.Config.get([:instance, :account_field_value_length], 255)
|
||||||
|
user = insert(:user, nickname: "nickname@supergood.domain")
|
||||||
|
|
||||||
|
data = %{
|
||||||
|
ap_id: user.ap_id,
|
||||||
|
name: user.name,
|
||||||
|
nickname: user.nickname,
|
||||||
|
info: %{
|
||||||
|
fields: [
|
||||||
|
%{"name" => "myfield", "value" => String.duplicate("h", current_max_length + 1)}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
assert {:ok, %User{}} = User.insert_or_update_user(data)
|
||||||
|
end
|
||||||
|
|
||||||
|
test "with an overly long bio" do
|
||||||
|
current_max_length = Pleroma.Config.get([:instance, :user_bio_length], 5000)
|
||||||
|
user = insert(:user, nickname: "nickname@supergood.domain")
|
||||||
|
|
||||||
|
data = %{
|
||||||
|
ap_id: user.ap_id,
|
||||||
|
name: user.name,
|
||||||
|
nickname: user.nickname,
|
||||||
|
bio: String.duplicate("h", current_max_length + 1),
|
||||||
|
info: %{}
|
||||||
|
}
|
||||||
|
|
||||||
|
assert {:ok, %User{}} = User.insert_or_update_user(data)
|
||||||
|
end
|
||||||
|
|
||||||
|
test "with an overly long display name" do
|
||||||
|
current_max_length = Pleroma.Config.get([:instance, :user_name_length], 100)
|
||||||
|
user = insert(:user, nickname: "nickname@supergood.domain")
|
||||||
|
|
||||||
|
data = %{
|
||||||
|
ap_id: user.ap_id,
|
||||||
|
name: String.duplicate("h", current_max_length + 1),
|
||||||
|
nickname: user.nickname,
|
||||||
|
info: %{}
|
||||||
|
}
|
||||||
|
|
||||||
|
assert {:ok, %User{}} = User.insert_or_update_user(data)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
describe "per-user rich-text filtering" do
|
describe "per-user rich-text filtering" do
|
||||||
test "html_filter_policy returns default policies, when rich-text is enabled" do
|
test "html_filter_policy returns default policies, when rich-text is enabled" do
|
||||||
user = insert(:user)
|
user = insert(:user)
|
||||||
|
@ -1153,7 +1201,8 @@ test "invalidate_cache works" do
|
||||||
test "User.delete() plugs any possible zombie objects" do
|
test "User.delete() plugs any possible zombie objects" do
|
||||||
user = insert(:user)
|
user = insert(:user)
|
||||||
|
|
||||||
{:ok, _} = User.delete(user)
|
{:ok, job} = User.delete(user)
|
||||||
|
{:ok, _} = ObanHelpers.perform(job)
|
||||||
|
|
||||||
{:ok, cached_user} = Cachex.get(:user_cache, "ap_id:#{user.ap_id}")
|
{:ok, cached_user} = Cachex.get(:user_cache, "ap_id:#{user.ap_id}")
|
||||||
|
|
||||||
|
@ -1614,4 +1663,31 @@ test "syncronizes the counters with the remote instance for the follower when en
|
||||||
assert User.user_info(other_user).following_count == 152
|
assert User.user_info(other_user).following_count == 152
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
describe "change_email/2" do
|
||||||
|
setup do
|
||||||
|
[user: insert(:user)]
|
||||||
|
end
|
||||||
|
|
||||||
|
test "blank email returns error", %{user: user} do
|
||||||
|
assert {:error, %{errors: [email: {"can't be blank", _}]}} = User.change_email(user, "")
|
||||||
|
assert {:error, %{errors: [email: {"can't be blank", _}]}} = User.change_email(user, nil)
|
||||||
|
end
|
||||||
|
|
||||||
|
test "non unique email returns error", %{user: user} do
|
||||||
|
%{email: email} = insert(:user)
|
||||||
|
|
||||||
|
assert {:error, %{errors: [email: {"has already been taken", _}]}} =
|
||||||
|
User.change_email(user, email)
|
||||||
|
end
|
||||||
|
|
||||||
|
test "invalid email returns error", %{user: user} do
|
||||||
|
assert {:error, %{errors: [email: {"has invalid format", _}]}} =
|
||||||
|
User.change_email(user, "cofe")
|
||||||
|
end
|
||||||
|
|
||||||
|
test "changes email", %{user: user} do
|
||||||
|
assert {:ok, %User{email: "cofe@cofe.party"}} = User.change_email(user, "cofe@cofe.party")
|
||||||
|
end
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -4,16 +4,21 @@
|
||||||
|
|
||||||
defmodule Pleroma.Web.ActivityPub.ActivityPubControllerTest do
|
defmodule Pleroma.Web.ActivityPub.ActivityPubControllerTest do
|
||||||
use Pleroma.Web.ConnCase
|
use Pleroma.Web.ConnCase
|
||||||
|
use Oban.Testing, repo: Pleroma.Repo
|
||||||
|
|
||||||
import Pleroma.Factory
|
import Pleroma.Factory
|
||||||
alias Pleroma.Activity
|
alias Pleroma.Activity
|
||||||
|
alias Pleroma.Delivery
|
||||||
alias Pleroma.Instances
|
alias Pleroma.Instances
|
||||||
alias Pleroma.Object
|
alias Pleroma.Object
|
||||||
|
alias Pleroma.Tests.ObanHelpers
|
||||||
alias Pleroma.User
|
alias Pleroma.User
|
||||||
alias Pleroma.Web.ActivityPub.ObjectView
|
alias Pleroma.Web.ActivityPub.ObjectView
|
||||||
alias Pleroma.Web.ActivityPub.Relay
|
alias Pleroma.Web.ActivityPub.Relay
|
||||||
alias Pleroma.Web.ActivityPub.UserView
|
alias Pleroma.Web.ActivityPub.UserView
|
||||||
alias Pleroma.Web.ActivityPub.Utils
|
alias Pleroma.Web.ActivityPub.Utils
|
||||||
alias Pleroma.Web.CommonAPI
|
alias Pleroma.Web.CommonAPI
|
||||||
|
alias Pleroma.Workers.ReceiverWorker
|
||||||
|
|
||||||
setup_all do
|
setup_all do
|
||||||
Tesla.Mock.mock_global(fn env -> apply(HttpRequestMock, :request, [env]) end)
|
Tesla.Mock.mock_global(fn env -> apply(HttpRequestMock, :request, [env]) end)
|
||||||
|
@ -365,7 +370,8 @@ test "it inserts an incoming activity into the database", %{conn: conn} do
|
||||||
|> post("/inbox", data)
|
|> post("/inbox", data)
|
||||||
|
|
||||||
assert "ok" == json_response(conn, 200)
|
assert "ok" == json_response(conn, 200)
|
||||||
:timer.sleep(500)
|
|
||||||
|
ObanHelpers.perform(all_enqueued(worker: ReceiverWorker))
|
||||||
assert Activity.get_by_ap_id(data["id"])
|
assert Activity.get_by_ap_id(data["id"])
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@ -407,7 +413,7 @@ test "it inserts an incoming activity into the database", %{conn: conn, data: da
|
||||||
|> post("/users/#{user.nickname}/inbox", data)
|
|> post("/users/#{user.nickname}/inbox", data)
|
||||||
|
|
||||||
assert "ok" == json_response(conn, 200)
|
assert "ok" == json_response(conn, 200)
|
||||||
:timer.sleep(500)
|
ObanHelpers.perform(all_enqueued(worker: ReceiverWorker))
|
||||||
assert Activity.get_by_ap_id(data["id"])
|
assert Activity.get_by_ap_id(data["id"])
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@ -436,7 +442,7 @@ test "it accepts messages from actors that are followed by the user", %{
|
||||||
|> post("/users/#{recipient.nickname}/inbox", data)
|
|> post("/users/#{recipient.nickname}/inbox", data)
|
||||||
|
|
||||||
assert "ok" == json_response(conn, 200)
|
assert "ok" == json_response(conn, 200)
|
||||||
:timer.sleep(500)
|
ObanHelpers.perform(all_enqueued(worker: ReceiverWorker))
|
||||||
assert Activity.get_by_ap_id(data["id"])
|
assert Activity.get_by_ap_id(data["id"])
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@ -526,6 +532,8 @@ test "it removes all follower collections but actor's", %{conn: conn} do
|
||||||
|> post("/users/#{recipient.nickname}/inbox", data)
|
|> post("/users/#{recipient.nickname}/inbox", data)
|
||||||
|> json_response(200)
|
|> json_response(200)
|
||||||
|
|
||||||
|
ObanHelpers.perform(all_enqueued(worker: ReceiverWorker))
|
||||||
|
|
||||||
activity = Activity.get_by_ap_id(data["id"])
|
activity = Activity.get_by_ap_id(data["id"])
|
||||||
|
|
||||||
assert activity.id
|
assert activity.id
|
||||||
|
@ -601,6 +609,7 @@ test "it inserts an incoming create activity into the database", %{conn: conn} d
|
||||||
|> post("/users/#{user.nickname}/outbox", data)
|
|> post("/users/#{user.nickname}/outbox", data)
|
||||||
|
|
||||||
result = json_response(conn, 201)
|
result = json_response(conn, 201)
|
||||||
|
|
||||||
assert Activity.get_by_ap_id(result["id"])
|
assert Activity.get_by_ap_id(result["id"])
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@ -885,4 +894,86 @@ test "it works for more than 10 users", %{conn: conn} do
|
||||||
assert result["totalItems"] == 15
|
assert result["totalItems"] == 15
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
describe "delivery tracking" do
|
||||||
|
test "it tracks a signed object fetch", %{conn: conn} do
|
||||||
|
user = insert(:user, local: false)
|
||||||
|
activity = insert(:note_activity)
|
||||||
|
object = Object.normalize(activity)
|
||||||
|
|
||||||
|
object_path = String.trim_leading(object.data["id"], Pleroma.Web.Endpoint.url())
|
||||||
|
|
||||||
|
conn
|
||||||
|
|> put_req_header("accept", "application/activity+json")
|
||||||
|
|> assign(:user, user)
|
||||||
|
|> get(object_path)
|
||||||
|
|> json_response(200)
|
||||||
|
|
||||||
|
assert Delivery.get(object.id, user.id)
|
||||||
|
end
|
||||||
|
|
||||||
|
test "it tracks a signed activity fetch", %{conn: conn} do
|
||||||
|
user = insert(:user, local: false)
|
||||||
|
activity = insert(:note_activity)
|
||||||
|
object = Object.normalize(activity)
|
||||||
|
|
||||||
|
activity_path = String.trim_leading(activity.data["id"], Pleroma.Web.Endpoint.url())
|
||||||
|
|
||||||
|
conn
|
||||||
|
|> put_req_header("accept", "application/activity+json")
|
||||||
|
|> assign(:user, user)
|
||||||
|
|> get(activity_path)
|
||||||
|
|> json_response(200)
|
||||||
|
|
||||||
|
assert Delivery.get(object.id, user.id)
|
||||||
|
end
|
||||||
|
|
||||||
|
test "it tracks a signed object fetch when the json is cached", %{conn: conn} do
|
||||||
|
user = insert(:user, local: false)
|
||||||
|
other_user = insert(:user, local: false)
|
||||||
|
activity = insert(:note_activity)
|
||||||
|
object = Object.normalize(activity)
|
||||||
|
|
||||||
|
object_path = String.trim_leading(object.data["id"], Pleroma.Web.Endpoint.url())
|
||||||
|
|
||||||
|
conn
|
||||||
|
|> put_req_header("accept", "application/activity+json")
|
||||||
|
|> assign(:user, user)
|
||||||
|
|> get(object_path)
|
||||||
|
|> json_response(200)
|
||||||
|
|
||||||
|
build_conn()
|
||||||
|
|> put_req_header("accept", "application/activity+json")
|
||||||
|
|> assign(:user, other_user)
|
||||||
|
|> get(object_path)
|
||||||
|
|> json_response(200)
|
||||||
|
|
||||||
|
assert Delivery.get(object.id, user.id)
|
||||||
|
assert Delivery.get(object.id, other_user.id)
|
||||||
|
end
|
||||||
|
|
||||||
|
test "it tracks a signed activity fetch when the json is cached", %{conn: conn} do
|
||||||
|
user = insert(:user, local: false)
|
||||||
|
other_user = insert(:user, local: false)
|
||||||
|
activity = insert(:note_activity)
|
||||||
|
object = Object.normalize(activity)
|
||||||
|
|
||||||
|
activity_path = String.trim_leading(activity.data["id"], Pleroma.Web.Endpoint.url())
|
||||||
|
|
||||||
|
conn
|
||||||
|
|> put_req_header("accept", "application/activity+json")
|
||||||
|
|> assign(:user, user)
|
||||||
|
|> get(activity_path)
|
||||||
|
|> json_response(200)
|
||||||
|
|
||||||
|
build_conn()
|
||||||
|
|> put_req_header("accept", "application/activity+json")
|
||||||
|
|> assign(:user, other_user)
|
||||||
|
|> get(activity_path)
|
||||||
|
|> json_response(200)
|
||||||
|
|
||||||
|
assert Delivery.get(object.id, user.id)
|
||||||
|
assert Delivery.get(object.id, other_user.id)
|
||||||
|
end
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -38,9 +38,7 @@ test "it streams them out" do
|
||||||
stream: fn _, _ -> nil end do
|
stream: fn _, _ -> nil end do
|
||||||
ActivityPub.stream_out_participations(conversation.participations)
|
ActivityPub.stream_out_participations(conversation.participations)
|
||||||
|
|
||||||
Enum.each(participations, fn participation ->
|
assert called(Pleroma.Web.Streamer.stream("participation", participations))
|
||||||
assert called(Pleroma.Web.Streamer.stream("participation", participation))
|
|
||||||
end)
|
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
@ -686,7 +684,7 @@ test "returns reblogs for users for whom reblogs have not been muted" do
|
||||||
user = insert(:user)
|
user = insert(:user)
|
||||||
|
|
||||||
{:ok, like_activity, _object} = ActivityPub.like(user, object_activity)
|
{:ok, like_activity, _object} = ActivityPub.like(user, object_activity)
|
||||||
assert called(Pleroma.Web.Federator.publish(like_activity, 5))
|
assert called(Pleroma.Web.Federator.publish(like_activity))
|
||||||
end
|
end
|
||||||
|
|
||||||
test "returns exist activity if object already liked" do
|
test "returns exist activity if object already liked" do
|
||||||
|
@ -747,7 +745,7 @@ test "adds a like activity to the db" do
|
||||||
{:ok, unlike_activity, _, object} = ActivityPub.unlike(user, object)
|
{:ok, unlike_activity, _, object} = ActivityPub.unlike(user, object)
|
||||||
assert object.data["like_count"] == 0
|
assert object.data["like_count"] == 0
|
||||||
|
|
||||||
assert called(Pleroma.Web.Federator.publish(unlike_activity, 5))
|
assert called(Pleroma.Web.Federator.publish(unlike_activity))
|
||||||
end
|
end
|
||||||
|
|
||||||
test "unliking a previously liked object" do
|
test "unliking a previously liked object" do
|
||||||
|
|
|
@ -6,6 +6,7 @@ defmodule Pleroma.Web.ActivityPub.MRF.MediaProxyWarmingPolicyTest do
|
||||||
use Pleroma.DataCase
|
use Pleroma.DataCase
|
||||||
|
|
||||||
alias Pleroma.HTTP
|
alias Pleroma.HTTP
|
||||||
|
alias Pleroma.Tests.ObanHelpers
|
||||||
alias Pleroma.Web.ActivityPub.MRF.MediaProxyWarmingPolicy
|
alias Pleroma.Web.ActivityPub.MRF.MediaProxyWarmingPolicy
|
||||||
|
|
||||||
import Mock
|
import Mock
|
||||||
|
@ -24,6 +25,11 @@ defmodule Pleroma.Web.ActivityPub.MRF.MediaProxyWarmingPolicyTest do
|
||||||
test "it prefetches media proxy URIs" do
|
test "it prefetches media proxy URIs" do
|
||||||
with_mock HTTP, get: fn _, _, _ -> {:ok, []} end do
|
with_mock HTTP, get: fn _, _, _ -> {:ok, []} end do
|
||||||
MediaProxyWarmingPolicy.filter(@message)
|
MediaProxyWarmingPolicy.filter(@message)
|
||||||
|
|
||||||
|
ObanHelpers.perform_all()
|
||||||
|
# Performing jobs which has been just enqueued
|
||||||
|
ObanHelpers.perform_all()
|
||||||
|
|
||||||
assert called(HTTP.get(:_, :_, :_))
|
assert called(HTTP.get(:_, :_, :_))
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -3,15 +3,18 @@
|
||||||
# SPDX-License-Identifier: AGPL-3.0-only
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
defmodule Pleroma.Web.ActivityPub.PublisherTest do
|
defmodule Pleroma.Web.ActivityPub.PublisherTest do
|
||||||
use Pleroma.DataCase
|
use Pleroma.Web.ConnCase
|
||||||
|
|
||||||
|
import ExUnit.CaptureLog
|
||||||
import Pleroma.Factory
|
import Pleroma.Factory
|
||||||
import Tesla.Mock
|
import Tesla.Mock
|
||||||
import Mock
|
import Mock
|
||||||
|
|
||||||
alias Pleroma.Activity
|
alias Pleroma.Activity
|
||||||
alias Pleroma.Instances
|
alias Pleroma.Instances
|
||||||
|
alias Pleroma.Object
|
||||||
alias Pleroma.Web.ActivityPub.Publisher
|
alias Pleroma.Web.ActivityPub.Publisher
|
||||||
|
alias Pleroma.Web.CommonAPI
|
||||||
|
|
||||||
@as_public "https://www.w3.org/ns/activitystreams#Public"
|
@as_public "https://www.w3.org/ns/activitystreams#Public"
|
||||||
|
|
||||||
|
@ -188,7 +191,10 @@ test "it returns inbox for messages involving single recipients in total" do
|
||||||
actor = insert(:user)
|
actor = insert(:user)
|
||||||
inbox = "http://connrefused.site/users/nick1/inbox"
|
inbox = "http://connrefused.site/users/nick1/inbox"
|
||||||
|
|
||||||
assert {:error, _} = Publisher.publish_one(%{inbox: inbox, json: "{}", actor: actor, id: 1})
|
assert capture_log(fn ->
|
||||||
|
assert {:error, _} =
|
||||||
|
Publisher.publish_one(%{inbox: inbox, json: "{}", actor: actor, id: 1})
|
||||||
|
end) =~ "connrefused"
|
||||||
|
|
||||||
assert called(Instances.set_unreachable(inbox))
|
assert called(Instances.set_unreachable(inbox))
|
||||||
end
|
end
|
||||||
|
@ -212,6 +218,7 @@ test "it returns inbox for messages involving single recipients in total" do
|
||||||
actor = insert(:user)
|
actor = insert(:user)
|
||||||
inbox = "http://connrefused.site/users/nick1/inbox"
|
inbox = "http://connrefused.site/users/nick1/inbox"
|
||||||
|
|
||||||
|
assert capture_log(fn ->
|
||||||
assert {:error, _} =
|
assert {:error, _} =
|
||||||
Publisher.publish_one(%{
|
Publisher.publish_one(%{
|
||||||
inbox: inbox,
|
inbox: inbox,
|
||||||
|
@ -220,6 +227,7 @@ test "it returns inbox for messages involving single recipients in total" do
|
||||||
id: 1,
|
id: 1,
|
||||||
unreachable_since: NaiveDateTime.utc_now()
|
unreachable_since: NaiveDateTime.utc_now()
|
||||||
})
|
})
|
||||||
|
end) =~ "connrefused"
|
||||||
|
|
||||||
refute called(Instances.set_unreachable(inbox))
|
refute called(Instances.set_unreachable(inbox))
|
||||||
end
|
end
|
||||||
|
@ -257,10 +265,74 @@ test "it returns inbox for messages involving single recipients in total" do
|
||||||
assert called(
|
assert called(
|
||||||
Pleroma.Web.Federator.Publisher.enqueue_one(Publisher, %{
|
Pleroma.Web.Federator.Publisher.enqueue_one(Publisher, %{
|
||||||
inbox: "https://domain.com/users/nick1/inbox",
|
inbox: "https://domain.com/users/nick1/inbox",
|
||||||
actor: actor,
|
actor_id: actor.id,
|
||||||
id: note_activity.data["id"]
|
id: note_activity.data["id"]
|
||||||
})
|
})
|
||||||
)
|
)
|
||||||
end
|
end
|
||||||
|
|
||||||
|
test_with_mock "publishes a delete activity to peers who signed fetch requests to the create acitvity/object.",
|
||||||
|
Pleroma.Web.Federator.Publisher,
|
||||||
|
[:passthrough],
|
||||||
|
[] do
|
||||||
|
fetcher =
|
||||||
|
insert(:user,
|
||||||
|
local: false,
|
||||||
|
info: %{
|
||||||
|
ap_enabled: true,
|
||||||
|
source_data: %{"inbox" => "https://domain.com/users/nick1/inbox"}
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
another_fetcher =
|
||||||
|
insert(:user,
|
||||||
|
local: false,
|
||||||
|
info: %{
|
||||||
|
ap_enabled: true,
|
||||||
|
source_data: %{"inbox" => "https://domain2.com/users/nick1/inbox"}
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
actor = insert(:user)
|
||||||
|
|
||||||
|
note_activity = insert(:note_activity, user: actor)
|
||||||
|
object = Object.normalize(note_activity)
|
||||||
|
|
||||||
|
activity_path = String.trim_leading(note_activity.data["id"], Pleroma.Web.Endpoint.url())
|
||||||
|
object_path = String.trim_leading(object.data["id"], Pleroma.Web.Endpoint.url())
|
||||||
|
|
||||||
|
build_conn()
|
||||||
|
|> put_req_header("accept", "application/activity+json")
|
||||||
|
|> assign(:user, fetcher)
|
||||||
|
|> get(object_path)
|
||||||
|
|> json_response(200)
|
||||||
|
|
||||||
|
build_conn()
|
||||||
|
|> put_req_header("accept", "application/activity+json")
|
||||||
|
|> assign(:user, another_fetcher)
|
||||||
|
|> get(activity_path)
|
||||||
|
|> json_response(200)
|
||||||
|
|
||||||
|
{:ok, delete} = CommonAPI.delete(note_activity.id, actor)
|
||||||
|
|
||||||
|
res = Publisher.publish(actor, delete)
|
||||||
|
assert res == :ok
|
||||||
|
|
||||||
|
assert called(
|
||||||
|
Pleroma.Web.Federator.Publisher.enqueue_one(Publisher, %{
|
||||||
|
inbox: "https://domain.com/users/nick1/inbox",
|
||||||
|
actor_id: actor.id,
|
||||||
|
id: delete.data["id"]
|
||||||
|
})
|
||||||
|
)
|
||||||
|
|
||||||
|
assert called(
|
||||||
|
Pleroma.Web.Federator.Publisher.enqueue_one(Publisher, %{
|
||||||
|
inbox: "https://domain2.com/users/nick1/inbox",
|
||||||
|
actor_id: actor.id,
|
||||||
|
id: delete.data["id"]
|
||||||
|
})
|
||||||
|
)
|
||||||
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -10,6 +10,7 @@ defmodule Pleroma.Web.ActivityPub.RelayTest do
|
||||||
alias Pleroma.Web.ActivityPub.ActivityPub
|
alias Pleroma.Web.ActivityPub.ActivityPub
|
||||||
alias Pleroma.Web.ActivityPub.Relay
|
alias Pleroma.Web.ActivityPub.Relay
|
||||||
|
|
||||||
|
import ExUnit.CaptureLog
|
||||||
import Pleroma.Factory
|
import Pleroma.Factory
|
||||||
import Mock
|
import Mock
|
||||||
|
|
||||||
|
@ -20,7 +21,9 @@ test "gets an actor for the relay" do
|
||||||
|
|
||||||
describe "follow/1" do
|
describe "follow/1" do
|
||||||
test "returns errors when user not found" do
|
test "returns errors when user not found" do
|
||||||
|
assert capture_log(fn ->
|
||||||
assert Relay.follow("test-ap-id") == {:error, "Could not fetch by AP id"}
|
assert Relay.follow("test-ap-id") == {:error, "Could not fetch by AP id"}
|
||||||
|
end) =~ "Could not fetch by AP id"
|
||||||
end
|
end
|
||||||
|
|
||||||
test "returns activity" do
|
test "returns activity" do
|
||||||
|
@ -37,7 +40,9 @@ test "returns activity" do
|
||||||
|
|
||||||
describe "unfollow/1" do
|
describe "unfollow/1" do
|
||||||
test "returns errors when user not found" do
|
test "returns errors when user not found" do
|
||||||
|
assert capture_log(fn ->
|
||||||
assert Relay.unfollow("test-ap-id") == {:error, "Could not fetch by AP id"}
|
assert Relay.unfollow("test-ap-id") == {:error, "Could not fetch by AP id"}
|
||||||
|
end) =~ "Could not fetch by AP id"
|
||||||
end
|
end
|
||||||
|
|
||||||
test "returns activity" do
|
test "returns activity" do
|
||||||
|
@ -78,7 +83,9 @@ test "returns error when object is unknown" do
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
|
assert capture_log(fn ->
|
||||||
assert Relay.publish(activity) == {:error, nil}
|
assert Relay.publish(activity) == {:error, nil}
|
||||||
|
end) =~ "[error] error: nil"
|
||||||
end
|
end
|
||||||
|
|
||||||
test_with_mock "returns announce activity and publish to federate",
|
test_with_mock "returns announce activity and publish to federate",
|
||||||
|
@ -92,7 +99,7 @@ test "returns error when object is unknown" do
|
||||||
assert activity.data["type"] == "Announce"
|
assert activity.data["type"] == "Announce"
|
||||||
assert activity.data["actor"] == service_actor.ap_id
|
assert activity.data["actor"] == service_actor.ap_id
|
||||||
assert activity.data["object"] == obj.data["id"]
|
assert activity.data["object"] == obj.data["id"]
|
||||||
assert called(Pleroma.Web.Federator.publish(activity, 5))
|
assert called(Pleroma.Web.Federator.publish(activity))
|
||||||
end
|
end
|
||||||
|
|
||||||
test_with_mock "returns announce activity and not publish to federate",
|
test_with_mock "returns announce activity and not publish to federate",
|
||||||
|
@ -106,7 +113,7 @@ test "returns error when object is unknown" do
|
||||||
assert activity.data["type"] == "Announce"
|
assert activity.data["type"] == "Announce"
|
||||||
assert activity.data["actor"] == service_actor.ap_id
|
assert activity.data["actor"] == service_actor.ap_id
|
||||||
assert activity.data["object"] == obj.data["id"]
|
assert activity.data["object"] == obj.data["id"]
|
||||||
refute called(Pleroma.Web.Federator.publish(activity, 5))
|
refute called(Pleroma.Web.Federator.publish(activity))
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -8,6 +8,7 @@ defmodule Pleroma.Web.ActivityPub.TransmogrifierTest do
|
||||||
alias Pleroma.Object
|
alias Pleroma.Object
|
||||||
alias Pleroma.Object.Fetcher
|
alias Pleroma.Object.Fetcher
|
||||||
alias Pleroma.Repo
|
alias Pleroma.Repo
|
||||||
|
alias Pleroma.Tests.ObanHelpers
|
||||||
alias Pleroma.User
|
alias Pleroma.User
|
||||||
alias Pleroma.Web.ActivityPub.ActivityPub
|
alias Pleroma.Web.ActivityPub.ActivityPub
|
||||||
alias Pleroma.Web.ActivityPub.Transmogrifier
|
alias Pleroma.Web.ActivityPub.Transmogrifier
|
||||||
|
@ -102,7 +103,7 @@ test "it does not crash if the object in inReplyTo can't be fetched" do
|
||||||
|
|
||||||
assert capture_log(fn ->
|
assert capture_log(fn ->
|
||||||
{:ok, _returned_activity} = Transmogrifier.handle_incoming(data)
|
{:ok, _returned_activity} = Transmogrifier.handle_incoming(data)
|
||||||
end) =~ "[error] Couldn't fetch \"\"https://404.site/whatever\"\", error: nil"
|
end) =~ "[error] Couldn't fetch \"https://404.site/whatever\", error: nil"
|
||||||
end
|
end
|
||||||
|
|
||||||
test "it works for incoming notices" do
|
test "it works for incoming notices" do
|
||||||
|
@ -648,6 +649,7 @@ test "it works for incoming user deletes" do
|
||||||
|> Poison.decode!()
|
|> Poison.decode!()
|
||||||
|
|
||||||
{:ok, _} = Transmogrifier.handle_incoming(data)
|
{:ok, _} = Transmogrifier.handle_incoming(data)
|
||||||
|
ObanHelpers.perform_all()
|
||||||
|
|
||||||
refute User.get_cached_by_ap_id(ap_id)
|
refute User.get_cached_by_ap_id(ap_id)
|
||||||
end
|
end
|
||||||
|
@ -1210,6 +1212,8 @@ test "it upgrades a user to activitypub" do
|
||||||
assert user.info.note_count == 1
|
assert user.info.note_count == 1
|
||||||
|
|
||||||
{:ok, user} = Transmogrifier.upgrade_user_from_ap_id("https://niu.moe/users/rye")
|
{:ok, user} = Transmogrifier.upgrade_user_from_ap_id("https://niu.moe/users/rye")
|
||||||
|
ObanHelpers.perform_all()
|
||||||
|
|
||||||
assert user.info.ap_enabled
|
assert user.info.ap_enabled
|
||||||
assert user.info.note_count == 1
|
assert user.info.note_count == 1
|
||||||
assert user.follower_address == "https://niu.moe/users/rye/followers"
|
assert user.follower_address == "https://niu.moe/users/rye/followers"
|
||||||
|
|
|
@ -1779,7 +1779,11 @@ test "common config example", %{conn: conn} do
|
||||||
%{"tuple" => [":seconds_valid", 60]},
|
%{"tuple" => [":seconds_valid", 60]},
|
||||||
%{"tuple" => [":path", ""]},
|
%{"tuple" => [":path", ""]},
|
||||||
%{"tuple" => [":key1", nil]},
|
%{"tuple" => [":key1", nil]},
|
||||||
%{"tuple" => [":partial_chain", "&:hackney_connect.partial_chain/1"]}
|
%{"tuple" => [":partial_chain", "&:hackney_connect.partial_chain/1"]},
|
||||||
|
%{"tuple" => [":regex1", "~r/https:\/\/example.com/"]},
|
||||||
|
%{"tuple" => [":regex2", "~r/https:\/\/example.com/u"]},
|
||||||
|
%{"tuple" => [":regex3", "~r/https:\/\/example.com/i"]},
|
||||||
|
%{"tuple" => [":regex4", "~r/https:\/\/example.com/s"]}
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
|
@ -1796,7 +1800,11 @@ test "common config example", %{conn: conn} do
|
||||||
%{"tuple" => [":seconds_valid", 60]},
|
%{"tuple" => [":seconds_valid", 60]},
|
||||||
%{"tuple" => [":path", ""]},
|
%{"tuple" => [":path", ""]},
|
||||||
%{"tuple" => [":key1", nil]},
|
%{"tuple" => [":key1", nil]},
|
||||||
%{"tuple" => [":partial_chain", "&:hackney_connect.partial_chain/1"]}
|
%{"tuple" => [":partial_chain", "&:hackney_connect.partial_chain/1"]},
|
||||||
|
%{"tuple" => [":regex1", "~r/https:\\/\\/example.com/"]},
|
||||||
|
%{"tuple" => [":regex2", "~r/https:\\/\\/example.com/u"]},
|
||||||
|
%{"tuple" => [":regex3", "~r/https:\\/\\/example.com/i"]},
|
||||||
|
%{"tuple" => [":regex4", "~r/https:\\/\\/example.com/s"]}
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
|
@ -2088,7 +2096,7 @@ test "queues key as atom", %{conn: conn} do
|
||||||
post(conn, "/api/pleroma/admin/config", %{
|
post(conn, "/api/pleroma/admin/config", %{
|
||||||
configs: [
|
configs: [
|
||||||
%{
|
%{
|
||||||
"group" => "pleroma_job_queue",
|
"group" => "oban",
|
||||||
"key" => ":queues",
|
"key" => ":queues",
|
||||||
"value" => [
|
"value" => [
|
||||||
%{"tuple" => [":federator_incoming", 50]},
|
%{"tuple" => [":federator_incoming", 50]},
|
||||||
|
@ -2106,7 +2114,7 @@ test "queues key as atom", %{conn: conn} do
|
||||||
assert json_response(conn, 200) == %{
|
assert json_response(conn, 200) == %{
|
||||||
"configs" => [
|
"configs" => [
|
||||||
%{
|
%{
|
||||||
"group" => "pleroma_job_queue",
|
"group" => "oban",
|
||||||
"key" => ":queues",
|
"key" => ":queues",
|
||||||
"value" => [
|
"value" => [
|
||||||
%{"tuple" => [":federator_incoming", 50]},
|
%{"tuple" => [":federator_incoming", 50]},
|
||||||
|
|
|
@ -103,6 +103,30 @@ test "sigil" do
|
||||||
assert Config.from_binary(binary) == ~r/comp[lL][aA][iI][nN]er/
|
assert Config.from_binary(binary) == ~r/comp[lL][aA][iI][nN]er/
|
||||||
end
|
end
|
||||||
|
|
||||||
|
test "link sigil" do
|
||||||
|
binary = Config.transform("~r/https:\/\/example.com/")
|
||||||
|
assert binary == :erlang.term_to_binary(~r/https:\/\/example.com/)
|
||||||
|
assert Config.from_binary(binary) == ~r/https:\/\/example.com/
|
||||||
|
end
|
||||||
|
|
||||||
|
test "link sigil with u modifier" do
|
||||||
|
binary = Config.transform("~r/https:\/\/example.com/u")
|
||||||
|
assert binary == :erlang.term_to_binary(~r/https:\/\/example.com/u)
|
||||||
|
assert Config.from_binary(binary) == ~r/https:\/\/example.com/u
|
||||||
|
end
|
||||||
|
|
||||||
|
test "link sigil with i modifier" do
|
||||||
|
binary = Config.transform("~r/https:\/\/example.com/i")
|
||||||
|
assert binary == :erlang.term_to_binary(~r/https:\/\/example.com/i)
|
||||||
|
assert Config.from_binary(binary) == ~r/https:\/\/example.com/i
|
||||||
|
end
|
||||||
|
|
||||||
|
test "link sigil with s modifier" do
|
||||||
|
binary = Config.transform("~r/https:\/\/example.com/s")
|
||||||
|
assert binary == :erlang.term_to_binary(~r/https:\/\/example.com/s)
|
||||||
|
assert Config.from_binary(binary) == ~r/https:\/\/example.com/s
|
||||||
|
end
|
||||||
|
|
||||||
test "2 child tuple" do
|
test "2 child tuple" do
|
||||||
binary = Config.transform(%{"tuple" => ["v1", ":v2"]})
|
binary = Config.transform(%{"tuple" => ["v1", ":v2"]})
|
||||||
assert binary == :erlang.term_to_binary({"v1", :v2})
|
assert binary == :erlang.term_to_binary({"v1", :v2})
|
||||||
|
|
|
@ -4,9 +4,14 @@
|
||||||
|
|
||||||
defmodule Pleroma.Web.FederatorTest do
|
defmodule Pleroma.Web.FederatorTest do
|
||||||
alias Pleroma.Instances
|
alias Pleroma.Instances
|
||||||
|
alias Pleroma.Tests.ObanHelpers
|
||||||
alias Pleroma.Web.CommonAPI
|
alias Pleroma.Web.CommonAPI
|
||||||
alias Pleroma.Web.Federator
|
alias Pleroma.Web.Federator
|
||||||
|
alias Pleroma.Workers.PublisherWorker
|
||||||
|
|
||||||
use Pleroma.DataCase
|
use Pleroma.DataCase
|
||||||
|
use Oban.Testing, repo: Pleroma.Repo
|
||||||
|
|
||||||
import Pleroma.Factory
|
import Pleroma.Factory
|
||||||
import Mock
|
import Mock
|
||||||
|
|
||||||
|
@ -24,15 +29,6 @@ defmodule Pleroma.Web.FederatorTest do
|
||||||
clear_config([:instance, :rewrite_policy])
|
clear_config([:instance, :rewrite_policy])
|
||||||
clear_config([:mrf_keyword])
|
clear_config([:mrf_keyword])
|
||||||
|
|
||||||
describe "Publisher.perform" do
|
|
||||||
test "call `perform` with unknown task" do
|
|
||||||
assert {
|
|
||||||
:error,
|
|
||||||
"Don't know what to do with this"
|
|
||||||
} = Pleroma.Web.Federator.Publisher.perform("test", :ok, :ok)
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
describe "Publish an activity" do
|
describe "Publish an activity" do
|
||||||
setup do
|
setup do
|
||||||
user = insert(:user)
|
user = insert(:user)
|
||||||
|
@ -53,6 +49,7 @@ test "with relays active, it publishes to the relay", %{
|
||||||
} do
|
} do
|
||||||
with_mocks([relay_mock]) do
|
with_mocks([relay_mock]) do
|
||||||
Federator.publish(activity)
|
Federator.publish(activity)
|
||||||
|
ObanHelpers.perform(all_enqueued(worker: PublisherWorker))
|
||||||
end
|
end
|
||||||
|
|
||||||
assert_received :relay_publish
|
assert_received :relay_publish
|
||||||
|
@ -66,6 +63,7 @@ test "with relays deactivated, it does not publish to the relay", %{
|
||||||
|
|
||||||
with_mocks([relay_mock]) do
|
with_mocks([relay_mock]) do
|
||||||
Federator.publish(activity)
|
Federator.publish(activity)
|
||||||
|
ObanHelpers.perform(all_enqueued(worker: PublisherWorker))
|
||||||
end
|
end
|
||||||
|
|
||||||
refute_received :relay_publish
|
refute_received :relay_publish
|
||||||
|
@ -73,10 +71,7 @@ test "with relays deactivated, it does not publish to the relay", %{
|
||||||
end
|
end
|
||||||
|
|
||||||
describe "Targets reachability filtering in `publish`" do
|
describe "Targets reachability filtering in `publish`" do
|
||||||
test_with_mock "it federates only to reachable instances via AP",
|
test "it federates only to reachable instances via AP" do
|
||||||
Pleroma.Web.ActivityPub.Publisher,
|
|
||||||
[:passthrough],
|
|
||||||
[] do
|
|
||||||
user = insert(:user)
|
user = insert(:user)
|
||||||
|
|
||||||
{inbox1, inbox2} =
|
{inbox1, inbox2} =
|
||||||
|
@ -104,20 +99,20 @@ test "with relays deactivated, it does not publish to the relay", %{
|
||||||
{:ok, _activity} =
|
{:ok, _activity} =
|
||||||
CommonAPI.post(user, %{"status" => "HI @nick1@domain.com, @nick2@domain2.com!"})
|
CommonAPI.post(user, %{"status" => "HI @nick1@domain.com, @nick2@domain2.com!"})
|
||||||
|
|
||||||
assert called(
|
expected_dt = NaiveDateTime.to_iso8601(dt)
|
||||||
Pleroma.Web.ActivityPub.Publisher.publish_one(%{
|
|
||||||
inbox: inbox1,
|
|
||||||
unreachable_since: dt
|
|
||||||
})
|
|
||||||
)
|
|
||||||
|
|
||||||
refute called(Pleroma.Web.ActivityPub.Publisher.publish_one(%{inbox: inbox2}))
|
ObanHelpers.perform(all_enqueued(worker: PublisherWorker))
|
||||||
|
|
||||||
|
assert ObanHelpers.member?(
|
||||||
|
%{
|
||||||
|
"op" => "publish_one",
|
||||||
|
"params" => %{"inbox" => inbox1, "unreachable_since" => expected_dt}
|
||||||
|
},
|
||||||
|
all_enqueued(worker: PublisherWorker)
|
||||||
|
)
|
||||||
end
|
end
|
||||||
|
|
||||||
test_with_mock "it federates only to reachable instances via Websub",
|
test "it federates only to reachable instances via Websub" do
|
||||||
Pleroma.Web.Websub,
|
|
||||||
[:passthrough],
|
|
||||||
[] do
|
|
||||||
user = insert(:user)
|
user = insert(:user)
|
||||||
websub_topic = Pleroma.Web.OStatus.feed_path(user)
|
websub_topic = Pleroma.Web.OStatus.feed_path(user)
|
||||||
|
|
||||||
|
@ -142,23 +137,27 @@ test "with relays deactivated, it does not publish to the relay", %{
|
||||||
|
|
||||||
{:ok, _activity} = CommonAPI.post(user, %{"status" => "HI"})
|
{:ok, _activity} = CommonAPI.post(user, %{"status" => "HI"})
|
||||||
|
|
||||||
assert called(
|
expected_callback = sub2.callback
|
||||||
Pleroma.Web.Websub.publish_one(%{
|
expected_dt = NaiveDateTime.to_iso8601(dt)
|
||||||
callback: sub2.callback,
|
|
||||||
unreachable_since: dt
|
|
||||||
})
|
|
||||||
)
|
|
||||||
|
|
||||||
refute called(Pleroma.Web.Websub.publish_one(%{callback: sub1.callback}))
|
ObanHelpers.perform(all_enqueued(worker: PublisherWorker))
|
||||||
|
|
||||||
|
assert ObanHelpers.member?(
|
||||||
|
%{
|
||||||
|
"op" => "publish_one",
|
||||||
|
"params" => %{
|
||||||
|
"callback" => expected_callback,
|
||||||
|
"unreachable_since" => expected_dt
|
||||||
|
}
|
||||||
|
},
|
||||||
|
all_enqueued(worker: PublisherWorker)
|
||||||
|
)
|
||||||
end
|
end
|
||||||
|
|
||||||
test_with_mock "it federates only to reachable instances via Salmon",
|
test "it federates only to reachable instances via Salmon" do
|
||||||
Pleroma.Web.Salmon,
|
|
||||||
[:passthrough],
|
|
||||||
[] do
|
|
||||||
user = insert(:user)
|
user = insert(:user)
|
||||||
|
|
||||||
remote_user1 =
|
_remote_user1 =
|
||||||
insert(:user, %{
|
insert(:user, %{
|
||||||
local: false,
|
local: false,
|
||||||
nickname: "nick1@domain.com",
|
nickname: "nick1@domain.com",
|
||||||
|
@ -174,6 +173,8 @@ test "with relays deactivated, it does not publish to the relay", %{
|
||||||
info: %{salmon: "https://domain2.com/salmon"}
|
info: %{salmon: "https://domain2.com/salmon"}
|
||||||
})
|
})
|
||||||
|
|
||||||
|
remote_user2_id = remote_user2.id
|
||||||
|
|
||||||
dt = NaiveDateTime.utc_now()
|
dt = NaiveDateTime.utc_now()
|
||||||
Instances.set_unreachable(remote_user2.ap_id, dt)
|
Instances.set_unreachable(remote_user2.ap_id, dt)
|
||||||
|
|
||||||
|
@ -182,14 +183,20 @@ test "with relays deactivated, it does not publish to the relay", %{
|
||||||
{:ok, _activity} =
|
{:ok, _activity} =
|
||||||
CommonAPI.post(user, %{"status" => "HI @nick1@domain.com, @nick2@domain2.com!"})
|
CommonAPI.post(user, %{"status" => "HI @nick1@domain.com, @nick2@domain2.com!"})
|
||||||
|
|
||||||
assert called(
|
expected_dt = NaiveDateTime.to_iso8601(dt)
|
||||||
Pleroma.Web.Salmon.publish_one(%{
|
|
||||||
recipient: remote_user2,
|
|
||||||
unreachable_since: dt
|
|
||||||
})
|
|
||||||
)
|
|
||||||
|
|
||||||
refute called(Pleroma.Web.Salmon.publish_one(%{recipient: remote_user1}))
|
ObanHelpers.perform(all_enqueued(worker: PublisherWorker))
|
||||||
|
|
||||||
|
assert ObanHelpers.member?(
|
||||||
|
%{
|
||||||
|
"op" => "publish_one",
|
||||||
|
"params" => %{
|
||||||
|
"recipient_id" => remote_user2_id,
|
||||||
|
"unreachable_since" => expected_dt
|
||||||
|
}
|
||||||
|
},
|
||||||
|
all_enqueued(worker: PublisherWorker)
|
||||||
|
)
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@ -209,7 +216,8 @@ test "successfully processes incoming AP docs with correct origin" do
|
||||||
"to" => ["https://www.w3.org/ns/activitystreams#Public"]
|
"to" => ["https://www.w3.org/ns/activitystreams#Public"]
|
||||||
}
|
}
|
||||||
|
|
||||||
{:ok, _activity} = Federator.incoming_ap_doc(params)
|
assert {:ok, job} = Federator.incoming_ap_doc(params)
|
||||||
|
assert {:ok, _activity} = ObanHelpers.perform(job)
|
||||||
end
|
end
|
||||||
|
|
||||||
test "rejects incoming AP docs with incorrect origin" do
|
test "rejects incoming AP docs with incorrect origin" do
|
||||||
|
@ -227,7 +235,8 @@ test "rejects incoming AP docs with incorrect origin" do
|
||||||
"to" => ["https://www.w3.org/ns/activitystreams#Public"]
|
"to" => ["https://www.w3.org/ns/activitystreams#Public"]
|
||||||
}
|
}
|
||||||
|
|
||||||
:error = Federator.incoming_ap_doc(params)
|
assert {:ok, job} = Federator.incoming_ap_doc(params)
|
||||||
|
assert :error = ObanHelpers.perform(job)
|
||||||
end
|
end
|
||||||
|
|
||||||
test "it does not crash if MRF rejects the post" do
|
test "it does not crash if MRF rejects the post" do
|
||||||
|
@ -242,7 +251,8 @@ test "it does not crash if MRF rejects the post" do
|
||||||
File.read!("test/fixtures/mastodon-post-activity.json")
|
File.read!("test/fixtures/mastodon-post-activity.json")
|
||||||
|> Poison.decode!()
|
|> Poison.decode!()
|
||||||
|
|
||||||
assert Federator.incoming_ap_doc(params) == :error
|
assert {:ok, job} = Federator.incoming_ap_doc(params)
|
||||||
|
assert :error = ObanHelpers.perform(job)
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -16,7 +16,8 @@ defmodule Pleroma.Instances.InstanceTest do
|
||||||
|
|
||||||
describe "set_reachable/1" do
|
describe "set_reachable/1" do
|
||||||
test "clears `unreachable_since` of existing matching Instance record having non-nil `unreachable_since`" do
|
test "clears `unreachable_since` of existing matching Instance record having non-nil `unreachable_since`" do
|
||||||
instance = insert(:instance, unreachable_since: NaiveDateTime.utc_now())
|
unreachable_since = NaiveDateTime.to_iso8601(NaiveDateTime.utc_now())
|
||||||
|
instance = insert(:instance, unreachable_since: unreachable_since)
|
||||||
|
|
||||||
assert {:ok, instance} = Instance.set_reachable(instance.host)
|
assert {:ok, instance} = Instance.set_reachable(instance.host)
|
||||||
refute instance.unreachable_since
|
refute instance.unreachable_since
|
||||||
|
|
|
@ -13,6 +13,7 @@ defmodule Pleroma.Web.MastodonAPI.MastodonAPIControllerTest do
|
||||||
alias Pleroma.Object
|
alias Pleroma.Object
|
||||||
alias Pleroma.Repo
|
alias Pleroma.Repo
|
||||||
alias Pleroma.ScheduledActivity
|
alias Pleroma.ScheduledActivity
|
||||||
|
alias Pleroma.Tests.ObanHelpers
|
||||||
alias Pleroma.User
|
alias Pleroma.User
|
||||||
alias Pleroma.Web.ActivityPub.ActivityPub
|
alias Pleroma.Web.ActivityPub.ActivityPub
|
||||||
alias Pleroma.Web.CommonAPI
|
alias Pleroma.Web.CommonAPI
|
||||||
|
@ -744,6 +745,16 @@ test "get a status", %{conn: conn} do
|
||||||
assert id == to_string(activity.id)
|
assert id == to_string(activity.id)
|
||||||
end
|
end
|
||||||
|
|
||||||
|
test "get statuses by IDs", %{conn: conn} do
|
||||||
|
%{id: id1} = insert(:note_activity)
|
||||||
|
%{id: id2} = insert(:note_activity)
|
||||||
|
|
||||||
|
query_string = "ids[]=#{id1}&ids[]=#{id2}"
|
||||||
|
conn = get(conn, "/api/v1/statuses/?#{query_string}")
|
||||||
|
|
||||||
|
assert [%{"id" => ^id1}, %{"id" => ^id2}] = Enum.sort_by(json_response(conn, :ok), & &1["id"])
|
||||||
|
end
|
||||||
|
|
||||||
describe "deleting a status" do
|
describe "deleting a status" do
|
||||||
test "when you created it", %{conn: conn} do
|
test "when you created it", %{conn: conn} do
|
||||||
activity = insert(:note_activity)
|
activity = insert(:note_activity)
|
||||||
|
@ -3688,7 +3699,7 @@ test "returns 404 when poll is private and not available for user", %{conn: conn
|
||||||
build_conn()
|
build_conn()
|
||||||
|> assign(:user, user)
|
|> assign(:user, user)
|
||||||
|
|
||||||
[conn: conn, activity: activity]
|
[conn: conn, activity: activity, user: user]
|
||||||
end
|
end
|
||||||
|
|
||||||
test "returns users who have favorited the status", %{conn: conn, activity: activity} do
|
test "returns users who have favorited the status", %{conn: conn, activity: activity} do
|
||||||
|
@ -3748,6 +3759,32 @@ test "does not fail on an unauthenticated request", %{conn: conn, activity: acti
|
||||||
[%{"id" => id}] = response
|
[%{"id" => id}] = response
|
||||||
assert id == other_user.id
|
assert id == other_user.id
|
||||||
end
|
end
|
||||||
|
|
||||||
|
test "requires authentification for private posts", %{conn: conn, user: user} do
|
||||||
|
other_user = insert(:user)
|
||||||
|
|
||||||
|
{:ok, activity} =
|
||||||
|
CommonAPI.post(user, %{
|
||||||
|
"status" => "@#{other_user.nickname} wanna get some #cofe together?",
|
||||||
|
"visibility" => "direct"
|
||||||
|
})
|
||||||
|
|
||||||
|
{:ok, _, _} = CommonAPI.favorite(activity.id, other_user)
|
||||||
|
|
||||||
|
conn
|
||||||
|
|> assign(:user, nil)
|
||||||
|
|> get("/api/v1/statuses/#{activity.id}/favourited_by")
|
||||||
|
|> json_response(404)
|
||||||
|
|
||||||
|
response =
|
||||||
|
build_conn()
|
||||||
|
|> assign(:user, other_user)
|
||||||
|
|> get("/api/v1/statuses/#{activity.id}/favourited_by")
|
||||||
|
|> json_response(200)
|
||||||
|
|
||||||
|
[%{"id" => id}] = response
|
||||||
|
assert id == other_user.id
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
describe "GET /api/v1/statuses/:id/reblogged_by" do
|
describe "GET /api/v1/statuses/:id/reblogged_by" do
|
||||||
|
@ -3759,7 +3796,7 @@ test "does not fail on an unauthenticated request", %{conn: conn, activity: acti
|
||||||
build_conn()
|
build_conn()
|
||||||
|> assign(:user, user)
|
|> assign(:user, user)
|
||||||
|
|
||||||
[conn: conn, activity: activity]
|
[conn: conn, activity: activity, user: user]
|
||||||
end
|
end
|
||||||
|
|
||||||
test "returns users who have reblogged the status", %{conn: conn, activity: activity} do
|
test "returns users who have reblogged the status", %{conn: conn, activity: activity} do
|
||||||
|
@ -3819,6 +3856,29 @@ test "does not fail on an unauthenticated request", %{conn: conn, activity: acti
|
||||||
[%{"id" => id}] = response
|
[%{"id" => id}] = response
|
||||||
assert id == other_user.id
|
assert id == other_user.id
|
||||||
end
|
end
|
||||||
|
|
||||||
|
test "requires authentification for private posts", %{conn: conn, user: user} do
|
||||||
|
other_user = insert(:user)
|
||||||
|
|
||||||
|
{:ok, activity} =
|
||||||
|
CommonAPI.post(user, %{
|
||||||
|
"status" => "@#{other_user.nickname} wanna get some #cofe together?",
|
||||||
|
"visibility" => "direct"
|
||||||
|
})
|
||||||
|
|
||||||
|
conn
|
||||||
|
|> assign(:user, nil)
|
||||||
|
|> get("/api/v1/statuses/#{activity.id}/reblogged_by")
|
||||||
|
|> json_response(404)
|
||||||
|
|
||||||
|
response =
|
||||||
|
build_conn()
|
||||||
|
|> assign(:user, other_user)
|
||||||
|
|> get("/api/v1/statuses/#{activity.id}/reblogged_by")
|
||||||
|
|> json_response(200)
|
||||||
|
|
||||||
|
assert [] == response
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
describe "POST /auth/password, with valid parameters" do
|
describe "POST /auth/password, with valid parameters" do
|
||||||
|
@ -3838,6 +3898,7 @@ test "it creates a PasswordResetToken record for user", %{user: user} do
|
||||||
end
|
end
|
||||||
|
|
||||||
test "it sends an email to user", %{user: user} do
|
test "it sends an email to user", %{user: user} do
|
||||||
|
ObanHelpers.perform_all()
|
||||||
token_record = Repo.get_by(Pleroma.PasswordResetToken, user_id: user.id)
|
token_record = Repo.get_by(Pleroma.PasswordResetToken, user_id: user.id)
|
||||||
|
|
||||||
email = Pleroma.Emails.UserEmail.password_reset_email(user, token_record.token)
|
email = Pleroma.Emails.UserEmail.password_reset_email(user, token_record.token)
|
||||||
|
@ -3898,6 +3959,8 @@ test "resend account confirmation email", %{conn: conn, user: user} do
|
||||||
|> post("/api/v1/pleroma/accounts/confirmation_resend?email=#{user.email}")
|
|> post("/api/v1/pleroma/accounts/confirmation_resend?email=#{user.email}")
|
||||||
|> json_response(:no_content)
|
|> json_response(:no_content)
|
||||||
|
|
||||||
|
ObanHelpers.perform_all()
|
||||||
|
|
||||||
email = Pleroma.Emails.UserEmail.account_confirmation_email(user)
|
email = Pleroma.Emails.UserEmail.account_confirmation_email(user)
|
||||||
notify_email = Config.get([:instance, :notify_email])
|
notify_email = Config.get([:instance, :notify_email])
|
||||||
instance_name = Config.get([:instance, :name])
|
instance_name = Config.get([:instance, :name])
|
||||||
|
@ -3953,6 +4016,7 @@ test "returns error", %{conn: conn, user: user} do
|
||||||
Config.put([:suggestions, :enabled], true)
|
Config.put([:suggestions, :enabled], true)
|
||||||
Config.put([:suggestions, :third_party_engine], "http://test500?{{host}}&{{user}}")
|
Config.put([:suggestions, :third_party_engine], "http://test500?{{host}}&{{user}}")
|
||||||
|
|
||||||
|
assert capture_log(fn ->
|
||||||
res =
|
res =
|
||||||
conn
|
conn
|
||||||
|> assign(:user, user)
|
|> assign(:user, user)
|
||||||
|
@ -3960,6 +4024,7 @@ test "returns error", %{conn: conn, user: user} do
|
||||||
|> json_response(500)
|
|> json_response(500)
|
||||||
|
|
||||||
assert res == "Something went wrong"
|
assert res == "Something went wrong"
|
||||||
|
end) =~ "Could not retrieve suggestions"
|
||||||
end
|
end
|
||||||
|
|
||||||
test "returns suggestions", %{conn: conn, user: user, other_user: other_user} do
|
test "returns suggestions", %{conn: conn, user: user, other_user: other_user} do
|
||||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue