diff --git a/.credo.exs b/.credo.exs index 580cb2705..46d45d015 100644 --- a/.credo.exs +++ b/.credo.exs @@ -69,8 +69,8 @@ # You can also customize the exit_status of each check. # If you don't want TODO comments to cause `mix credo` to fail, just # set this value to 0 (zero). - {Credo.Check.Design.TagTODO, exit_status: 2}, - {Credo.Check.Design.TagFIXME}, + {Credo.Check.Design.TagTODO, exit_status: 0}, + {Credo.Check.Design.TagFIXME, exit_status: 0}, {Credo.Check.Readability.FunctionNames}, {Credo.Check.Readability.LargeNumbers}, @@ -81,7 +81,9 @@ {Credo.Check.Readability.ParenthesesOnZeroArityDefs}, {Credo.Check.Readability.ParenthesesInCondition}, {Credo.Check.Readability.PredicateFunctionNames}, - {Credo.Check.Readability.PreferImplicitTry}, + # lanodan: I think PreferImplicitTry should be consistency, and the behaviour seems + # inconsistent, see: https://github.com/rrrene/credo/issues/224 + {Credo.Check.Readability.PreferImplicitTry, false}, {Credo.Check.Readability.RedundantBlankLines}, {Credo.Check.Readability.StringSigils}, {Credo.Check.Readability.TrailingBlankLine}, @@ -126,10 +128,6 @@ # Deprecated checks (these will be deleted after a grace period) {Credo.Check.Readability.Specs, false}, - {Credo.Check.Warning.NameRedeclarationByAssignment, false}, - {Credo.Check.Warning.NameRedeclarationByCase, false}, - {Credo.Check.Warning.NameRedeclarationByDef, false}, - {Credo.Check.Warning.NameRedeclarationByFn, false}, # Custom checks can be created using `mix credo.gen.check`. # diff --git a/.gitignore b/.gitignore index 04c61ede7..774893b35 100644 --- a/.gitignore +++ b/.gitignore @@ -35,3 +35,6 @@ erl_crash.dump # Editor config /.vscode/ + +# Prevent committing docs files +/priv/static/doc/* diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 6deb0a1de..c07f1a5d3 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -1,8 +1,4 @@ -image: elixir:1.7.2 - -services: - - name: postgres:9.6.2 - command: ["postgres", "-c", "fsync=off", "-c", "synchronous_commit=off", "-c", "full_page_writes=off"] +image: elixir:1.8.1 variables: POSTGRES_DB: pleroma_test @@ -17,23 +13,68 @@ cache: - deps - _build stages: - - lint + - build - test + - deploy before_script: - mix local.hex --force - mix local.rebar --force + +build: + stage: build + script: - mix deps.get - mix compile --force - - mix ecto.create - - mix ecto.migrate -lint: - stage: lint +docs-build: + stage: build + only: + - master@pleroma/pleroma + - develop@pleroma/pleroma + variables: + MIX_ENV: dev script: - - mix format --check-formatted + - mix deps.get + - mix compile + - mix docs + artifacts: + paths: + - priv/static/doc unit-testing: stage: test + services: + - name: postgres:9.6.2 + command: ["postgres", "-c", "fsync=off", "-c", "synchronous_commit=off", "-c", "full_page_writes=off"] script: + - mix ecto.create + - mix ecto.migrate - mix test --trace --preload-modules + +lint: + stage: test + script: + - mix format --check-formatted + +analysis: + stage: test + script: + - mix deps.get + - mix credo --strict --only=warnings,todo,fixme,consistency,readability + + +docs-deploy: + stage: deploy + image: alpine:3.9 + only: + - master@pleroma/pleroma + - develop@pleroma/pleroma + before_script: + - apk update && apk add openssh-client rsync + script: + - mkdir -p ~/.ssh + - echo "${SSH_HOST_KEY}" > ~/.ssh/known_hosts + - eval $(ssh-agent -s) + - echo "$SSH_PRIVATE_KEY" | tr -d '\r' | ssh-add - + - rsync -hrvz --delete -e "ssh -p ${SSH_PORT}" priv/static/doc/ "${SSH_USER_HOST_LOCATION}/${CI_COMMIT_REF_NAME}" diff --git a/LICENSE b/AGPL-3 similarity index 100% rename from LICENSE rename to AGPL-3 diff --git a/CC-BY-NC-ND-4.0 b/CC-BY-NC-ND-4.0 new file mode 100644 index 000000000..486544290 --- /dev/null +++ b/CC-BY-NC-ND-4.0 @@ -0,0 +1,403 @@ +Attribution-NonCommercial-NoDerivatives 4.0 International + +======================================================================= + +Creative Commons Corporation ("Creative Commons") is not a law firm and +does not provide legal services or legal advice. Distribution of +Creative Commons public licenses does not create a lawyer-client or +other relationship. Creative Commons makes its licenses and related +information available on an "as-is" basis. Creative Commons gives no +warranties regarding its licenses, any material licensed under their +terms and conditions, or any related information. Creative Commons +disclaims all liability for damages resulting from their use to the +fullest extent possible. + +Using Creative Commons Public Licenses + +Creative Commons public licenses provide a standard set of terms and +conditions that creators and other rights holders may use to share +original works of authorship and other material subject to copyright +and certain other rights specified in the public license below. The +following considerations are for informational purposes only, are not +exhaustive, and do not form part of our licenses. + + Considerations for licensors: Our public licenses are + intended for use by those authorized to give the public + permission to use material in ways otherwise restricted by + copyright and certain other rights. Our licenses are + irrevocable. Licensors should read and understand the terms + and conditions of the license they choose before applying it. + Licensors should also secure all rights necessary before + applying our licenses so that the public can reuse the + material as expected. Licensors should clearly mark any + material not subject to the license. This includes other CC- + licensed material, or material used under an exception or + limitation to copyright. More considerations for licensors: + wiki.creativecommons.org/Considerations_for_licensors + + Considerations for the public: By using one of our public + licenses, a licensor grants the public permission to use the + licensed material under specified terms and conditions. If + the licensor's permission is not necessary for any reason--for + example, because of any applicable exception or limitation to + copyright--then that use is not regulated by the license. Our + licenses grant only permissions under copyright and certain + other rights that a licensor has authority to grant. Use of + the licensed material may still be restricted for other + reasons, including because others have copyright or other + rights in the material. A licensor may make special requests, + such as asking that all changes be marked or described. + Although not required by our licenses, you are encouraged to + respect those requests where reasonable. More considerations + for the public: + wiki.creativecommons.org/Considerations_for_licensees + +======================================================================= + +Creative Commons Attribution-NonCommercial-NoDerivatives 4.0 +International Public License + +By exercising the Licensed Rights (defined below), You accept and agree +to be bound by the terms and conditions of this Creative Commons +Attribution-NonCommercial-NoDerivatives 4.0 International Public +License ("Public License"). To the extent this Public License may be +interpreted as a contract, You are granted the Licensed Rights in +consideration of Your acceptance of these terms and conditions, and the +Licensor grants You such rights in consideration of benefits the +Licensor receives from making the Licensed Material available under +these terms and conditions. + + +Section 1 -- Definitions. + + a. Adapted Material means material subject to Copyright and Similar + Rights that is derived from or based upon the Licensed Material + and in which the Licensed Material is translated, altered, + arranged, transformed, or otherwise modified in a manner requiring + permission under the Copyright and Similar Rights held by the + Licensor. For purposes of this Public License, where the Licensed + Material is a musical work, performance, or sound recording, + Adapted Material is always produced where the Licensed Material is + synched in timed relation with a moving image. + + b. Copyright and Similar Rights means copyright and/or similar rights + closely related to copyright including, without limitation, + performance, broadcast, sound recording, and Sui Generis Database + Rights, without regard to how the rights are labeled or + categorized. For purposes of this Public License, the rights + specified in Section 2(b)(1)-(2) are not Copyright and Similar + Rights. + + c. Effective Technological Measures means those measures that, in the + absence of proper authority, may not be circumvented under laws + fulfilling obligations under Article 11 of the WIPO Copyright + Treaty adopted on December 20, 1996, and/or similar international + agreements. + + d. Exceptions and Limitations means fair use, fair dealing, and/or + any other exception or limitation to Copyright and Similar Rights + that applies to Your use of the Licensed Material. + + e. Licensed Material means the artistic or literary work, database, + or other material to which the Licensor applied this Public + License. + + f. Licensed Rights means the rights granted to You subject to the + terms and conditions of this Public License, which are limited to + all Copyright and Similar Rights that apply to Your use of the + Licensed Material and that the Licensor has authority to license. + + g. Licensor means the individual(s) or entity(ies) granting rights + under this Public License. + + h. NonCommercial means not primarily intended for or directed towards + commercial advantage or monetary compensation. For purposes of + this Public License, the exchange of the Licensed Material for + other material subject to Copyright and Similar Rights by digital + file-sharing or similar means is NonCommercial provided there is + no payment of monetary compensation in connection with the + exchange. + + i. Share means to provide material to the public by any means or + process that requires permission under the Licensed Rights, such + as reproduction, public display, public performance, distribution, + dissemination, communication, or importation, and to make material + available to the public including in ways that members of the + public may access the material from a place and at a time + individually chosen by them. + + j. Sui Generis Database Rights means rights other than copyright + resulting from Directive 96/9/EC of the European Parliament and of + the Council of 11 March 1996 on the legal protection of databases, + as amended and/or succeeded, as well as other essentially + equivalent rights anywhere in the world. + + k. You means the individual or entity exercising the Licensed Rights + under this Public License. Your has a corresponding meaning. + + +Section 2 -- Scope. + + a. License grant. + + 1. Subject to the terms and conditions of this Public License, + the Licensor hereby grants You a worldwide, royalty-free, + non-sublicensable, non-exclusive, irrevocable license to + exercise the Licensed Rights in the Licensed Material to: + + a. reproduce and Share the Licensed Material, in whole or + in part, for NonCommercial purposes only; and + + b. produce and reproduce, but not Share, Adapted Material + for NonCommercial purposes only. + + 2. Exceptions and Limitations. For the avoidance of doubt, where + Exceptions and Limitations apply to Your use, this Public + License does not apply, and You do not need to comply with + its terms and conditions. + + 3. Term. The term of this Public License is specified in Section + 6(a). + + 4. Media and formats; technical modifications allowed. The + Licensor authorizes You to exercise the Licensed Rights in + all media and formats whether now known or hereafter created, + and to make technical modifications necessary to do so. The + Licensor waives and/or agrees not to assert any right or + authority to forbid You from making technical modifications + necessary to exercise the Licensed Rights, including + technical modifications necessary to circumvent Effective + Technological Measures. For purposes of this Public License, + simply making modifications authorized by this Section 2(a) + (4) never produces Adapted Material. + + 5. Downstream recipients. + + a. Offer from the Licensor -- Licensed Material. Every + recipient of the Licensed Material automatically + receives an offer from the Licensor to exercise the + Licensed Rights under the terms and conditions of this + Public License. + + b. No downstream restrictions. You may not offer or impose + any additional or different terms or conditions on, or + apply any Effective Technological Measures to, the + Licensed Material if doing so restricts exercise of the + Licensed Rights by any recipient of the Licensed + Material. + + 6. No endorsement. Nothing in this Public License constitutes or + may be construed as permission to assert or imply that You + are, or that Your use of the Licensed Material is, connected + with, or sponsored, endorsed, or granted official status by, + the Licensor or others designated to receive attribution as + provided in Section 3(a)(1)(A)(i). + + b. Other rights. + + 1. Moral rights, such as the right of integrity, are not + licensed under this Public License, nor are publicity, + privacy, and/or other similar personality rights; however, to + the extent possible, the Licensor waives and/or agrees not to + assert any such rights held by the Licensor to the limited + extent necessary to allow You to exercise the Licensed + Rights, but not otherwise. + + 2. Patent and trademark rights are not licensed under this + Public License. + + 3. To the extent possible, the Licensor waives any right to + collect royalties from You for the exercise of the Licensed + Rights, whether directly or through a collecting society + under any voluntary or waivable statutory or compulsory + licensing scheme. In all other cases the Licensor expressly + reserves any right to collect such royalties, including when + the Licensed Material is used other than for NonCommercial + purposes. + + +Section 3 -- License Conditions. + +Your exercise of the Licensed Rights is expressly made subject to the +following conditions. + + a. Attribution. + + 1. If You Share the Licensed Material, You must: + + a. retain the following if it is supplied by the Licensor + with the Licensed Material: + + i. identification of the creator(s) of the Licensed + Material and any others designated to receive + attribution, in any reasonable manner requested by + the Licensor (including by pseudonym if + designated); + + ii. a copyright notice; + + iii. a notice that refers to this Public License; + + iv. a notice that refers to the disclaimer of + warranties; + + v. a URI or hyperlink to the Licensed Material to the + extent reasonably practicable; + + b. indicate if You modified the Licensed Material and + retain an indication of any previous modifications; and + + c. indicate the Licensed Material is licensed under this + Public License, and include the text of, or the URI or + hyperlink to, this Public License. + + For the avoidance of doubt, You do not have permission under + this Public License to Share Adapted Material. + + 2. You may satisfy the conditions in Section 3(a)(1) in any + reasonable manner based on the medium, means, and context in + which You Share the Licensed Material. For example, it may be + reasonable to satisfy the conditions by providing a URI or + hyperlink to a resource that includes the required + information. + + 3. If requested by the Licensor, You must remove any of the + information required by Section 3(a)(1)(A) to the extent + reasonably practicable. + + +Section 4 -- Sui Generis Database Rights. + +Where the Licensed Rights include Sui Generis Database Rights that +apply to Your use of the Licensed Material: + + a. for the avoidance of doubt, Section 2(a)(1) grants You the right + to extract, reuse, reproduce, and Share all or a substantial + portion of the contents of the database for NonCommercial purposes + only and provided You do not Share Adapted Material; + + b. if You include all or a substantial portion of the database + contents in a database in which You have Sui Generis Database + Rights, then the database in which You have Sui Generis Database + Rights (but not its individual contents) is Adapted Material; and + + c. You must comply with the conditions in Section 3(a) if You Share + all or a substantial portion of the contents of the database. + +For the avoidance of doubt, this Section 4 supplements and does not +replace Your obligations under this Public License where the Licensed +Rights include other Copyright and Similar Rights. + + +Section 5 -- Disclaimer of Warranties and Limitation of Liability. + + a. UNLESS OTHERWISE SEPARATELY UNDERTAKEN BY THE LICENSOR, TO THE + EXTENT POSSIBLE, THE LICENSOR OFFERS THE LICENSED MATERIAL AS-IS + AND AS-AVAILABLE, AND MAKES NO REPRESENTATIONS OR WARRANTIES OF + ANY KIND CONCERNING THE LICENSED MATERIAL, WHETHER EXPRESS, + IMPLIED, STATUTORY, OR OTHER. THIS INCLUDES, WITHOUT LIMITATION, + WARRANTIES OF TITLE, MERCHANTABILITY, FITNESS FOR A PARTICULAR + PURPOSE, NON-INFRINGEMENT, ABSENCE OF LATENT OR OTHER DEFECTS, + ACCURACY, OR THE PRESENCE OR ABSENCE OF ERRORS, WHETHER OR NOT + KNOWN OR DISCOVERABLE. WHERE DISCLAIMERS OF WARRANTIES ARE NOT + ALLOWED IN FULL OR IN PART, THIS DISCLAIMER MAY NOT APPLY TO YOU. + + b. TO THE EXTENT POSSIBLE, IN NO EVENT WILL THE LICENSOR BE LIABLE + TO YOU ON ANY LEGAL THEORY (INCLUDING, WITHOUT LIMITATION, + NEGLIGENCE) OR OTHERWISE FOR ANY DIRECT, SPECIAL, INDIRECT, + INCIDENTAL, CONSEQUENTIAL, PUNITIVE, EXEMPLARY, OR OTHER LOSSES, + COSTS, EXPENSES, OR DAMAGES ARISING OUT OF THIS PUBLIC LICENSE OR + USE OF THE LICENSED MATERIAL, EVEN IF THE LICENSOR HAS BEEN + ADVISED OF THE POSSIBILITY OF SUCH LOSSES, COSTS, EXPENSES, OR + DAMAGES. WHERE A LIMITATION OF LIABILITY IS NOT ALLOWED IN FULL OR + IN PART, THIS LIMITATION MAY NOT APPLY TO YOU. + + c. The disclaimer of warranties and limitation of liability provided + above shall be interpreted in a manner that, to the extent + possible, most closely approximates an absolute disclaimer and + waiver of all liability. + + +Section 6 -- Term and Termination. + + a. This Public License applies for the term of the Copyright and + Similar Rights licensed here. However, if You fail to comply with + this Public License, then Your rights under this Public License + terminate automatically. + + b. Where Your right to use the Licensed Material has terminated under + Section 6(a), it reinstates: + + 1. automatically as of the date the violation is cured, provided + it is cured within 30 days of Your discovery of the + violation; or + + 2. upon express reinstatement by the Licensor. + + For the avoidance of doubt, this Section 6(b) does not affect any + right the Licensor may have to seek remedies for Your violations + of this Public License. + + c. For the avoidance of doubt, the Licensor may also offer the + Licensed Material under separate terms or conditions or stop + distributing the Licensed Material at any time; however, doing so + will not terminate this Public License. + + d. Sections 1, 5, 6, 7, and 8 survive termination of this Public + License. + + +Section 7 -- Other Terms and Conditions. + + a. The Licensor shall not be bound by any additional or different + terms or conditions communicated by You unless expressly agreed. + + b. Any arrangements, understandings, or agreements regarding the + Licensed Material not stated herein are separate from and + independent of the terms and conditions of this Public License. + + +Section 8 -- Interpretation. + + a. For the avoidance of doubt, this Public License does not, and + shall not be interpreted to, reduce, limit, restrict, or impose + conditions on any use of the Licensed Material that could lawfully + be made without permission under this Public License. + + b. To the extent possible, if any provision of this Public License is + deemed unenforceable, it shall be automatically reformed to the + minimum extent necessary to make it enforceable. If the provision + cannot be reformed, it shall be severed from this Public License + without affecting the enforceability of the remaining terms and + conditions. + + c. No term or condition of this Public License will be waived and no + failure to comply consented to unless expressly agreed to by the + Licensor. + + d. Nothing in this Public License constitutes or may be interpreted + as a limitation upon, or waiver of, any privileges and immunities + that apply to the Licensor or You, including from the legal + processes of any jurisdiction or authority. + +======================================================================= + +Creative Commons is not a party to its public +licenses. Notwithstanding, Creative Commons may elect to apply one of +its public licenses to material it publishes and in those instances +will be considered the “Licensor.” The text of the Creative Commons +public licenses is dedicated to the public domain under the CC0 Public +Domain Dedication. Except for the limited purpose of indicating that +material is shared under a Creative Commons public license or as +otherwise permitted by the Creative Commons policies published at +creativecommons.org/policies, Creative Commons does not authorize the +use of the trademark "Creative Commons" or any other trademark or logo +of Creative Commons without its prior written consent including, +without limitation, in connection with any unauthorized modifications +to any of its public licenses or any other arrangements, +understandings, or agreements concerning use of licensed material. For +the avoidance of doubt, this paragraph does not form part of the +public licenses. + +Creative Commons may be contacted at creativecommons.org. + diff --git a/CC-BY-SA-4.0 b/CC-BY-SA-4.0 new file mode 100644 index 000000000..4681ab80f --- /dev/null +++ b/CC-BY-SA-4.0 @@ -0,0 +1,427 @@ +Attribution-ShareAlike 4.0 International + +======================================================================= + +Creative Commons Corporation ("Creative Commons") is not a law firm and +does not provide legal services or legal advice. Distribution of +Creative Commons public licenses does not create a lawyer-client or +other relationship. Creative Commons makes its licenses and related +information available on an "as-is" basis. Creative Commons gives no +warranties regarding its licenses, any material licensed under their +terms and conditions, or any related information. Creative Commons +disclaims all liability for damages resulting from their use to the +fullest extent possible. + +Using Creative Commons Public Licenses + +Creative Commons public licenses provide a standard set of terms and +conditions that creators and other rights holders may use to share +original works of authorship and other material subject to copyright +and certain other rights specified in the public license below. The +following considerations are for informational purposes only, are not +exhaustive, and do not form part of our licenses. + + Considerations for licensors: Our public licenses are + intended for use by those authorized to give the public + permission to use material in ways otherwise restricted by + copyright and certain other rights. Our licenses are + irrevocable. Licensors should read and understand the terms + and conditions of the license they choose before applying it. + Licensors should also secure all rights necessary before + applying our licenses so that the public can reuse the + material as expected. Licensors should clearly mark any + material not subject to the license. This includes other CC- + licensed material, or material used under an exception or + limitation to copyright. More considerations for licensors: + wiki.creativecommons.org/Considerations_for_licensors + + Considerations for the public: By using one of our public + licenses, a licensor grants the public permission to use the + licensed material under specified terms and conditions. If + the licensor's permission is not necessary for any reason--for + example, because of any applicable exception or limitation to + copyright--then that use is not regulated by the license. Our + licenses grant only permissions under copyright and certain + other rights that a licensor has authority to grant. Use of + the licensed material may still be restricted for other + reasons, including because others have copyright or other + rights in the material. A licensor may make special requests, + such as asking that all changes be marked or described. + Although not required by our licenses, you are encouraged to + respect those requests where reasonable. More considerations + for the public: + wiki.creativecommons.org/Considerations_for_licensees + +======================================================================= + +Creative Commons Attribution-ShareAlike 4.0 International Public +License + +By exercising the Licensed Rights (defined below), You accept and agree +to be bound by the terms and conditions of this Creative Commons +Attribution-ShareAlike 4.0 International Public License ("Public +License"). To the extent this Public License may be interpreted as a +contract, You are granted the Licensed Rights in consideration of Your +acceptance of these terms and conditions, and the Licensor grants You +such rights in consideration of benefits the Licensor receives from +making the Licensed Material available under these terms and +conditions. + + +Section 1 -- Definitions. + + a. Adapted Material means material subject to Copyright and Similar + Rights that is derived from or based upon the Licensed Material + and in which the Licensed Material is translated, altered, + arranged, transformed, or otherwise modified in a manner requiring + permission under the Copyright and Similar Rights held by the + Licensor. For purposes of this Public License, where the Licensed + Material is a musical work, performance, or sound recording, + Adapted Material is always produced where the Licensed Material is + synched in timed relation with a moving image. + + b. Adapter's License means the license You apply to Your Copyright + and Similar Rights in Your contributions to Adapted Material in + accordance with the terms and conditions of this Public License. + + c. BY-SA Compatible License means a license listed at + creativecommons.org/compatiblelicenses, approved by Creative + Commons as essentially the equivalent of this Public License. + + d. Copyright and Similar Rights means copyright and/or similar rights + closely related to copyright including, without limitation, + performance, broadcast, sound recording, and Sui Generis Database + Rights, without regard to how the rights are labeled or + categorized. For purposes of this Public License, the rights + specified in Section 2(b)(1)-(2) are not Copyright and Similar + Rights. + + e. Effective Technological Measures means those measures that, in the + absence of proper authority, may not be circumvented under laws + fulfilling obligations under Article 11 of the WIPO Copyright + Treaty adopted on December 20, 1996, and/or similar international + agreements. + + f. Exceptions and Limitations means fair use, fair dealing, and/or + any other exception or limitation to Copyright and Similar Rights + that applies to Your use of the Licensed Material. + + g. License Elements means the license attributes listed in the name + of a Creative Commons Public License. The License Elements of this + Public License are Attribution and ShareAlike. + + h. Licensed Material means the artistic or literary work, database, + or other material to which the Licensor applied this Public + License. + + i. Licensed Rights means the rights granted to You subject to the + terms and conditions of this Public License, which are limited to + all Copyright and Similar Rights that apply to Your use of the + Licensed Material and that the Licensor has authority to license. + + j. Licensor means the individual(s) or entity(ies) granting rights + under this Public License. + + k. Share means to provide material to the public by any means or + process that requires permission under the Licensed Rights, such + as reproduction, public display, public performance, distribution, + dissemination, communication, or importation, and to make material + available to the public including in ways that members of the + public may access the material from a place and at a time + individually chosen by them. + + l. Sui Generis Database Rights means rights other than copyright + resulting from Directive 96/9/EC of the European Parliament and of + the Council of 11 March 1996 on the legal protection of databases, + as amended and/or succeeded, as well as other essentially + equivalent rights anywhere in the world. + + m. You means the individual or entity exercising the Licensed Rights + under this Public License. Your has a corresponding meaning. + + +Section 2 -- Scope. + + a. License grant. + + 1. Subject to the terms and conditions of this Public License, + the Licensor hereby grants You a worldwide, royalty-free, + non-sublicensable, non-exclusive, irrevocable license to + exercise the Licensed Rights in the Licensed Material to: + + a. reproduce and Share the Licensed Material, in whole or + in part; and + + b. produce, reproduce, and Share Adapted Material. + + 2. Exceptions and Limitations. For the avoidance of doubt, where + Exceptions and Limitations apply to Your use, this Public + License does not apply, and You do not need to comply with + its terms and conditions. + + 3. Term. The term of this Public License is specified in Section + 6(a). + + 4. Media and formats; technical modifications allowed. The + Licensor authorizes You to exercise the Licensed Rights in + all media and formats whether now known or hereafter created, + and to make technical modifications necessary to do so. The + Licensor waives and/or agrees not to assert any right or + authority to forbid You from making technical modifications + necessary to exercise the Licensed Rights, including + technical modifications necessary to circumvent Effective + Technological Measures. For purposes of this Public License, + simply making modifications authorized by this Section 2(a) + (4) never produces Adapted Material. + + 5. Downstream recipients. + + a. Offer from the Licensor -- Licensed Material. Every + recipient of the Licensed Material automatically + receives an offer from the Licensor to exercise the + Licensed Rights under the terms and conditions of this + Public License. + + b. Additional offer from the Licensor -- Adapted Material. + Every recipient of Adapted Material from You + automatically receives an offer from the Licensor to + exercise the Licensed Rights in the Adapted Material + under the conditions of the Adapter's License You apply. + + c. No downstream restrictions. You may not offer or impose + any additional or different terms or conditions on, or + apply any Effective Technological Measures to, the + Licensed Material if doing so restricts exercise of the + Licensed Rights by any recipient of the Licensed + Material. + + 6. No endorsement. Nothing in this Public License constitutes or + may be construed as permission to assert or imply that You + are, or that Your use of the Licensed Material is, connected + with, or sponsored, endorsed, or granted official status by, + the Licensor or others designated to receive attribution as + provided in Section 3(a)(1)(A)(i). + + b. Other rights. + + 1. Moral rights, such as the right of integrity, are not + licensed under this Public License, nor are publicity, + privacy, and/or other similar personality rights; however, to + the extent possible, the Licensor waives and/or agrees not to + assert any such rights held by the Licensor to the limited + extent necessary to allow You to exercise the Licensed + Rights, but not otherwise. + + 2. Patent and trademark rights are not licensed under this + Public License. + + 3. To the extent possible, the Licensor waives any right to + collect royalties from You for the exercise of the Licensed + Rights, whether directly or through a collecting society + under any voluntary or waivable statutory or compulsory + licensing scheme. In all other cases the Licensor expressly + reserves any right to collect such royalties. + + +Section 3 -- License Conditions. + +Your exercise of the Licensed Rights is expressly made subject to the +following conditions. + + a. Attribution. + + 1. If You Share the Licensed Material (including in modified + form), You must: + + a. retain the following if it is supplied by the Licensor + with the Licensed Material: + + i. identification of the creator(s) of the Licensed + Material and any others designated to receive + attribution, in any reasonable manner requested by + the Licensor (including by pseudonym if + designated); + + ii. a copyright notice; + + iii. a notice that refers to this Public License; + + iv. a notice that refers to the disclaimer of + warranties; + + v. a URI or hyperlink to the Licensed Material to the + extent reasonably practicable; + + b. indicate if You modified the Licensed Material and + retain an indication of any previous modifications; and + + c. indicate the Licensed Material is licensed under this + Public License, and include the text of, or the URI or + hyperlink to, this Public License. + + 2. You may satisfy the conditions in Section 3(a)(1) in any + reasonable manner based on the medium, means, and context in + which You Share the Licensed Material. For example, it may be + reasonable to satisfy the conditions by providing a URI or + hyperlink to a resource that includes the required + information. + + 3. If requested by the Licensor, You must remove any of the + information required by Section 3(a)(1)(A) to the extent + reasonably practicable. + + b. ShareAlike. + + In addition to the conditions in Section 3(a), if You Share + Adapted Material You produce, the following conditions also apply. + + 1. The Adapter's License You apply must be a Creative Commons + license with the same License Elements, this version or + later, or a BY-SA Compatible License. + + 2. You must include the text of, or the URI or hyperlink to, the + Adapter's License You apply. You may satisfy this condition + in any reasonable manner based on the medium, means, and + context in which You Share Adapted Material. + + 3. You may not offer or impose any additional or different terms + or conditions on, or apply any Effective Technological + Measures to, Adapted Material that restrict exercise of the + rights granted under the Adapter's License You apply. + + +Section 4 -- Sui Generis Database Rights. + +Where the Licensed Rights include Sui Generis Database Rights that +apply to Your use of the Licensed Material: + + a. for the avoidance of doubt, Section 2(a)(1) grants You the right + to extract, reuse, reproduce, and Share all or a substantial + portion of the contents of the database; + + b. if You include all or a substantial portion of the database + contents in a database in which You have Sui Generis Database + Rights, then the database in which You have Sui Generis Database + Rights (but not its individual contents) is Adapted Material, + including for purposes of Section 3(b); and + + c. You must comply with the conditions in Section 3(a) if You Share + all or a substantial portion of the contents of the database. + +For the avoidance of doubt, this Section 4 supplements and does not +replace Your obligations under this Public License where the Licensed +Rights include other Copyright and Similar Rights. + + +Section 5 -- Disclaimer of Warranties and Limitation of Liability. + + a. UNLESS OTHERWISE SEPARATELY UNDERTAKEN BY THE LICENSOR, TO THE + EXTENT POSSIBLE, THE LICENSOR OFFERS THE LICENSED MATERIAL AS-IS + AND AS-AVAILABLE, AND MAKES NO REPRESENTATIONS OR WARRANTIES OF + ANY KIND CONCERNING THE LICENSED MATERIAL, WHETHER EXPRESS, + IMPLIED, STATUTORY, OR OTHER. THIS INCLUDES, WITHOUT LIMITATION, + WARRANTIES OF TITLE, MERCHANTABILITY, FITNESS FOR A PARTICULAR + PURPOSE, NON-INFRINGEMENT, ABSENCE OF LATENT OR OTHER DEFECTS, + ACCURACY, OR THE PRESENCE OR ABSENCE OF ERRORS, WHETHER OR NOT + KNOWN OR DISCOVERABLE. WHERE DISCLAIMERS OF WARRANTIES ARE NOT + ALLOWED IN FULL OR IN PART, THIS DISCLAIMER MAY NOT APPLY TO YOU. + + b. TO THE EXTENT POSSIBLE, IN NO EVENT WILL THE LICENSOR BE LIABLE + TO YOU ON ANY LEGAL THEORY (INCLUDING, WITHOUT LIMITATION, + NEGLIGENCE) OR OTHERWISE FOR ANY DIRECT, SPECIAL, INDIRECT, + INCIDENTAL, CONSEQUENTIAL, PUNITIVE, EXEMPLARY, OR OTHER LOSSES, + COSTS, EXPENSES, OR DAMAGES ARISING OUT OF THIS PUBLIC LICENSE OR + USE OF THE LICENSED MATERIAL, EVEN IF THE LICENSOR HAS BEEN + ADVISED OF THE POSSIBILITY OF SUCH LOSSES, COSTS, EXPENSES, OR + DAMAGES. WHERE A LIMITATION OF LIABILITY IS NOT ALLOWED IN FULL OR + IN PART, THIS LIMITATION MAY NOT APPLY TO YOU. + + c. The disclaimer of warranties and limitation of liability provided + above shall be interpreted in a manner that, to the extent + possible, most closely approximates an absolute disclaimer and + waiver of all liability. + + +Section 6 -- Term and Termination. + + a. This Public License applies for the term of the Copyright and + Similar Rights licensed here. However, if You fail to comply with + this Public License, then Your rights under this Public License + terminate automatically. + + b. Where Your right to use the Licensed Material has terminated under + Section 6(a), it reinstates: + + 1. automatically as of the date the violation is cured, provided + it is cured within 30 days of Your discovery of the + violation; or + + 2. upon express reinstatement by the Licensor. + + For the avoidance of doubt, this Section 6(b) does not affect any + right the Licensor may have to seek remedies for Your violations + of this Public License. + + c. For the avoidance of doubt, the Licensor may also offer the + Licensed Material under separate terms or conditions or stop + distributing the Licensed Material at any time; however, doing so + will not terminate this Public License. + + d. Sections 1, 5, 6, 7, and 8 survive termination of this Public + License. + + +Section 7 -- Other Terms and Conditions. + + a. The Licensor shall not be bound by any additional or different + terms or conditions communicated by You unless expressly agreed. + + b. Any arrangements, understandings, or agreements regarding the + Licensed Material not stated herein are separate from and + independent of the terms and conditions of this Public License. + + +Section 8 -- Interpretation. + + a. For the avoidance of doubt, this Public License does not, and + shall not be interpreted to, reduce, limit, restrict, or impose + conditions on any use of the Licensed Material that could lawfully + be made without permission under this Public License. + + b. To the extent possible, if any provision of this Public License is + deemed unenforceable, it shall be automatically reformed to the + minimum extent necessary to make it enforceable. If the provision + cannot be reformed, it shall be severed from this Public License + without affecting the enforceability of the remaining terms and + conditions. + + c. No term or condition of this Public License will be waived and no + failure to comply consented to unless expressly agreed to by the + Licensor. + + d. Nothing in this Public License constitutes or may be interpreted + as a limitation upon, or waiver of, any privileges and immunities + that apply to the Licensor or You, including from the legal + processes of any jurisdiction or authority. + + +======================================================================= + +Creative Commons is not a party to its public +licenses. Notwithstanding, Creative Commons may elect to apply one of +its public licenses to material it publishes and in those instances +will be considered the “Licensor.” The text of the Creative Commons +public licenses is dedicated to the public domain under the CC0 Public +Domain Dedication. Except for the limited purpose of indicating that +material is shared under a Creative Commons public license or as +otherwise permitted by the Creative Commons policies published at +creativecommons.org/policies, Creative Commons does not authorize the +use of the trademark "Creative Commons" or any other trademark or logo +of Creative Commons without its prior written consent including, +without limitation, in connection with any unauthorized modifications +to any of its public licenses or any other arrangements, +understandings, or agreements concerning use of licensed material. For +the avoidance of doubt, this paragraph does not form part of the +public licenses. + +Creative Commons may be contacted at creativecommons.org. diff --git a/COPYING b/COPYING new file mode 100644 index 000000000..ceec519ae --- /dev/null +++ b/COPYING @@ -0,0 +1,48 @@ +Unless otherwise stated this repository is copyright © 2017-2019 +Pleroma Authors , and is distributed under +The GNU Affero General Public License Version 3, you should have received a +copy of the license file as AGPL-3. + +--- + +The following files are copyright © 2019 shitposter.club, and are distributed +under the Creative Commons Attribution-ShareAlike 4.0 International license, +you should have received a copy of the license file as CC-BY-SA-4.0. + +priv/static/images/pleroma-fox-tan.png +priv/static/images/pleroma-fox-tan-smol.png +priv/static/images/pleroma-tan.png + +--- + +The following files are copyright © 2017-2019 Pleroma Authors +, and are distributed under the Creative Commons +Attribution-ShareAlike 4.0 International license, you should have received +a copy of the license file as CC-BY-SA-4.0. + +priv/static/images/avi.png +priv/static/images/banner.png +priv/static/instance/thumbnail.jpeg + +--- + +All photos published on Unsplash can be used for free. You can use them for +commercial and noncommercial purposes. You do not need to ask permission from +or provide credit to the photographer or Unsplash, although it is appreciated +when possible. + +More precisely, Unsplash grants you an irrevocable, nonexclusive, worldwide +copyright license to download, copy, modify, distribute, perform, and use +photos from Unsplash for free, including for commercial purposes, without +permission from or attributing the photographer or Unsplash. This license +does not include the right to compile photos from Unsplash to replicate +a similar or competing service. + +priv/static/images/city.jpg + +--- + +The files present under the priv/static/finmoji directory are copyright +Finland , and are distributed under the Creative +Commons Attribution-NonCommercial-NoDerivatives 4.0 International license, you +should have received a copy of the license file as CC-BY-NC-ND-4.0. diff --git a/README.md b/README.md index 4f22445d0..c45190fc3 100644 --- a/README.md +++ b/README.md @@ -1,14 +1,16 @@ # Pleroma +**Note**: This readme as well as complete documentation is also availible at + ## About Pleroma Pleroma is a microblogging server software that can federate (= exchange messages with) other servers that support the same federation standards (OStatus and ActivityPub). What that means is that you can host a server for yourself or your friends and stay in control of your online identity, but still exchange messages with people on larger servers. Pleroma will federate with all servers that implement either OStatus or ActivityPub, like Friendica, GNU Social, Hubzilla, Mastodon, Misskey, Peertube, and Pixelfed. Pleroma is written in Elixir, high-performance and can run on small devices like a Raspberry Pi. -For clients it supports both the [GNU Social API with Qvitter extensions](https://twitter-api.readthedocs.io/en/latest/index.html) and the [Mastodon client API](https://github.com/tootsuite/documentation/blob/master/Using-the-API/API.md). +For clients it supports both the [GNU Social API with Qvitter extensions](https://twitter-api.readthedocs.io/en/latest/index.html) and the [Mastodon client API](https://docs.joinmastodon.org/api/guidelines/). -- [Client Applications for Pleroma](docs/Clients.md) +- [Client Applications for Pleroma](https://docs-develop.pleroma.social/clients.html) No release has been made yet, but several servers have been online for months already. If you want to run your own server, feel free to contact us at @lain@pleroma.soykaf.com or in our dev chat at #pleroma on freenode or via matrix at . @@ -28,7 +30,7 @@ While we don’t provide docker files, other people have written very good ones. * Run `mix deps.get` to install elixir dependencies. * Run `mix pleroma.instance gen`. This will ask you questions about your instance and generate a configuration file in `config/generated_config.exs`. Check that and copy it to either `config/dev.secret.exs` or `config/prod.secret.exs`. It will also create a `config/setup_db.psql`, which you should run as the PostgreSQL superuser (i.e., `sudo -u postgres psql -f config/setup_db.psql`). It will create the database, user, and password you gave `mix pleroma.gen.instance` earlier, as well as set up the necessary extensions in the database. PostgreSQL superuser privileges are only needed for this step. -* For these next steps, the default will be to run pleroma using the dev configuration file, `config/dev.secret.exs`. To run them using the prod config file, prefix each command at the shell with `MIX_ENV=prod`. For example: `MIX_ENV=prod mix phx.server`. Documentation for the config can be found at [`docs/config.md`](docs/config.md) +* For these next steps, the default will be to run pleroma using the dev configuration file, `config/dev.secret.exs`. To run them using the prod config file, prefix each command at the shell with `MIX_ENV=prod`. For example: `MIX_ENV=prod mix phx.server`. Documentation for the config can be found at [`docs/config.md`](docs/config.md) in the repository, or at the "Configuration" page on * Run `mix ecto.migrate` to run the database migrations. You will have to do this again after certain updates. * You can check if your instance is configured correctly by running it with `mix phx.server` and checking the instance info endpoint at `/api/v1/instance`. If it shows your uri, name and email correctly, you are configured correctly. If it shows something like `localhost:4000`, your configuration is probably wrong, unless you are running a local development setup. * The common and convenient way for adding HTTPS is by using Nginx as a reverse proxy. You can look at example Nginx configuration in `installation/pleroma.nginx`. If you need TLS/SSL certificates for HTTPS, you can look get some for free with letsencrypt: . The simplest way to obtain and install a certificate is to use [Certbot.](https://certbot.eff.org) Depending on your specific setup, certbot may be able to get a certificate and configure your web server automatically. @@ -66,7 +68,7 @@ This is useful for running Pleroma inside Tor or I2P. ## Customization and contribution -The [Pleroma Wiki](https://git.pleroma.social/pleroma/pleroma/wikis/home) offers manuals and guides on how to further customize your instance to your liking and how you can contribute to the project. +The [Pleroma Documentation](https://docs-develop.pleroma.social/readme.html) offers manuals and guides on how to further customize your instance to your liking and how you can contribute to the project. ## Troubleshooting diff --git a/config/config.exs b/config/config.exs index d9ed43dda..d42b136b7 100644 --- a/config/config.exs +++ b/config/config.exs @@ -8,7 +8,9 @@ # General application configuration config :pleroma, ecto_repos: [Pleroma.Repo] -config :pleroma, Pleroma.Repo, types: Pleroma.PostgresTypes +config :pleroma, Pleroma.Repo, + types: Pleroma.PostgresTypes, + telemetry_event: [Pleroma.Repo.Instrumenter] config :pleroma, Pleroma.Captcha, enabled: false, @@ -34,7 +36,8 @@ # Upload configuration config :pleroma, Pleroma.Upload, uploader: Pleroma.Uploaders.Local, - filters: [], + filters: [Pleroma.Upload.Filter.Dedupe], + link_name: true, proxy_remote: false, proxy_opts: [ redirect_on_failure: false, @@ -55,7 +58,13 @@ cgi: "https://mdii.sakura.ne.jp/mdii-post.cgi", files: "https://mdii.sakura.ne.jp" -config :pleroma, :emoji, shortcode_globs: ["/emoji/custom/**/*.png"] +config :pleroma, :emoji, + shortcode_globs: ["/emoji/custom/**/*.png"], + groups: [ + # Put groups that have higher priority than defaults here. Example in `docs/config/custom_emoji.md` + Finmoji: "/finmoji/128px/*-128.png", + Custom: ["/emoji/*.png", "/emoji/custom/*.png"] + ] config :pleroma, :uri_schemes, valid_schemes: [ @@ -88,6 +97,7 @@ # Configures the endpoint config :pleroma, Pleroma.Web.Endpoint, + instrumenters: [Pleroma.Web.Endpoint.Instrumenter], url: [host: "localhost"], http: [ dispatch: [ @@ -119,6 +129,11 @@ format: "$metadata[$level] $message", metadata: [:request_id] +config :quack, + level: :warn, + meta: [:all], + webhook_url: "https://hooks.slack.com/services/YOUR-KEY-HERE" + config :mime, :types, %{ "application/xml" => ["xml"], "application/xrd+xml" => ["xrd+xml"], @@ -133,7 +148,14 @@ config :tesla, adapter: Tesla.Adapter.Hackney # Configures http settings, upstream proxy etc. -config :pleroma, :http, proxy_url: nil +config :pleroma, :http, + proxy_url: nil, + adapter: [ + ssl_options: [ + # We don't support TLS v1.3 yet + versions: [:tlsv1, :"tlsv1.1", :"tlsv1.2"] + ] + ] config :pleroma, :instance, name: "Pleroma", @@ -166,7 +188,8 @@ no_attachment_links: false, welcome_user_nickname: nil, welcome_message: nil, - max_report_comment_size: 1000 + max_report_comment_size: 1000, + safe_dm_mentions: false config :pleroma, :markup, # XXX - unfortunately, inline images must be enabled by default right now, because @@ -215,6 +238,9 @@ scopeCopy: true, subjectLineBehavior: "email", alwaysShowSubjectInput: true + }, + masto_fe: %{ + showInstanceSpecificPanel: true } config :pleroma, :activitypub, @@ -262,8 +288,6 @@ config :pleroma, :chat, enabled: true -config :ecto, json_library: Jason - config :phoenix, :format_encoders, json: Jason config :pleroma, :gopher, @@ -340,11 +364,18 @@ initial_timeout: 30, max_retries: 5 -config :pleroma, Pleroma.Jobs, - federator_incoming: [max_jobs: 50], - federator_outgoing: [max_jobs: 50], - mailer: [max_jobs: 10], - user: [max_jobs: 10] +config :pleroma_job_queue, :queues, + federator_incoming: 50, + federator_outgoing: 50, + web_push: 50, + mailer: 10, + transmogrifier: 20, + scheduled_activities: 10, + user: 10 + +config :pleroma, :fetch_initial_posts, + enabled: false, + pages: 5 config :auto_linker, opts: [ @@ -356,6 +387,42 @@ rel: false ] +config :pleroma, :ldap, + enabled: System.get_env("LDAP_ENABLED") == "true", + host: System.get_env("LDAP_HOST") || "localhost", + port: String.to_integer(System.get_env("LDAP_PORT") || "389"), + ssl: System.get_env("LDAP_SSL") == "true", + sslopts: [], + tls: System.get_env("LDAP_TLS") == "true", + tlsopts: [], + base: System.get_env("LDAP_BASE") || "dc=example,dc=com", + uid: System.get_env("LDAP_UID") || "cn" + +oauth_consumer_strategies = String.split(System.get_env("OAUTH_CONSUMER_STRATEGIES") || "") + +ueberauth_providers = + for strategy <- oauth_consumer_strategies do + strategy_module_name = "Elixir.Ueberauth.Strategy.#{String.capitalize(strategy)}" + strategy_module = String.to_atom(strategy_module_name) + {String.to_atom(strategy), {strategy_module, [callback_params: ["state"]]}} + end + +config :ueberauth, + Ueberauth, + base_path: "/oauth", + providers: ueberauth_providers + +config :pleroma, :auth, oauth_consumer_strategies: oauth_consumer_strategies + +config :pleroma, Pleroma.Mailer, adapter: Swoosh.Adapters.Sendmail + +config :prometheus, Pleroma.Web.Endpoint.MetricsExporter, path: "/api/pleroma/app_metrics" + +config :pleroma, Pleroma.ScheduledActivity, + daily_user_limit: 25, + total_user_limit: 300, + enabled: true + # Import environment specific config. This must remain at the bottom # of this file so it overrides the configuration defined above. import_config "#{Mix.env()}.exs" diff --git a/config/dev.exs b/config/dev.exs index f77bb9976..a7eb4b644 100644 --- a/config/dev.exs +++ b/config/dev.exs @@ -12,7 +12,6 @@ protocol_options: [max_request_line_length: 8192, max_header_value_length: 8192] ], protocol: "http", - secure_cookie_flag: false, debug_errors: true, code_reloader: true, check_origin: false, diff --git a/config/emoji.txt b/config/emoji.txt index 7afacb09f..79246f239 100644 --- a/config/emoji.txt +++ b/config/emoji.txt @@ -1,5 +1,5 @@ -firefox, /emoji/Firefox.gif -blank, /emoji/blank.png +firefox, /emoji/Firefox.gif, Gif,Fun +blank, /emoji/blank.png, Fun f_00b, /emoji/f_00b.png f_00b11b, /emoji/f_00b11b.png f_00b33b, /emoji/f_00b33b.png @@ -28,4 +28,3 @@ f_33b00b, /emoji/f_33b00b.png f_33b22b, /emoji/f_33b22b.png f_33h, /emoji/f_33h.png f_33t, /emoji/f_33t.png - diff --git a/config/test.exs b/config/test.exs index fbeba0919..894fa8d3d 100644 --- a/config/test.exs +++ b/config/test.exs @@ -17,6 +17,8 @@ # Print only warnings and errors during test config :logger, level: :warn +config :pleroma, Pleroma.Upload, filters: [], link_name: false + config :pleroma, Pleroma.Uploaders.Local, uploads: "test/uploads" config :pleroma, Pleroma.Mailer, adapter: Swoosh.Adapters.Test @@ -44,7 +46,14 @@ "BLH1qVhJItRGCfxgTtONfsOKDc9VRAraXw-3NsmjMngWSh7NxOizN6bkuRA7iLTMPS82PjwJAr3UoK9EC1IFrz4", private_key: "_-XZ0iebPrRfZ_o0-IatTdszYa8VCH1yLN-JauK7HHA" -config :pleroma, Pleroma.Jobs, testing: [max_jobs: 2] +config :web_push_encryption, :http_client, Pleroma.Web.WebPushHttpClientMock + +config :pleroma_job_queue, disabled: true + +config :pleroma, Pleroma.ScheduledActivity, + daily_user_limit: 2, + total_user_limit: 3, + enabled: false try do import_config "test.secret.exs" diff --git a/docs/Differences-in-MastodonAPI-Responses.md b/docs/Differences-in-MastodonAPI-Responses.md deleted file mode 100644 index 3026e1173..000000000 --- a/docs/Differences-in-MastodonAPI-Responses.md +++ /dev/null @@ -1,15 +0,0 @@ -# Differences in Mastodon API responses from vanilla Mastodon - -A Pleroma instance can be identified by " (compatible; Pleroma )" present in `version` field in response from `/api/v1/instance` - -## Flake IDs - -Pleroma uses 128-bit ids as opposed to Mastodon's 64 bits. However just like Mastodon's ids they are sortable strings - -## Attachment cap - -Some apps operate under the assumption that no more than 4 attachments can be returned or uploaded. Pleroma however does not enforce any limits on attachment count neither when returning the status object nor when posting. - -## Timelines - -Adding the parameter `with_muted=true` to the timeline queries will also return activities by muted (not by blocked!) users. diff --git a/docs/admin/backup.md b/docs/admin/backup.md new file mode 100644 index 000000000..2c70e7bf8 --- /dev/null +++ b/docs/admin/backup.md @@ -0,0 +1,17 @@ +# Backup/Restore your instance + +## Backup + +1. Stop the Pleroma service. +2. Go to the working directory of Pleroma (default is `/opt/pleroma`) +3. Run `sudo -Hu postgres pg_dump -d --format=custom -f ` +4. Copy `pleroma.pgdump`, `config/prod.secret.exs` and the `uploads` folder to your backup destination. If you have other modifications, copy those changes too. +5. Restart the Pleroma service. + +## Restore + +1. Stop the Pleroma service. +2. Go to the working directory of Pleroma (default is `/opt/pleroma`) +3. Copy the above mentioned files back to their original position. +4. Run `sudo -Hu postgres pg_restore -d -v -1 ` +5. Restart the Pleroma service. diff --git a/docs/admin/updating.md b/docs/admin/updating.md new file mode 100644 index 000000000..84e6ef18d --- /dev/null +++ b/docs/admin/updating.md @@ -0,0 +1,9 @@ +# Updating your instance +1. Go to the working directory of Pleroma (default is `/opt/pleroma`) +2. Run `git pull`. This pulls the latest changes from upstream. +3. Run `mix deps.get`. This pulls in any new dependencies. +4. Stop the Pleroma service. +5. Run `mix ecto.migrate`[^1]. This task performs database migrations, if there were any. +6. Start the Pleroma service. + +[^1]: Prefix with `MIX_ENV=prod` to run it using the production config file. diff --git a/docs/Admin-API.md b/docs/api/admin_api.md similarity index 55% rename from docs/Admin-API.md rename to docs/api/admin_api.md index 407647645..8befa8ea0 100644 --- a/docs/Admin-API.md +++ b/docs/api/admin_api.md @@ -7,9 +7,16 @@ Authentication is required and the user must be an admin. ### List users - Method `GET` -- Params: - - `page`: **integer** page number - - `page_size`: **integer** number of users per page (default is `50`) +- Query Params: + - *optional* `query`: **string** search term + - *optional* `filters`: **string** comma-separated string of filters: + - `local`: only local users + - `external`: only external users + - `active`: only active users + - `deactivated`: only deactivated users + - *optional* `page`: **integer** page number + - *optional* `page_size`: **integer** number of users per page (default is `50`) +- Example: `https://mypleroma.org/api/pleroma/admin/users?query=john&filters=local,active&page=1&page_size=10` - Response: ```JSON @@ -20,34 +27,13 @@ Authentication is required and the user must be an admin. { "deactivated": bool, "id": integer, - "nickname": string - }, - ... - ] -} -``` - -## `/api/pleroma/admin/users/search?query={query}&local={local}&page={page}&page_size={page_size}` - -### Search users by name or nickname - -- Method `GET` -- Params: - - `query`: **string** search term - - `local`: **bool** whether to return only local users - - `page`: **integer** page number - - `page_size`: **integer** number of users per page (default is `50`) -- Response: - -```JSON -{ - "page_size": integer, - "count": integer, - "users": [ - { - "deactivated": bool, - "id": integer, - "nickname": string + "nickname": string, + "roles": { + "admin": bool, + "moderator": bool + }, + "local": bool, + "tags": array }, ... ] @@ -72,6 +58,26 @@ Authentication is required and the user must be an admin. - `password` - Response: User’s nickname +## `/api/pleroma/admin/user/follow` +### Make a user follow another user + +- Methods: `POST` +- Params: + - `follower`: The nickname of the follower + - `followed`: The nickname of the followed +- Response: + - "ok" + +## `/api/pleroma/admin/user/unfollow` +### Make a user unfollow another user + +- Methods: `POST` +- Params: + - `follower`: The nickname of the follower + - `followed`: The nickname of the followed +- Response: + - "ok" + ## `/api/pleroma/admin/users/:nickname/toggle_activation` ### Toggle user activation @@ -124,7 +130,7 @@ Authentication is required and the user must be an admin. Note: Available `:permission_group` is currently moderator and admin. 404 is returned when the permission group doesn’t exist. -### Get user user permission groups membership +### Get user user permission groups membership per permission group - Method: `GET` - Params: none @@ -163,6 +169,17 @@ Note: Available `:permission_group` is currently moderator and admin. 404 is ret - `nickname` - `status` BOOLEAN field, false value means deactivation. +## `/api/pleroma/admin/users/:nickname` + +### Retrive the details of a user + +- Method: `GET` +- Params: + - `nickname` +- Response: + - On failure: `Not found` + - On success: JSON of the user + ## `/api/pleroma/admin/relay` ### Follow a Relay @@ -183,11 +200,64 @@ Note: Available `:permission_group` is currently moderator and admin. 404 is ret ## `/api/pleroma/admin/invite_token` -### Get a account registeration invite token +### Get an account registration invite token + +- Methods: `GET` +- Params: + - *optional* `invite` => [ + - *optional* `max_use` (integer) + - *optional* `expires_at` (date string e.g. "2019-04-07") + ] +- Response: invite token (base64 string) + +## `/api/pleroma/admin/invites` + +### Get a list of generated invites - Methods: `GET` - Params: none -- Response: invite token (base64 string) +- Response: + +```JSON +{ + + "invites": [ + { + "id": integer, + "token": string, + "used": boolean, + "expires_at": date, + "uses": integer, + "max_use": integer, + "invite_type": string (possible values: `one_time`, `reusable`, `date_limited`, `reusable_date_limited`) + }, + ... + ] +} +``` + +## `/api/pleroma/admin/revoke_invite` + +### Revoke invite by token + +- Methods: `POST` +- Params: + - `token` +- Response: + +```JSON +{ + "id": integer, + "token": string, + "used": boolean, + "expires_at": date, + "uses": integer, + "max_use": integer, + "invite_type": string (possible values: `one_time`, `reusable`, `date_limited`, `reusable_date_limited`) + +} +``` + ## `/api/pleroma/admin/email_invite` @@ -196,7 +266,7 @@ Note: Available `:permission_group` is currently moderator and admin. 404 is ret - Methods: `POST` - Params: - `email` - - `name`, optionnal + - `name`, optional ## `/api/pleroma/admin/password_reset` diff --git a/docs/api/differences_in_mastoapi_responses.md b/docs/api/differences_in_mastoapi_responses.md new file mode 100644 index 000000000..923d94db2 --- /dev/null +++ b/docs/api/differences_in_mastoapi_responses.md @@ -0,0 +1,54 @@ +# Differences in Mastodon API responses from vanilla Mastodon + +A Pleroma instance can be identified by " (compatible; Pleroma )" present in `version` field in response from `/api/v1/instance` + +## Flake IDs + +Pleroma uses 128-bit ids as opposed to Mastodon's 64 bits. However just like Mastodon's ids they are sortable strings + +## Attachment cap + +Some apps operate under the assumption that no more than 4 attachments can be returned or uploaded. Pleroma however does not enforce any limits on attachment count neither when returning the status object nor when posting. + +## Timelines + +Adding the parameter `with_muted=true` to the timeline queries will also return activities by muted (not by blocked!) users. + +## Statuses + +Has these additional fields under the `pleroma` object: + +- `local`: true if the post was made on the local instance. +- `conversation_id`: the ID of the conversation the status is associated with (if any) +- `content`: a map consisting of alternate representations of the `content` property with the key being it's mimetype. Currently the only alternate representation supported is `text/plain` +- `spoiler_text`: a map consisting of alternate representations of the `spoiler_text` property with the key being it's mimetype. Currently the only alternate representation supported is `text/plain` + +## Attachments + +Has these additional fields under the `pleroma` object: + +- `mime_type`: mime type of the attachment. + +## Accounts + +- `/api/v1/accounts/:id`: The `id` parameter can also be the `nickname` of the user. This only works in this endpoint, not the deeper nested ones for following etc. + +Has these additional fields under the `pleroma` object: + +- `tags`: Lists an array of tags for the user +- `relationship{}`: Includes fields as documented for Mastodon API https://docs.joinmastodon.org/api/entities/#relationship +- `is_moderator`: boolean, true if user is a moderator +- `is_admin`: boolean, true if user is an admin +- `confirmation_pending`: boolean, true if a new user account is waiting on email confirmation to be activated + +## Notifications + +Has these additional fields under the `pleroma` object: + +- `is_seen`: true if the notification was read by the user + +## POST `/api/v1/statuses` + +Additional parameters can be added to the JSON body/Form data: + +- `preview`: boolean, if set to `true` the post won't be actually posted, but the status entitiy would still be rendered back. This could be useful for previewing rich text/custom emoji, for example. diff --git a/docs/Pleroma-API.md b/docs/api/pleroma_api.md similarity index 60% rename from docs/Pleroma-API.md rename to docs/api/pleroma_api.md index 379d3dbed..dbe250300 100644 --- a/docs/Pleroma-API.md +++ b/docs/api/pleroma_api.md @@ -10,7 +10,29 @@ Request parameters can be passed via [query strings](https://en.wikipedia.org/wi * Authentication: not required * Params: none * Response: JSON -* Example response: `{"kalsarikannit_f":"/finmoji/128px/kalsarikannit_f-128.png","perkele":"/finmoji/128px/perkele-128.png","blobdab":"/emoji/blobdab.png","happiness":"/finmoji/128px/happiness-128.png"}` +* Example response: +```json +{ + "girlpower": { + "tags": [ + "Finmoji" + ], + "image_url": "/finmoji/128px/girlpower-128.png" + }, + "education": { + "tags": [ + "Finmoji" + ], + "image_url": "/finmoji/128px/education-128.png" + }, + "finnishlove": { + "tags": [ + "Finmoji" + ], + "image_url": "/finmoji/128px/finnishlove-128.png" + } +} +``` * Note: Same data as Mastodon API’s `/api/v1/custom_emojis` but in a different format ## `/api/pleroma/follow_import` @@ -27,14 +49,14 @@ Request parameters can be passed via [query strings](https://en.wikipedia.org/wi * Method: `GET` * Authentication: not required * Params: none -* Response: Provider specific JSON, the only guaranteed parameter is `type` +* Response: Provider specific JSON, the only guaranteed parameter is `type` * Example response: `{"type": "kocaptcha", "token": "whatever", "url": "https://captcha.kotobank.ch/endpoint"}` ## `/api/pleroma/delete_account` ### Delete an account * Method `POST` * Authentication: required -* Params: +* Params: * `password`: user's password * Response: JSON. Returns `{"status": "success"}` if the deletion was successful, `{"error": "[error message]"}` otherwise * Example response: `{"error": "Invalid password."}` @@ -52,7 +74,7 @@ Request parameters can be passed via [query strings](https://en.wikipedia.org/wi * `confirm` * `captcha_solution`: optional, contains provider-specific captcha solution, * `captcha_token`: optional, contains provider-specific captcha token - * `token`: invite token required when the registerations aren't public. + * `token`: invite token required when the registrations aren't public. * Response: JSON. Returns a user object on success, otherwise returns `{"error": "error_msg"}` * Example response: ``` @@ -108,3 +130,70 @@ See [Admin-API](Admin-API.md) * Response: JSON string. Returns the user flavour or the default one. * Example response: "glitch" * Note: This is intended to be used only by mastofe + +## `/api/pleroma/notifications/read` +### Mark a single notification as read +* Method `POST` +* Authentication: required +* Params: + * `id`: notification's id +* Response: JSON. Returns `{"status": "success"}` if the reading was successful, otherwise returns `{"error": "error_msg"}` + +## `/api/v1/pleroma/accounts/:id/subscribe` +### Subscribe to receive notifications for all statuses posted by a user +* Method `POST` +* Authentication: required +* Params: + * `id`: account id to subscribe to +* Response: JSON, returns a mastodon relationship object on success, otherwise returns `{"error": "error_msg"}` +* Example response: +```json +{ + "id": "abcdefg", + "following": true, + "followed_by": false, + "blocking": false, + "muting": false, + "muting_notifications": false, + "subscribing": true, + "requested": false, + "domain_blocking": false, + "showing_reblogs": true, + "endorsed": false +} +``` + +## `/api/v1/pleroma/accounts/:id/unsubscribe` +### Unsubscribe to stop receiving notifications from user statuses +* Method `POST` +* Authentication: required +* Params: + * `id`: account id to unsubscribe from +* Response: JSON, returns a mastodon relationship object on success, otherwise returns `{"error": "error_msg"}` +* Example response: +```json +{ + "id": "abcdefg", + "following": true, + "followed_by": false, + "blocking": false, + "muting": false, + "muting_notifications": false, + "subscribing": false, + "requested": false, + "domain_blocking": false, + "showing_reblogs": true, + "endorsed": false +} +``` + +## `/api/pleroma/notification_settings` +### Updates user notification settings +* Method `PUT` +* Authentication: required +* Params: + * `followers`: BOOLEAN field, receives notifications from followers + * `follows`: BOOLEAN field, receives notifications from people the user follows + * `remote`: BOOLEAN field, receives notifications from people on remote instances + * `local`: BOOLEAN field, receives notifications from people on the local instance +* Response: JSON. Returns `{"status": "success"}` if the update was successful, otherwise returns `{"error": "error_msg"}` diff --git a/docs/api/prometheus.md b/docs/api/prometheus.md new file mode 100644 index 000000000..19c564e3c --- /dev/null +++ b/docs/api/prometheus.md @@ -0,0 +1,22 @@ +# Prometheus Metrics + +Pleroma includes support for exporting metrics via the [prometheus_ex](https://github.com/deadtrickster/prometheus.ex) library. + +## `/api/pleroma/app_metrics` +### Exports Prometheus application metrics +* Method: `GET` +* Authentication: not required +* Params: none +* Response: JSON + +## Grafana +### Config example +The following is a config example to use with [Grafana](https://grafana.com) + +``` + - job_name: 'beam' + metrics_path: /api/pleroma/app_metrics + scheme: https + static_configs: + - targets: ['pleroma.soykaf.com'] +``` diff --git a/docs/Clients.md b/docs/clients.md similarity index 92% rename from docs/Clients.md rename to docs/clients.md index c3d776488..dc3e83bcc 100644 --- a/docs/Clients.md +++ b/docs/clients.md @@ -4,8 +4,8 @@ Feel free to contact us to be added to this list! ## Desktop ### Roma for Desktop -- Homepage: -- Source Code: ??? +- Homepage: +- Source Code: - Platforms: Windows, Mac, (Linux?) - Features: Streaming Ready @@ -30,6 +30,12 @@ Feel free to contact us to be added to this list! - Platforms: iOS - Features: No Streaming +### Fedilab +- Source Code: +- Contact: [@tom79@mastodon.social](https://mastodon.social/users/tom79) +- Platforms: Android +- Features: Streaming Ready + ### Nekonium - Homepage: [F-Droid Repository](https://repo.gdgd.jp.net/), [Google Play](https://play.google.com/store/apps/details?id=com.apps.nekonium), [Amazon](https://www.amazon.co.jp/dp/B076FXPRBC/) - Source: @@ -37,15 +43,9 @@ Feel free to contact us to be added to this list! - Platforms: Android - Features: Streaming Ready -### Mastalab -- Source Code: -- Contact: [@tom79@mastodon.social](https://mastodon.social/users/tom79) -- Platforms: Android -- Features: Streaming Ready - ### Roma -- Homepage: -- Source Code: ??? +- Homepage: +- Source Code: [iOS](https://github.com/roma-apps/roma-ios), [Android](https://github.com/roma-apps/roma-android) - Platforms: iOS, Android - Features: No Streaming diff --git a/docs/config.md b/docs/config.md index d1bf2a6f4..b5ea58746 100644 --- a/docs/config.md +++ b/docs/config.md @@ -6,6 +6,7 @@ If you run Pleroma with ``MIX_ENV=prod`` the file is ``prod.secret.exs``, otherw ## Pleroma.Upload * `uploader`: Select which `Pleroma.Uploaders` to use * `filters`: List of `Pleroma.Upload.Filter` to use. +* `link_name`: When enabled Pleroma will add a `name` parameter to the url of the upload, for example `https://instance.tld/media/corndog.png?name=corndog.png`. This is needed to provide the correct filename in Content-Disposition headers when using filters like `Pleroma.Upload.Filter.Dedupe` * `base_url`: The base URL to access a user-uploaded file. Useful when you want to proxy the media files via another host. * `proxy_remote`: If you\'re using a remote uploader, Pleroma will proxy media requests instead of redirecting to it. * `proxy_opts`: Proxy options, see `Pleroma.ReverseProxy` documentation. @@ -100,10 +101,11 @@ config :pleroma, Pleroma.Mailer, * `no_attachment_links`: Set to true to disable automatically adding attachment link text to statuses * `welcome_message`: A message that will be send to a newly registered users as a direct message. * `welcome_user_nickname`: The nickname of the local user that sends the welcome message. -* `max_report_size`: The maximum size of the report comment (Default: `1000`) +* `max_report_comment_size`: The maximum size of the report comment (Default: `1000`) +* `safe_dm_mentions`: If set to true, only mentions at the beginning of a post will be used to address people in direct messages. This is to prevent accidental mentioning of people when talking about them (e.g. "@friend hey i really don't like @enemy"). (Default: `false`) ## :logger -* `backends`: `:console` is used to send logs to stdout, `{ExSyslogger, :ex_syslogger}` to log to syslog +* `backends`: `:console` is used to send logs to stdout, `{ExSyslogger, :ex_syslogger}` to log to syslog, and `Quack.Logger` to log to Slack An example to enable ONLY ExSyslogger (f/ex in ``prod.secret.exs``) with info and debug suppressed: ``` @@ -126,10 +128,28 @@ config :logger, :ex_syslogger, See: [logger’s documentation](https://hexdocs.pm/logger/Logger.html) and [ex_syslogger’s documentation](https://hexdocs.pm/ex_syslogger/) +An example of logging info to local syslog, but warn to a Slack channel: +``` +config :logger, + backends: [ {ExSyslogger, :ex_syslogger}, Quack.Logger ], + level: :info + +config :logger, :ex_syslogger, + level: :info, + ident: "pleroma", + format: "$metadata[$level] $message" + +config :quack, + level: :warn, + meta: [:all], + webhook_url: "https://hooks.slack.com/services/YOUR-API-KEY-HERE" +``` + +See the [Quack Github](https://github.com/azohra/quack) for more details ## :frontend_configurations -This can be used to configure a keyword list that keeps the configuration data for any kind of frontend. By default, settings for `pleroma_fe` are configured. +This can be used to configure a keyword list that keeps the configuration data for any kind of frontend. By default, settings for `pleroma_fe` and `masto_fe` are configured. Frontends can access these settings at `/api/pleroma/frontend_configurations` @@ -189,6 +209,45 @@ This section is used to configure Pleroma-FE, unless ``:managed_config`` in ``:i * `enabled`: Enables the gopher interface * `ip`: IP address to bind to * `port`: Port to bind to +* `dstport`: Port advertised in urls (optional, defaults to `port`) + +## Pleroma.Web.Endpoint +`Phoenix` endpoint configuration, all configuration options can be viewed [here](https://hexdocs.pm/phoenix/Phoenix.Endpoint.html#module-dynamic-configuration), only common options are listed here +* `http` - a list containing http protocol configuration, all configuration options can be viewed [here](https://hexdocs.pm/plug_cowboy/Plug.Cowboy.html#module-options), only common options are listed here + - `ip` - a tuple consisting of 4 integers + - `port` +* `url` - a list containing the configuration for generating urls, accepts + - `host` - the host without the scheme and a post (e.g `example.com`, not `https://example.com:2020`) + - `scheme` - e.g `http`, `https` + - `port` + - `path` + + +**Important note**: if you modify anything inside these lists, default `config.exs` values will be overwritten, which may result in breakage, to make sure this does not happen please copy the default value for the list from `config.exs` and modify/add only what you need + +Example: +```elixir +config :pleroma, Pleroma.Web.Endpoint, + url: [host: "example.com", port: 2020, scheme: "https"], + http: [ + # start copied from config.exs + dispatch: [ + {:_, + [ + {"/api/v1/streaming", Pleroma.Web.MastodonAPI.WebsocketHandler, []}, + {"/websocket", Phoenix.Endpoint.CowboyWebSocket, + {Phoenix.Transports.WebSocket, + {Pleroma.Web.Endpoint, Pleroma.Web.UserSocket, websocket_config}}}, + {:_, Phoenix.Endpoint.Cowboy2Handler, {Pleroma.Web.Endpoint, []}} + ]} + # end copied from config.exs + ], + port: 8080, + ip: {127, 0, 0, 1} + ] +``` + +This will make Pleroma listen on `127.0.0.1` port `8080` and generate urls starting with `https://example.com:2020` ## :activitypub * ``accept_blocks``: Whether to accept incoming block activities from other instances @@ -250,25 +309,29 @@ You can then do curl "http://localhost:4000/api/pleroma/admin/invite_token?admin_token=somerandomtoken" ``` -## Pleroma.Jobs +## :pleroma_job_queue -A list of job queues and their settings. +[Pleroma Job Queue](https://git.pleroma.social/pleroma/pleroma_job_queue) configuration: a list of queues with maximum concurrent jobs. -Job queue settings: +Pleroma has the following queues: -* `max_jobs`: The maximum amount of parallel jobs running at the same time. +* `federator_outgoing` - Outgoing federation +* `federator_incoming` - Incoming federation +* `mailer` - Email sender, see [`Pleroma.Mailer`](#pleroma-mailer) +* `transmogrifier` - Transmogrifier +* `web_push` - Web push notifications +* `scheduled_activities` - Scheduled activities, see [`Pleroma.ScheduledActivities`](#pleromascheduledactivity) Example: -```exs -config :pleroma, Pleroma.Jobs, - federator_incoming: [max_jobs: 50], - federator_outgoing: [max_jobs: 50] +```elixir +config :pleroma_job_queue, :queues, + federator_incoming: 50, + federator_outgoing: 50 ``` This config contains two queues: `federator_incoming` and `federator_outgoing`. Both have the `max_jobs` set to `50`. - ## Pleroma.Web.Federator.RetryQueue * `enabled`: If set to `true`, failed federation jobs will be retried @@ -285,6 +348,10 @@ This config contains two queues: `federator_incoming` and `federator_outgoing`. ## :rich_media * `enabled`: if enabled the instance will parse metadata from attached links to generate link previews +## :fetch_initial_posts +* `enabled`: if enabled, when a new user is federated with, fetch some of their latest posts +* `pages`: the amount of pages to fetch + ## :hackney_pools Advanced. Tweaks Hackney (http client) connections pools. @@ -326,3 +393,91 @@ config :auto_linker, rel: false ] ``` + +## Pleroma.ScheduledActivity + +* `daily_user_limit`: the number of scheduled activities a user is allowed to create in a single day (Default: `25`) +* `total_user_limit`: the number of scheduled activities a user is allowed to create in total (Default: `300`) +* `enabled`: whether scheduled activities are sent to the job queue to be executed + +## Pleroma.Web.Auth.Authenticator + +* `Pleroma.Web.Auth.PleromaAuthenticator`: default database authenticator +* `Pleroma.Web.Auth.LDAPAuthenticator`: LDAP authentication + +## :ldap + +Use LDAP for user authentication. When a user logs in to the Pleroma +instance, the name and password will be verified by trying to authenticate +(bind) to an LDAP server. If a user exists in the LDAP directory but there +is no account with the same name yet on the Pleroma instance then a new +Pleroma account will be created with the same name as the LDAP user name. + +* `enabled`: enables LDAP authentication +* `host`: LDAP server hostname +* `port`: LDAP port, e.g. 389 or 636 +* `ssl`: true to use SSL, usually implies the port 636 +* `sslopts`: additional SSL options +* `tls`: true to start TLS, usually implies the port 389 +* `tlsopts`: additional TLS options +* `base`: LDAP base, e.g. "dc=example,dc=com" +* `uid`: LDAP attribute name to authenticate the user, e.g. when "cn", the filter will be "cn=username,base" + +## :auth + +Authentication / authorization settings. + +* `auth_template`: authentication form template. By default it's `show.html` which corresponds to `lib/pleroma/web/templates/o_auth/o_auth/show.html.eex`. +* `oauth_consumer_template`: OAuth consumer mode authentication form template. By default it's `consumer.html` which corresponds to `lib/pleroma/web/templates/o_auth/o_auth/consumer.html.eex`. +* `oauth_consumer_strategies`: the list of enabled OAuth consumer strategies; by default it's set by OAUTH_CONSUMER_STRATEGIES environment variable. + +# OAuth consumer mode + +OAuth consumer mode allows sign in / sign up via external OAuth providers (e.g. Twitter, Facebook, Google, Microsoft, etc.). +Implementation is based on Ueberauth; see the list of [available strategies](https://github.com/ueberauth/ueberauth/wiki/List-of-Strategies). + +Note: each strategy is shipped as a separate dependency; in order to get the strategies, run `OAUTH_CONSUMER_STRATEGIES="..." mix deps.get`, +e.g. `OAUTH_CONSUMER_STRATEGIES="twitter facebook google microsoft" mix deps.get`. +The server should also be started with `OAUTH_CONSUMER_STRATEGIES="..." mix phx.server` in case you enable any strategies. + +Note: each strategy requires separate setup (on external provider side and Pleroma side). Below are the guidelines on setting up most popular strategies. + +* For Twitter, [register an app](https://developer.twitter.com/en/apps), configure callback URL to https:///oauth/twitter/callback + +* For Facebook, [register an app](https://developers.facebook.com/apps), configure callback URL to https:///oauth/facebook/callback, enable Facebook Login service at https://developers.facebook.com/apps//fb-login/settings/ + +* For Google, [register an app](https://console.developers.google.com), configure callback URL to https:///oauth/google/callback + +* For Microsoft, [register an app](https://portal.azure.com), configure callback URL to https:///oauth/microsoft/callback + +Once the app is configured on external OAuth provider side, add app's credentials and strategy-specific settings (if any — e.g. see Microsoft below) to `config/prod.secret.exs`, +per strategy's documentation (e.g. [ueberauth_twitter](https://github.com/ueberauth/ueberauth_twitter)). Example config basing on environment variables: + +``` +# Twitter +config :ueberauth, Ueberauth.Strategy.Twitter.OAuth, + consumer_key: System.get_env("TWITTER_CONSUMER_KEY"), + consumer_secret: System.get_env("TWITTER_CONSUMER_SECRET") + +# Facebook +config :ueberauth, Ueberauth.Strategy.Facebook.OAuth, + client_id: System.get_env("FACEBOOK_APP_ID"), + client_secret: System.get_env("FACEBOOK_APP_SECRET"), + redirect_uri: System.get_env("FACEBOOK_REDIRECT_URI") + +# Google +config :ueberauth, Ueberauth.Strategy.Google.OAuth, + client_id: System.get_env("GOOGLE_CLIENT_ID"), + client_secret: System.get_env("GOOGLE_CLIENT_SECRET"), + redirect_uri: System.get_env("GOOGLE_REDIRECT_URI") + +# Microsoft +config :ueberauth, Ueberauth.Strategy.Microsoft.OAuth, + client_id: System.get_env("MICROSOFT_CLIENT_ID"), + client_secret: System.get_env("MICROSOFT_CLIENT_SECRET") + +config :ueberauth, Ueberauth, + providers: [ + microsoft: {Ueberauth.Strategy.Microsoft, [callback_params: []]} + ] +``` diff --git a/docs/config/General-tips-for-customizing-Pleroma-FE.md b/docs/config/General-tips-for-customizing-Pleroma-FE.md new file mode 100644 index 000000000..15c4882dd --- /dev/null +++ b/docs/config/General-tips-for-customizing-Pleroma-FE.md @@ -0,0 +1,17 @@ +# General tips for customizing Pleroma FE +There are some configuration scripts for Pleroma BE and FE: + +1. `config/prod.secret.exs` +1. `config/config.exs` +1. `priv/static/static/config.json` + +The `prod.secret.exs` affects first. `config.exs` is for fallback or default. `config.json` is for GNU-social-BE-Pleroma-FE instances. + +Usually all you have to do is: + +1. Copy the section in the `config/config.exs` which you want to activate. +1. Paste into `config/prod.secret.exs`. +1. Edit `config/prod.secret.exs`. +1. Restart the Pleroma daemon. + +`prod.secret.exs` is for the `MIX_ENV=prod` environment. `dev.secret.exs` is for the `MIX_ENV=dev` environment respectively. diff --git a/docs/config/custom_emoji.md b/docs/config/custom_emoji.md new file mode 100644 index 000000000..419a7d0e2 --- /dev/null +++ b/docs/config/custom_emoji.md @@ -0,0 +1,53 @@ +# Custom Emoji + +To add custom emoji: +* Add the image file(s) to `priv/static/emoji/custom` +* In case of conflicts: add the desired shortcode with the path to `config/custom_emoji.txt`, comma-separated and one per line +* Force recompilation (``mix clean && mix compile``) + +Example: + +image files (in `/priv/static/emoji/custom`): `happy.png` and `sad.png` + +content of `config/custom_emoji.txt`: +``` +happy, /emoji/custom/happy.png, Tag1,Tag2 +sad, /emoji/custom/sad.png, Tag1 +foo, /emoji/custom/foo.png +``` + +The files should be PNG (APNG is okay with `.png` for `image/png` Content-type) and under 50kb for compatibility with mastodon. + +## Emoji tags (groups) + +Default tags are set in `config.exs`. +```elixir +config :pleroma, :emoji, + shortcode_globs: ["/emoji/custom/**/*.png"], + groups: [ + Finmoji: "/finmoji/128px/*-128.png", + Custom: ["/emoji/*.png", "/emoji/custom/*.png"] + ] +``` + +Order of the `groups` matters, so to override default tags just put your group on top of the list. E.g: +```elixir +config :pleroma, :emoji, + shortcode_globs: ["/emoji/custom/**/*.png"], + groups: [ + "Finmoji special": "/finmoji/128px/a_trusted_friend-128.png", # special file + "Cirno": "/emoji/custom/cirno*.png", # png files in /emoji/custom/ which start with `cirno` + "Special group": "/emoji/custom/special_folder/*.png", # png files in /emoji/custom/special_folder/ + "Another group": "/emoji/custom/special_folder/*/.png", # png files in /emoji/custom/special_folder/ subfolders + Finmoji: "/finmoji/128px/*-128.png", + Custom: ["/emoji/*.png", "/emoji/custom/*.png"] + ] +``` + +Priority of tags assigns in emoji.txt and custom.txt: + +`tag in file > special group setting in config.exs > default setting in config.exs` + +Priority for globs: + +`special group setting in config.exs > default setting in config.exs` diff --git a/docs/config/hardening.md b/docs/config/hardening.md new file mode 100644 index 000000000..b54c28850 --- /dev/null +++ b/docs/config/hardening.md @@ -0,0 +1,103 @@ +# Hardening your instance +Here are some suggestions which improve the security of parts of your Pleroma instance. + +## Configuration file + +These changes should go into `prod.secret.exs` or `dev.secret.exs`, depending on your `MIX_ENV` value. + +### `http` + +> Recommended value: `[ip: {127, 0, 0, 1}]` + +This sets the Pleroma application server to only listen to the localhost interface. This way, you can only reach your server over the Internet by going through the reverse proxy. By default, Pleroma listens on all interfaces. + +### `secure_cookie_flag` + +> Recommended value: `true` + +This sets the `secure` flag on Pleroma’s session cookie. This makes sure, that the cookie is only accepted over encrypted HTTPs connections. This implicitly renames the cookie from `pleroma_key` to `__Host-pleroma-key` which enforces some restrictions. (see [cookie prefixes](https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Set-Cookie#Cookie_prefixes)) + +### `:http_security` + +> Recommended value: `true` + +This will send additional HTTP security headers to the clients, including: + +* `X-XSS-Protection: "1; mode=block"` +* `X-Permitted-Cross-Domain-Policies: "none"` +* `X-Frame-Options: "DENY"` +* `X-Content-Type-Options: "nosniff"` +* `X-Download-Options: "noopen"` + +A content security policy (CSP) will also be set: + +```csp +content-security-policy: + default-src 'none'; + base-uri 'self'; + frame-ancestors 'none'; + img-src 'self' data: https:; + media-src 'self' https:; + style-src 'self' 'unsafe-inline'; + font-src 'self'; + script-src 'self'; + connect-src 'self' wss://example.tld; + manifest-src 'self'; + upgrade-insecure-requests; +``` + +#### `sts` + +> Recommended value: `true` + +An additional “Strict transport security” header will be sent with the configured `sts_max_age` parameter. This tells the browser, that the domain should only be accessed over a secure HTTPs connection. + +#### `ct_max_age` + +An additional “Expect-CT” header will be sent with the configured `ct_max_age` parameter. This enforces the use of TLS certificates that are published in the certificate transparency log. (see [Expect-CT](https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Expect-CT)) + +#### `referrer_policy` + +> Recommended value: `same-origin` + +If you click on a link, your browser’s request to the other site will include from where it is coming from. The “Referrer policy” header tells the browser how and if it should send this information. (see [Referrer policy](https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Referrer-Policy)) + +## systemd + +A systemd unit example is provided at `installation/pleroma.service`. + +### PrivateTmp + +> Recommended value: `true` + +Use private `/tmp` and `/var/tmp` folders inside a new file system namespace, which are discarded after the process stops. + +### ProtectHome + +> Recommended value: `true` + +The `/home`, `/root`, and `/run/user` folders can not be accessed by this service anymore. If your Pleroma user has its home folder in one of the restricted places, or use one of these folders as its working directory, you have to set this to `false`. + +### ProtectSystem + +> Recommended value: `full` + +Mount `/usr`, `/boot`, and `/etc` as read-only for processes invoked by this service. + +### PrivateDevices + +> Recommended value: `true` + +Sets up a new `/dev` mount for the process and only adds API pseudo devices like `/dev/null`, `/dev/zero` or `/dev/random` but not physical devices. This may not work on devices like the Raspberry Pi, where you need to set this to `false`. + +### NoNewPrivileges + +> Recommended value: `true` + +Ensures that the service process and all its children can never gain new privileges through `execve()`. + +### CapabilityBoundingSet + +> Recommended value: `~CAP_SYS_ADMIN` + +Drops the sysadmin capability from the daemon. diff --git a/docs/config/howto_mediaproxy.md b/docs/config/howto_mediaproxy.md new file mode 100644 index 000000000..fb731112b --- /dev/null +++ b/docs/config/howto_mediaproxy.md @@ -0,0 +1,32 @@ +# How to activate mediaproxy +## Explanation + +Without the `mediaproxy` function, Pleroma don't store any remote content like pictures, video etc. locally. So every time you open Pleroma, the content is loaded from the source server, from where the post is coming. This can result in slowly loading content or/and increased bandwidth usage on the source server. +With the `mediaproxy` function you can use the cache ability of nginx, to cache these content, so user can access it faster, cause it's loaded from your server. + +## Activate it + +* Edit your nginx config and add the following location: +``` +location /proxy { + proxy_cache pleroma_media_cache; + proxy_cache_lock on; + proxy_pass http://localhost:4000; +} +``` +Also add the following on top of the configuration, outside of the `server` block: +``` +proxy_cache_path /tmp/pleroma-media-cache levels=1:2 keys_zone=pleroma_media_cache:10m max_size=10g inactive=720m use_temp_path=off; +``` +If you came here from one of the installation guides, take a look at the example configuration `/installation/pleroma.nginx`, where this part is already included. + +* Append the following to your `prod.secret.exs` or `dev.secret.exs` (depends on which mode your instance is running): +``` +config :pleroma, :media_proxy, + enabled: true, + redirect_on_failure: true + #base_url: "https://cache.pleroma.social" +``` +If you want to use a subdomain to serve the files, uncomment `base_url`, change the url and add a comma after `true` in the previous line. + +* Restart nginx and Pleroma diff --git a/docs/config/howto_proxy.md b/docs/config/howto_proxy.md new file mode 100644 index 000000000..10a635266 --- /dev/null +++ b/docs/config/howto_proxy.md @@ -0,0 +1,12 @@ +# How to configure upstream proxy for federation +If you want to proxify all http requests (e.g. for TOR) that pleroma makes to an upstream proxy server, edit you config file (`dev.secret.exs` or `prod.secret.exs`) and add the following: + +``` +config :pleroma, :http, + proxy_url: "127.0.0.1:8123" +``` + +The other way to do it, for example, with Tor you would most likely add something like this: +``` +config :pleroma, :http, proxy_url: {:socks5, :localhost, 9050} +``` diff --git a/docs/config/howto_user_recomendation.md b/docs/config/howto_user_recomendation.md new file mode 100644 index 000000000..27c0760dd --- /dev/null +++ b/docs/config/howto_user_recomendation.md @@ -0,0 +1,31 @@ +# How to activate user recommendation (Who to follow panel) +![who-to-follow-panel-small](/uploads/9de1b1300436c32461d272945f1bc23e/who-to-follow-panel-small.png) + +To show the *who to follow* panel, edit `config/prod.secret.exs` in the Pleroma backend. Following code activates the *who to follow* panel: + +```elixir +config :pleroma, :suggestions, + enabled: true, + third_party_engine: + "http://vinayaka.distsn.org/cgi-bin/vinayaka-user-match-suggestions-api.cgi?{{host}}+{{user}}", + timeout: 300_000, + limit: 23, + web: "https://vinayaka.distsn.org/?{{host}}+{{user}}" + +``` + +`config/config.exs` already includes this code, but `enabled:` is `false`. + +`/api/v1/suggestions` is also provided when *who to follow* panel is enabled. + +For advanced customization, following code shows the newcomers of the fediverse at the *who to follow* panel: + +```elixir +config :pleroma, :suggestions, + enabled: true, + third_party_engine: + "http://vinayaka.distsn.org/cgi-bin/vinayaka-user-new-suggestions-api.cgi?{{host}}+{{user}}", + timeout: 60_000, + limit: 23, + web: "https://vinayaka.distsn.org/user-new.html" +``` diff --git a/docs/config/i2p.md b/docs/config/i2p.md new file mode 100644 index 000000000..62ced8b7a --- /dev/null +++ b/docs/config/i2p.md @@ -0,0 +1,196 @@ +# I2P Federation and Accessability + +This guide is going to focus on the Pleroma federation aspect. The actual installation is neatly explained in the official documentation, and more likely to remain up-to-date. +It might be added to this guide if there will be a need for that. + +We're going to use I2PD for its lightweightness over the official client. +Follow the documentation according to your distro: https://i2pd.readthedocs.io/en/latest/user-guide/install/#installing + +How to run it: https://i2pd.readthedocs.io/en/latest/user-guide/run/ + +## I2P Federation + +There are 2 ways to go about this. +One using the config, and one using external software (fedproxy). The external software works better so far. + +### Using the Config + +**Warning:** So far, everytime I followed this way of federating using I2P, the rest of my federation stopped working. I'm leaving this here in case it will help with making it work. + +Assuming you're running in prod, cd to your Pleroma folder and append the following to `config/prod.secret.exs`: +``` +config :pleroma, :http, proxy_url: {:socks5, :localhost, 4447} +``` +And then run the following: +``` +su pleroma +MIX_ENV=prod mix deps.get +MIX_ENV=prod mix ecto.migrate +exit +``` +You can restart I2PD here and finish if you don't wish to make your instance viewable or accessible over I2P. +``` +systemctl stop i2pd.service --no-block +systemctl start i2pd.service +``` +*Notice:* The stop command initiates a graceful shutdown process, i2pd stops after finishing to route transit tunnels (maximum 10 minutes). + +You can change the socks proxy port in `/etc/i2pd/i2pd.conf`. + +### Using Fedproxy + +Fedproxy passes through clearnet requests direct to where they are going. It doesn't force anything over Tor. + +To use [fedproxy](https://github.com/majestrate/fedproxy) you'll need to install Golang. +``` +apt install golang +``` +Use a different user than pleroma or root. Run the following to add the Gopath to your ~/.bashrc. +``` +echo "export GOPATH=/home/ren/.go" >> ~/.bashrc +``` +Restart that bash session (you can exit and log back in). +Run the following to get fedproxy. +``` +go get -u github.com/majestrate/fedproxy$ +cp $(GOPATH)/bin/fedproxy /usr/local/bin/fedproxy +``` +And then the following to start it for I2P only. +``` +fedproxy 127.0.0.1:2000 127.0.0.1:4447 +``` +If you want to also use it for Tor, add `127.0.0.1:9050` to that command. +You'll also need to modify your Pleroma config. + +Assuming you're running in prod, cd to your Pleroma folder and append the following to `config/prod.secret.exs`: +``` +config :pleroma, :http, proxy_url: {:socks5, :localhost, 2000} +``` +And then run the following: +``` +su pleroma +MIX_ENV=prod mix deps.get +MIX_ENV=prod mix ecto.migrate +exit +``` +You can restart I2PD here and finish if you don't wish to make your instance viewable or accessible over I2P. + +``` +systemctl stop i2pd.service --no-block +systemctl start i2pd.service +``` +*Notice:* The stop command initiates a graceful shutdown process, i2pd stops after finishing to route transit tunnels (maximum 10 minutes). + +You can change the socks proxy port in `/etc/i2pd/i2pd.conf`. + +## I2P Instance Access + +Make your instance accessible using I2P. + +Add the following to your I2PD config `/etc/i2pd/tunnels.conf`: +``` +[pleroma] +type = http +host = 127.0.0.1 +port = 14447 +keys = pleroma.dat +``` +Restart I2PD: +``` +systemctl stop i2pd.service --no-block +systemctl start i2pd.service +``` +*Notice:* The stop command initiates a graceful shutdown process, i2pd stops after finishing to route transit tunnels (maximum 10 minutes). + +Now you'll have to find your address. +To do that you can download and use I2PD tools.[^1] +Or you'll need to access your web-console on localhost:7070. +If you don't have a GUI, you'll have to SSH tunnel into it like this: +`ssh -L 7070:127.0.0.1:7070 user@ip -p port`. +Now you can access it at localhost:7070. +Go to I2P tunnels page. Look for Server tunnels and you will see an address that ends with `.b32.i2p` next to "pleroma". +This is your site's address. + +### I2P-only Instance + +If creating an I2P-only instance, open `config/prod.secret.exs` and under "config :pleroma, Pleroma.Web.Endpoint," edit "https" and "port: 443" to the following: +``` + url: [host: "i2paddress", scheme: "http", port: 80], +``` +In addition to that, replace the existing nginx config's contents with the example below. + +### Existing Instance (Clearnet Instance) + +If not an I2P-only instance, add the nginx config below to your existing config at `/etc/nginx/sites-enabled/pleroma.nginx`. + +And for both cases, disable CSP in Pleroma's config (STS is disabled by default) so you can define those yourself seperately from the clearnet (if your instance is also on the clearnet). +Copy the following into the `config/prod.secret.exs` in your Pleroma folder (/home/pleroma/pleroma/): +``` +config :pleroma, :http_security, + enabled: false +``` + +Use this as the Nginx config: +``` +proxy_cache_path /tmp/pleroma-media-cache levels=1:2 keys_zone=pleroma_media_cache:10m max_size=10g inactive=720m use_temp_path=off; +# The above already exists in a clearnet instance's config. +# If not, add it. + +server { + listen 127.0.0.1:14447; + server_name youri2paddress; + + # Comment to enable logs + access_log /dev/null; + error_log /dev/null; + + gzip_vary on; + gzip_proxied any; + gzip_comp_level 6; + gzip_buffers 16 8k; + gzip_http_version 1.1; + gzip_types text/plain text/css application/json application/javascript text/xml application/xml application/xml+rss text/javascript application/activity+json application/atom+xml; + + client_max_body_size 16m; + + location / { + + add_header X-XSS-Protection "1; mode=block"; + add_header X-Permitted-Cross-Domain-Policies none; + add_header X-Frame-Options DENY; + add_header X-Content-Type-Options nosniff; + add_header Referrer-Policy same-origin; + add_header X-Download-Options noopen; + + proxy_http_version 1.1; + proxy_set_header Upgrade $http_upgrade; + proxy_set_header Connection "upgrade"; + proxy_set_header Host $http_host; + + proxy_pass http://localhost:4000; + + client_max_body_size 16m; + } + + location /proxy { + proxy_cache pleroma_media_cache; + proxy_cache_lock on; + proxy_ignore_client_abort on; + proxy_pass http://localhost:4000; + } +} +``` +reload Nginx: +``` +systemctl stop i2pd.service --no-block +systemctl start i2pd.service +``` +*Notice:* The stop command initiates a graceful shutdown process, i2pd stops after finishing to route transit tunnels (maximum 10 minutes). + +You should now be able to both access your instance using I2P and federate with other I2P instances! + +[^1]: [I2PD tools](https://github.com/purplei2p/i2pd-tools) to print information about a router info file or an I2P private key, generate an I2P private key, and generate vanity addresses. + +### Possible Issues + +Will be added when encountered. diff --git a/docs/config/mrf.md b/docs/config/mrf.md new file mode 100644 index 000000000..2cc16cef0 --- /dev/null +++ b/docs/config/mrf.md @@ -0,0 +1,119 @@ +# Message Rewrite Facility +The Message Rewrite Facility (MRF) is a subsystem that is implemented as a series of hooks that allows the administrator to rewrite or discard messages. + +Possible uses include: + +* marking incoming messages with media from a given account or instance as sensitive +* rejecting messages from a specific instance +* removing/unlisting messages from the public timelines +* removing media from messages +* sending only public messages to a specific instance + +The MRF provides user-configurable policies. The default policy is `NoOpPolicy`, which disables the MRF functionality. Pleroma also includes an easy to use policy called `SimplePolicy` which maps messages matching certain pre-defined criterion to actions built into the policy module. +It is possible to use multiple, active MRF policies at the same time. + +## Quarantine Instances + +You have the ability to prevent from private / followers-only messages from federating with specific instances. Which means they will only get the public or unlisted messages from your instance. + +If, for example, you're using `MIX_ENV=prod` aka using production mode, you would open your configuration file located in `config/prod.secret.exs` and edit or add the option under your `:instance` config object. Then you would specify the instance within quotes. +``` +config :pleroma, :instance, + [...] + quarantined_instances: ["instance.example", "other.example"] +``` + +## Using `SimplePolicy` + +`SimplePolicy` is capable of handling most common admin tasks. + +To use `SimplePolicy`, you must enable it. Do so by adding the following to your `:instance` config object, so that it looks like this: + +``` +config :pleroma, :instance, + [...] + rewrite_policy: Pleroma.Web.ActivityPub.MRF.SimplePolicy +``` + +Once `SimplePolicy` is enabled, you can configure various groups in the `:mrf_simple` config object. These groups are: + +* `media_removal`: Servers in this group will have media stripped from incoming messages. +* `media_nsfw`: Servers in this group will have the #nsfw tag and sensitive setting injected into incoming messages which contain media. +* `reject`: Servers in this group will have their messages rejected. +* `federated_timeline_removal`: Servers in this group will have their messages unlisted from the public timelines by flipping the `to` and `cc` fields. + +Servers should be configured as lists. + +### Example + +This example will enable `SimplePolicy`, block media from `illegalporn.biz`, mark media as NSFW from `porn.biz` and `porn.business`, reject messages from `spam.com` and remove messages from `spam.university` from the federated timeline: + +``` +config :pleroma, :instance, + rewrite_policy: [Pleroma.Web.ActivityPub.MRF.SimplePolicy] + +config :pleroma, :mrf_simple, + media_removal: ["illegalporn.biz"], + media_nsfw: ["porn.biz", "porn.business"], + reject: ["spam.com"], + federated_timeline_removal: ["spam.university"] + +``` + +### Use with Care + +The effects of MRF policies can be very drastic. It is important to use this functionality carefully. Always try to talk to an admin before writing an MRF policy concerning their instance. + +## Writing your own MRF Policy + +As discussed above, the MRF system is a modular system that supports pluggable policies. This means that an admin may write a custom MRF policy in Elixir or any other language that runs on the Erlang VM, by specifying the module name in the `rewrite_policy` config setting. + +For example, here is a sample policy module which rewrites all messages to "new message content": + +```elixir +# This is a sample MRF policy which rewrites all Notes to have "new message +# content." +defmodule Site.RewritePolicy do + @behavior Pleroma.Web.ActivityPub.MRF + + # Catch messages which contain Note objects with actual data to filter. + # Capture the object as `object`, the message content as `content` and the + # message itself as `message`. + @impl true + def filter(%{"type" => Create", "object" => {"type" => "Note", "content" => content} = object} = message) + when is_binary(content) do + # Subject / CW is stored as summary instead of `name` like other AS2 objects + # because of Mastodon doing it that way. + summary = object["summary"] + + # Message edits go here. + content = "new message content" + + # Assemble the mutated object. + object = + object + |> Map.put("content", content) + |> Map.put("summary", summary) + + # Assemble the mutated message. + message = Map.put(message, "object", object) + {:ok, message} + end + + # Let all other messages through without modifying them. + @impl true + def filter(message), do: {:ok, message} +end +``` + +If you save this file as `lib/site/mrf/rewrite_policy.ex`, it will be included when you next rebuild Pleroma. You can enable it in the configuration like so: + +``` +config :pleroma, :instance, + rewrite_policy: [ + Pleroma.Web.ActivityPub.MRF.SimplePolicy, + Site.RewritePolicy + ] +``` + +Please note that the Pleroma developers consider custom MRF policy modules to fall under the purview of the AGPL. As such, you are obligated to release the sources to your custom MRF policy modules upon request. diff --git a/docs/config/onion_federation.md b/docs/config/onion_federation.md new file mode 100644 index 000000000..99f104995 --- /dev/null +++ b/docs/config/onion_federation.md @@ -0,0 +1,159 @@ +# Easy Onion Federation (Tor) +Tor can free people from the necessity of a domain, in addition to helping protect their privacy. As Pleroma's goal is to empower the people and let as many as possible host an instance with as little resources as possible, the ability to host an instance with a small, cheap computer like a RaspberryPi along with Tor, would be a great way to achieve that. +In addition, federating with such instances will also help furthering that goal. + +This is a guide to show you how it can be easily done. + +This guide assumes you already got Pleroma working, and that it's running on the default port 4000. +Currently only has an Nginx example. + +To install Tor on Debian / Ubuntu: +``` +apt -yq install tor +``` +If using an old server version (older than Debian Stretch or Ubuntu 18.04), install from backports or PPA. +I recommend using a newer server version instead. + +To have the newest, V3 onion addresses (which I recommend) in Debian, install Tor from backports. +If you do not have backports, uncomment the stretch-backports links at the end of `/etc/apt/sources.list`. +Then install: +``` +apt update +apt -t stretch-backports -yq install tor +``` +**WARNING:** Onion instances not using a Tor version supporting V3 addresses will not be able to federate with you. + +Create the hidden service for your Pleroma instance in `/etc/tor/torrc`: +``` +HiddenServiceDir /var/lib/tor/pleroma_hidden_service/ +HiddenServicePort 80 127.0.0.1:8099 +HiddenServiceVersion 3 # Remove if Tor version is below 0.3 ( tor --version ) +``` +Restart Tor to generate an adress: +``` +systemctl restart tor@default.service +``` +Get the address: +``` +cat /var/lib/tor/pleroma_hidden_service/hostname +``` + +# Federation + +Next, edit your Pleroma config. +If running in prod, cd to your Pleroma directory, edit `config/prod.secret.exs` +and append this line: +``` +config :pleroma, :http, proxy_url: {:socks5, :localhost, 9050} +``` +In your Pleroma directory, assuming you're running prod, +run the following: +``` +su pleroma +MIX_ENV=prod mix deps.get +MIX_ENV=prod mix ecto.migrate +exit +``` +restart Pleroma (if using systemd): +``` +systemctl restart pleroma +``` + +# Tor Instance Access + +Make your instance accessible using Tor. + +## Tor-only Instance +If creating a Tor-only instance, open `config/prod.secret.exs` and under "config :pleroma, Pleroma.Web.Endpoint," edit "https" and "port: 443" to the following: +``` + url: [host: "onionaddress", scheme: "http", port: 80], +``` +In addition to that, replace the existing nginx config's contents with the example below. + +## Existing Instance (Clearnet Instance) +If not a Tor-only instance, +add the nginx config below to your existing config at `/etc/nginx/sites-enabled/pleroma.nginx`. + +--- +For both cases, disable CSP in Pleroma's config (STS is disabled by default) so you can define those yourself seperately from the clearnet (if your instance is also on the clearnet). +Copy the following into the `config/prod.secret.exs` in your Pleroma folder (/home/pleroma/pleroma/): +``` +config :pleroma, :http_security, + enabled: false +``` + +Use this as the Nginx config: +``` +proxy_cache_path /tmp/pleroma-media-cache levels=1:2 keys_zone=pleroma_media_cache:10m max_size=10g inactive=720m use_temp_path=off; +# The above already exists in a clearnet instance's config. +# If not, add it. + +server { + listen 127.0.0.1:8099; + server_name youronionaddress; + + # Comment to enable logs + access_log /dev/null; + error_log /dev/null; + + gzip_vary on; + gzip_proxied any; + gzip_comp_level 6; + gzip_buffers 16 8k; + gzip_http_version 1.1; + gzip_types text/plain text/css application/json application/javascript text/xml application/xml application/xml+rss text/javascript application/activity+json application/atom+xml; + + client_max_body_size 16m; + + location / { + + add_header X-XSS-Protection "1; mode=block"; + add_header X-Permitted-Cross-Domain-Policies none; + add_header X-Frame-Options DENY; + add_header X-Content-Type-Options nosniff; + add_header Referrer-Policy same-origin; + add_header X-Download-Options noopen; + + proxy_http_version 1.1; + proxy_set_header Upgrade $http_upgrade; + proxy_set_header Connection "upgrade"; + proxy_set_header Host $http_host; + + proxy_pass http://localhost:4000; + + client_max_body_size 16m; + } + + location /proxy { + proxy_cache pleroma_media_cache; + proxy_cache_lock on; + proxy_ignore_client_abort on; + proxy_pass http://localhost:4000; + } +} +``` +reload Nginx: +``` +systemctl reload nginx +``` + +You should now be able to both access your instance using Tor and federate with other Tor instances! + +--- + +### Possible Issues + +* In Debian, make sure your hidden service folder `/var/lib/tor/pleroma_hidden_service/` and its contents, has debian-tor as both owner and group by using +``` +ls -la /var/lib/tor/ +``` +If it's not, run: +``` +chown -R debian-tor:debian-tor /var/lib/tor/pleroma_hidden_service/ +``` +* Make sure *only* the owner has *only* read and write permissions. +If not, run: +``` +chmod -R 600 /var/lib/tor/pleroma_hidden_service/ +``` +* If you have trouble logging in to the Mastodon Frontend when using Tor, use the Tor Browser Bundle. diff --git a/docs/config/small_customizations.md b/docs/config/small_customizations.md new file mode 100644 index 000000000..09e8d6041 --- /dev/null +++ b/docs/config/small_customizations.md @@ -0,0 +1,35 @@ +# Small customizations +Replace `dev.secret.exs` with `prod.secret.exs` according to your setup. + +# Thumbnail + +Replace `priv/static/instance/thumbnail.jpeg` with your selfie or other neat picture. It will appear in [Pleroma Instances](http://distsn.org/pleroma-instances.html). + +# Instance-specific panel + +![instance-specific panel demo](/uploads/296b19ec806b130e0b49b16bfe29ce8a/image.png) + +To show the instance specific panel, set `show_instance_panel` to `true` in `config/dev.secret.exs`. You can modify its content by editing `priv/static/instance/panel.html`. + +# Background + +You can change the background of your Pleroma instance by uploading it to `priv/static/static`, and then changing `"background"` in `config/dev.secret.exs` accordingly. + +# Logo + +![logo modification demo](/uploads/c70b14de60fa74245e7f0dcfa695ebff/image.png) + +If you want to give a brand to your instance, look no further. You can change the logo of your instance by uploading it to `priv/static/static`, and then changing `logo` in `config/dev.secret.exs` accordingly. + +# Theme + +All users of your instance will be able to change the theme they use by going to the settings (the cog in the top-right hand corner). However, if you wish to change the default theme, you can do so by editing `theme` in `config/dev.secret.exs` accordingly. + +# Terms of Service + +Terms of Service will be shown to all users on the registration page. It's the best place where to write down the rules for your instance. You can modify the rules by changing `priv/static/static/terms-of-service.html`. + +# Message Visibility + +To enable message visibility options when posting like in the Mastodon frontend, set +`scope_options_enabled` to `true` in `config/dev.secret.exs`. diff --git a/docs/config/static_dir.md b/docs/config/static_dir.md new file mode 100644 index 000000000..0cc52b99a --- /dev/null +++ b/docs/config/static_dir.md @@ -0,0 +1,20 @@ +# Static Directory + +Static frontend files are shipped in `priv/static/` and tracked by version control in this repository. If you want to overwrite or update these without the possibility of merge conflicts, you can write your custom versions to `instance/static/`. + +``` +config :pleroma, :instance, + static_dir: "instance/static/", +``` + +You can overwrite this value in your configuration to use a different static instance directory. + +## robots.txt + +By default, the `robots.txt` that ships in `priv/static/` is permissive. It allows well-behaved search engines to index all of your instance's URIs. + +If you want to generate a restrictive `robots.txt`, you can run the following mix task. The generated `robots.txt` will be written in your instance static directory. + +``` +mix pleroma.robots_txt disallow_all +``` diff --git a/docs/installation/alpine_linux_en.md b/docs/installation/alpine_linux_en.md new file mode 100644 index 000000000..c493816d6 --- /dev/null +++ b/docs/installation/alpine_linux_en.md @@ -0,0 +1,215 @@ +# Installing on Alpine Linux +## Installation + +This guide is a step-by-step installation guide for Alpine Linux. It also assumes that you have administrative rights, either as root or a user with [sudo permissions](https://www.linode.com/docs/tools-reference/custom-kernels-distros/install-alpine-linux-on-your-linode/#configuration). If you want to run this guide with root, ignore the `sudo` at the beginning of the lines, unless it calls a user like `sudo -Hu pleroma`; in this case, use `su -l -s $SHELL -c 'command'` instead. + +### Required packages + +* `postgresql` +* `elixir` +* `erlang` +* `erlang-parsetools` +* `erlang-xmerl` +* `git` +* Development Tools + +#### Optional packages used in this guide + +* `nginx` (preferred, example configs for other reverse proxies can be found in the repo) +* `certbot` (or any other ACME client for Let’s Encrypt certificates) + +### Prepare the system + +* First make sure to have the community repository enabled: + +```shell +echo "https://nl.alpinelinux.org/alpine/latest-stable/community" | sudo tee -a /etc/apk/repository +``` + +* Then update the system, if not already done: + +```shell +sudo apk update +sudo apk upgrade +``` + +* Install some tools, which are needed later: + +```shell +sudo apk add git build-base +``` + +### Install Elixir and Erlang + +* Install Erlang and Elixir: + +```shell +sudo apk add erlang erlang-runtime-tools erlang-xmerl elixir +``` + +* Install `erlang-eldap` if you want to enable ldap authenticator + +```shell +sudo apk add erlang-eldap +``` +### Install PostgreSQL + +* Install Postgresql server: + +```shell +sudo apk add postgresql postgresql-contrib +``` + +* Initialize database: + +```shell +sudo /etc/init.d/postgresql start +``` + +* Enable and start postgresql server: + +```shell +sudo rc-update add postgresql +``` + +### Install PleromaBE + +* Add a new system user for the Pleroma service: + +```shell +sudo adduser -S -s /bin/false -h /opt/pleroma -H pleroma +``` + +**Note**: To execute a single command as the Pleroma system user, use `sudo -Hu pleroma command`. You can also switch to a shell by using `sudo -Hu pleroma $SHELL`. If you don’t have and want `sudo` on your system, you can use `su` as root user (UID 0) for a single command by using `su -l pleroma -s $SHELL -c 'command'` and `su -l pleroma -s $SHELL` for starting a shell. + +* Git clone the PleromaBE repository and make the Pleroma user the owner of the directory: + +```shell +sudo mkdir -p /opt/pleroma +sudo chown -R pleroma:pleroma /opt/pleroma +sudo -Hu pleroma git clone https://git.pleroma.social/pleroma/pleroma /opt/pleroma +``` + +* Change to the new directory: + +```shell +cd /opt/pleroma +``` + +* Install the dependencies for Pleroma and answer with `yes` if it asks you to install `Hex`: + +```shell +sudo -Hu pleroma mix deps.get +``` + +* Generate the configuration: `sudo -Hu pleroma mix pleroma.instance gen` + * Answer with `yes` if it asks you to install `rebar3`. + * This may take some time, because parts of pleroma get compiled first. + * After that it will ask you a few questions about your instance and generates a configuration file in `config/generated_config.exs`. + +* Check the configuration and if all looks right, rename it, so Pleroma will load it (`prod.secret.exs` for productive instance, `dev.secret.exs` for development instances): + +```shell +mv config/{generated_config.exs,prod.secret.exs} +``` + +* The previous command creates also the file `config/setup_db.psql`, with which you can create the database: + +```shell +sudo -Hu postgres psql -f config/setup_db.psql +``` + +* Now run the database migration: + +```shell +sudo -Hu pleroma MIX_ENV=prod mix ecto.migrate +``` + +* Now you can start Pleroma already + +```shell +sudo -Hu pleroma MIX_ENV=prod mix phx.server +``` + +### Finalize installation + +If you want to open your newly installed instance to the world, you should run nginx or some other webserver/proxy in front of Pleroma and you should consider to create an OpenRC service file for Pleroma. + +#### Nginx + +* Install nginx, if not already done: + +```shell +sudo apk add nginx +``` + +* Setup your SSL cert, using your method of choice or certbot. If using certbot, first install it: + +```shell +sudo apk add certbot +``` + +and then set it up: + +```shell +sudo mkdir -p /var/lib/letsencrypt/ +sudo certbot certonly --email -d --standalone +``` + +If that doesn’t work, make sure, that nginx is not already running. If it still doesn’t work, try setting up nginx first (change ssl “on” to “off” and try again). + +* Copy the example nginx configuration to the nginx folder + +```shell +sudo cp /opt/pleroma/installation/pleroma.nginx /etc/nginx/conf.d/pleroma.conf +``` + +* Before starting nginx edit the configuration and change it to your needs (e.g. change servername, change cert paths) +* Enable and start nginx: + +```shell +sudo rc-update add nginx +sudo service nginx start +``` + +If you need to renew the certificate in the future, uncomment the relevant location block in the nginx config and run: + +```shell +sudo certbot certonly --email -d --webroot -w /var/lib/letsencrypt/ +``` + +#### OpenRC service + +* Copy example service file: + +```shell +sudo cp /opt/pleroma/installation/init.d/pleroma /etc/init.d/pleroma +``` + +* Make sure to start it during the boot + +```shell +sudo rc-update add pleroma +``` + +#### Create your first user + +If your instance is up and running, you can create your first user with administrative rights with the following task: + +```shell +sudo -Hu pleroma MIX_ENV=prod mix pleroma.user new --admin +``` + +#### Further reading + +* [Admin tasks](Admin tasks) +* [Backup your instance](Backup-your-instance) +* [Configuration tips](General tips for customizing pleroma fe) +* [Hardening your instance](Hardening-your-instance) +* [How to activate mediaproxy](How-to-activate-mediaproxy) +* [Small Pleroma-FE customizations](Small customizations) +* [Updating your instance](Updating-your-instance) + +## Questions + +Questions about the installation or didn’t it work as it should be, ask in [#pleroma:matrix.org](https://matrix.heldscal.la/#/room/#freenode_#pleroma:matrix.org) or IRC Channel **#pleroma** on **Freenode**. diff --git a/docs/installation/arch_linux_en.md b/docs/installation/arch_linux_en.md new file mode 100644 index 000000000..4b3bbbbb0 --- /dev/null +++ b/docs/installation/arch_linux_en.md @@ -0,0 +1,214 @@ +# Installing on Arch Linux +## Installation + +This guide will assume that you have administrative rights, either as root or a user with [sudo permissions](https://wiki.archlinux.org/index.php/Sudo). If you want to run this guide with root, ignore the `sudo` at the beginning of the lines, unless it calls a user like `sudo -Hu pleroma`; in this case, use `su -s $SHELL -c 'command'` instead. + +### Required packages + +* `postgresql` +* `elixir` +* `erlang-unixodbc` +* `git` +* `base-devel` + +#### Optional packages used in this guide + +* `nginx` (preferred, example configs for other reverse proxies can be found in the repo) +* `certbot` (or any other ACME client for Let’s Encrypt certificates) + +### Prepare the system + +* First update the system, if not already done: + +```shell +sudo pacman -Syu +``` + +* Install some of the above mentioned programs: + +```shell +sudo pacman -S git base-devel elixir erlang-unixodbc +``` + +### Install PostgreSQL + +[Arch Wiki article](https://wiki.archlinux.org/index.php/PostgreSQL) + +* Install the `postgresql` package: + +```shell +sudo pacman -S postgresql +``` + +* Initialize the database cluster: + +```shell +sudo -iu postgres initdb -D /var/lib/postgres/data +``` + +* Start and enable the `postgresql.service` + +```shell +sudo systemctl enable --now postgresql.service +``` + +### Install PleromaBE + +* Add a new system user for the Pleroma service: + +```shell +sudo useradd -r -s /bin/false -m -d /var/lib/pleroma -U pleroma +``` + +**Note**: To execute a single command as the Pleroma system user, use `sudo -Hu pleroma command`. You can also switch to a shell by using `sudo -Hu pleroma $SHELL`. If you don’t have and want `sudo` on your system, you can use `su` as root user (UID 0) for a single command by using `su -l pleroma -s $SHELL -c 'command'` and `su -l pleroma -s $SHELL` for starting a shell. + +* Git clone the PleromaBE repository and make the Pleroma user the owner of the directory: + +```shell +sudo mkdir -p /opt/pleroma +sudo chown -R pleroma:pleroma /opt/pleroma +sudo -Hu pleroma git clone https://git.pleroma.social/pleroma/pleroma /opt/pleroma +``` + +* Change to the new directory: + +```shell +cd /opt/pleroma +``` + +* Install the dependencies for Pleroma and answer with `yes` if it asks you to install `Hex`: + +```shell +sudo -Hu pleroma mix deps.get +``` + +* Generate the configuration: `sudo -Hu pleroma mix pleroma.instance gen` + * Answer with `yes` if it asks you to install `rebar3`. + * This may take some time, because parts of pleroma get compiled first. + * After that it will ask you a few questions about your instance and generates a configuration file in `config/generated_config.exs`. + +* Check the configuration and if all looks right, rename it, so Pleroma will load it (`prod.secret.exs` for productive instance, `dev.secret.exs` for development instances): + +```shell +mv config/{generated_config.exs,prod.secret.exs} +``` + +* The previous command creates also the file `config/setup_db.psql`, with which you can create the database: + +```shell +sudo -Hu postgres psql -f config/setup_db.psql +``` + +* Now run the database migration: + +```shell +sudo -Hu pleroma MIX_ENV=prod mix ecto.migrate +``` + +* Now you can start Pleroma already + +```shell +sudo -Hu pleroma MIX_ENV=prod mix phx.server +``` + +### Finalize installation + +If you want to open your newly installed instance to the world, you should run nginx or some other webserver/proxy in front of Pleroma and you should consider to create a systemd service file for Pleroma. + +#### Nginx + +* Install nginx, if not already done: + +```shell +sudo pacman -S nginx +``` + +* Create directories for available and enabled sites: + +```shell +sudo mkdir -p /etc/nginx/sites-{available,enabled} +``` + +* Append the following line at the end of the `http` block in `/etc/nginx/nginx.conf`: + +```Nginx +include sites-enabled/*; +``` + +* Setup your SSL cert, using your method of choice or certbot. If using certbot, first install it: + +```shell +sudo pacman -S certbot certbot-nginx +``` + +and then set it up: + +```shell +sudo mkdir -p /var/lib/letsencrypt/ +sudo certbot certonly --email -d --standalone +``` + +If that doesn’t work, make sure, that nginx is not already running. If it still doesn’t work, try setting up nginx first (change ssl “on” to “off” and try again). + +--- + +* Copy the example nginx configuration and activate it: + +```shell +sudo cp /opt/pleroma/installation/pleroma.nginx /etc/nginx/sites-available/pleroma.nginx +sudo ln -s /etc/nginx/sites-available/pleroma.nginx /etc/nginx/sites-enabled/pleroma.nginx +``` + +* Before starting nginx edit the configuration and change it to your needs (e.g. change servername, change cert paths) +* Enable and start nginx: + +```shell +sudo systemctl enable --now nginx.service +``` + +If you need to renew the certificate in the future, uncomment the relevant location block in the nginx config and run: + +```shell +sudo certbot certonly --email -d --webroot -w /var/lib/letsencrypt/ +``` + +#### Other webserver/proxies + +You can find example configurations for them in `/opt/pleroma/installation/`. + +#### Systemd service + +* Copy example service file + +```shell +sudo cp /opt/pleroma/installation/pleroma.service /etc/systemd/system/pleroma.service +``` + +* Edit the service file and make sure that all paths fit your installation +* Enable and start `pleroma.service`: + +```shell +sudo systemctl enable --now pleroma.service +``` + +#### Create your first user + +If your instance is up and running, you can create your first user with administrative rights with the following task: + +```shell +sudo -Hu pleroma MIX_ENV=prod mix pleroma.user new --admin +``` + +#### Further reading + +* [Admin tasks](Admin tasks) +* [Backup your instance](Backup-your-instance) +* [Configuration tips](General tips for customizing pleroma fe) +* [Hardening your instance](Hardening-your-instance) +* [How to activate mediaproxy](How-to-activate-mediaproxy) +* [Small Pleroma-FE customizations](Small customizations) +* [Updating your instance](Updating-your-instance) + +## Questions + +Questions about the installation or didn’t it work as it should be, ask in [#pleroma:matrix.org](https://matrix.heldscal.la/#/room/#freenode_#pleroma:matrix.org) or IRC Channel **#pleroma** on **Freenode**. diff --git a/docs/installation/centos7_en.md b/docs/installation/centos7_en.md new file mode 100644 index 000000000..76de21ed8 --- /dev/null +++ b/docs/installation/centos7_en.md @@ -0,0 +1,277 @@ +# Installing on CentOS 7 +## Installation + +This guide is a step-by-step installation guide for CentOS 7. It also assumes that you have administrative rights, either as root or a user with [sudo permissions](https://www.digitalocean.com/community/tutorials/how-to-create-a-sudo-user-on-centos-quickstart). If you want to run this guide with root, ignore the `sudo` at the beginning of the lines, unless it calls a user like `sudo -Hu pleroma`; in this case, use `su -s $SHELL -c 'command'` instead. + +### Required packages + +* `postgresql` (9,6+, CentOS 7 comes with 9.2, we will install version 11 in this guide) +* `elixir` (1.5+) +* `erlang` +* `erlang-parsetools` +* `erlang-xmerl` +* `git` +* Development Tools + +#### Optional packages used in this guide + +* `nginx` (preferred, example configs for other reverse proxies can be found in the repo) +* `certbot` (or any other ACME client for Let’s Encrypt certificates) + +### Prepare the system + +* First update the system, if not already done: + +```shell +sudo yum update +``` + +* Install some of the above mentioned programs: + +```shell +sudo yum install wget git unzip +``` + +* Install development tools: + +```shell +sudo yum group install "Development Tools" +``` + +### Install Elixir and Erlang + +* Add the EPEL repo: + +```shell +sudo yum install epel-release +sudo yum -y update +``` + +* Install Erlang repository: + +```shell +wget -P /tmp/ https://packages.erlang-solutions.com/erlang-solutions-1.0-1.noarch.rpm +sudo rpm -Uvh erlang-solutions-1.0-1.noarch.rpm +``` + +* Install Erlang: + +```shell +sudo yum install erlang erlang-parsetools erlang-xmerl +``` + +* Download [latest Elixir release from Github](https://github.com/elixir-lang/elixir/releases/tag/v1.8.1) (Example for the newest version at the time when this manual was written) + +```shell +wget -P /tmp/ https://github.com/elixir-lang/elixir/releases/download/v1.8.1/Precompiled.zip +``` + +* Create folder where you want to install Elixir, we’ll use: + +```shell +sudo mkdir -p /opt/elixir +``` + +* Unzip downloaded file there: + +```shell +sudo unzip /tmp/Precompiled.zip -d /opt/elixir +``` + +* Create symlinks for the pre-compiled binaries: + +```shell +for e in elixir elixirc iex mix; do sudo ln -s /opt/elixir/bin/${e} /usr/local/bin/${e}; done +``` + +### Install PostgreSQL + +* Add the Postgresql repository: + +```shell +sudo yum install https://download.postgresql.org/pub/repos/yum/11/redhat/rhel-7-x86_64/pgdg-centos11-11-2.noarch.rpm +``` + +* Install the Postgresql server: + +```shell +sudo yum install postgresql11-server postgresql11-contrib +``` + +* Initialize database: + +```shell +sudo /usr/pgsql-11/bin/postgresql-11-setup initdb +``` + +* Open configuration file `/var/lib/pgsql/11/data/pg_hba.conf` and change the following lines from: + +```plain +# IPv4 local connections: +host all all 127.0.0.1/32 ident +# IPv6 local connections: +host all all ::1/128 ident +``` + +to + +```plain +# IPv4 local connections: +host all all 127.0.0.1/32 md5 +# IPv6 local connections: +host all all ::1/128 md5 +``` + +* Enable and start postgresql server: + +```shell +sudo systemctl enable --now postgresql-11.service +``` + +### Install PleromaBE + +* Add a new system user for the Pleroma service: + +```shell +sudo useradd -r -s /bin/false -m -d /var/lib/pleroma -U pleroma +``` + +**Note**: To execute a single command as the Pleroma system user, use `sudo -Hu pleroma command`. You can also switch to a shell by using `sudo -Hu pleroma $SHELL`. If you don’t have and want `sudo` on your system, you can use `su` as root user (UID 0) for a single command by using `su -l pleroma -s $SHELL -c 'command'` and `su -l pleroma -s $SHELL` for starting a shell. + +* Git clone the PleromaBE repository and make the Pleroma user the owner of the directory: + +```shell +sudo mkdir -p /opt/pleroma +sudo chown -R pleroma:pleroma /opt/pleroma +sudo -Hu pleroma git clone https://git.pleroma.social/pleroma/pleroma /opt/pleroma +``` + +* Change to the new directory: + +```shell +cd /opt/pleroma +``` + +* Install the dependencies for Pleroma and answer with `yes` if it asks you to install `Hex`: + +```shell +sudo -Hu pleroma mix deps.get +``` + +* Generate the configuration: `sudo -Hu pleroma mix pleroma.instance gen` + * Answer with `yes` if it asks you to install `rebar3`. + * This may take some time, because parts of pleroma get compiled first. + * After that it will ask you a few questions about your instance and generates a configuration file in `config/generated_config.exs`. + +* Check the configuration and if all looks right, rename it, so Pleroma will load it (`prod.secret.exs` for productive instance, `dev.secret.exs` for development instances): + +```shell +mv config/{generated_config.exs,prod.secret.exs} +``` + +* The previous command creates also the file `config/setup_db.psql`, with which you can create the database: + +```shell +sudo -Hu postgres psql -f config/setup_db.psql +``` + +* Now run the database migration: + +```shell +sudo -Hu pleroma MIX_ENV=prod mix ecto.migrate +``` + +* Now you can start Pleroma already + +```shell +sudo -Hu pleroma MIX_ENV=prod mix phx.server +``` + +### Finalize installation + +If you want to open your newly installed instance to the world, you should run nginx or some other webserver/proxy in front of Pleroma and you should consider to create a systemd service file for Pleroma. + +#### Nginx + +* Install nginx, if not already done: + +```shell +sudo yum install nginx +``` + +* Setup your SSL cert, using your method of choice or certbot. If using certbot, first install it: + +```shell +sudo yum install certbot-nginx +``` + +and then set it up: + +```shell +sudo mkdir -p /var/lib/letsencrypt/ +sudo certbot certonly --email -d --standalone +``` + +If that doesn’t work, make sure, that nginx is not already running. If it still doesn’t work, try setting up nginx first (change ssl “on” to “off” and try again). + +--- + +* Copy the example nginx configuration to the nginx folder + +```shell +sudo cp /opt/pleroma/installation/pleroma.nginx /etc/nginx/conf.d/pleroma.conf +``` + +* Before starting nginx edit the configuration and change it to your needs (e.g. change servername, change cert paths) +* Enable and start nginx: + +```shell +sudo systemctl enable --now nginx +``` + +If you need to renew the certificate in the future, uncomment the relevant location block in the nginx config and run: + +```shell +sudo certbot certonly --email -d --webroot -w /var/lib/letsencrypt/ +``` + +#### Other webserver/proxies + +You can find example configurations for them in `/opt/pleroma/installation/`. + +#### Systemd service + +* Copy example service file + +```shell +sudo cp /opt/pleroma/installation/pleroma.service /etc/systemd/system/pleroma.service +``` + +* Edit the service file and make sure that all paths fit your installation +* Enable and start `pleroma.service`: + +```shell +sudo systemctl enable --now pleroma.service +``` + +#### Create your first user + +If your instance is up and running, you can create your first user with administrative rights with the following task: + +```shell +sudo -Hu pleroma MIX_ENV=prod mix pleroma.user new --admin +``` + +#### Further reading + +* [Admin tasks](Admin tasks) +* [Backup your instance](Backup-your-instance) +* [Configuration tips](General tips for customizing pleroma fe) +* [Hardening your instance](Hardening-your-instance) +* [How to activate mediaproxy](How-to-activate-mediaproxy) +* [Small Pleroma-FE customizations](Small customizations) +* [Updating your instance](Updating-your-instance) + +## Questions + +Questions about the installation or didn’t it work as it should be, ask in [#pleroma:matrix.org](https://matrix.heldscal.la/#/room/#freenode_#pleroma:matrix.org) or IRC Channel **#pleroma** on **Freenode**. diff --git a/docs/installation/debian_based_en.md b/docs/installation/debian_based_en.md new file mode 100644 index 000000000..9613a329b --- /dev/null +++ b/docs/installation/debian_based_en.md @@ -0,0 +1,202 @@ +# Installing on Debian Based Distributions +## Installation + +This guide will assume you are on Debian Stretch. This guide should also work with Ubuntu 16.04 and 18.04. It also assumes that you have administrative rights, either as root or a user with [sudo permissions](https://www.digitalocean.com/community/tutorials/how-to-add-delete-and-grant-sudo-privileges-to-users-on-a-debian-vps). If you want to run this guide with root, ignore the `sudo` at the beginning of the lines, unless it calls a user like `sudo -Hu pleroma`; in this case, use `su -s $SHELL -c 'command'` instead. + +### Required packages + +* `postgresql` (9.6+, Ubuntu 16.04 comes with 9.5, you can get a newer version from [here](https://www.postgresql.org/download/linux/ubuntu/)) +* `postgresql-contrib` (9.6+, same situtation as above) +* `elixir` (1.5+, [install from here, Debian and Ubuntu ship older versions](https://elixir-lang.org/install.html#unix-and-unix-like) or use [asdf](https://github.com/asdf-vm/asdf) as the pleroma user) +* `erlang-dev` +* `erlang-tools` +* `erlang-parsetools` +* `erlang-eldap`, if you want to enable ldap authenticator +* `erlang-xmerl` +* `git` +* `build-essential` + +#### Optional packages used in this guide + +* `nginx` (preferred, example configs for other reverse proxies can be found in the repo) +* `certbot` (or any other ACME client for Let’s Encrypt certificates) + +### Prepare the system + +* First update the system, if not already done: + +```shell +sudo apt update +sudo apt full-upgrade +``` + +* Install some of the above mentioned programs: + +```shell +sudo apt install git build-essential postgresql postgresql-contrib +``` + +### Install Elixir and Erlang + +* Download and add the Erlang repository: + +```shell +wget -P /tmp/ https://packages.erlang-solutions.com/erlang-solutions_1.0_all.deb +sudo dpkg -i /tmp/erlang-solutions_1.0_all.deb +``` + +* Install Elixir and Erlang: + +```shell +sudo apt update +sudo apt install elixir erlang-dev erlang-parsetools erlang-xmerl erlang-tools +``` + +### Install PleromaBE + +* Add a new system user for the Pleroma service: + +```shell +sudo useradd -r -s /bin/false -m -d /var/lib/pleroma -U pleroma +``` + +**Note**: To execute a single command as the Pleroma system user, use `sudo -Hu pleroma command`. You can also switch to a shell by using `sudo -Hu pleroma $SHELL`. If you don’t have and want `sudo` on your system, you can use `su` as root user (UID 0) for a single command by using `su -l pleroma -s $SHELL -c 'command'` and `su -l pleroma -s $SHELL` for starting a shell. + +* Git clone the PleromaBE repository and make the Pleroma user the owner of the directory: + +```shell +sudo mkdir -p /opt/pleroma +sudo chown -R pleroma:pleroma /opt/pleroma +sudo -Hu pleroma git clone https://git.pleroma.social/pleroma/pleroma /opt/pleroma +``` + +* Change to the new directory: + +```shell +cd /opt/pleroma +``` + +* Install the dependencies for Pleroma and answer with `yes` if it asks you to install `Hex`: + +```shell +sudo -Hu pleroma mix deps.get +``` + +* Generate the configuration: `sudo -Hu pleroma mix pleroma.instance gen` + * Answer with `yes` if it asks you to install `rebar3`. + * This may take some time, because parts of pleroma get compiled first. + * After that it will ask you a few questions about your instance and generates a configuration file in `config/generated_config.exs`. + +* Check the configuration and if all looks right, rename it, so Pleroma will load it (`prod.secret.exs` for productive instance, `dev.secret.exs` for development instances): + +```shell +mv config/{generated_config.exs,prod.secret.exs} +``` + +* The previous command creates also the file `config/setup_db.psql`, with which you can create the database: + +```shell +sudo -Hu postgres psql -f config/setup_db.psql +``` + +* Now run the database migration: + +```shell +sudo -Hu pleroma MIX_ENV=prod mix ecto.migrate +``` + +* Now you can start Pleroma already + +```shell +sudo -Hu pleroma MIX_ENV=prod mix phx.server +``` + +### Finalize installation + +If you want to open your newly installed instance to the world, you should run nginx or some other webserver/proxy in front of Pleroma and you should consider to create a systemd service file for Pleroma. + +#### Nginx + +* Install nginx, if not already done: + +```shell +sudo apt install nginx +``` + +* Setup your SSL cert, using your method of choice or certbot. If using certbot, first install it: + +```shell +sudo apt install certbot +``` + +and then set it up: + +```shell +sudo mkdir -p /var/lib/letsencrypt/ +sudo certbot certonly --email -d --standalone +``` + +If that doesn’t work, make sure, that nginx is not already running. If it still doesn’t work, try setting up nginx first (change ssl “on” to “off” and try again). + +--- + +* Copy the example nginx configuration and activate it: + +```shell +sudo cp /opt/pleroma/installation/pleroma.nginx /etc/nginx/sites-available/pleroma.nginx +sudo ln -s /etc/nginx/sites-available/pleroma.nginx /etc/nginx/sites-enabled/pleroma.nginx +``` + +* Before starting nginx edit the configuration and change it to your needs (e.g. change servername, change cert paths) +* Enable and start nginx: + +```shell +sudo systemctl enable --now nginx.service +``` + +If you need to renew the certificate in the future, uncomment the relevant location block in the nginx config and run: + +```shell +sudo certbot certonly --email -d --webroot -w /var/lib/letsencrypt/ +``` + +#### Other webserver/proxies + +You can find example configurations for them in `/opt/pleroma/installation/`. + +#### Systemd service + +* Copy example service file + +```shell +sudo cp /opt/pleroma/installation/pleroma.service /etc/systemd/system/pleroma.service +``` + +* Edit the service file and make sure that all paths fit your installation +* Enable and start `pleroma.service`: + +```shell +sudo systemctl enable --now pleroma.service +``` + +#### Create your first user + +If your instance is up and running, you can create your first user with administrative rights with the following task: + +```shell +sudo -Hu pleroma MIX_ENV=prod mix pleroma.user new --admin +``` + +#### Further reading + +* [Admin tasks](Admin tasks) +* [Backup your instance](Backup-your-instance) +* [Configuration tips](General tips for customizing pleroma fe) +* [Hardening your instance](Hardening-your-instance) +* [How to activate mediaproxy](How-to-activate-mediaproxy) +* [Small Pleroma-FE customizations](Small customizations) +* [Updating your instance](Updating-your-instance) + +## Questions + +Questions about the installation or didn’t it work as it should be, ask in [#pleroma:matrix.org](https://matrix.heldscal.la/#/room/#freenode_#pleroma:matrix.org) or IRC Channel **#pleroma** on **Freenode**. diff --git a/docs/installation/debian_based_jp.md b/docs/installation/debian_based_jp.md new file mode 100644 index 000000000..ac5dcaaee --- /dev/null +++ b/docs/installation/debian_based_jp.md @@ -0,0 +1,191 @@ +# Pleromaの入れ方 +## 日本語訳について + +この記事は [Installing on Debian based distributions](Installing on Debian based distributions) の日本語訳です。何かがおかしいと思ったら、原文を見てください。 + +## インストール + +このガイドはDebian Stretchを仮定しています。Ubuntu 16.04でも可能です。 + +### 必要なソフトウェア + +- PostgreSQL 9.6+ (postgresql-contrib-9.6 または他のバージョンの PSQL をインストールしてください) +- Elixir 1.5 以上 ([Debianのリポジトリからインストールしないこと!!! ここからインストールすること!](https://elixir-lang.org/install.html#unix-and-unix-like))。または [asdf](https://github.com/asdf-vm/asdf) を pleroma ユーザーでインストール。 +- erlang-dev +- erlang-tools +- erlang-parsetools +- erlang-xmerl (Jessieではバックポートからインストールすること!) +- git +- build-essential +- openssh +- openssl +- nginx prefered (Apacheも動くかもしれませんが、誰もテストしていません!) +- certbot (または何らかのACME Let's encryptクライアント) + +### システムを準備する + +* まずシステムをアップデートしてください。 +``` +apt update && apt dist-upgrade +``` + +* 複数のツールとpostgresqlをインストールします。あとで必要になるので。 +``` +apt install git build-essential openssl ssh sudo postgresql-9.6 postgresql-contrib-9.6 +``` +(postgresqlのバージョンは、あなたのディストロにあわせて変えてください。または、バージョン番号がいらないかもしれません。) + +### ElixirとErlangをインストールします + +* Erlangのリポジトリをダウンロードおよびインストールします。 +``` +wget -P /tmp/ https://packages.erlang-solutions.com/erlang-solutions_1.0_all.deb && sudo dpkg -i /tmp/erlang-solutions_1.0_all.deb +``` + +* ElixirとErlangをインストールします、 +``` +apt update && apt install elixir erlang-dev erlang-parsetools erlang-xmerl erlang-tools +``` + +### Pleroma BE (バックエンド) をインストールします + +* 新しいユーザーを作ります。 +``` +adduser pleroma +``` +(Give it any password you want, make it STRONG) + +* 新しいユーザーをsudoグループに入れます。 +``` +usermod -aG sudo pleroma +``` + +* 新しいユーザーに変身し、ホームディレクトリに移動します。 +``` +su pleroma +cd ~ +``` + +* Gitリポジトリをクローンします。 +``` +git clone https://git.pleroma.social/pleroma/pleroma +``` + +* 新しいディレクトリに移動します。 +``` +cd pleroma/ +``` + +* Pleromaが依存するパッケージをインストールします。Hexをインストールしてもよいか聞かれたら、yesを入力してください。 +``` +mix deps.get +``` + +* コンフィギュレーションを生成します。 +``` +mix pleroma.instance gen +``` + * rebar3をインストールしてもよいか聞かれたら、yesを入力してください。 + * この処理には時間がかかります。私もよく分かりませんが、何らかのコンパイルが行われているようです。 + * あなたのインスタンスについて、いくつかの質問があります。その回答は `config/generated_config.exs` というコンフィギュレーションファイルに保存されます。 + +**注意**: メディアプロクシを有効にすると回答して、なおかつ、キャッシュのURLは空欄のままにしている場合は、`generated_config.exs` を編集して、`base_url` で始まる行をコメントアウトまたは削除してください。そして、上にある行の `true` の後にあるコンマを消してください。 + +* コンフィギュレーションを確認して、もし問題なければ、ファイル名を変更してください。 +``` +mv config/{generated_config.exs,prod.secret.exs} +``` + +* これまでのコマンドで、すでに `config/setup_db.psql` というファイルが作られています。このファイルをもとに、データベースを作成します。 +``` +sudo su postgres -c 'psql -f config/setup_db.psql' +``` + +* そして、データベースのミグレーションを実行します。 +``` +MIX_ENV=prod mix ecto.migrate +``` + +* Pleromaを起動できるようになりました。 +``` +MIX_ENV=prod mix phx.server +``` + +### インストールを終わらせる + +あなたの新しいインスタンスを世界に向けて公開するには、nginxまたは何らかのウェブサーバー (プロクシ) を使用する必要があります。また、Pleroma のためにシステムサービスファイルを作成する必要があります。 + +#### Nginx + +* まだインストールしていないなら、nginxをインストールします。 +``` +apt install nginx +``` + +* SSLをセットアップします。他の方法でもよいですが、ここではcertbotを説明します。 +certbotを使うならば、まずそれをインストールします。 +``` +apt install certbot +``` +そしてセットアップします。 +``` +mkdir -p /var/lib/letsencrypt/.well-known +% certbot certonly --email your@emailaddress --webroot -w /var/lib/letsencrypt/ -d yourdomain +``` +もしうまくいかないときは、先にnginxを設定してください。ssl "on" を "off" に変えてから再試行してください。 + +--- + +* nginxコンフィギュレーションの例をnginxフォルダーにコピーします。 +``` +cp /home/pleroma/pleroma/installation/pleroma.nginx /etc/nginx/sites-enabled/pleroma.nginx +``` + +* nginxを起動する前に、コンフィギュレーションを編集してください。例えば、サーバー名、証明書のパスなどを変更する必要があります。 +* nginxを再起動します。 +``` +systemctl reload nginx.service +``` + +#### Systemd サービス + +* サービスファイルの例をコピーします。 +``` +cp /home/pleroma/pleroma/installation/pleroma.service /usr/lib/systemd/system/pleroma.service +``` + +* サービスファイルを変更します。すべてのパスが正しいことを確認してください。また、`[Service]` セクションに以下の行があることを確認してください。 +``` +Environment="MIX_ENV=prod" +``` + +* `pleroma.service` を enable および start してください。 +``` +systemctl enable --now pleroma.service +``` + +#### モデレーターを作る + +新たにユーザーを作ったら、モデレーター権限を与えたいかもしれません。以下のタスクで可能です。 +``` +mix set_moderator username [true|false] +``` + +モデレーターはすべてのポストを消すことができます。将来的には他のことも可能になるかもしれません。 + +#### メディアプロクシを有効にする + +`generate_config` でメディアプロクシを有効にしているなら、すでにメディアプロクシが動作しています。あとから設定を変更したいなら、[How to activate mediaproxy](How-to-activate-mediaproxy) を見てください。 + +#### コンフィギュレーションとカスタマイズ + +* [Configuration tips](General tips for customizing pleroma fe) +* [Small Pleroma-FE customizations](Small customizations) +* [Admin tasks](Admin tasks) + +## 質問ある? + +インストールについて質問がある、もしくは、うまくいかないときは、以下のところで質問できます。 + +* [#pleroma:matrix.org](https://matrix.heldscal.la/#/room/#freenode_#pleroma:matrix.org) +* **Freenode** の **#pleroma** IRCチャンネル diff --git a/docs/installation/netbsd_en.md b/docs/installation/netbsd_en.md new file mode 100644 index 000000000..e0ac98359 --- /dev/null +++ b/docs/installation/netbsd_en.md @@ -0,0 +1,198 @@ +# Installing on NetBSD + +## Required software + +pkgin should have been installed by the NetBSD installer if you selected +the right options. If it isn't installed, install it using pkg_add. + +Note that `postgresql11-contrib` is needed for the Postgres extensions +Pleroma uses. + +The `mksh` shell is needed to run the Elixir `mix` script. + +`# pkgin install acmesh elixir git-base git-docs mksh nginx postgresql11-server postgresql11-client postgresql11-contrib sudo` + +You can also build these packages using pkgsrc: +``` +databases/postgresql11-contrib +databases/postgresql11-client +databases/postgresql11-server +devel/git-base +devel/git-docs +lang/elixir +security/acmesh +security/sudo +shells/mksh +www/nginx +``` + +Copy the rc.d scripts to the right directory: + +``` +# cp /usr/pkg/share/examples/rc.d/nginx /usr/pkg/share/examples/rc.d/pgsql /etc/rc.d +``` + +Add nginx and Postgres to `/etc/rc.conf`: + +``` +nginx=YES +pgsql=YES +``` + +## Configuring postgres + +First, run `# /etc/rc.d/pgsql start`. Then, `$ sudo -Hu pgsql -g pgsql createdb`. + +## Configuring Pleroma + +Create a user for Pleroma: + +``` +# groupadd pleroma +# useradd -d /home/pleroma -m -g pleroma -s /usr/pkg/bin/mksh pleroma +# echo 'export LC_ALL="en_GB.UTF-8"' >> /home/pleroma/.profile +# su -l pleroma -c $SHELL +``` + +Clone the repository: + +``` +$ cd /home/pleroma +$ git clone https://git.pleroma.social/pleroma/pleroma.git +``` + +Configure Pleroma. Note that you need a domain name at this point: + +``` +$ cd /home/pleroma/pleroma +$ mix deps.get +$ mix pleroma.instance gen # You will be asked a few questions here. +``` + +Since Postgres is configured, we can now initialize the database. There should +now be a file in `config/setup_db.psql` that makes this easier. Edit it, and +*change the password* to a password of your choice. Make sure it is secure, since +it'll be protecting your database. Now initialize the database: + +``` +$ sudo -Hu pgsql -g pgsql psql -f config/setup_db.psql +``` + +Postgres allows connections from all users without a password by default. To +fix this, edit `/usr/pkg/pgsql/data/pg_hba.conf`. Change every `trust` to +`password`. + +Once this is done, restart Postgres with `# /etc/rc.d/pgsql restart`. + +Run the database migrations. +You will need to do this whenever you update with `git pull`: + +``` +$ MIX_ENV=prod mix ecto.migrate +``` + +## Configuring nginx + +Install the example configuration file +`/home/pleroma/pleroma/installation/pleroma.nginx` to +`/usr/pkg/etc/nginx.conf`. + +Note that it will need to be wrapped in a `http {}` block. You should add +settings for the nginx daemon outside of the http block, for example: + +``` +user nginx nginx; +error_log /var/log/nginx/error.log; +worker_processes 4; + +events { +} +``` + +Edit the defaults: + +* Change `ssl_certificate` and `ssl_trusted_certificate` to +`/etc/nginx/tls/fullchain`. +* Change `ssl_certificate_key` to `/etc/nginx/tls/key`. +* Change `example.tld` to your instance's domain name. + +## Configuring acme.sh + +We'll be using acme.sh in Stateless Mode for TLS certificate renewal. + +First, get your account fingerprint: + +``` +$ sudo -Hu nginx -g nginx acme.sh --register-account +``` + +You need to add the following to your nginx configuration for the server +running on port 80: + +``` + location ~ ^/\.well-known/acme-challenge/([-_a-zA-Z0-9]+)$ { + default_type text/plain; + return 200 "$1.6fXAG9VyG0IahirPEU2ZerUtItW2DHzDzD9wZaEKpqd"; + } +``` + +Replace the string after after `$1.` with your fingerprint. + +Start nginx: + +``` +# /etc/rc.d/nginx start +``` + +It should now be possible to issue a cert (replace `example.com` +with your domain name): + +``` +$ sudo -Hu nginx -g nginx acme.sh --issue -d example.com --stateless +``` + +Let's add auto-renewal to `/etc/daily.local` +(replace `example.com` with your domain): + +``` +/usr/pkg/bin/sudo -Hu nginx -g nginx \ + /usr/pkg/sbin/acme.sh -r \ + -d example.com \ + --cert-file /etc/nginx/tls/cert \ + --key-file /etc/nginx/tls/key \ + --ca-file /etc/nginx/tls/ca \ + --fullchain-file /etc/nginx/tls/fullchain \ + --stateless +``` + +## Creating a startup script for Pleroma + +Copy the startup script to the correct location and make sure it's executable: + +``` +# cp /home/pleroma/pleroma/installation/netbsd/rc.d/pleroma /etc/rc.d/pleroma +# chmod +x /etc/rc.d/pleroma +``` + +Add the following to `/etc/rc.conf`: + +``` +pleroma=YES +pleroma_home="/home/pleroma" +pleroma_user="pleroma" +``` + +Run `# /etc/rc.d/pleroma start` to start Pleroma. + +## Conclusion + +Restart nginx with `# /etc/rc.d/nginx restart` and you should be up and running. + +If you need further help, contact niaa on freenode. + +Make sure your time is in sync, or other instances will receive your posts with +incorrect timestamps. You should have ntpd running. + +## Instances running NetBSD + +* diff --git a/docs/installation/openbsd_en.md b/docs/installation/openbsd_en.md new file mode 100644 index 000000000..633b08e6c --- /dev/null +++ b/docs/installation/openbsd_en.md @@ -0,0 +1,222 @@ +# Installing on OpenBSD +This guide describes the installation and configuration of pleroma (and the required software to run it) on a single OpenBSD 6.4 server. +For any additional information regarding commands and configuration files mentioned here, check the man pages [online](https://man.openbsd.org/) or directly on your server with the man command. + +#### Required software +The following packages need to be installed: + * elixir + * gmake + * ImageMagick + * git + * postgresql-server + * postgresql-contrib + +To install them, run the following command (with doas or as root): +`pkg_add elixir gmake ImageMagick git postgresql-server postgresql-contrib` + +Pleroma requires a reverse proxy, OpenBSD has relayd in base (and is used in this guide) and packages/ports are available for nginx (www/nginx) and apache (www/apache-httpd). Independently of the reverse proxy, [acme-client(1)](https://man.openbsd.org/acme-client) can be used to get a certificate from Let's Encrypt. + +#### Creating the pleroma user +Pleroma will be run by a dedicated user, \_pleroma. Before creating it, insert the following lines in login.conf: +``` +pleroma:\ + :datasize-max=1536M:\ + :datasize-cur=1536M:\ + :openfiles-max=4096 +``` +This creates a "pleroma" login class and sets higher values than default for datasize and openfiles (see [login.conf(5)](https://man.openbsd.org/login.conf)), this is required to avoid having pleroma crash some time after starting. + +Create the \_pleroma user, assign it the pleroma login class and create its home directory (/home/\_pleroma/): `useradd -m -L pleroma _pleroma` + +#### Clone pleroma's directory +Enter a shell as the \_pleroma user. As root, run `su _pleroma -;cd`. Then clone the repository with `git clone https://git.pleroma.social/pleroma/pleroma.git`. Pleroma is now installed in /home/\_pleroma/pleroma/, it will be configured and started at the end of this guide. + +#### Postgresql +Start a shell as the \_postgresql user (as root run `su _postgresql -` then run the `initdb` command to initialize postgresql: +If you wish to not use the default location for postgresql's data (/var/postgresql/data), add the following switch at the end of the command: `-D ` and modify the `datadir` variable in the /etc/rc.d/postgresql script. + +When this is done, enable postgresql so that it starts on boot and start it. As root, run: +``` +rcctl enable postgresql +rcctl start postgresql +``` +To check that it started properly and didn't fail right after starting, you can run `ps aux | grep postgres`, there should be multiple lines of output. + +#### httpd +httpd will have three fuctions: + * redirect requests trying to reach the instance over http to the https URL + * serve a robots.txt file + * get Let's Encrypt certificates, with acme-client + +Insert the following config in httpd.conf: +``` +# $OpenBSD: httpd.conf,v 1.17 2017/04/16 08:50:49 ajacoutot Exp $ + +ext_inet="" +ext_inet6="" + +server "default" { + listen on $ext_inet port 80 # Comment to disable listening on IPv4 + listen on $ext_inet6 port 80 # Comment to disable listening on IPv6 + listen on 127.0.0.1 port 80 # Do NOT comment this line + + log syslog + directory no index + + location "/.well-known/acme-challenge/*" { + root "/acme" + request strip 2 + } + + location "/robots.txt" { root "/htdocs/local/" } + location "/*" { block return 302 "https://$HTTP_HOST$REQUEST_URI" } +} + +types { + include "/usr/share/misc/mime.types" +} +``` +Do not forget to change *\* to your server's address(es). If httpd should only listen on one protocol family, comment one of the two first *listen* options. + +Create the /var/www/htdocs/local/ folder and write the content of your robots.txt in /var/www/htdocs/local/robots.txt. +Check the configuration with `httpd -n`, if it is OK enable and start httpd (as root): +``` +rcctl enable httpd +rcctl start httpd +``` + +#### acme-client +acme-client is used to get SSL/TLS certificates from Let's Encrypt. +Insert the following configuration in /etc/acme-client.conf: +``` +# +# $OpenBSD: acme-client.conf,v 1.4 2017/03/22 11:14:14 benno Exp $ +# + +authority letsencrypt- { + #agreement url "https://letsencrypt.org/documents/LE-SA-v1.2-November-15-2017.pdf" + api url "https://acme-v01.api.letsencrypt.org/directory" + account key "/etc/acme/letsencrypt-privkey-.pem" +} + +domain { + domain key "/etc/ssl/private/.key" + domain certificate "/etc/ssl/.crt" + domain full chain certificate "/etc/ssl/.fullchain.pem" + sign with letsencrypt- + challengedir "/var/www/acme/" +} +``` +Replace *\* by the domain name you'll use for your instance. As root, run `acme-client -n` to check the config, then `acme-client -ADv ` to create account and domain keys, and request a certificate for the first time. +Make acme-client run everyday by adding it in /etc/daily.local. As root, run the following command: `echo "acme-client " >> /etc/daily.local`. + +Relayd will look for certificates and keys based on the address it listens on (see next part), the easiest way to make them available to relayd is to create a link, as root run: +``` +ln -s /etc/ssl/.fullchain.pem /etc/ssl/.crt +ln -s /etc/ssl/private/.key /etc/ssl/private/.key +``` +This will have to be done for each IPv4 and IPv6 address relayd listens on. + +#### relayd +relayd will be used as the reverse proxy sitting in front of pleroma. +Insert the following configuration in /etc/relayd.conf: +``` +# $OpenBSD: relayd.conf,v 1.4 2018/03/23 09:55:06 claudio Exp $ + +ext_inet="" +ext_inet6="" + +table { 127.0.0.1 } +table { 127.0.0.1 } + +http protocol plerup { # Protocol for upstream pleroma server + #tcp { nodelay, sack, socket buffer 65536, backlog 128 } # Uncomment and adjust as you see fit + tls ciphers "ECDHE-ECDSA-AES256-GCM-SHA384:ECDHE-RSA-AES256-GCM-SHA384:ECDHE-ECDSA-CHACHA20-POLY1305:ECDHE-RSA-CHACHA20-POLY1305" + tls ecdhe secp384r1 + + # Forward some paths to the local server (as pleroma won't respond to them as you might want) + pass request quick path "/robots.txt" forward to + + # Append a bunch of headers + match request header append "X-Forwarded-For" value "$REMOTE_ADDR" # This two header and the next one are not strictly required by pleroma but adding them won't hurt + match request header append "X-Forwarded-By" value "$SERVER_ADDR:$SERVER_PORT" + + match response header append "X-XSS-Protection" value "1; mode=block" + match response header append "X-Permitted-Cross-Domain-Policies" value "none" + match response header append "X-Frame-Options" value "DENY" + match response header append "X-Content-Type-Options" value "nosniff" + match response header append "Referrer-Policy" value "same-origin" + match response header append "X-Download-Options" value "noopen" + match response header append "Content-Security-Policy" value "default-src 'none'; base-uri 'self'; form-action 'self'; img-src 'self' data: https:; media-src 'self' https:; style-src 'self' 'unsafe-inline'; font-src 'self'; script-src 'self'; connect-src 'self' wss://CHANGEME.tld; upgrade-insecure-requests;" # Modify "CHANGEME.tld" and set your instance's domain here + match request header append "Connection" value "upgrade" + #match response header append "Strict-Transport-Security" value "max-age=31536000; includeSubDomains" # Uncomment this only after you get HTTPS working. + + # If you do not want remote frontends to be able to access your Pleroma backend server, comment these lines + match response header append "Access-Control-Allow-Origin" value "*" + match response header append "Access-Control-Allow-Methods" value "POST, PUT, DELETE, GET, PATCH, OPTIONS" + match response header append "Access-Control-Allow-Headers" value "Authorization, Content-Type, Idempotency-Key" + match response header append "Access-Control-Expose-Headers" value "Link, X-RateLimit-Reset, X-RateLimit-Limit, X-RateLimit-Remaining, X-Request-Id" + # Stop commenting lines here +} + +relay wwwtls { + listen on $ext_inet port https tls # Comment to disable listening on IPv4 + listen on $ext_inet6 port https tls # Comment to disable listening on IPv6 + + protocol plerup + + forward to port 4000 check http "/" code 200 + forward to port 80 check http "/robots.txt" code 200 +} +``` +Again, change *\* to your server's address(es) and comment one of the two *listen* options if needed. Also change *wss://CHANGEME.tld* to *wss://\*. +Check the configuration with `relayd -n`, if it is OK enable and start relayd (as root): +``` +rcctl enable relayd +rcctl start relayd +``` + +#### pf +Enabling and configuring pf is highly recommended. +In /etc/pf.conf, insert the following configuration: +``` +# Macros +if="" +authorized_ssh_clients="any" + +# Skip traffic on loopback interface +set skip on lo + +# Default behavior +set block-policy drop +block in log all +pass out quick + +# Security features +match in all scrub (no-df random-id) +block in log from urpf-failed + +# Rules +pass in quick on $if inet proto icmp to ($if) icmp-type { echoreq unreach paramprob trace } # ICMP +pass in quick on $if inet6 proto icmp6 to ($if) icmp6-type { echoreq unreach paramprob timex toobig } # ICMPv6 +pass in quick on $if proto tcp to ($if) port { http https } # relayd/httpd +pass in quick on $if proto tcp from $authorized_ssh_clients to ($if) port ssh +``` +Replace *\* by your server's network interface name (which you can get with ifconfig). Consider replacing the content of the authorized\_ssh\_clients macro by, for exemple, your home IP address, to avoid SSH connection attempts from bots. + +Check pf's configuration by running `pfctl -nf /etc/pf.conf`, load it with `pfctl -f /etc/pf.conf` and enable pf at boot with `rcctl enable pf`. + +#### Configure and start pleroma +Enter a shell as \_pleroma (as root `su _pleroma -`) and enter pleroma's installation directory (`cd ~/pleroma/`). +Then follow the main installation guide: + * run `mix deps.get` + * run `mix pleroma.instance gen` and enter your instance's information when asked + * copy config/generated\_config.exs to config/prod.secret.exs. The default values should be sufficient but you should edit it and check that everything seems OK. + * exit your current shell back to a root one and run `psql -U postgres -f /home/_pleroma/config/setup_db.psql` to setup the database. + * return to a \_pleroma shell into pleroma's installation directory (`su _pleroma -;cd ~/pleroma`) and run `MIX_ENV=prod mix ecto.migrate` + +As \_pleroma in /home/\_pleroma/pleroma, you can now run `LC_ALL=en_US.UTF-8 MIX_ENV=prod mix phx.server` to start your instance. +In another SSH session/tmux window, check that it is working properly by running `ftp -MVo - http://127.0.0.1:4000/api/v1/instance`, you should get json output. Double-check that *uri*'s value is your instance's domain name. + +##### Starting pleroma at boot +An rc script to automatically start pleroma at boot hasn't been written yet, it can be run in a tmux session (tmux is in base). diff --git a/docs/installation/openbsd_fi.md b/docs/installation/openbsd_fi.md new file mode 100644 index 000000000..fa6faa62d --- /dev/null +++ b/docs/installation/openbsd_fi.md @@ -0,0 +1,110 @@ +# Pleroman asennus OpenBSD:llä + +Tarvitset: +* Oman domainin +* OpenBSD 6.3 -serverin +* Auttavan ymmärryksen unix-järjestelmistä + +Komennot, joiden edessä on '#', tulee ajaa käyttäjänä `root`. Tämä on +suositeltavaa tehdä komennon `doas` avulla, katso `doas (1)` ja `doas.conf (5)`. +Tästä eteenpäin oletuksena on, että domain "esimerkki.com" osoittaa +serverin IP-osoitteeseen. + +Jos asennuksen kanssa on ongelmia, IRC-kanava #pleroma Freenodessa tai +Matrix-kanava #freenode_#pleroma:matrix.org ovat hyviä paikkoja löytää apua +(englanniksi), `/msg eal kukkuu` jos haluat välttämättä puhua härmää. + +Asenna tarvittava ohjelmisto: + +`# pkg_add git elixir gmake postgresql-server-10.3 postgresql-contrib-10.3` + +Luo postgresql-tietokanta: + +`# su - _postgresql` + +`$ mkdir /var/postgresql/data` + +`$ initdb -D /var/postgresql/data -E UTF8` + +`$ createdb` + +Käynnistä tietokanta ja aseta se käynnistymään automaattisesti. + +`# rcctl start postgresql` + +`# rcctl enable postgresql` + +Luo käyttäjä pleromaa varten (kysyy muutaman kysymyksen): + +`# adduser pleroma` + +Vaihda pleroma-käyttäjään ja mene kotihakemistoosi: + +`# su - pleroma` + +Lataa pleroman lähdekoodi: + +`$ git clone https://git.pleroma.social/pleroma/pleroma.git` + +`$ cd pleroma` + +Asenna tarvittavat elixir-kirjastot: + +`$ mix deps.get` + +`$ mix deps.compile` + +Luo tarvittava konfiguraatio: + +`$ mix generate_config` + +`$ cp config/generated_config.exs config/prod.secret.exs` + +Aja luodut tietokantakomennot: + +`# su _postgres -c 'psql -f config/setup_db.psql'` + +`$ MIX_ENV=prod mix ecto.migrate` + +Käynnistä pleroma-prosessi: + +`$ MIX_ENV=prod mix compile` + +`$ MIX_ENV=prod mix phx.server` + +Tässä vaiheessa on hyvä tarkistaa että asetukset ovat oikein. Avaa selaimella, +curlilla tai vastaavalla työkalulla `esimerkki.com:4000/api/v1/instance` ja katso +että kohta "uri" on "https://esimerkki.com". + +Huom! Muista varmistaa että muuttuja MIX_ENV on "prod" mix-komentoja ajaessasi. +Mix lukee oikean konfiguraatiotiedoston sen mukaisesti. + +Ohessa enimmäkseen toimivaksi todettu rc.d-skripti pleroman käynnistämiseen. +Kirjoita se tiedostoon /etc/rc.d/pleroma. Tämän jälkeen aja +`# chmod +x /etc/rc.d/pleroma`, ja voit käynnistää pleroman komennolla +`# /etc/rc.d/pleroma start`. + +``` +#!/bin/ksh +#/etc/rc.d/pleroma + +daemon="cd /home/pleroma/pleroma;MIX_ENV=prod /usr/local/bin/elixir" +daemon_flags="--detached /usr/local/bin/mix phx.server" +daemon_user="pleroma" +rc_reload="NO" +rc_bg="YES" + +pexp="beam" + +. /etc/rc.d/rc.subr + +rc_cmd $1 +``` + +Tämän jälkeen tarvitset enää HTTP-serverin välittämään kutsut pleroma-prosessille. +Tiedostosta `install/pleroma.nginx` löytyy esimerkkikonfiguraatio, ja TLS-sertifikaatit +saat ilmaiseksi esimerkiksi [letsencryptiltä](https://certbot.eff.org/lets-encrypt/opbsd-nginx.html). +Nginx asentuu yksinkertaisesti komennolla `# pkg_add nginx`. + +Kun olet valmis, avaa https://esimerkki.com selaimessasi. Luo käyttäjä ja seuraa kiinnostavia +tyyppejä muilla palvelimilla! diff --git a/docs/introduction.md b/docs/introduction.md new file mode 100644 index 000000000..4af0747fe --- /dev/null +++ b/docs/introduction.md @@ -0,0 +1,55 @@ +# Introduction to Pleroma +## What is Pleroma? +Pleroma is a federated social networking platform, compatible with GNU social, Mastodon and other OStatus and ActivityPub implementations. It is free software licensed under the AGPLv3. +It actually consists of two components: a backend, named simply Pleroma, and a user-facing frontend, named Pleroma-FE. It also includes the Mastodon frontend, if that's your thing. +It's part of what we call the fediverse, a federated network of instances which speak common protocols and can communicate with each other. +One account on a instance is enough to talk to the entire fediverse! + +## How can I use it? + +Pleroma instances are already widely deployed, a list can be found here: +http://distsn.org/pleroma-instances.html + +If you don't feel like joining an existing instance, but instead prefer to deploy your own instance, that's easy too! +Installation instructions can be found here: +[main Pleroma wiki](/) + +## I got an account, now what? +Great! Now you can explore the fediverse! +- Open the login page for your Pleroma instance (for ex. https://pleroma.soykaf.com) and login with your username and password. +(If you don't have one yet, click on Register) :slightly_smiling_face: + +At this point you will have two columns in front of you. + +### Left column +- first block: here you can see your avatar, your nickname a bio, and statistics (Statuses, Following, Followers). +Under that you have a text form which allows you to post new statuses. The icon on the left is for uploading media files and attach them to your post. The number under the text form is a character counter, every instance can have a different character limit (the default is 5000). +If you want to mention someone, type @ + name of the person. A drop-down menu will help you in finding the right person. :slight_smile: +To post your status, simply press Submit. + +- second block: Here you can switch between the different timelines: + - Timeline: all the people that you follow + - Mentions: all the statutes where you are mentioned + - Public Timeline: all the statutes from the local instance + - The Whole Known Network: everything, local and remote! + +- third block: this is the Chat block, where you communicate with people on the same instance in realtime. It is local-only, for now, but we're planning to make it extendable to the entire fediverse! :sweat_smile: + +- fourth block: This is the Notifications block, here you will get notified whenever somebody mentions you, follows you, repeats or favorites one of your statuses. + +### Right column +This is where the interesting stuff happens! :slight_smile: +Depending on the timeline you will see different statuses, but each status has a standard structure: +- Icon + name + link to profile. An optional left-arrow if it's a reply to another status (hovering will reveal the replied-to status). +- A + button on the right allows you to Expand/Collapse an entire discussion thread. It also updates in realtime! +- A binocular icon allows you to open the status on the instance where it's originating from. +- The text of the status, including mentions. If you click on a mention, it will automatically open the profile page of that person. +- Four buttons (left to right): Reply, Repeat, Favorite, Delete. + +## Mastodon interface +If the Pleroma interface isn't your thing, or you're just trying something new but you want to keep using the familiar Mastodon interface, we got that too! :smile: +Just add a "/web" after your instance url (for ex. https://pleroma.soycaf.com/web) and you'll end on the Mastodon web interface, but with a Pleroma backend! MAGIC! :fireworks: +For more information on the Mastodon interface, please look here: +https://github.com/tootsuite/documentation/blob/master/Using-Mastodon/User-guide.md + +Remember, what you see is only the frontend part of Mastodon, the backend is still Pleroma. diff --git a/lib/mix/tasks/pleroma/instance.ex b/lib/mix/tasks/pleroma/instance.ex index 1ba452275..8f8d86a11 100644 --- a/lib/mix/tasks/pleroma/instance.ex +++ b/lib/mix/tasks/pleroma/instance.ex @@ -81,6 +81,14 @@ def run(["gen" | rest]) do email = Common.get_option(options, :admin_email, "What is your admin email address?") + indexable = + Common.get_option( + options, + :indexable, + "Do you want search engines to index your site? (y/n)", + "y" + ) === "y" + dbhost = Common.get_option(options, :dbhost, "What is the hostname of your database?", "localhost") @@ -142,6 +150,8 @@ def run(["gen" | rest]) do Mix.shell().info("Writing #{psql_path}.") File.write(psql_path, result_psql) + write_robots_txt(indexable) + Mix.shell().info( "\n" <> """ @@ -163,4 +173,28 @@ def run(["gen" | rest]) do ) end end + + defp write_robots_txt(indexable) do + robots_txt = + EEx.eval_file( + Path.expand("robots_txt.eex", __DIR__), + indexable: indexable + ) + + static_dir = Pleroma.Config.get([:instance, :static_dir], "instance/static/") + + unless File.exists?(static_dir) do + File.mkdir_p!(static_dir) + end + + robots_txt_path = Path.join(static_dir, "robots.txt") + + if File.exists?(robots_txt_path) do + File.cp!(robots_txt_path, "#{robots_txt_path}.bak") + Mix.shell().info("Backing up existing robots.txt to #{robots_txt_path}.bak") + end + + File.write(robots_txt_path, robots_txt) + Mix.shell().info("Writing #{robots_txt_path}.") + end end diff --git a/lib/mix/tasks/pleroma/relay.ex b/lib/mix/tasks/pleroma/relay.ex index cbe23f82e..fbec473c5 100644 --- a/lib/mix/tasks/pleroma/relay.ex +++ b/lib/mix/tasks/pleroma/relay.ex @@ -4,8 +4,8 @@ defmodule Mix.Tasks.Pleroma.Relay do use Mix.Task - alias Pleroma.Web.ActivityPub.Relay alias Mix.Tasks.Pleroma.Common + alias Pleroma.Web.ActivityPub.Relay @shortdoc "Manages remote relays" @moduledoc """ diff --git a/lib/mix/tasks/pleroma/robots_txt.eex b/lib/mix/tasks/pleroma/robots_txt.eex new file mode 100644 index 000000000..1af3c47ee --- /dev/null +++ b/lib/mix/tasks/pleroma/robots_txt.eex @@ -0,0 +1,2 @@ +User-Agent: * +Disallow: <%= if indexable, do: "", else: "/" %> diff --git a/lib/mix/tasks/pleroma/robotstxt.ex b/lib/mix/tasks/pleroma/robotstxt.ex new file mode 100644 index 000000000..2128e1cd6 --- /dev/null +++ b/lib/mix/tasks/pleroma/robotstxt.ex @@ -0,0 +1,32 @@ +# Pleroma: A lightweight social networking server +# Copyright © 2019 Pleroma Authors +# SPDX-License-Identifier: AGPL-3.0-only + +defmodule Mix.Tasks.Pleroma.RobotsTxt do + use Mix.Task + + @shortdoc "Generate robots.txt" + @moduledoc """ + Generates robots.txt + + ## Overwrite robots.txt to disallow all + + mix pleroma.robots_txt disallow_all + + This will write a robots.txt that will hide all paths on your instance + from search engines and other robots that obey robots.txt + + """ + def run(["disallow_all"]) do + static_dir = Pleroma.Config.get([:instance, :static_dir], "instance/static/") + + if !File.exists?(static_dir) do + File.mkdir_p!(static_dir) + end + + robots_txt_path = Path.join(static_dir, "robots.txt") + robots_txt_content = "User-Agent: *\nDisallow: /\n" + + File.write!(robots_txt_path, robots_txt_content, [:write]) + end +end diff --git a/lib/mix/tasks/pleroma/uploads.ex b/lib/mix/tasks/pleroma/uploads.ex index a01e61627..106fcf443 100644 --- a/lib/mix/tasks/pleroma/uploads.ex +++ b/lib/mix/tasks/pleroma/uploads.ex @@ -4,9 +4,9 @@ defmodule Mix.Tasks.Pleroma.Uploads do use Mix.Task + alias Mix.Tasks.Pleroma.Common alias Pleroma.Upload alias Pleroma.Uploaders.Local - alias Mix.Tasks.Pleroma.Common require Logger @log_every 50 @@ -20,7 +20,6 @@ defmodule Mix.Tasks.Pleroma.Uploads do Options: - `--delete` - delete local uploads after migrating them to the target uploader - A list of available uploaders can be seen in config.exs """ def run(["migrate_local", target_uploader | args]) do diff --git a/lib/mix/tasks/pleroma/user.ex b/lib/mix/tasks/pleroma/user.ex index 297332bc4..78493231c 100644 --- a/lib/mix/tasks/pleroma/user.ex +++ b/lib/mix/tasks/pleroma/user.ex @@ -5,9 +5,9 @@ defmodule Mix.Tasks.Pleroma.User do use Mix.Task import Ecto.Changeset - alias Pleroma.Repo - alias Pleroma.User alias Mix.Tasks.Pleroma.Common + alias Pleroma.User + alias Pleroma.UserInviteToken @shortdoc "Manages Pleroma users" @moduledoc """ @@ -27,12 +27,28 @@ defmodule Mix.Tasks.Pleroma.User do ## Generate an invite link. - mix pleroma.user invite + mix pleroma.user invite [OPTION...] + + Options: + - `--expires_at DATE` - last day on which token is active (e.g. "2019-04-05") + - `--max_use NUMBER` - maximum numbers of token uses + + ## List generated invites + + mix pleroma.user invites + + ## Revoke invite + + mix pleroma.user revoke_invite TOKEN OR TOKEN_ID ## Delete the user's account. mix pleroma.user rm NICKNAME + ## Delete the user's activities. + + mix pleroma.user delete_activities NICKNAME + ## Deactivate or activate the user's account. mix pleroma.user toggle_activated NICKNAME @@ -220,7 +236,7 @@ def run(["unsubscribe", nickname]) do {:ok, friends} = User.get_friends(user) Enum.each(friends, fn friend -> - user = Repo.get(User, user.id) + user = User.get_by_id(user.id) Mix.shell().info("Unsubscribing #{friend.nickname} from #{user.nickname}") User.unfollow(user, friend) @@ -228,7 +244,7 @@ def run(["unsubscribe", nickname]) do :timer.sleep(500) - user = Repo.get(User, user.id) + user = User.get_by_id(user.id) if Enum.empty?(user.following) do Mix.shell().info("Successfully unsubscribed all followers from #{user.nickname}") @@ -302,23 +318,91 @@ def run(["untag", nickname | tags]) do end end - def run(["invite"]) do + def run(["invite" | rest]) do + {options, [], []} = + OptionParser.parse(rest, + strict: [ + expires_at: :string, + max_use: :integer + ] + ) + + options = + options + |> Keyword.update(:expires_at, {:ok, nil}, fn + nil -> {:ok, nil} + val -> Date.from_iso8601(val) + end) + |> Enum.into(%{}) + Common.start_pleroma() - with {:ok, token} <- Pleroma.UserInviteToken.create_token() do - Mix.shell().info("Generated user invite token") + with {:ok, val} <- options[:expires_at], + options = Map.put(options, :expires_at, val), + {:ok, invite} <- UserInviteToken.create_invite(options) do + Mix.shell().info( + "Generated user invite token " <> String.replace(invite.invite_type, "_", " ") + ) url = Pleroma.Web.Router.Helpers.redirect_url( Pleroma.Web.Endpoint, :registration_page, - token.token + invite.token ) IO.puts(url) + else + error -> + Mix.shell().error("Could not create invite token: #{inspect(error)}") + end + end + + def run(["invites"]) do + Common.start_pleroma() + + Mix.shell().info("Invites list:") + + UserInviteToken.list_invites() + |> Enum.each(fn invite -> + expire_info = + with expires_at when not is_nil(expires_at) <- invite.expires_at do + " | Expires at: #{Date.to_string(expires_at)}" + end + + using_info = + with max_use when not is_nil(max_use) <- invite.max_use do + " | Max use: #{max_use} Left use: #{max_use - invite.uses}" + end + + Mix.shell().info( + "ID: #{invite.id} | Token: #{invite.token} | Token type: #{invite.invite_type} | Used: #{ + invite.used + }#{expire_info}#{using_info}" + ) + end) + end + + def run(["revoke_invite", token]) do + Common.start_pleroma() + + with {:ok, invite} <- UserInviteToken.find_by_token(token), + {:ok, _} <- UserInviteToken.update_invite(invite, %{used: true}) do + Mix.shell().info("Invite for token #{token} was revoked.") + else + _ -> Mix.shell().error("No invite found with token #{token}") + end + end + + def run(["delete_activities", nickname]) do + Common.start_pleroma() + + with %User{local: true} = user <- User.get_by_nickname(nickname) do + User.delete_user_activities(user) + Mix.shell().info("User #{nickname} statuses deleted.") else _ -> - Mix.shell().error("Could not create invite token.") + Mix.shell().error("No local user #{nickname}") end end diff --git a/lib/pleroma/PasswordResetToken.ex b/lib/pleroma/PasswordResetToken.ex index 750ddd3c0..7afbc8751 100644 --- a/lib/pleroma/PasswordResetToken.ex +++ b/lib/pleroma/PasswordResetToken.ex @@ -7,9 +7,9 @@ defmodule Pleroma.PasswordResetToken do import Ecto.Changeset - alias Pleroma.User - alias Pleroma.Repo alias Pleroma.PasswordResetToken + alias Pleroma.Repo + alias Pleroma.User schema "password_reset_tokens" do belongs_to(:user, User, type: Pleroma.FlakeId) @@ -39,7 +39,7 @@ def used_changeset(struct) do def reset_password(token, data) do with %{used: false} = token <- Repo.get_by(PasswordResetToken, %{token: token}), - %User{} = user <- Repo.get(User, token.user_id), + %User{} = user <- User.get_by_id(token.user_id), {:ok, _user} <- User.reset_password(user, data), {:ok, token} <- Repo.update(used_changeset(token)) do {:ok, token} diff --git a/lib/pleroma/activity.ex b/lib/pleroma/activity.ex index c466bff7f..c8c7f0d04 100644 --- a/lib/pleroma/activity.ex +++ b/lib/pleroma/activity.ex @@ -5,9 +5,10 @@ defmodule Pleroma.Activity do use Ecto.Schema - alias Pleroma.Repo alias Pleroma.Activity alias Pleroma.Notification + alias Pleroma.Object + alias Pleroma.Repo import Ecto.Query @@ -22,16 +23,53 @@ defmodule Pleroma.Activity do "Like" => "favourite" } + @mastodon_to_ap_notification_types for {k, v} <- @mastodon_notification_types, + into: %{}, + do: {v, k} + schema "activities" do field(:data, :map) field(:local, :boolean, default: true) field(:actor, :string) - field(:recipients, {:array, :string}) + field(:recipients, {:array, :string}, default: []) has_many(:notifications, Notification, on_delete: :delete_all) + # Attention: this is a fake relation, don't try to preload it blindly and expect it to work! + # The foreign key is embedded in a jsonb field. + # + # To use it, you probably want to do an inner join and a preload: + # + # ``` + # |> join(:inner, [activity], o in Object, + # on: fragment("(?->>'id') = COALESCE((?)->'object'->> 'id', (?)->>'object')", + # o.data, activity.data, activity.data)) + # |> preload([activity, object], [object: object]) + # ``` + # + # As a convenience, Activity.with_preloaded_object() sets up an inner join and preload for the + # typical case. + has_one(:object, Object, on_delete: :nothing, foreign_key: :id) + timestamps() end + def with_preloaded_object(query) do + query + |> join( + :inner, + [activity], + o in Object, + on: + fragment( + "(?->>'id') = COALESCE(?->'object'->>'id', ?->>'object')", + o.data, + activity.data, + activity.data + ) + ) + |> preload([activity, object], object: object) + end + def get_by_ap_id(ap_id) do Repo.one( from( @@ -41,6 +79,24 @@ def get_by_ap_id(ap_id) do ) end + def get_by_ap_id_with_object(ap_id) do + Repo.one( + from( + activity in Activity, + where: fragment("(?)->>'id' = ?", activity.data, ^to_string(ap_id)), + left_join: o in Object, + on: + fragment( + "(?->>'id') = COALESCE(?->'object'->>'id', ?->>'object')", + o.data, + activity.data, + activity.data + ), + preload: [object: o] + ) + ) + end + def get_by_id(id) do Activity |> where([a], a.id == ^id) @@ -48,6 +104,22 @@ def get_by_id(id) do |> Repo.one() end + def get_by_id_with_object(id) do + from(activity in Activity, + where: activity.id == ^id, + inner_join: o in Object, + on: + fragment( + "(?->>'id') = COALESCE(?->'object'->>'id', ?->>'object')", + o.data, + activity.data, + activity.data + ), + preload: [object: o] + ) + |> Repo.one() + end + def by_object_ap_id(ap_id) do from( activity in Activity, @@ -75,7 +147,7 @@ def create_by_object_ap_id(ap_ids) when is_list(ap_ids) do ) end - def create_by_object_ap_id(ap_id) do + def create_by_object_ap_id(ap_id) when is_binary(ap_id) do from( activity in Activity, where: @@ -89,6 +161,8 @@ def create_by_object_ap_id(ap_id) do ) end + def create_by_object_ap_id(_), do: nil + def get_all_create_by_object_ap_id(ap_id) do Repo.all(create_by_object_ap_id(ap_id)) end @@ -101,8 +175,39 @@ def get_create_by_object_ap_id(ap_id) when is_binary(ap_id) do def get_create_by_object_ap_id(_), do: nil - def normalize(obj) when is_map(obj), do: Activity.get_by_ap_id(obj["id"]) - def normalize(ap_id) when is_binary(ap_id), do: Activity.get_by_ap_id(ap_id) + def create_by_object_ap_id_with_object(ap_id) when is_binary(ap_id) do + from( + activity in Activity, + where: + fragment( + "coalesce((?)->'object'->>'id', (?)->>'object') = ?", + activity.data, + activity.data, + ^to_string(ap_id) + ), + where: fragment("(?)->>'type' = 'Create'", activity.data), + inner_join: o in Object, + on: + fragment( + "(?->>'id') = COALESCE(?->'object'->>'id', ?->>'object')", + o.data, + activity.data, + activity.data + ), + preload: [object: o] + ) + end + + def create_by_object_ap_id_with_object(_), do: nil + + def get_create_by_object_ap_id_with_object(ap_id) do + ap_id + |> create_by_object_ap_id_with_object() + |> Repo.one() + end + + def normalize(obj) when is_map(obj), do: get_by_ap_id_with_object(obj["id"]) + def normalize(ap_id) when is_binary(ap_id), do: get_by_ap_id_with_object(ap_id) def normalize(_), do: nil def get_in_reply_to_activity(%Activity{data: %{"object" => %{"inReplyTo" => ap_id}}}) do @@ -111,6 +216,19 @@ def get_in_reply_to_activity(%Activity{data: %{"object" => %{"inReplyTo" => ap_i def get_in_reply_to_activity(_), do: nil + def delete_by_ap_id(id) when is_binary(id) do + by_object_ap_id(id) + |> select([u], u) + |> Repo.delete_all() + |> elem(1) + |> Enum.find(fn + %{data: %{"type" => "Create", "object" => %{"id" => ap_id}}} -> ap_id == id + _ -> nil + end) + end + + def delete_by_ap_id(_), do: nil + for {ap_type, type} <- @mastodon_notification_types do def mastodon_notification_type(%Activity{data: %{"type" => unquote(ap_type)}}), do: unquote(type) @@ -118,6 +236,10 @@ def mastodon_notification_type(%Activity{data: %{"type" => unquote(ap_type)}}), def mastodon_notification_type(%Activity{}), do: nil + def from_mastodon_notification_type(type) do + Map.get(@mastodon_to_ap_notification_types, type) + end + def all_by_actor_and_id(actor, status_ids \\ []) def all_by_actor_and_id(_actor, []), do: [] @@ -128,6 +250,52 @@ def all_by_actor_and_id(actor, status_ids) do |> Repo.all() end + def increase_replies_count(id) do + Activity + |> where(id: ^id) + |> update([a], + set: [ + data: + fragment( + """ + jsonb_set(?, '{object, repliesCount}', + (coalesce((?->'object'->>'repliesCount')::int, 0) + 1)::varchar::jsonb, true) + """, + a.data, + a.data + ) + ] + ) + |> Repo.update_all([]) + |> case do + {1, [activity]} -> activity + _ -> {:error, "Not found"} + end + end + + def decrease_replies_count(id) do + Activity + |> where(id: ^id) + |> update([a], + set: [ + data: + fragment( + """ + jsonb_set(?, '{object, repliesCount}', + (greatest(0, (?->'object'->>'repliesCount')::int - 1))::varchar::jsonb, true) + """, + a.data, + a.data + ) + ] + ) + |> Repo.update_all([]) + |> case do + {1, [activity]} -> activity + _ -> {:error, "Not found"} + end + end + def restrict_disabled_users(query) do from(activity in query, where: diff --git a/lib/pleroma/application.ex b/lib/pleroma/application.ex index d2523c045..eeb415084 100644 --- a/lib/pleroma/application.ex +++ b/lib/pleroma/application.ex @@ -11,10 +11,10 @@ defmodule Pleroma.Application do @repository Mix.Project.config()[:source_url] def name, do: @name def version, do: @version - def named_version(), do: @name <> " " <> @version + def named_version, do: @name <> " " <> @version def repository, do: @repository - def user_agent() do + def user_agent do info = "#{Pleroma.Web.base_url()} <#{Pleroma.Config.get([:instance, :email], "")}>" named_version() <> "; " <> info end @@ -25,6 +25,7 @@ def start(_type, _args) do import Cachex.Spec Pleroma.Config.DeprecationWarnings.warn() + setup_instrumenters() # Define workers and child supervisors to be supervised children = @@ -48,7 +49,7 @@ def start(_type, _args) do [ :user_cache, [ - default_ttl: 25000, + default_ttl: 25_000, ttl_interval: 1000, limit: 2500 ] @@ -60,7 +61,7 @@ def start(_type, _args) do [ :object_cache, [ - default_ttl: 25000, + default_ttl: 25_000, ttl_interval: 1000, limit: 2500 ] @@ -103,15 +104,15 @@ def start(_type, _args) do ], id: :cachex_idem ), - worker(Pleroma.FlakeId, []) + worker(Pleroma.FlakeId, []), + worker(Pleroma.ScheduledActivityWorker, []) ] ++ hackney_pool_children() ++ [ worker(Pleroma.Web.Federator.RetryQueue, []), worker(Pleroma.Stats, []), - worker(Pleroma.Web.Push, []), - worker(Pleroma.Jobs, []), - worker(Task, [&Pleroma.Web.Federator.init/0], restart: :temporary) + worker(Task, [&Pleroma.Web.Push.init/0], restart: :temporary, id: :web_push_init), + worker(Task, [&Pleroma.Web.Federator.init/0], restart: :temporary, id: :federator_init) ] ++ streamer_child() ++ chat_child() ++ @@ -127,7 +128,25 @@ def start(_type, _args) do Supervisor.start_link(children, opts) end - def enabled_hackney_pools() do + defp setup_instrumenters do + require Prometheus.Registry + + :ok = + :telemetry.attach( + "prometheus-ecto", + [:pleroma, :repo, :query], + &Pleroma.Repo.Instrumenter.handle_event/4, + %{} + ) + + Prometheus.Registry.register_collector(:prometheus_process_collector) + Pleroma.Web.Endpoint.MetricsExporter.setup() + Pleroma.Web.Endpoint.PipelineInstrumenter.setup() + Pleroma.Web.Endpoint.Instrumenter.setup() + Pleroma.Repo.Instrumenter.setup() + end + + def enabled_hackney_pools do [:media] ++ if Application.get_env(:tesla, :adapter) == Tesla.Adapter.Hackney do [:federation] @@ -142,14 +161,14 @@ def enabled_hackney_pools() do end if Mix.env() == :test do - defp streamer_child(), do: [] - defp chat_child(), do: [] + defp streamer_child, do: [] + defp chat_child, do: [] else - defp streamer_child() do + defp streamer_child do [worker(Pleroma.Web.Streamer, [])] end - defp chat_child() do + defp chat_child do if Pleroma.Config.get([:chat, :enabled]) do [worker(Pleroma.Web.ChatChannel.ChatChannelState, [])] else @@ -158,7 +177,7 @@ defp chat_child() do end end - defp hackney_pool_children() do + defp hackney_pool_children do for pool <- enabled_hackney_pools() do options = Pleroma.Config.get([:hackney_pools, pool]) :hackney_pool.child_spec(pool, options) diff --git a/lib/pleroma/captcha/captcha.ex b/lib/pleroma/captcha/captcha.ex index aa41acd1a..f105cbb25 100644 --- a/lib/pleroma/captcha/captcha.ex +++ b/lib/pleroma/captcha/captcha.ex @@ -10,7 +10,7 @@ defmodule Pleroma.Captcha do use GenServer @doc false - def start_link() do + def start_link do GenServer.start_link(__MODULE__, [], name: __MODULE__) end @@ -22,7 +22,7 @@ def init(_) do @doc """ Ask the configured captcha service for a new captcha """ - def new() do + def new do GenServer.call(__MODULE__, :new) end @@ -73,7 +73,7 @@ def handle_call({:validate, token, captcha, answer_data}, _from, state) do secret = KeyGenerator.generate(secret_key_base, token <> "_encrypt") sign_secret = KeyGenerator.generate(secret_key_base, token <> "_sign") - # If the time found is less than (current_time - seconds_valid), then the time has already passed. + # If the time found is less than (current_time-seconds_valid) then the time has already passed # Later we check that the time found is more than the presumed invalidatation time, that means # that the data is still valid and the captcha can be checked seconds_valid = Pleroma.Config.get!([Pleroma.Captcha, :seconds_valid]) diff --git a/lib/pleroma/captcha/kocaptcha.ex b/lib/pleroma/captcha/kocaptcha.ex index 34a611492..61688e778 100644 --- a/lib/pleroma/captcha/kocaptcha.ex +++ b/lib/pleroma/captcha/kocaptcha.ex @@ -7,7 +7,7 @@ defmodule Pleroma.Captcha.Kocaptcha do @behaviour Service @impl Service - def new() do + def new do endpoint = Pleroma.Config.get!([__MODULE__, :endpoint]) case Tesla.get(endpoint <> "/new") do diff --git a/lib/pleroma/clippy.ex b/lib/pleroma/clippy.ex index 4e9bdbe19..bd20952a6 100644 --- a/lib/pleroma/clippy.ex +++ b/lib/pleroma/clippy.ex @@ -7,13 +7,13 @@ defmodule Pleroma.Clippy do # No software is complete until they have a Clippy implementation. # A ballmer peak _may_ be required to change this module. - def tip() do + def tip do tips() |> Enum.random() |> puts() end - def tips() do + def tips do host = Pleroma.Config.get([Pleroma.Web.Endpoint, :url, :host]) [ @@ -92,8 +92,8 @@ def puts(text_or_lines) do # surrond one/five line clippy with blank lines around to not fuck up the layout # - # yes this fix sucks but it's good enough, have you ever seen a release of windows wihtout some butched - # features anyway? + # yes this fix sucks but it's good enough, have you ever seen a release of windows + # without some butched features anyway? lines = if length(lines) == 1 or length(lines) == 5 do [""] ++ lines ++ [""] diff --git a/lib/pleroma/config.ex b/lib/pleroma/config.ex index 21507cd38..189faa15f 100644 --- a/lib/pleroma/config.ex +++ b/lib/pleroma/config.ex @@ -57,4 +57,8 @@ def delete([parent_key | keys]) do def delete(key) do Application.delete_env(:pleroma, key) end + + def oauth_consumer_strategies, do: get([:auth, :oauth_consumer_strategies], []) + + def oauth_consumer_enabled?, do: oauth_consumer_strategies() != [] end diff --git a/lib/pleroma/config/deprecation_warnings.ex b/lib/pleroma/config/deprecation_warnings.ex index 7451fd0a7..0345ac19c 100644 --- a/lib/pleroma/config/deprecation_warnings.ex +++ b/lib/pleroma/config/deprecation_warnings.ex @@ -5,7 +5,7 @@ defmodule Pleroma.Config.DeprecationWarnings do require Logger - def check_frontend_config_mechanism() do + def check_frontend_config_mechanism do if Pleroma.Config.get(:fe) do Logger.warn(""" !!!DEPRECATION WARNING!!! diff --git a/lib/pleroma/emails/admin_email.ex b/lib/pleroma/emails/admin_email.ex index 9b20c7e08..afefccec5 100644 --- a/lib/pleroma/emails/admin_email.ex +++ b/lib/pleroma/emails/admin_email.ex @@ -29,9 +29,13 @@ def report(to, reporter, account, statuses, comment) do if length(statuses) > 0 do statuses_list_html = statuses - |> Enum.map(fn %{id: id} -> - status_url = Helpers.o_status_url(Pleroma.Web.Endpoint, :notice, id) - "
  • #{status_url}
  • " + |> Enum.map(fn + %{id: id} -> + status_url = Helpers.o_status_url(Pleroma.Web.Endpoint, :notice, id) + "
  • #{status_url}
  • " + + id when is_binary(id) -> + "
  • #{id}
  • " end) |> Enum.join("\n") diff --git a/lib/pleroma/emails/mailer.ex b/lib/pleroma/emails/mailer.ex index f7e3aa78b..b384e6fec 100644 --- a/lib/pleroma/emails/mailer.ex +++ b/lib/pleroma/emails/mailer.ex @@ -6,7 +6,7 @@ defmodule Pleroma.Mailer do use Swoosh.Mailer, otp_app: :pleroma def deliver_async(email, config \\ []) do - Pleroma.Jobs.enqueue(:mailer, __MODULE__, [:deliver_async, email, config]) + PleromaJobQueue.enqueue(:mailer, __MODULE__, [:deliver_async, email, config]) end def perform(:deliver_async, email, config), do: deliver(email, config) diff --git a/lib/pleroma/emoji.ex b/lib/pleroma/emoji.ex index bb3190e08..87c7f2cec 100644 --- a/lib/pleroma/emoji.ex +++ b/lib/pleroma/emoji.ex @@ -8,22 +8,28 @@ defmodule Pleroma.Emoji do * the built-in Finmojis (if enabled in configuration), * the files: `config/emoji.txt` and `config/custom_emoji.txt` - * glob paths + * glob paths, nested folder is used as tag name for grouping e.g. priv/static/emoji/custom/nested_folder This GenServer stores in an ETS table the list of the loaded emojis, and also allows to reload the list at runtime. """ use GenServer + + @type pattern :: Regex.t() | module() | String.t() + @type patterns :: pattern() | [pattern()] + @type group_patterns :: keyword(patterns()) + @ets __MODULE__.Ets @ets_options [:ordered_set, :protected, :named_table, {:read_concurrency, true}] + @groups Application.get_env(:pleroma, :emoji)[:groups] @doc false - def start_link() do + def start_link do GenServer.start_link(__MODULE__, [], name: __MODULE__) end @doc "Reloads the emojis from disk." @spec reload() :: :ok - def reload() do + def reload do GenServer.call(__MODULE__, :reload) end @@ -38,7 +44,7 @@ def get(name) do @doc "Returns all the emojos!!" @spec get_all() :: [{String.t(), String.t()}, ...] - def get_all() do + def get_all do :ets.tab2list(@ets) end @@ -72,14 +78,15 @@ def code_change(_old_vsn, state, _extra) do {:ok, state} end - defp load() do + defp load do + finmoji_enabled = Keyword.get(Application.get_env(:pleroma, :instance), :finmoji_enabled) + shortcode_globs = Application.get_env(:pleroma, :emoji)[:shortcode_globs] || [] + emojis = - (load_finmoji(Keyword.get(Application.get_env(:pleroma, :instance), :finmoji_enabled)) ++ + (load_finmoji(finmoji_enabled) ++ load_from_file("config/emoji.txt") ++ load_from_file("config/custom_emoji.txt") ++ - load_from_globs( - Keyword.get(Application.get_env(:pleroma, :emoji, []), :shortcode_globs, []) - )) + load_from_globs(shortcode_globs)) |> Enum.reject(fn value -> value == nil end) true = :ets.insert(@ets, emojis) @@ -151,9 +158,12 @@ defp load() do "white_nights", "woollysocks" ] + defp load_finmoji(true) do Enum.map(@finmoji, fn finmoji -> - {finmoji, "/finmoji/128px/#{finmoji}-128.png"} + file_name = "/finmoji/128px/#{finmoji}-128.png" + group = match_extra(@groups, file_name) + {finmoji, file_name, to_string(group)} end) end @@ -172,8 +182,14 @@ defp load_from_file_stream(stream) do |> Stream.map(&String.trim/1) |> Stream.map(fn line -> case String.split(line, ~r/,\s*/) do - [name, file] -> {name, file} - _ -> nil + [name, file, tags] -> + {name, file, tags} + + [name, file] -> + {name, file, to_string(match_extra(@groups, file))} + + _ -> + nil end end) |> Enum.to_list() @@ -190,9 +206,40 @@ defp load_from_globs(globs) do |> Enum.concat() Enum.map(paths, fn path -> + tag = match_extra(@groups, Path.join("/", Path.relative_to(path, static_path))) shortcode = Path.basename(path, Path.extname(path)) external_path = Path.join("/", Path.relative_to(path, static_path)) - {shortcode, external_path} + {shortcode, external_path, to_string(tag)} + end) + end + + @doc """ + Finds a matching group for the given emoji filename + """ + @spec match_extra(group_patterns(), String.t()) :: atom() | nil + def match_extra(group_patterns, filename) do + match_group_patterns(group_patterns, fn pattern -> + case pattern do + %Regex{} = regex -> Regex.match?(regex, filename) + string when is_binary(string) -> filename == string + end + end) + end + + defp match_group_patterns(group_patterns, matcher) do + Enum.find_value(group_patterns, fn {group, patterns} -> + patterns = + patterns + |> List.wrap() + |> Enum.map(fn pattern -> + if String.contains?(pattern, "*") do + ~r(#{String.replace(pattern, "*", ".*")}) + else + pattern + end + end) + + Enum.any?(patterns, matcher) && group end) end end diff --git a/lib/pleroma/filter.ex b/lib/pleroma/filter.ex index bdc34698c..79efc29f0 100644 --- a/lib/pleroma/filter.ex +++ b/lib/pleroma/filter.ex @@ -8,8 +8,8 @@ defmodule Pleroma.Filter do import Ecto.Changeset import Ecto.Query - alias Pleroma.User alias Pleroma.Repo + alias Pleroma.User schema "filters" do belongs_to(:user, User, type: Pleroma.FlakeId) diff --git a/lib/pleroma/flake_id.ex b/lib/pleroma/flake_id.ex index 9f098ce33..58ab3650d 100644 --- a/lib/pleroma/flake_id.ex +++ b/lib/pleroma/flake_id.ex @@ -46,7 +46,7 @@ def from_string(<<_::integer-size(128)>> = flake), do: flake def from_string(string) when is_binary(string) and byte_size(string) < 18 do case Integer.parse(string) do - {id, _} -> <<0::integer-size(64), id::integer-size(64)>> + {id, ""} -> <<0::integer-size(64), id::integer-size(64)>> _ -> nil end end @@ -85,7 +85,7 @@ def dump(value) do {:ok, FlakeId.from_string(value)} end - def autogenerate(), do: get() + def autogenerate, do: get() # -- GenServer API def start_link do @@ -165,7 +165,7 @@ defp time do 1_000_000_000 * mega_seconds + seconds * 1000 + :erlang.trunc(micro_seconds / 1000) end - defp worker_id() do + defp worker_id do <> = :crypto.strong_rand_bytes(6) worker end diff --git a/lib/pleroma/formatter.ex b/lib/pleroma/formatter.ex index 048c032ed..8ea9dbd38 100644 --- a/lib/pleroma/formatter.ex +++ b/lib/pleroma/formatter.ex @@ -8,8 +8,10 @@ defmodule Pleroma.Formatter do alias Pleroma.User alias Pleroma.Web.MediaProxy + @safe_mention_regex ~r/^(\s*(?@.+?\s+)+)(?.*)/ @markdown_characters_regex ~r/(`|\*|_|{|}|[|]|\(|\)|#|\+|-|\.|!)/ @link_regex ~r{((?:http(s)?:\/\/)?[\w.-]+(?:\.[\w\.-]+)+[\w\-\._~%:/?#[\]@!\$&'\(\)\*\+,;=.]+)|[0-9a-z+\-\.]+:[0-9a-z$-_.+!*'(),]+}ui + # credo:disable-for-previous-line Credo.Check.Readability.MaxLineLength @auto_linker_config hashtag: true, hashtag_handler: &Pleroma.Formatter.hashtag_handler/4, @@ -44,15 +46,28 @@ def hashtag_handler("#" <> tag = tag_text, _buffer, _opts, acc) do @doc """ Parses a text and replace plain text links with HTML. Returns a tuple with a result text, mentions, and hashtags. + + If the 'safe_mention' option is given, only consecutive mentions at the start the post are actually mentioned. """ @spec linkify(String.t(), keyword()) :: {String.t(), [{String.t(), User.t()}], [{String.t(), String.t()}]} def linkify(text, options \\ []) do options = options ++ @auto_linker_config - acc = %{mentions: MapSet.new(), tags: MapSet.new()} - {text, %{mentions: mentions, tags: tags}} = AutoLinker.link_map(text, acc, options) - {text, MapSet.to_list(mentions), MapSet.to_list(tags)} + if options[:safe_mention] && Regex.named_captures(@safe_mention_regex, text) do + %{"mentions" => mentions, "rest" => rest} = Regex.named_captures(@safe_mention_regex, text) + acc = %{mentions: MapSet.new(), tags: MapSet.new()} + + {text_mentions, %{mentions: mentions}} = AutoLinker.link_map(mentions, acc, options) + {text_rest, %{tags: tags}} = AutoLinker.link_map(rest, acc, options) + + {text_mentions <> text_rest, MapSet.to_list(mentions), MapSet.to_list(tags)} + else + acc = %{mentions: MapSet.new(), tags: MapSet.new()} + {text, %{mentions: mentions, tags: tags}} = AutoLinker.link_map(text, acc, options) + + {text, MapSet.to_list(mentions), MapSet.to_list(tags)} + end end def emojify(text) do @@ -62,9 +77,9 @@ def emojify(text) do def emojify(text, nil), do: text def emojify(text, emoji, strip \\ false) do - Enum.reduce(emoji, text, fn {emoji, file}, text -> - emoji = HTML.strip_tags(emoji) - file = HTML.strip_tags(file) + Enum.reduce(emoji, text, fn emoji_data, text -> + emoji = HTML.strip_tags(elem(emoji_data, 0)) + file = HTML.strip_tags(elem(emoji_data, 1)) html = if not strip do @@ -86,7 +101,7 @@ def demojify(text) do def demojify(text, nil), do: text def get_emoji(text) when is_binary(text) do - Enum.filter(Emoji.get_all(), fn {emoji, _} -> String.contains?(text, ":#{emoji}:") end) + Enum.filter(Emoji.get_all(), fn {emoji, _, _} -> String.contains?(text, ":#{emoji}:") end) end def get_emoji(_), do: [] diff --git a/lib/pleroma/gopher/server.ex b/lib/pleroma/gopher/server.ex index 24190574e..6a56a6f67 100644 --- a/lib/pleroma/gopher/server.ex +++ b/lib/pleroma/gopher/server.ex @@ -6,7 +6,7 @@ defmodule Pleroma.Gopher.Server do use GenServer require Logger - def start_link() do + def start_link do config = Pleroma.Config.get(:gopher, []) ip = Keyword.get(config, :ip, {0, 0, 0, 0}) port = Keyword.get(config, :port, 1234) @@ -36,11 +36,11 @@ def init([ip, port]) do end defmodule Pleroma.Gopher.Server.ProtocolHandler do - alias Pleroma.Web.ActivityPub.ActivityPub - alias Pleroma.Web.ActivityPub.Visibility alias Pleroma.Activity alias Pleroma.HTML alias Pleroma.User + alias Pleroma.Web.ActivityPub.ActivityPub + alias Pleroma.Web.ActivityPub.Visibility def start_link(ref, socket, transport, opts) do pid = spawn_link(__MODULE__, :init, [ref, socket, transport, opts]) @@ -65,7 +65,8 @@ def info(text) do def link(name, selector, type \\ 1) do address = Pleroma.Web.Endpoint.host() port = Pleroma.Config.get([:gopher, :port], 1234) - "#{type}#{name}\t#{selector}\t#{address}\t#{port}\r\n" + dstport = Pleroma.Config.get([:gopher, :dstport], port) + "#{type}#{name}\t#{selector}\t#{address}\t#{dstport}\r\n" end def render_activities(activities) do diff --git a/lib/pleroma/html.ex b/lib/pleroma/html.ex index 4dc6998b1..7f1dbe28c 100644 --- a/lib/pleroma/html.ex +++ b/lib/pleroma/html.ex @@ -9,7 +9,7 @@ defp get_scrubbers(scrubber) when is_atom(scrubber), do: [scrubber] defp get_scrubbers(scrubbers) when is_list(scrubbers), do: scrubbers defp get_scrubbers(_), do: [Pleroma.HTML.Scrubber.Default] - def get_scrubbers() do + def get_scrubbers do Pleroma.Config.get([:markup, :scrub_policy]) |> get_scrubbers end @@ -28,27 +28,39 @@ def filter_tags(html, scrubber), do: Scrubber.scrub(html, scrubber) def filter_tags(html), do: filter_tags(html, nil) def strip_tags(html), do: Scrubber.scrub(html, Scrubber.StripTags) - def get_cached_scrubbed_html_for_object(content, scrubbers, object, module) do - key = "#{module}#{generate_scrubber_signature(scrubbers)}|#{object.id}" - Cachex.fetch!(:scrubber_cache, key, fn _key -> ensure_scrubbed_html(content, scrubbers) end) + def get_cached_scrubbed_html_for_activity(content, scrubbers, activity, key \\ "") do + key = "#{key}#{generate_scrubber_signature(scrubbers)}|#{activity.id}" + + Cachex.fetch!(:scrubber_cache, key, fn _key -> + ensure_scrubbed_html(content, scrubbers, activity.data["object"]["fake"] || false) + end) end - def get_cached_stripped_html_for_object(content, object, module) do - get_cached_scrubbed_html_for_object( + def get_cached_stripped_html_for_activity(content, activity, key) do + get_cached_scrubbed_html_for_activity( content, HtmlSanitizeEx.Scrubber.StripTags, - object, - module + activity, + key ) end def ensure_scrubbed_html( content, - scrubbers + scrubbers, + false = _fake ) do {:commit, filter_tags(content, scrubbers)} end + def ensure_scrubbed_html( + content, + scrubbers, + true = _fake + ) do + {:ignore, filter_tags(content, scrubbers)} + end + defp generate_scrubber_signature(scrubber) when is_atom(scrubber) do generate_scrubber_signature([scrubber]) end @@ -95,6 +107,13 @@ defmodule Pleroma.HTML.Scrubber.TwitterText do Meta.allow_tag_with_uri_attributes("a", ["href", "data-user", "data-tag"], @valid_schemes) Meta.allow_tag_with_these_attributes("a", ["name", "title", "class"]) + Meta.allow_tag_with_this_attribute_values("a", "rel", [ + "tag", + "nofollow", + "noopener", + "noreferrer" + ]) + # paragraphs and linebreaks Meta.allow_tag_with_these_attributes("br", []) Meta.allow_tag_with_these_attributes("p", []) @@ -137,6 +156,13 @@ defmodule Pleroma.HTML.Scrubber.Default do Meta.allow_tag_with_uri_attributes("a", ["href", "data-user", "data-tag"], @valid_schemes) Meta.allow_tag_with_these_attributes("a", ["name", "title", "class"]) + Meta.allow_tag_with_this_attribute_values("a", "rel", [ + "tag", + "nofollow", + "noopener", + "noreferrer" + ]) + Meta.allow_tag_with_these_attributes("abbr", ["title"]) Meta.allow_tag_with_these_attributes("b", []) diff --git a/lib/pleroma/http/connection.ex b/lib/pleroma/http/connection.ex index b798eaa5a..c0173465a 100644 --- a/lib/pleroma/http/connection.ex +++ b/lib/pleroma/http/connection.ex @@ -8,8 +8,8 @@ defmodule Pleroma.HTTP.Connection do """ @hackney_options [ - timeout: 10000, - recv_timeout: 20000, + connect_timeout: 2_000, + recv_timeout: 20_000, follow_redirect: true, pool: :federation ] @@ -31,6 +31,10 @@ def new(opts \\ []) do # defp hackney_options(opts) do options = Keyword.get(opts, :adapter, []) - @hackney_options ++ options + adapter_options = Pleroma.Config.get([:http, :adapter], []) + + @hackney_options + |> Keyword.merge(adapter_options) + |> Keyword.merge(options) end end diff --git a/lib/pleroma/http/http.ex b/lib/pleroma/http/http.ex index 75c58e6c9..c5f720bc9 100644 --- a/lib/pleroma/http/http.ex +++ b/lib/pleroma/http/http.ex @@ -27,21 +27,29 @@ defmodule Pleroma.HTTP do """ def request(method, url, body \\ "", headers \\ [], options \\ []) do - options = - process_request_options(options) - |> process_sni_options(url) + try do + options = + process_request_options(options) + |> process_sni_options(url) - params = Keyword.get(options, :params, []) + params = Keyword.get(options, :params, []) - %{} - |> Builder.method(method) - |> Builder.headers(headers) - |> Builder.opts(options) - |> Builder.url(url) - |> Builder.add_param(:body, :body, body) - |> Builder.add_param(:query, :query, params) - |> Enum.into([]) - |> (&Tesla.request(Connection.new(), &1)).() + %{} + |> Builder.method(method) + |> Builder.headers(headers) + |> Builder.opts(options) + |> Builder.url(url) + |> Builder.add_param(:body, :body, body) + |> Builder.add_param(:query, :query, params) + |> Enum.into([]) + |> (&Tesla.request(Connection.new(options), &1)).() + rescue + e -> + {:error, e} + catch + :exit, e -> + {:error, e} + end end defp process_sni_options(options, nil), do: options diff --git a/lib/pleroma/instances/instance.ex b/lib/pleroma/instances/instance.ex index 48bc939dd..420803a8f 100644 --- a/lib/pleroma/instances/instance.ex +++ b/lib/pleroma/instances/instance.ex @@ -2,8 +2,8 @@ defmodule Pleroma.Instances.Instance do @moduledoc "Instance." alias Pleroma.Instances - alias Pleroma.Repo alias Pleroma.Instances.Instance + alias Pleroma.Repo use Ecto.Schema @@ -12,7 +12,7 @@ defmodule Pleroma.Instances.Instance do schema "instances" do field(:host, :string) - field(:unreachable_since, :naive_datetime) + field(:unreachable_since, :naive_datetime_usec) timestamps() end diff --git a/lib/pleroma/jobs.ex b/lib/pleroma/jobs.ex deleted file mode 100644 index 24b7e5e46..000000000 --- a/lib/pleroma/jobs.ex +++ /dev/null @@ -1,152 +0,0 @@ -# Pleroma: A lightweight social networking server -# Copyright © 2017-2019 Pleroma Authors -# SPDX-License-Identifier: AGPL-3.0-only - -defmodule Pleroma.Jobs do - @moduledoc """ - A basic job queue - """ - use GenServer - - require Logger - - def init(args) do - {:ok, args} - end - - def start_link do - queues = - Pleroma.Config.get(Pleroma.Jobs) - |> Enum.map(fn {name, _} -> create_queue(name) end) - |> Enum.into(%{}) - - state = %{ - queues: queues, - refs: %{} - } - - GenServer.start_link(__MODULE__, state, name: __MODULE__) - end - - def create_queue(name) do - {name, {:sets.new(), []}} - end - - @doc """ - Enqueues a job. - - Returns `:ok`. - - ## Arguments - - - `queue_name` - a queue name(must be specified in the config). - - `mod` - a worker module (must have `perform` function). - - `args` - a list of arguments for the `perform` function of the worker module. - - `priority` - a job priority (`0` by default). - - ## Examples - - Enqueue `Module.perform/0` with `priority=1`: - - iex> Pleroma.Jobs.enqueue(:example_queue, Module, []) - :ok - - Enqueue `Module.perform(:job_name)` with `priority=5`: - - iex> Pleroma.Jobs.enqueue(:example_queue, Module, [:job_name], 5) - :ok - - Enqueue `Module.perform(:another_job, data)` with `priority=1`: - - iex> data = "foobar" - iex> Pleroma.Jobs.enqueue(:example_queue, Module, [:another_job, data]) - :ok - - Enqueue `Module.perform(:foobar_job, :foo, :bar, 42)` with `priority=1`: - - iex> Pleroma.Jobs.enqueue(:example_queue, Module, [:foobar_job, :foo, :bar, 42]) - :ok - - """ - - def enqueue(queue_name, mod, args, priority \\ 1) - - if Mix.env() == :test do - def enqueue(_queue_name, mod, args, _priority) do - apply(mod, :perform, args) - end - else - @spec enqueue(atom(), atom(), [any()], integer()) :: :ok - def enqueue(queue_name, mod, args, priority) do - GenServer.cast(__MODULE__, {:enqueue, queue_name, mod, args, priority}) - end - end - - def handle_cast({:enqueue, queue_name, mod, args, priority}, state) do - {running_jobs, queue} = state[:queues][queue_name] - - queue = enqueue_sorted(queue, {mod, args}, priority) - - state = - state - |> update_queue(queue_name, {running_jobs, queue}) - |> maybe_start_job(queue_name, running_jobs, queue) - - {:noreply, state} - end - - def handle_info({:DOWN, ref, :process, _pid, _reason}, state) do - queue_name = state.refs[ref] - - {running_jobs, queue} = state[:queues][queue_name] - - running_jobs = :sets.del_element(ref, running_jobs) - - state = - state - |> remove_ref(ref) - |> update_queue(queue_name, {running_jobs, queue}) - |> maybe_start_job(queue_name, running_jobs, queue) - - {:noreply, state} - end - - def maybe_start_job(state, queue_name, running_jobs, queue) do - if :sets.size(running_jobs) < Pleroma.Config.get([__MODULE__, queue_name, :max_jobs]) && - queue != [] do - {{mod, args}, queue} = queue_pop(queue) - {:ok, pid} = Task.start(fn -> apply(mod, :perform, args) end) - mref = Process.monitor(pid) - - state - |> add_ref(queue_name, mref) - |> update_queue(queue_name, {:sets.add_element(mref, running_jobs), queue}) - else - state - end - end - - def enqueue_sorted(queue, element, priority) do - [%{item: element, priority: priority} | queue] - |> Enum.sort_by(fn %{priority: priority} -> priority end) - end - - def queue_pop([%{item: element} | queue]) do - {element, queue} - end - - defp add_ref(state, queue_name, ref) do - refs = Map.put(state[:refs], ref, queue_name) - Map.put(state, :refs, refs) - end - - defp remove_ref(state, ref) do - refs = Map.delete(state[:refs], ref) - Map.put(state, :refs, refs) - end - - defp update_queue(state, queue_name, data) do - queues = Map.put(state[:queues], queue_name, data) - Map.put(state, :queues, queues) - end -end diff --git a/lib/pleroma/list.ex b/lib/pleroma/list.ex index 55c4cf6df..110be8355 100644 --- a/lib/pleroma/list.ex +++ b/lib/pleroma/list.ex @@ -80,7 +80,7 @@ def get_lists_from_activity(%Activity{actor: ap_id}) do # Get lists to which the account belongs. def get_lists_account_belongs(%User{} = owner, account_id) do - user = Repo.get(User, account_id) + user = User.get_by_id(account_id) query = from( diff --git a/lib/pleroma/notification.ex b/lib/pleroma/notification.ex index 0f9f74b1e..7de2d4c18 100644 --- a/lib/pleroma/notification.ex +++ b/lib/pleroma/notification.ex @@ -5,14 +5,17 @@ defmodule Pleroma.Notification do use Ecto.Schema - alias Pleroma.User alias Pleroma.Activity alias Pleroma.Notification + alias Pleroma.Object + alias Pleroma.Pagination alias Pleroma.Repo - alias Pleroma.Web.CommonAPI.Utils + alias Pleroma.User alias Pleroma.Web.CommonAPI + alias Pleroma.Web.CommonAPI.Utils import Ecto.Query + import Ecto.Changeset schema "notifications" do field(:seen, :boolean, default: false) @@ -22,36 +25,37 @@ defmodule Pleroma.Notification do timestamps() end - # TODO: Make generic and unify (see activity_pub.ex) - defp restrict_max(query, %{"max_id" => max_id}) do - from(activity in query, where: activity.id < ^max_id) + def changeset(%Notification{} = notification, attrs) do + notification + |> cast(attrs, [:seen]) end - defp restrict_max(query, _), do: query - - defp restrict_since(query, %{"since_id" => since_id}) do - from(activity in query, where: activity.id > ^since_id) - end - - defp restrict_since(query, _), do: query - - def for_user(user, opts \\ %{}) do - from( - n in Notification, - where: n.user_id == ^user.id, - order_by: [desc: n.id], - join: activity in assoc(n, :activity), - preload: [activity: activity], - limit: 20, - where: + def for_user_query(user) do + Notification + |> where(user_id: ^user.id) + |> where( + [n, a], + fragment( + "? not in (SELECT ap_id FROM users WHERE info->'disabled' @> 'true')", + a.actor + ) + ) + |> join(:inner, [n], activity in assoc(n, :activity)) + |> join(:left, [n, a], object in Object, + on: fragment( - "? not in (SELECT ap_id FROM users WHERE info->'disabled' @> 'true')", - activity.actor + "(?->>'id') = COALESCE((? -> 'object'::text) ->> 'id'::text)", + object.data, + a.data ) ) - |> restrict_since(opts) - |> restrict_max(opts) - |> Repo.all() + |> preload([n, a, o], activity: {a, object: o}) + end + + def for_user(user, opts \\ %{}) do + user + |> for_user_query() + |> Pagination.fetch_paginated(opts) end def set_read_up_to(%{id: user_id} = _user, id) do @@ -68,6 +72,14 @@ def set_read_up_to(%{id: user_id} = _user, id) do Repo.update_all(query, []) end + def read_one(%User{} = user, notification_id) do + with {:ok, %Notification{} = notification} <- get(user, notification_id) do + notification + |> changeset(%{seen: true}) + |> Repo.update() + end + end + def get(%{id: user_id} = _user, id) do query = from( @@ -117,13 +129,7 @@ def create_notifications(_), do: {:ok, []} # TODO move to sql, too. def create_notification(%Activity{} = activity, %User{} = user) do - unless User.blocks?(user, %{ap_id: activity.data["actor"]}) or - CommonAPI.thread_muted?(user, activity) or user.ap_id == activity.data["actor"] or - (activity.data["type"] == "Follow" and - Enum.any?(Notification.for_user(user), fn notif -> - notif.activity.data["type"] == "Follow" and - notif.activity.data["actor"] == activity.data["actor"] - end)) do + unless skip?(activity, user) do notification = %Notification{user_id: user.id, activity: activity} {:ok, notification} = Repo.insert(notification) Pleroma.Web.Streamer.stream("user", notification) @@ -143,10 +149,66 @@ def get_notified_from_activity( [] |> Utils.maybe_notify_to_recipients(activity) |> Utils.maybe_notify_mentioned_recipients(activity) + |> Utils.maybe_notify_subscribers(activity) |> Enum.uniq() User.get_users_from_set(recipients, local_only) end def get_notified_from_activity(_, _local_only), do: [] + + def skip?(activity, user) do + [:self, :blocked, :local, :muted, :followers, :follows, :recently_followed] + |> Enum.any?(&skip?(&1, activity, user)) + end + + def skip?(:self, activity, user) do + activity.data["actor"] == user.ap_id + end + + def skip?(:blocked, activity, user) do + actor = activity.data["actor"] + User.blocks?(user, %{ap_id: actor}) + end + + def skip?(:local, %{local: true}, %{info: %{notification_settings: %{"local" => false}}}), + do: true + + def skip?(:local, %{local: false}, %{info: %{notification_settings: %{"remote" => false}}}), + do: true + + def skip?(:muted, activity, user) do + actor = activity.data["actor"] + + User.mutes?(user, %{ap_id: actor}) or + CommonAPI.thread_muted?(user, activity) + end + + def skip?( + :followers, + activity, + %{info: %{notification_settings: %{"followers" => false}}} = user + ) do + actor = activity.data["actor"] + follower = User.get_cached_by_ap_id(actor) + User.following?(follower, user) + end + + def skip?(:follows, activity, %{info: %{notification_settings: %{"follows" => false}}} = user) do + actor = activity.data["actor"] + followed = User.get_by_ap_id(actor) + User.following?(user, followed) + end + + def skip?(:recently_followed, %{data: %{"type" => "Follow"}} = activity, user) do + actor = activity.data["actor"] + + Notification.for_user(user) + |> Enum.any?(fn + %{activity: %{data: %{"type" => "Follow", "actor" => ^actor}}} -> true + _ -> false + end) + end + + def skip?(_, _, _), do: false end diff --git a/lib/pleroma/object.ex b/lib/pleroma/object.ex index 5f1fc801b..013d62157 100644 --- a/lib/pleroma/object.ex +++ b/lib/pleroma/object.ex @@ -5,15 +5,17 @@ defmodule Pleroma.Object do use Ecto.Schema - alias Pleroma.Repo - alias Pleroma.Object - alias Pleroma.User alias Pleroma.Activity + alias Pleroma.Object alias Pleroma.ObjectTombstone + alias Pleroma.Repo + alias Pleroma.User import Ecto.Query import Ecto.Changeset + require Logger + schema "objects" do field(:data, :map) @@ -38,6 +40,38 @@ def get_by_ap_id(ap_id) do Repo.one(from(object in Object, where: fragment("(?)->>'id' = ?", object.data, ^ap_id))) end + # If we pass an Activity to Object.normalize(), we can try to use the preloaded object. + # Use this whenever possible, especially when walking graphs in an O(N) loop! + def normalize(%Activity{object: %Object{} = object}), do: object + + # A hack for fake activities + def normalize(%Activity{data: %{"object" => %{"fake" => true} = data}}) do + %Object{id: "pleroma:fake_object_id", data: data} + end + + # Catch and log Object.normalize() calls where the Activity's child object is not + # preloaded. + def normalize(%Activity{data: %{"object" => %{"id" => ap_id}}}) do + Logger.debug( + "Object.normalize() called without preloaded object (#{ap_id}). Consider preloading the object!" + ) + + Logger.debug("Backtrace: #{inspect(Process.info(:erlang.self(), :current_stacktrace))}") + + normalize(ap_id) + end + + def normalize(%Activity{data: %{"object" => ap_id}}) do + Logger.debug( + "Object.normalize() called without preloaded object (#{ap_id}). Consider preloading the object!" + ) + + Logger.debug("Backtrace: #{inspect(Process.info(:erlang.self(), :current_stacktrace))}") + + normalize(ap_id) + end + + # Old way, try fetching the object through cache. def normalize(%{"id" => ap_id}), do: normalize(ap_id) def normalize(ap_id) when is_binary(ap_id), do: get_cached_by_ap_id(ap_id) def normalize(_), do: nil @@ -86,9 +120,9 @@ def swap_object_with_tombstone(object) do def delete(%Object{data: %{"id" => id}} = object) do with {:ok, _obj} = swap_object_with_tombstone(object), - Repo.delete_all(Activity.by_object_ap_id(id)), + deleted_activity = Activity.delete_by_ap_id(id), {:ok, true} <- Cachex.del(:object_cache, "object:#{id}") do - {:ok, object} + {:ok, object, deleted_activity} end end @@ -104,4 +138,50 @@ def update_and_set_cache(changeset) do e -> e end end + + def increase_replies_count(ap_id) do + Object + |> where([o], fragment("?->>'id' = ?::text", o.data, ^to_string(ap_id))) + |> update([o], + set: [ + data: + fragment( + """ + jsonb_set(?, '{repliesCount}', + (coalesce((?->>'repliesCount')::int, 0) + 1)::varchar::jsonb, true) + """, + o.data, + o.data + ) + ] + ) + |> Repo.update_all([]) + |> case do + {1, [object]} -> set_cache(object) + _ -> {:error, "Not found"} + end + end + + def decrease_replies_count(ap_id) do + Object + |> where([o], fragment("?->>'id' = ?::text", o.data, ^to_string(ap_id))) + |> update([o], + set: [ + data: + fragment( + """ + jsonb_set(?, '{repliesCount}', + (greatest(0, (?->>'repliesCount')::int - 1))::varchar::jsonb, true) + """, + o.data, + o.data + ) + ] + ) + |> Repo.update_all([]) + |> case do + {1, [object]} -> set_cache(object) + _ -> {:error, "Not found"} + end + end end diff --git a/lib/pleroma/pagination.ex b/lib/pleroma/pagination.ex new file mode 100644 index 000000000..7c864deef --- /dev/null +++ b/lib/pleroma/pagination.ex @@ -0,0 +1,78 @@ +defmodule Pleroma.Pagination do + @moduledoc """ + Implements Mastodon-compatible pagination. + """ + + import Ecto.Query + import Ecto.Changeset + + alias Pleroma.Repo + + @default_limit 20 + + def fetch_paginated(query, params) do + options = cast_params(params) + + query + |> paginate(options) + |> Repo.all() + |> enforce_order(options) + end + + def paginate(query, options) do + query + |> restrict(:min_id, options) + |> restrict(:since_id, options) + |> restrict(:max_id, options) + |> restrict(:order, options) + |> restrict(:limit, options) + end + + defp cast_params(params) do + param_types = %{ + min_id: :string, + since_id: :string, + max_id: :string, + limit: :integer + } + + changeset = cast({%{}, param_types}, params, Map.keys(param_types)) + changeset.changes + end + + defp restrict(query, :min_id, %{min_id: min_id}) do + where(query, [q], q.id > ^min_id) + end + + defp restrict(query, :since_id, %{since_id: since_id}) do + where(query, [q], q.id > ^since_id) + end + + defp restrict(query, :max_id, %{max_id: max_id}) do + where(query, [q], q.id < ^max_id) + end + + defp restrict(query, :order, %{min_id: _}) do + order_by(query, [u], fragment("? asc nulls last", u.id)) + end + + defp restrict(query, :order, _options) do + order_by(query, [u], fragment("? desc nulls last", u.id)) + end + + defp restrict(query, :limit, options) do + limit = Map.get(options, :limit, @default_limit) + + query + |> limit(^limit) + end + + defp restrict(query, _, _), do: query + + defp enforce_order(result, %{min_id: _}) do + result + |> Enum.reverse() + end + + defp enforce_order(result, _), do: result +end diff --git a/lib/pleroma/plugs/http_security_plug.ex b/lib/pleroma/plugs/http_security_plug.ex index 057553e24..f701aaaa5 100644 --- a/lib/pleroma/plugs/http_security_plug.ex +++ b/lib/pleroma/plugs/http_security_plug.ex @@ -34,13 +34,16 @@ defp headers do defp csp_string do scheme = Config.get([Pleroma.Web.Endpoint, :url])[:scheme] - websocket_url = String.replace(Pleroma.Web.Endpoint.static_url(), "http", "ws") + static_url = Pleroma.Web.Endpoint.static_url() + websocket_url = String.replace(static_url, "http", "ws") + + connect_src = "connect-src 'self' #{static_url} #{websocket_url}" connect_src = if Mix.env() == :dev do - "connect-src 'self' http://localhost:3035/ " <> websocket_url + connect_src <> " http://localhost:3035/" else - "connect-src 'self' " <> websocket_url + connect_src end script_src = diff --git a/lib/pleroma/plugs/http_signature.ex b/lib/pleroma/plugs/http_signature.ex index 51bec910e..21c195713 100644 --- a/lib/pleroma/plugs/http_signature.ex +++ b/lib/pleroma/plugs/http_signature.ex @@ -3,8 +3,8 @@ # SPDX-License-Identifier: AGPL-3.0-only defmodule Pleroma.Web.Plugs.HTTPSignaturePlug do - alias Pleroma.Web.HTTPSignatures alias Pleroma.Web.ActivityPub.Utils + alias Pleroma.Web.HTTPSignatures import Plug.Conn require Logger diff --git a/lib/pleroma/plugs/instance_static.ex b/lib/pleroma/plugs/instance_static.ex index 41125921a..a64f1ea80 100644 --- a/lib/pleroma/plugs/instance_static.ex +++ b/lib/pleroma/plugs/instance_static.ex @@ -21,7 +21,8 @@ def file_path(path) do end end - @only ~w(index.html static emoji packs sounds images instance favicon.png sw.js sw-pleroma.js) + @only ~w(index.html robots.txt static emoji packs sounds images instance favicon.png sw.js + sw-pleroma.js) def init(opts) do opts diff --git a/lib/pleroma/plugs/oauth_plug.ex b/lib/pleroma/plugs/oauth_plug.ex index 22f0406f4..5888d596a 100644 --- a/lib/pleroma/plugs/oauth_plug.ex +++ b/lib/pleroma/plugs/oauth_plug.ex @@ -6,8 +6,8 @@ defmodule Pleroma.Plugs.OAuthPlug do import Plug.Conn import Ecto.Query - alias Pleroma.User alias Pleroma.Repo + alias Pleroma.User alias Pleroma.Web.OAuth.Token @realm_reg Regex.compile!("Bearer\:?\s+(.*)$", "i") @@ -38,6 +38,7 @@ defp fetch_user_and_token(token) do preload: [user: user] ) + # credo:disable-for-next-line Credo.Check.Readability.MaxLineLength with %Token{user: %{info: %{deactivated: false} = _} = user} = token_record <- Repo.one(query) do {:ok, user, token_record} end diff --git a/lib/pleroma/plugs/uploaded_media.ex b/lib/pleroma/plugs/uploaded_media.ex index 13aa8641a..fd77b8d8f 100644 --- a/lib/pleroma/plugs/uploaded_media.ex +++ b/lib/pleroma/plugs/uploaded_media.ex @@ -24,6 +24,18 @@ def init(_opts) do end def call(%{request_path: <<"/", @path, "/", file::binary>>} = conn, opts) do + conn = + case fetch_query_params(conn) do + %{query_params: %{"name" => name}} = conn -> + name = String.replace(name, "\"", "\\\"") + + conn + |> put_resp_header("content-disposition", "filename=\"#{name}\"") + + conn -> + conn + end + config = Pleroma.Config.get([Pleroma.Upload]) with uploader <- Keyword.fetch!(config, :uploader), diff --git a/lib/pleroma/plugs/user_fetcher_plug.ex b/lib/pleroma/plugs/user_fetcher_plug.ex index 7ed4602bb..4089aa958 100644 --- a/lib/pleroma/plugs/user_fetcher_plug.ex +++ b/lib/pleroma/plugs/user_fetcher_plug.ex @@ -4,8 +4,6 @@ defmodule Pleroma.Plugs.UserFetcherPlug do alias Pleroma.User - alias Pleroma.Repo - import Plug.Conn def init(options) do @@ -14,26 +12,10 @@ def init(options) do def call(conn, _options) do with %{auth_credentials: %{username: username}} <- conn.assigns, - {:ok, %User{} = user} <- user_fetcher(username) do - conn - |> assign(:auth_user, user) + %User{} = user <- User.get_by_nickname_or_email(username) do + assign(conn, :auth_user, user) else _ -> conn end end - - defp user_fetcher(username_or_email) do - { - :ok, - cond do - # First, try logging in as if it was a name - user = Repo.get_by(User, %{nickname: username_or_email}) -> - user - - # If we get nil, we try using it as an email - user = Repo.get_by(User, %{email: username_or_email}) -> - user - end - } - end end diff --git a/lib/pleroma/registration.ex b/lib/pleroma/registration.ex new file mode 100644 index 000000000..21fd1fc3f --- /dev/null +++ b/lib/pleroma/registration.ex @@ -0,0 +1,57 @@ +# Pleroma: A lightweight social networking server +# Copyright © 2017-2019 Pleroma Authors +# SPDX-License-Identifier: AGPL-3.0-only + +defmodule Pleroma.Registration do + use Ecto.Schema + + import Ecto.Changeset + + alias Pleroma.Registration + alias Pleroma.Repo + alias Pleroma.User + + @primary_key {:id, Pleroma.FlakeId, autogenerate: true} + + schema "registrations" do + belongs_to(:user, User, type: Pleroma.FlakeId) + field(:provider, :string) + field(:uid, :string) + field(:info, :map, default: %{}) + + timestamps() + end + + def nickname(registration, default \\ nil), + do: Map.get(registration.info, "nickname", default) + + def email(registration, default \\ nil), + do: Map.get(registration.info, "email", default) + + def name(registration, default \\ nil), + do: Map.get(registration.info, "name", default) + + def description(registration, default \\ nil), + do: Map.get(registration.info, "description", default) + + def changeset(registration, params \\ %{}) do + registration + |> cast(params, [:user_id, :provider, :uid, :info]) + |> validate_required([:provider, :uid]) + |> foreign_key_constraint(:user_id) + |> unique_constraint(:uid, name: :registrations_provider_uid_index) + end + + def bind_to_user(registration, user) do + registration + |> changeset(%{user_id: (user && user.id) || nil}) + |> Repo.update() + end + + def get_by_provider_uid(provider, uid) do + Repo.get_by(Registration, + provider: to_string(provider), + uid: to_string(uid) + ) + end +end diff --git a/lib/pleroma/repo.ex b/lib/pleroma/repo.ex index e6a51b19e..aa5d427ae 100644 --- a/lib/pleroma/repo.ex +++ b/lib/pleroma/repo.ex @@ -3,7 +3,14 @@ # SPDX-License-Identifier: AGPL-3.0-only defmodule Pleroma.Repo do - use Ecto.Repo, otp_app: :pleroma + use Ecto.Repo, + otp_app: :pleroma, + adapter: Ecto.Adapters.Postgres, + migration_timestamps: [type: :naive_datetime_usec] + + defmodule Instrumenter do + use Prometheus.EctoInstrumenter + end @doc """ Dynamically loads the repository url from the diff --git a/lib/pleroma/reverse_proxy.ex b/lib/pleroma/reverse_proxy.ex index a25b5ea4e..a3f177fec 100644 --- a/lib/pleroma/reverse_proxy.ex +++ b/lib/pleroma/reverse_proxy.ex @@ -3,10 +3,12 @@ # SPDX-License-Identifier: AGPL-3.0-only defmodule Pleroma.ReverseProxy do - @keep_req_headers ~w(accept user-agent accept-encoding cache-control if-modified-since if-unmodified-since if-none-match if-range range) + @keep_req_headers ~w(accept user-agent accept-encoding cache-control if-modified-since) ++ + ~w(if-unmodified-since if-none-match if-range range) @resp_cache_headers ~w(etag date last-modified cache-control) @keep_resp_headers @resp_cache_headers ++ - ~w(content-type content-disposition content-encoding content-range accept-ranges vary) + ~w(content-type content-disposition content-encoding content-range) ++ + ~w(accept-ranges vary) @default_cache_control_header "public, max-age=1209600" @valid_resp_codes [200, 206, 304] @max_read_duration :timer.seconds(30) @@ -282,8 +284,8 @@ defp build_resp_cache_headers(headers, _opts) do headers has_cache? -> - # There's caching header present but no cache-control -- we need to explicitely override it to public - # as Plug defaults to "max-age=0, private, must-revalidate" + # There's caching header present but no cache-control -- we need to explicitely override it + # to public as Plug defaults to "max-age=0, private, must-revalidate" List.keystore(headers, "cache-control", 0, {"cache-control", "public"}) true -> @@ -309,7 +311,25 @@ defp build_resp_content_disposition_header(headers, opts) do end if attachment? do - disposition = "attachment; filename=" <> Keyword.get(opts, :attachment_name, "attachment") + name = + try do + {{"content-disposition", content_disposition_string}, _} = + List.keytake(headers, "content-disposition", 0) + + [name | _] = + Regex.run( + ~r/filename="((?:[^"\\]|\\.)*)"/u, + content_disposition_string || "", + capture: :all_but_first + ) + + name + rescue + MatchError -> Keyword.get(opts, :attachment_name, "attachment") + end + + disposition = "attachment; filename=\"#{name}\"" + List.keystore(headers, "content-disposition", 0, {"content-disposition", disposition}) else headers diff --git a/lib/pleroma/scheduled_activity.ex b/lib/pleroma/scheduled_activity.ex new file mode 100644 index 000000000..de0e54699 --- /dev/null +++ b/lib/pleroma/scheduled_activity.ex @@ -0,0 +1,161 @@ +# Pleroma: A lightweight social networking server +# Copyright © 2017-2019 Pleroma Authors +# SPDX-License-Identifier: AGPL-3.0-only + +defmodule Pleroma.ScheduledActivity do + use Ecto.Schema + + alias Pleroma.Config + alias Pleroma.Repo + alias Pleroma.ScheduledActivity + alias Pleroma.User + alias Pleroma.Web.CommonAPI.Utils + + import Ecto.Query + import Ecto.Changeset + + @min_offset :timer.minutes(5) + + schema "scheduled_activities" do + belongs_to(:user, User, type: Pleroma.FlakeId) + field(:scheduled_at, :naive_datetime) + field(:params, :map) + + timestamps() + end + + def changeset(%ScheduledActivity{} = scheduled_activity, attrs) do + scheduled_activity + |> cast(attrs, [:scheduled_at, :params]) + |> validate_required([:scheduled_at, :params]) + |> validate_scheduled_at() + |> with_media_attachments() + end + + defp with_media_attachments( + %{changes: %{params: %{"media_ids" => media_ids} = params}} = changeset + ) + when is_list(media_ids) do + media_attachments = Utils.attachments_from_ids(%{"media_ids" => media_ids}) + + params = + params + |> Map.put("media_attachments", media_attachments) + |> Map.put("media_ids", media_ids) + + put_change(changeset, :params, params) + end + + defp with_media_attachments(changeset), do: changeset + + def update_changeset(%ScheduledActivity{} = scheduled_activity, attrs) do + scheduled_activity + |> cast(attrs, [:scheduled_at]) + |> validate_required([:scheduled_at]) + |> validate_scheduled_at() + end + + def validate_scheduled_at(changeset) do + validate_change(changeset, :scheduled_at, fn _, scheduled_at -> + cond do + not far_enough?(scheduled_at) -> + [scheduled_at: "must be at least 5 minutes from now"] + + exceeds_daily_user_limit?(changeset.data.user_id, scheduled_at) -> + [scheduled_at: "daily limit exceeded"] + + exceeds_total_user_limit?(changeset.data.user_id) -> + [scheduled_at: "total limit exceeded"] + + true -> + [] + end + end) + end + + def exceeds_daily_user_limit?(user_id, scheduled_at) do + ScheduledActivity + |> where(user_id: ^user_id) + |> where([sa], type(sa.scheduled_at, :date) == type(^scheduled_at, :date)) + |> select([sa], count(sa.id)) + |> Repo.one() + |> Kernel.>=(Config.get([ScheduledActivity, :daily_user_limit])) + end + + def exceeds_total_user_limit?(user_id) do + ScheduledActivity + |> where(user_id: ^user_id) + |> select([sa], count(sa.id)) + |> Repo.one() + |> Kernel.>=(Config.get([ScheduledActivity, :total_user_limit])) + end + + def far_enough?(scheduled_at) when is_binary(scheduled_at) do + with {:ok, scheduled_at} <- Ecto.Type.cast(:naive_datetime, scheduled_at) do + far_enough?(scheduled_at) + else + _ -> false + end + end + + def far_enough?(scheduled_at) do + now = NaiveDateTime.utc_now() + diff = NaiveDateTime.diff(scheduled_at, now, :millisecond) + diff > @min_offset + end + + def new(%User{} = user, attrs) do + %ScheduledActivity{user_id: user.id} + |> changeset(attrs) + end + + def create(%User{} = user, attrs) do + user + |> new(attrs) + |> Repo.insert() + end + + def get(%User{} = user, scheduled_activity_id) do + ScheduledActivity + |> where(user_id: ^user.id) + |> where(id: ^scheduled_activity_id) + |> Repo.one() + end + + def update(%ScheduledActivity{} = scheduled_activity, attrs) do + scheduled_activity + |> update_changeset(attrs) + |> Repo.update() + end + + def delete(%ScheduledActivity{} = scheduled_activity) do + scheduled_activity + |> Repo.delete() + end + + def delete(id) when is_binary(id) or is_integer(id) do + ScheduledActivity + |> where(id: ^id) + |> select([sa], sa) + |> Repo.delete_all() + |> case do + {1, [scheduled_activity]} -> {:ok, scheduled_activity} + _ -> :error + end + end + + def for_user_query(%User{} = user) do + ScheduledActivity + |> where(user_id: ^user.id) + end + + def due_activities(offset \\ 0) do + naive_datetime = + NaiveDateTime.utc_now() + |> NaiveDateTime.add(offset, :millisecond) + + ScheduledActivity + |> where([sa], sa.scheduled_at < ^naive_datetime) + |> Repo.all() + end +end diff --git a/lib/pleroma/scheduled_activity_worker.ex b/lib/pleroma/scheduled_activity_worker.ex new file mode 100644 index 000000000..65b38622f --- /dev/null +++ b/lib/pleroma/scheduled_activity_worker.ex @@ -0,0 +1,58 @@ +# Pleroma: A lightweight social networking server +# Copyright © 2017-2019 Pleroma Authors +# SPDX-License-Identifier: AGPL-3.0-only + +defmodule Pleroma.ScheduledActivityWorker do + @moduledoc """ + Sends scheduled activities to the job queue. + """ + + alias Pleroma.Config + alias Pleroma.ScheduledActivity + alias Pleroma.User + alias Pleroma.Web.CommonAPI + use GenServer + require Logger + + @schedule_interval :timer.minutes(1) + + def start_link do + GenServer.start_link(__MODULE__, nil) + end + + def init(_) do + if Config.get([ScheduledActivity, :enabled]) do + schedule_next() + {:ok, nil} + else + :ignore + end + end + + def perform(:execute, scheduled_activity_id) do + try do + {:ok, scheduled_activity} = ScheduledActivity.delete(scheduled_activity_id) + %User{} = user = User.get_cached_by_id(scheduled_activity.user_id) + {:ok, _result} = CommonAPI.post(user, scheduled_activity.params) + rescue + error -> + Logger.error( + "#{__MODULE__} Couldn't create a status from the scheduled activity: #{inspect(error)}" + ) + end + end + + def handle_info(:perform, state) do + ScheduledActivity.due_activities(@schedule_interval) + |> Enum.each(fn scheduled_activity -> + PleromaJobQueue.enqueue(:scheduled_activities, __MODULE__, [:execute, scheduled_activity.id]) + end) + + schedule_next() + {:noreply, state} + end + + defp schedule_next do + Process.send_after(self(), :perform, @schedule_interval) + end +end diff --git a/lib/pleroma/stats.ex b/lib/pleroma/stats.ex index fe0ce9051..2e7d747df 100644 --- a/lib/pleroma/stats.ex +++ b/lib/pleroma/stats.ex @@ -4,8 +4,8 @@ defmodule Pleroma.Stats do import Ecto.Query - alias Pleroma.User alias Pleroma.Repo + alias Pleroma.User def start_link do agent = Agent.start_link(fn -> {[], %{}} end, name: __MODULE__) diff --git a/lib/pleroma/thread_mute.ex b/lib/pleroma/thread_mute.ex index 0b577113d..10d31679d 100644 --- a/lib/pleroma/thread_mute.ex +++ b/lib/pleroma/thread_mute.ex @@ -4,7 +4,11 @@ defmodule Pleroma.ThreadMute do use Ecto.Schema - alias Pleroma.{Repo, User, ThreadMute} + + alias Pleroma.Repo + alias Pleroma.ThreadMute + alias Pleroma.User + require Ecto.Query schema "thread_mutes" do diff --git a/lib/pleroma/upload.ex b/lib/pleroma/upload.ex index 91a5db8c5..f72334930 100644 --- a/lib/pleroma/upload.ex +++ b/lib/pleroma/upload.ex @@ -70,7 +70,7 @@ def store(upload, opts \\ []) do %{ "type" => "Link", "mediaType" => upload.content_type, - "href" => url_from_spec(opts.base_url, url_spec) + "href" => url_from_spec(upload, opts.base_url, url_spec) } ], "name" => Map.get(opts, :description) || upload.name @@ -85,6 +85,10 @@ def store(upload, opts \\ []) do end end + def char_unescaped?(char) do + URI.char_unreserved?(char) or char == ?/ + end + defp get_opts(opts) do {size_limit, activity_type} = case Keyword.get(opts, :type) do @@ -215,16 +219,18 @@ defp tempfile_for_image(data) do tmp_path end - defp url_from_spec(base_url, {:file, path}) do + defp url_from_spec(%__MODULE__{name: name}, base_url, {:file, path}) do path = - path - |> URI.encode() - |> String.replace("?", "%3F") - |> String.replace(":", "%3A") + URI.encode(path, &char_unescaped?/1) <> + if Pleroma.Config.get([__MODULE__, :link_name], false) do + "?name=#{URI.encode(name, &char_unescaped?/1)}" + else + "" + end [base_url, "media", path] |> Path.join() end - defp url_from_spec(_base_url, {:url, url}), do: url + defp url_from_spec(_upload, _base_url, {:url, url}), do: url end diff --git a/lib/pleroma/uploaders/s3.ex b/lib/pleroma/uploaders/s3.ex index 0038ba01f..521daa93b 100644 --- a/lib/pleroma/uploaders/s3.ex +++ b/lib/pleroma/uploaders/s3.ex @@ -6,16 +6,22 @@ defmodule Pleroma.Uploaders.S3 do @behaviour Pleroma.Uploaders.Uploader require Logger - # The file name is re-encoded with S3's constraints here to comply with previous links with less strict filenames + # The file name is re-encoded with S3's constraints here to comply with previous + # links with less strict filenames def get_file(file) do config = Pleroma.Config.get([__MODULE__]) bucket = Keyword.fetch!(config, :bucket) bucket_with_namespace = - if namespace = Keyword.get(config, :bucket_namespace) do - namespace <> ":" <> bucket - else - bucket + cond do + truncated_namespace = Keyword.get(config, :truncated_namespace) -> + truncated_namespace + + namespace = Keyword.get(config, :bucket_namespace) -> + namespace <> ":" <> bucket + + true -> + bucket end {:ok, diff --git a/lib/pleroma/uploaders/swift/keystone.ex b/lib/pleroma/uploaders/swift/keystone.ex index b4f250f9d..3046cdbd2 100644 --- a/lib/pleroma/uploaders/swift/keystone.ex +++ b/lib/pleroma/uploaders/swift/keystone.ex @@ -17,7 +17,7 @@ def process_response_body(body) do |> Poison.decode!() end - def get_token() do + def get_token do settings = Pleroma.Config.get(Pleroma.Uploaders.Swift) username = Keyword.fetch!(settings, :username) password = Keyword.fetch!(settings, :password) diff --git a/lib/pleroma/uploaders/uploader.ex b/lib/pleroma/uploaders/uploader.ex index ce83cbbbc..bf15389fc 100644 --- a/lib/pleroma/uploaders/uploader.ex +++ b/lib/pleroma/uploaders/uploader.ex @@ -29,7 +29,6 @@ defmodule Pleroma.Uploaders.Uploader do * `{:error, String.t}` error information if the file failed to be saved to the backend. * `:wait_callback` will wait for an http post request at `/api/pleroma/upload_callback/:upload_path` and call the uploader's `http_callback/3` method. - """ @type file_spec :: {:file | :url, String.t()} @callback put_file(Pleroma.Upload.t()) :: diff --git a/lib/pleroma/user.ex b/lib/pleroma/user.ex index f02051174..1f2aca235 100644 --- a/lib/pleroma/user.ex +++ b/lib/pleroma/user.ex @@ -8,21 +8,22 @@ defmodule Pleroma.User do import Ecto.Changeset import Ecto.Query + alias Comeonin.Pbkdf2 + alias Pleroma.Activity + alias Pleroma.Formatter + alias Pleroma.Notification + alias Pleroma.Object + alias Pleroma.Registration alias Pleroma.Repo alias Pleroma.User - alias Pleroma.Object alias Pleroma.Web - alias Pleroma.Activity - alias Pleroma.Notification - alias Comeonin.Pbkdf2 - alias Pleroma.Formatter - alias Pleroma.Web.CommonAPI.Utils, as: CommonUtils - alias Pleroma.Web.OStatus - alias Pleroma.Web.Websub - alias Pleroma.Web.OAuth - alias Pleroma.Web.ActivityPub.Utils alias Pleroma.Web.ActivityPub.ActivityPub + alias Pleroma.Web.ActivityPub.Utils + alias Pleroma.Web.CommonAPI.Utils, as: CommonUtils + alias Pleroma.Web.OAuth + alias Pleroma.Web.OStatus alias Pleroma.Web.RelMe + alias Pleroma.Web.Websub require Logger @@ -30,6 +31,7 @@ defmodule Pleroma.User do @primary_key {:id, Pleroma.FlakeId, autogenerate: true} + # credo:disable-for-next-line Credo.Check.Readability.MaxLineLength @email_regex ~r/^[a-zA-Z0-9.!#$%&'*+\/=?^_`{|}~-]+@[a-zA-Z0-9](?:[a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?(?:\.[a-zA-Z0-9](?:[a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?)*$/ @strict_local_nickname_regex ~r/^[a-zA-Z\d]+$/ @@ -49,23 +51,21 @@ defmodule Pleroma.User do field(:local, :boolean, default: true) field(:follower_address, :string) field(:search_rank, :float, virtual: true) + field(:search_type, :integer, virtual: true) field(:tags, {:array, :string}, default: []) field(:bookmarks, {:array, :string}, default: []) - field(:last_refreshed_at, :naive_datetime) + field(:last_refreshed_at, :naive_datetime_usec) has_many(:notifications, Notification) + has_many(:registrations, Registration) embeds_one(:info, Pleroma.User.Info) timestamps() end - def auth_active?(%User{local: false}), do: true - - def auth_active?(%User{info: %User.Info{confirmation_pending: false}}), do: true - def auth_active?(%User{info: %User.Info{confirmation_pending: true}}), do: !Pleroma.Config.get([:instance, :account_activation_required]) - def auth_active?(_), do: false + def auth_active?(%User{}), do: true def visible_for?(user, for_user \\ nil) @@ -81,17 +81,17 @@ def superuser?(%User{local: true, info: %User.Info{is_admin: true}}), do: true def superuser?(%User{local: true, info: %User.Info{is_moderator: true}}), do: true def superuser?(_), do: false - def avatar_url(user) do + def avatar_url(user, options \\ []) do case user.avatar do %{"url" => [%{"href" => href} | _]} -> href - _ -> "#{Web.base_url()}/images/avi.png" + _ -> !options[:no_default] && "#{Web.base_url()}/images/avi.png" end end - def banner_url(user) do + def banner_url(user, options \\ []) do case user.info.banner do %{"url" => [%{"href" => href} | _]} -> href - _ -> "#{Web.base_url()}/images/banner.png" + _ -> !options[:no_default] && "#{Web.base_url()}/images/banner.png" end end @@ -103,9 +103,8 @@ def ap_id(%User{nickname: nickname}) do "#{Web.base_url()}/users/#{nickname}" end - def ap_followers(%User{} = user) do - "#{ap_id(user)}/followers" - end + def ap_followers(%User{follower_address: fa}) when is_binary(fa), do: fa + def ap_followers(%User{} = user), do: "#{ap_id(user)}/followers" def user_info(%User{} = user) do %{ @@ -234,7 +233,7 @@ def register_changeset(struct, params \\ %{}, opts \\ []) do changeset = struct |> cast(params, [:bio, :email, :name, :nickname, :password, :password_confirmation]) - |> validate_required([:email, :name, :nickname, :password, :password_confirmation]) + |> validate_required([:name, :nickname, :password, :password_confirmation]) |> validate_confirmation(:password) |> unique_constraint(:email) |> unique_constraint(:nickname) @@ -245,6 +244,13 @@ def register_changeset(struct, params \\ %{}, opts \\ []) do |> validate_length(:name, min: 1, max: 100) |> put_change(:info, info_change) + changeset = + if opts[:external] do + changeset + else + validate_required(changeset, [:email]) + end + if changeset.valid? do hashed = Pbkdf2.hashpwsalt(changeset.changes[:password]) ap_id = User.ap_id(%User{nickname: changeset.changes[:nickname]}) @@ -300,7 +306,7 @@ def needs_update?(%User{local: true}), do: false def needs_update?(%User{local: false, last_refreshed_at: nil}), do: true def needs_update?(%User{local: false} = user) do - NaiveDateTime.diff(NaiveDateTime.utc_now(), user.last_refreshed_at) >= 86400 + NaiveDateTime.diff(NaiveDateTime.utc_now(), user.last_refreshed_at) >= 86_400 end def needs_update?(_), do: true @@ -349,10 +355,11 @@ def follow_all(follower, followeds) do ^followed_addresses ) ] - ] + ], + select: u ) - {1, [follower]} = Repo.update_all(q, [], returning: true) + {1, [follower]} = Repo.update_all(q, []) Enum.each(followeds, fn followed -> update_follower_count(followed) @@ -382,10 +389,11 @@ def follow(%User{} = follower, %User{info: info} = followed) do q = from(u in User, where: u.id == ^follower.id, - update: [push: [following: ^ap_followers]] + update: [push: [following: ^ap_followers]], + select: u ) - {1, [follower]} = Repo.update_all(q, [], returning: true) + {1, [follower]} = Repo.update_all(q, []) {:ok, _} = update_follower_count(followed) @@ -400,10 +408,11 @@ def unfollow(%User{} = follower, %User{} = followed) do q = from(u in User, where: u.id == ^follower.id, - update: [pull: [following: ^ap_followers]] + update: [pull: [following: ^ap_followers]], + select: u ) - {1, [follower]} = Repo.update_all(q, [], returning: true) + {1, [follower]} = Repo.update_all(q, []) {:ok, followed} = update_follower_count(followed) @@ -450,7 +459,8 @@ def get_by_ap_id(ap_id) do Repo.get_by(User, ap_id: ap_id) end - # This is mostly an SPC migration fix. This guesses the user nickname (by taking the last part of the ap_id and the domain) and tries to get that user + # This is mostly an SPC migration fix. This guesses the user nickname by taking the last part + # of the ap_id and the domain and tries to get that user def get_by_guessed_nickname(ap_id) do domain = URI.parse(ap_id).host name = List.last(String.split(ap_id, "/")) @@ -519,11 +529,10 @@ def get_by_nickname(nickname) do end end + def get_by_email(email), do: Repo.get_by(User, email: email) + def get_by_nickname_or_email(nickname_or_email) do - case user = Repo.get_by(User, nickname: nickname_or_email) do - %User{} -> user - nil -> Repo.get_by(User, email: nickname_or_email) - end + get_by_nickname(nickname_or_email) || get_by_email(nickname_or_email) end def get_cached_user_info(user) do @@ -547,6 +556,10 @@ def get_or_fetch_by_nickname(nickname) do _e -> with [_nick, _domain] <- String.split(nickname, "@"), {:ok, user} <- fetch_by_nickname(nickname) do + if Pleroma.Config.get([:fetch_initial_posts, :enabled]) do + {:ok, _} = Task.start(__MODULE__, :fetch_initial_posts, [user]) + end + user else _e -> nil @@ -554,6 +567,17 @@ def get_or_fetch_by_nickname(nickname) do end end + @doc "Fetch some posts when the user has just been federated with" + def fetch_initial_posts(user) do + pages = Pleroma.Config.get!([:fetch_initial_posts, :pages]) + + Enum.each( + # Insert all the posts in reverse order, so they're in the right order on the timeline + Enum.reverse(Utils.fetch_ordered_collection(user.info.source_data["outbox"], pages)), + &Pleroma.Web.Federator.incoming_ap_doc/1 + ) + end + def get_followers_query(%User{id: id, follower_address: follower_address}, nil) do from( u in User, @@ -637,7 +661,7 @@ def get_follow_requests(%User{} = user) do users = user |> User.get_follow_requests_query() - |> join(:inner, [a], u in User, a.actor == u.ap_id) + |> join(:inner, [a], u in User, on: a.actor == u.ap_id) |> where([a, u], not fragment("? @> ?", u.following, ^[user.follower_address])) |> group_by([a, u], u.id) |> select([a, u], u) @@ -659,7 +683,8 @@ def increase_note_count(%User{} = user) do ) ] ) - |> Repo.update_all([], returning: true) + |> select([u], u) + |> Repo.update_all([]) |> case do {1, [user]} -> set_cache(user) _ -> {:error, user} @@ -679,7 +704,8 @@ def decrease_note_count(%User{} = user) do ) ] ) - |> Repo.update_all([], returning: true) + |> select([u], u) + |> Repo.update_all([]) |> case do {1, [user]} -> set_cache(user) _ -> {:error, user} @@ -725,7 +751,8 @@ def update_follower_count(%User{} = user) do ) ] ) - |> Repo.update_all([], returning: true) + |> select([u], u) + |> Repo.update_all([]) |> case do {1, [user]} -> set_cache(user) _ -> {:error, user} @@ -766,77 +793,59 @@ def get_recipients_from_activity(%Activity{recipients: to}) do Repo.all(query) end - @spec search_for_admin(binary(), %{ - admin: Pleroma.User.t(), - local: boolean(), - page: number(), - page_size: number() - }) :: {:ok, [Pleroma.User.t()], number()} - def search_for_admin(term, %{admin: admin, local: local, page: page, page_size: page_size}) do - term = String.trim_leading(term, "@") - - local_paginated_query = - User - |> maybe_local_user_query(local) - |> paginate(page, page_size) - - search_query = fts_search_subquery(term, local_paginated_query) - - count = - term - |> fts_search_subquery() - |> maybe_local_user_query(local) - |> Repo.aggregate(:count, :id) - - {:ok, do_search(search_query, admin), count} - end - - @spec all_for_admin(number(), number()) :: {:ok, [Pleroma.User.t()], number()} - def all_for_admin(page, page_size) do - query = from(u in User, order_by: u.id) - - paginated_query = - query - |> paginate(page, page_size) - - count = - query - |> Repo.aggregate(:count, :id) - - {:ok, Repo.all(paginated_query), count} - end - def search(query, resolve \\ false, for_user \\ nil) do # Strip the beginning @ off if there is a query query = String.trim_leading(query, "@") if resolve, do: get_or_fetch(query) - fts_results = do_search(fts_search_subquery(query), for_user) - - {:ok, trigram_results} = + {:ok, results} = Repo.transaction(fn -> Ecto.Adapters.SQL.query(Repo, "select set_limit(0.25)", []) - do_search(trigram_search_subquery(query), for_user) + Repo.all(search_query(query, for_user)) end) - Enum.uniq_by(fts_results ++ trigram_results, & &1.id) + results end - defp do_search(subquery, for_user, options \\ []) do - q = - from( - s in subquery(subquery), - order_by: [desc: s.search_rank], - limit: ^(options[:limit] || 20) - ) + def search_query(query, for_user) do + fts_subquery = fts_search_subquery(query) + trigram_subquery = trigram_search_subquery(query) + union_query = from(s in trigram_subquery, union_all: ^fts_subquery) + distinct_query = from(s in subquery(union_query), order_by: s.search_type, distinct: s.id) - results = - q - |> Repo.all() - |> Enum.filter(&(&1.search_rank > 0)) + from(s in subquery(boost_search_rank_query(distinct_query, for_user)), + order_by: [desc: s.search_rank], + limit: 20 + ) + end - boost_search_results(results, for_user) + defp boost_search_rank_query(query, nil), do: query + + defp boost_search_rank_query(query, for_user) do + friends_ids = get_friends_ids(for_user) + followers_ids = get_followers_ids(for_user) + + from(u in subquery(query), + select_merge: %{ + search_rank: + fragment( + """ + CASE WHEN (?) THEN (?) * 1.3 + WHEN (?) THEN (?) * 1.2 + WHEN (?) THEN (?) * 1.1 + ELSE (?) END + """, + u.id in ^friends_ids and u.id in ^followers_ids, + u.search_rank, + u.id in ^friends_ids, + u.search_rank, + u.id in ^followers_ids, + u.search_rank, + u.search_rank + ) + } + ) end defp fts_search_subquery(term, query \\ User) do @@ -851,6 +860,7 @@ defp fts_search_subquery(term, query \\ User) do from( u in query, select_merge: %{ + search_type: ^0, search_rank: fragment( """ @@ -884,6 +894,8 @@ defp trigram_search_subquery(term) do from( u in User, select_merge: %{ + # ^1 gives 'Postgrex expected a binary, got 1' for some weird reason + search_type: fragment("?", 1), search_rank: fragment( "similarity(?, trim(? || ' ' || coalesce(?, '')))", @@ -897,33 +909,6 @@ defp trigram_search_subquery(term) do |> restrict_disabled() end - defp boost_search_results(results, nil), do: results - - defp boost_search_results(results, for_user) do - friends_ids = get_friends_ids(for_user) - followers_ids = get_followers_ids(for_user) - - Enum.map( - results, - fn u -> - search_rank_coef = - cond do - u.id in friends_ids -> - 1.2 - - u.id in followers_ids -> - 1.1 - - true -> - 1 - end - - Map.put(u, :search_rank, u.search_rank * search_rank_coef) - end - ) - |> Enum.sort_by(&(-&1.search_rank)) - end - def blocks_import(%User{} = blocker, blocked_identifiers) when is_list(blocked_identifiers) do Enum.map( blocked_identifiers, @@ -965,6 +950,38 @@ def unmute(muter, %{ap_id: ap_id}) do update_and_set_cache(cng) end + def subscribe(subscriber, %{ap_id: ap_id}) do + deny_follow_blocked = Pleroma.Config.get([:user, :deny_follow_blocked]) + + with %User{} = subscribed <- get_cached_by_ap_id(ap_id) do + blocked = blocks?(subscribed, subscriber) and deny_follow_blocked + + if blocked do + {:error, "Could not subscribe: #{subscribed.nickname} is blocking you"} + else + info_cng = + subscribed.info + |> User.Info.add_to_subscribers(subscriber.ap_id) + + change(subscribed) + |> put_embed(:info, info_cng) + |> update_and_set_cache() + end + end + end + + def unsubscribe(unsubscriber, %{ap_id: ap_id}) do + with %User{} = user <- get_cached_by_ap_id(ap_id) do + info_cng = + user.info + |> User.Info.remove_from_subscribers(unsubscriber.ap_id) + + change(user) + |> put_embed(:info, info_cng) + |> update_and_set_cache() + end + end + def block(blocker, %User{ap_id: ap_id} = blocked) do # sever any follow relationships to prevent leaks per activitypub (Pleroma issue #213) blocker = @@ -975,10 +992,20 @@ def block(blocker, %User{ap_id: ap_id} = blocked) do blocker end + blocker = + if subscribed_to?(blocked, blocker) do + {:ok, blocker} = unsubscribe(blocked, blocker) + blocker + else + blocker + end + if following?(blocked, blocker) do unfollow(blocked, blocker) end + {:ok, blocker} = update_follower_count(blocker) + info_cng = blocker.info |> User.Info.add_to_block(ap_id) @@ -1021,12 +1048,21 @@ def blocks?(user, %{ap_id: ap_id}) do end) end + def subscribed_to?(user, %{ap_id: ap_id}) do + with %User{} = target <- User.get_by_ap_id(ap_id) do + Enum.member?(target.info.subscribers, user.ap_id) + end + end + def muted_users(user), do: Repo.all(from(u in User, where: u.ap_id in ^user.info.mutes)) def blocked_users(user), do: Repo.all(from(u in User, where: u.ap_id in ^user.info.blocks)) + def subscribers(user), + do: Repo.all(from(u in User, where: u.ap_id in ^user.info.subscribers)) + def block_domain(user, domain) do info_cng = user.info @@ -1063,6 +1099,42 @@ def local_user_query(query \\ User) do ) end + def maybe_external_user_query(query, external) do + if external, do: external_user_query(query), else: query + end + + def external_user_query(query \\ User) do + from( + u in query, + where: u.local == false, + where: not is_nil(u.nickname) + ) + end + + def maybe_active_user_query(query, active) do + if active, do: active_user_query(query), else: query + end + + def active_user_query(query \\ User) do + from( + u in query, + where: fragment("not (?->'deactivated' @> 'true')", u.info), + where: not is_nil(u.nickname) + ) + end + + def maybe_deactivated_user_query(query, deactivated) do + if deactivated, do: deactivated_user_query(query), else: query + end + + def deactivated_user_query(query \\ User) do + from( + u in query, + where: fragment("(?->'deactivated' @> 'true')", u.info), + where: not is_nil(u.nickname) + ) + end + def active_local_user_query do from( u in local_user_query(), @@ -1087,39 +1159,48 @@ def deactivate(%User{} = user, status \\ true) do |> update_and_set_cache() end + def update_notification_settings(%User{} = user, settings \\ %{}) do + info_changeset = User.Info.update_notification_settings(user.info, settings) + + change(user) + |> put_embed(:info, info_changeset) + |> update_and_set_cache() + end + def delete(%User{} = user) do {:ok, user} = User.deactivate(user) # Remove all relationships {:ok, followers} = User.get_followers(user) - followers - |> Enum.each(fn follower -> User.unfollow(follower, user) end) + Enum.each(followers, fn follower -> User.unfollow(follower, user) end) {:ok, friends} = User.get_friends(user) - friends - |> Enum.each(fn followed -> User.unfollow(user, followed) end) + Enum.each(friends, fn followed -> User.unfollow(user, followed) end) - query = from(a in Activity, where: a.actor == ^user.ap_id) + delete_user_activities(user) + end - Repo.all(query) - |> Enum.each(fn activity -> - case activity.data["type"] do - "Create" -> - ActivityPub.delete(Object.normalize(activity.data["object"])) + def delete_user_activities(%User{ap_id: ap_id} = user) do + Activity + |> where(actor: ^ap_id) + |> Activity.with_preloaded_object() + |> Repo.all() + |> Enum.each(fn + %{data: %{"type" => "Create"}} = activity -> + activity |> Object.normalize() |> ActivityPub.delete() - # TODO: Do something with likes, follows, repeats. - _ -> - "Doing nothing" - end + # TODO: Do something with likes, follows, repeats. + _ -> + "Doing nothing" end) {:ok, user} end def disable_async(user, status \\ true) do - Pleroma.Jobs.enqueue(:user, __MODULE__, [:disable_async, user, status]) + PleromaJobQueue.enqueue(:user, __MODULE__, [:disable_async, user, status]) end def disable(%User{} = user, status \\ true) do @@ -1146,24 +1227,39 @@ def html_filter_policy(%User{info: %{no_rich_text: true}}) do def html_filter_policy(_), do: @default_scrubbers + def fetch_by_ap_id(ap_id) do + ap_try = ActivityPub.make_user_from_ap_id(ap_id) + + case ap_try do + {:ok, user} -> + user + + _ -> + case OStatus.make_user(ap_id) do + {:ok, user} -> user + _ -> {:error, "Could not fetch by AP id"} + end + end + end + def get_or_fetch_by_ap_id(ap_id) do user = get_by_ap_id(ap_id) if !is_nil(user) and !User.needs_update?(user) do user else - ap_try = ActivityPub.make_user_from_ap_id(ap_id) + # Whether to fetch initial posts for the user (if it's a new user & the fetching is enabled) + should_fetch_initial = is_nil(user) and Pleroma.Config.get([:fetch_initial_posts, :enabled]) - case ap_try do - {:ok, user} -> - user + user = fetch_by_ap_id(ap_id) - _ -> - case OStatus.make_user(ap_id) do - {:ok, user} -> user - _ -> {:error, "Could not fetch by AP id"} - end + if should_fetch_initial do + with %User{} = user do + {:ok, _} = Task.start(__MODULE__, :fetch_initial_posts, [user]) + end end + + user end end @@ -1239,8 +1335,8 @@ def get_or_fetch(nickname), do: get_or_fetch_by_nickname(nickname) # this is because we have synchronous follow APIs and need to simulate them # with an async handshake def wait_and_refresh(_, %User{local: true} = a, %User{local: true} = b) do - with %User{} = a <- Repo.get(User, a.id), - %User{} = b <- Repo.get(User, b.id) do + with %User{} = a <- User.get_by_id(a.id), + %User{} = b <- User.get_by_id(b.id) do {:ok, a, b} else _e -> @@ -1250,8 +1346,8 @@ def wait_and_refresh(_, %User{local: true} = a, %User{local: true} = b) do def wait_and_refresh(timeout, %User{} = a, %User{} = b) do with :ok <- :timer.sleep(timeout), - %User{} = a <- Repo.get(User, a.id), - %User{} = b <- Repo.get(User, b.id) do + %User{} = a <- User.get_by_id(a.id), + %User{} = b <- User.get_by_id(b.id) do {:ok, a, b} else _e -> @@ -1338,7 +1434,7 @@ defp normalize_tags(tags) do |> Enum.map(&String.downcase(&1)) end - defp local_nickname_regex() do + defp local_nickname_regex do if Pleroma.Config.get([:instance, :extended_nickname_format]) do @extended_local_nickname_regex else @@ -1381,4 +1477,8 @@ defp paginate(query, page, page_size) do offset: ^((page - 1) * page_size) ) end + + def showing_reblogs?(%User{} = user, %User{} = target) do + target.ap_id not in user.info.muted_reblogs + end end diff --git a/lib/pleroma/user/info.ex b/lib/pleroma/user/info.ex index 1ec356ba9..07825a1c4 100644 --- a/lib/pleroma/user/info.ex +++ b/lib/pleroma/user/info.ex @@ -6,6 +6,8 @@ defmodule Pleroma.User.Info do use Ecto.Schema import Ecto.Changeset + alias Pleroma.User.Info + embedded_schema do field(:banner, :map, default: %{}) field(:background, :map, default: %{}) @@ -19,6 +21,8 @@ defmodule Pleroma.User.Info do field(:blocks, {:array, :string}, default: []) field(:domain_blocks, {:array, :string}, default: []) field(:mutes, {:array, :string}, default: []) + field(:muted_reblogs, {:array, :string}, default: []) + field(:subscribers, {:array, :string}, default: []) field(:deactivated, :boolean, default: false) field(:no_rich_text, :boolean, default: false) field(:ap_enabled, :boolean, default: false) @@ -38,6 +42,10 @@ defmodule Pleroma.User.Info do field(:flavour, :string, default: nil) field(:disabled, :boolean, default: false) + field(:notification_settings, :map, + default: %{"remote" => true, "local" => true, "followers" => true, "follows" => true} + ) + # Found in the wild # ap_id -> Where is this used? # bio -> Where is this used? @@ -55,6 +63,19 @@ def set_activation_status(info, deactivated) do |> validate_required([:deactivated]) end + def update_notification_settings(info, settings) do + notification_settings = + info.notification_settings + |> Map.merge(settings) + |> Map.take(["remote", "local", "followers", "follows"]) + + params = %{notification_settings: notification_settings} + + info + |> cast(params, [:notification_settings]) + |> validate_required([:notification_settings]) + end + def set_disabled_status(info, disabled) do params = %{disabled: disabled} @@ -99,6 +120,14 @@ def set_blocks(info, blocks) do |> validate_required([:blocks]) end + def set_subscribers(info, subscribers) do + params = %{subscribers: subscribers} + + info + |> cast(params, [:subscribers]) + |> validate_required([:subscribers]) + end + def add_to_mutes(info, muted) do set_mutes(info, Enum.uniq([muted | info.mutes])) end @@ -115,6 +144,14 @@ def remove_from_block(info, blocked) do set_blocks(info, List.delete(info.blocks, blocked)) end + def add_to_subscribers(info, subscribed) do + set_subscribers(info, Enum.uniq([subscribed | info.subscribers])) + end + + def remove_from_subscribers(info, subscribed) do + set_subscribers(info, List.delete(info.subscribers, subscribed)) + end + def set_domain_blocks(info, domain_blocks) do params = %{domain_blocks: domain_blocks} @@ -259,4 +296,23 @@ def remove_pinnned_activity(info, %Pleroma.Activity{id: id}) do cast(info, params, [:pinned_activities]) end + + def roles(%Info{is_moderator: is_moderator, is_admin: is_admin}) do + %{ + admin: is_admin, + moderator: is_moderator + } + end + + def add_reblog_mute(info, ap_id) do + params = %{muted_reblogs: info.muted_reblogs ++ [ap_id]} + + cast(info, params, [:muted_reblogs]) + end + + def remove_reblog_mute(info, ap_id) do + params = %{muted_reblogs: List.delete(info.muted_reblogs, ap_id)} + + cast(info, params, [:muted_reblogs]) + end end diff --git a/lib/pleroma/user/welcome_message.ex b/lib/pleroma/user/welcome_message.ex index 8018ac22f..2ba65b75a 100644 --- a/lib/pleroma/user/welcome_message.ex +++ b/lib/pleroma/user/welcome_message.ex @@ -14,7 +14,7 @@ def post_welcome_message_to_user(user) do end end - defp welcome_user() do + defp welcome_user do with nickname when is_binary(nickname) <- Pleroma.Config.get([:instance, :welcome_user_nickname]), %User{local: true} = user <- User.get_cached_by_nickname(nickname) do @@ -24,7 +24,7 @@ defp welcome_user() do end end - defp welcome_message() do + defp welcome_message do Pleroma.Config.get([:instance, :welcome_message]) end end diff --git a/lib/pleroma/user_invite_token.ex b/lib/pleroma/user_invite_token.ex index 5a448114c..86f0a5486 100644 --- a/lib/pleroma/user_invite_token.ex +++ b/lib/pleroma/user_invite_token.ex @@ -6,40 +6,119 @@ defmodule Pleroma.UserInviteToken do use Ecto.Schema import Ecto.Changeset - - alias Pleroma.UserInviteToken + import Ecto.Query alias Pleroma.Repo + alias Pleroma.UserInviteToken + + @type t :: %__MODULE__{} + @type token :: String.t() schema "user_invite_tokens" do field(:token, :string) field(:used, :boolean, default: false) + field(:max_use, :integer) + field(:expires_at, :date) + field(:uses, :integer, default: 0) + field(:invite_type, :string) timestamps() end - def create_token do + @spec create_invite(map()) :: UserInviteToken.t() + def create_invite(params \\ %{}) do + %UserInviteToken{} + |> cast(params, [:max_use, :expires_at]) + |> add_token() + |> assign_type() + |> Repo.insert() + end + + defp add_token(changeset) do token = :crypto.strong_rand_bytes(32) |> Base.url_encode64() - - token = %UserInviteToken{ - used: false, - token: token - } - - Repo.insert(token) + put_change(changeset, :token, token) end - def used_changeset(struct) do - struct - |> cast(%{}, []) - |> put_change(:used, true) + defp assign_type(%{changes: %{max_use: _max_use, expires_at: _expires_at}} = changeset) do + put_change(changeset, :invite_type, "reusable_date_limited") end - def mark_as_used(token) do - with %{used: false} = token <- Repo.get_by(UserInviteToken, %{token: token}), - {:ok, token} <- Repo.update(used_changeset(token)) do - {:ok, token} - else - _e -> {:error, token} + defp assign_type(%{changes: %{expires_at: _expires_at}} = changeset) do + put_change(changeset, :invite_type, "date_limited") + end + + defp assign_type(%{changes: %{max_use: _max_use}} = changeset) do + put_change(changeset, :invite_type, "reusable") + end + + defp assign_type(changeset), do: put_change(changeset, :invite_type, "one_time") + + @spec list_invites() :: [UserInviteToken.t()] + def list_invites do + query = from(u in UserInviteToken, order_by: u.id) + Repo.all(query) + end + + @spec update_invite!(UserInviteToken.t(), map()) :: UserInviteToken.t() | no_return() + def update_invite!(invite, changes) do + change(invite, changes) |> Repo.update!() + end + + @spec update_invite(UserInviteToken.t(), map()) :: + {:ok, UserInviteToken.t()} | {:error, Changeset.t()} + def update_invite(invite, changes) do + change(invite, changes) |> Repo.update() + end + + @spec find_by_token!(token()) :: UserInviteToken.t() | no_return() + def find_by_token!(token), do: Repo.get_by!(UserInviteToken, token: token) + + @spec find_by_token(token()) :: {:ok, UserInviteToken.t()} | nil + def find_by_token(token) do + with invite <- Repo.get_by(UserInviteToken, token: token) do + {:ok, invite} end end + + @spec valid_invite?(UserInviteToken.t()) :: boolean() + def valid_invite?(%{invite_type: "one_time"} = invite) do + not invite.used + end + + def valid_invite?(%{invite_type: "date_limited"} = invite) do + not_overdue_date?(invite) and not invite.used + end + + def valid_invite?(%{invite_type: "reusable"} = invite) do + invite.uses < invite.max_use and not invite.used + end + + def valid_invite?(%{invite_type: "reusable_date_limited"} = invite) do + not_overdue_date?(invite) and invite.uses < invite.max_use and not invite.used + end + + defp not_overdue_date?(%{expires_at: expires_at}) do + Date.compare(Date.utc_today(), expires_at) in [:lt, :eq] + end + + @spec update_usage!(UserInviteToken.t()) :: nil | UserInviteToken.t() | no_return() + def update_usage!(%{invite_type: "date_limited"}), do: nil + + def update_usage!(%{invite_type: "one_time"} = invite), + do: update_invite!(invite, %{used: true}) + + def update_usage!(%{invite_type: invite_type} = invite) + when invite_type == "reusable" or invite_type == "reusable_date_limited" do + changes = %{ + uses: invite.uses + 1 + } + + changes = + if changes.uses >= invite.max_use do + Map.put(changes, :used, true) + else + changes + end + + update_invite!(invite, changes) + end end diff --git a/lib/pleroma/web/activity_pub/activity_pub.ex b/lib/pleroma/web/activity_pub/activity_pub.ex index aa20990f3..dd51d63c8 100644 --- a/lib/pleroma/web/activity_pub/activity_pub.ex +++ b/lib/pleroma/web/activity_pub/activity_pub.ex @@ -4,17 +4,17 @@ defmodule Pleroma.Web.ActivityPub.ActivityPub do alias Pleroma.Activity - alias Pleroma.Repo + alias Pleroma.Instances + alias Pleroma.Notification alias Pleroma.Object + alias Pleroma.Repo alias Pleroma.Upload alias Pleroma.User - alias Pleroma.Notification - alias Pleroma.Instances - alias Pleroma.Web.ActivityPub.Transmogrifier alias Pleroma.Web.ActivityPub.MRF - alias Pleroma.Web.WebFinger + alias Pleroma.Web.ActivityPub.Transmogrifier alias Pleroma.Web.Federator alias Pleroma.Web.OStatus + alias Pleroma.Web.WebFinger import Ecto.Query import Pleroma.Web.ActivityPub.Utils @@ -89,15 +89,39 @@ def decrease_note_count_if_public(actor, object) do if is_public?(object), do: User.decrease_note_count(actor), else: {:ok, actor} end - def insert(map, local \\ true) when is_map(map) do + def increase_replies_count_if_reply(%{ + "object" => + %{"inReplyTo" => reply_ap_id, "inReplyToStatusId" => reply_status_id} = object, + "type" => "Create" + }) do + if is_public?(object) do + Activity.increase_replies_count(reply_status_id) + Object.increase_replies_count(reply_ap_id) + end + end + + def increase_replies_count_if_reply(_create_data), do: :noop + + def decrease_replies_count_if_reply(%Object{ + data: %{"inReplyTo" => reply_ap_id, "inReplyToStatusId" => reply_status_id} = object + }) do + if is_public?(object) do + Activity.decrease_replies_count(reply_status_id) + Object.decrease_replies_count(reply_ap_id) + end + end + + def decrease_replies_count_if_reply(_object), do: :noop + + def insert(map, local \\ true, fake \\ false) when is_map(map) do with nil <- Activity.normalize(map), - map <- lazy_put_activity_defaults(map), + map <- lazy_put_activity_defaults(map, fake), :ok <- check_actor_is_active(map["actor"]), {_, true} <- {:remote_limit_error, check_remote_limit(map)}, {:ok, map} <- MRF.filter(map), - :ok <- insert_full_object(map) do - {recipients, _, _} = get_recipients(map) - + {recipients, _, _} = get_recipients(map), + {:fake, false, map, recipients} <- {:fake, fake, map, recipients}, + {:ok, object} <- insert_full_object(map) do {:ok, activity} = Repo.insert(%Activity{ data: map, @@ -106,6 +130,14 @@ def insert(map, local \\ true) when is_map(map) do recipients: recipients }) + # Splice in the child object if we have one. + activity = + if !is_nil(object) do + Map.put(activity, :object, object) + else + activity + end + Task.start(fn -> Pleroma.Web.RichMedia.Helpers.fetch_data_for_activity(activity) end) @@ -114,8 +146,23 @@ def insert(map, local \\ true) when is_map(map) do stream_out(activity) {:ok, activity} else - %Activity{} = activity -> {:ok, activity} - error -> {:error, error} + %Activity{} = activity -> + {:ok, activity} + + {:fake, true, map, recipients} -> + activity = %Activity{ + data: map, + local: local, + actor: map["actor"], + recipients: recipients, + id: "pleroma:fakeid" + } + + Pleroma.Web.RichMedia.Helpers.fetch_data_for_activity(activity) + {:ok, activity} + + error -> + {:error, error} end end @@ -158,7 +205,7 @@ def stream_out(activity) do end end - def create(%{to: to, actor: actor, context: context, object: object} = params) do + def create(%{to: to, actor: actor, context: context, object: object} = params, fake \\ false) do additional = params[:additional] || %{} # only accept false as false value local = !(params[:local] == false) @@ -169,11 +216,17 @@ def create(%{to: to, actor: actor, context: context, object: object} = params) d %{to: to, actor: actor, published: published, context: context, object: object}, additional ), - {:ok, activity} <- insert(create_data, local), - # Changing note count prior to enqueuing federation task in order to avoid race conditions on updating user.info + {:ok, activity} <- insert(create_data, local, fake), + {:fake, false, activity} <- {:fake, fake, activity}, + _ <- increase_replies_count_if_reply(create_data), + # Changing note count prior to enqueuing federation task in order to avoid + # race conditions on updating user.info {:ok, _actor} <- increase_note_count_if_public(actor, activity), :ok <- maybe_federate(activity) do {:ok, activity} + else + {:fake, true, activity} -> + {:ok, activity} end end @@ -309,17 +362,20 @@ def unfollow(follower, followed, activity_id \\ nil, local \\ true) do def delete(%Object{data: %{"id" => id, "actor" => actor}} = object, local \\ true) do user = User.get_cached_by_ap_id(actor) + to = (object.data["to"] || []) ++ (object.data["cc"] || []) - data = %{ - "type" => "Delete", - "actor" => actor, - "object" => id, - "to" => [user.follower_address, "https://www.w3.org/ns/activitystreams#Public"] - } - - with {:ok, _} <- Object.delete(object), + with {:ok, object, activity} <- Object.delete(object), + data <- %{ + "type" => "Delete", + "actor" => actor, + "object" => id, + "to" => to, + "deleted_activity_id" => activity && activity.id + }, {:ok, activity} <- insert(data, local), - # Changing note count prior to enqueuing federation task in order to avoid race conditions on updating user.info + _ <- decrease_replies_count_if_reply(object), + # Changing note count prior to enqueuing federation task in order to avoid + # race conditions on updating user.info {:ok, _actor} <- decrease_note_count_if_public(user, object), :ok <- maybe_federate(activity) do {:ok, activity} @@ -367,20 +423,38 @@ def flag( content: content } = params ) do - additional = params[:additional] || %{} - # only accept false as false value local = !(params[:local] == false) + forward = !(params[:forward] == false) - %{ + additional = params[:additional] || %{} + + params = %{ actor: actor, context: context, account: account, statuses: statuses, content: content } - |> make_flag_data(additional) - |> insert(local) + + additional = + if forward do + Map.merge(additional, %{"to" => [], "cc" => [account.ap_id]}) + else + Map.merge(additional, %{"to" => [], "cc" => []}) + end + + with flag_data <- make_flag_data(params, additional), + {:ok, activity} <- insert(flag_data, local), + :ok <- maybe_federate(activity) do + Enum.each(User.all_superusers(), fn superuser -> + superuser + |> Pleroma.AdminEmail.report(actor, account, statuses, content) + |> Pleroma.Mailer.deliver_async() + end) + + {:ok, activity} + end end def fetch_activities_for_context(context, opts \\ %{}) do @@ -409,6 +483,7 @@ def fetch_activities_for_context(context, opts \\ %{}) do ), order_by: [desc: :id] ) + |> Activity.with_preloaded_object() Repo.all(query) end @@ -501,7 +576,7 @@ defp restrict_tag_reject(query, %{"tag_reject" => tag_reject}) when is_list(tag_reject) and tag_reject != [] do from( activity in query, - where: fragment("(not (? #> '{\"object\",\"tag\"}') \\?| ?)", activity.data, ^tag_reject) + where: fragment(~s(\(not \(? #> '{"object","tag"}'\) \\?| ?\)), activity.data, ^tag_reject) ) end @@ -511,7 +586,7 @@ defp restrict_tag_all(query, %{"tag_all" => tag_all}) when is_list(tag_all) and tag_all != [] do from( activity in query, - where: fragment("(? #> '{\"object\",\"tag\"}') \\?& ?", activity.data, ^tag_all) + where: fragment(~s(\(? #> '{"object","tag"}'\) \\?& ?), activity.data, ^tag_all) ) end @@ -520,14 +595,14 @@ defp restrict_tag_all(query, _), do: query defp restrict_tag(query, %{"tag" => tag}) when is_list(tag) do from( activity in query, - where: fragment("(? #> '{\"object\",\"tag\"}') \\?| ?", activity.data, ^tag) + where: fragment(~s(\(? #> '{"object","tag"}'\) \\?| ?), activity.data, ^tag) ) end defp restrict_tag(query, %{"tag" => tag}) when is_binary(tag) do from( activity in query, - where: fragment("? <@ (? #> '{\"object\",\"tag\"}')", ^tag, activity.data) + where: fragment(~s(? <@ (? #> '{"object","tag"}'\)), ^tag, activity.data) ) end @@ -600,7 +675,7 @@ defp restrict_type(query, _), do: query defp restrict_favorited_by(query, %{"favorited_by" => ap_id}) do from( activity in query, - where: fragment("? <@ (? #> '{\"object\",\"likes\"}')", ^ap_id, activity.data) + where: fragment(~s(? <@ (? #> '{"object","likes"}'\)), ^ap_id, activity.data) ) end @@ -609,7 +684,7 @@ defp restrict_favorited_by(query, _), do: query defp restrict_media(query, %{"only_media" => val}) when val == "true" or val == "1" do from( activity in query, - where: fragment("not (? #> '{\"object\",\"attachment\"}' = ?)", activity.data, ^[]) + where: fragment(~s(not (? #> '{"object","attachment"}' = ?\)), activity.data, ^[]) ) end @@ -676,6 +751,30 @@ defp restrict_pinned(query, %{"pinned" => "true", "pinned_activity_ids" => ids}) defp restrict_pinned(query, _), do: query + defp restrict_muted_reblogs(query, %{"muting_user" => %User{info: info}}) do + muted_reblogs = info.muted_reblogs || [] + + from( + activity in query, + where: + fragment( + "not ( ?->>'type' = 'Announce' and ? = ANY(?))", + activity.data, + activity.actor, + ^muted_reblogs + ) + ) + end + + defp restrict_muted_reblogs(query, _), do: query + + defp maybe_preload_objects(query, %{"skip_preload" => true}), do: query + + defp maybe_preload_objects(query, _) do + query + |> Activity.with_preloaded_object() + end + def fetch_activities_query(recipients, opts \\ %{}) do base_query = from( @@ -685,6 +784,7 @@ def fetch_activities_query(recipients, opts \\ %{}) do ) base_query + |> maybe_preload_objects(opts) |> restrict_recipients(recipients, opts["user"]) |> restrict_tag(opts) |> restrict_tag_reject(opts) @@ -703,6 +803,7 @@ def fetch_activities_query(recipients, opts \\ %{}) do |> restrict_replies(opts) |> restrict_reblogs(opts) |> restrict_pinned(opts) + |> restrict_muted_reblogs(opts) |> Activity.restrict_disabled_users() end @@ -907,7 +1008,7 @@ def fetch_object_from_id(id) do }, :ok <- Transmogrifier.contain_origin(id, params), {:ok, activity} <- Transmogrifier.handle_incoming(params) do - {:ok, Object.normalize(activity.data["object"])} + {:ok, Object.normalize(activity)} else {:error, {:reject, nil}} -> {:reject, nil} @@ -919,7 +1020,7 @@ def fetch_object_from_id(id) do Logger.info("Couldn't get object via AP, trying out OStatus fetching...") case OStatus.fetch_activity_from_url(id) do - {:ok, [activity | _]} -> {:ok, Object.normalize(activity.data["object"])} + {:ok, [activity | _]} -> {:ok, Object.normalize(activity)} e -> e end end diff --git a/lib/pleroma/web/activity_pub/activity_pub_controller.ex b/lib/pleroma/web/activity_pub/activity_pub_controller.ex index ff924a536..7091d6927 100644 --- a/lib/pleroma/web/activity_pub/activity_pub_controller.ex +++ b/lib/pleroma/web/activity_pub/activity_pub_controller.ex @@ -6,15 +6,15 @@ defmodule Pleroma.Web.ActivityPub.ActivityPubController do use Pleroma.Web, :controller alias Pleroma.Activity - alias Pleroma.User alias Pleroma.Object - alias Pleroma.Web.ActivityPub.ObjectView - alias Pleroma.Web.ActivityPub.UserView + alias Pleroma.User alias Pleroma.Web.ActivityPub.ActivityPub - alias Pleroma.Web.ActivityPub.Visibility + alias Pleroma.Web.ActivityPub.ObjectView alias Pleroma.Web.ActivityPub.Relay alias Pleroma.Web.ActivityPub.Transmogrifier + alias Pleroma.Web.ActivityPub.UserView alias Pleroma.Web.ActivityPub.Utils + alias Pleroma.Web.ActivityPub.Visibility alias Pleroma.Web.Federator require Logger diff --git a/lib/pleroma/web/activity_pub/mrf.ex b/lib/pleroma/web/activity_pub/mrf.ex index eebea207c..1aaa20050 100644 --- a/lib/pleroma/web/activity_pub/mrf.ex +++ b/lib/pleroma/web/activity_pub/mrf.ex @@ -16,7 +16,7 @@ def filter(object) do end) end - def get_policies() do + def get_policies do Application.get_env(:pleroma, :instance, []) |> Keyword.get(:rewrite_policy, []) |> get_policies() diff --git a/lib/pleroma/web/activity_pub/mrf/anti_followbot_policy.ex b/lib/pleroma/web/activity_pub/mrf/anti_followbot_policy.ex index 7c6ad582a..34665a3a6 100644 --- a/lib/pleroma/web/activity_pub/mrf/anti_followbot_policy.ex +++ b/lib/pleroma/web/activity_pub/mrf/anti_followbot_policy.ex @@ -23,15 +23,21 @@ defp score_displayname("fedibot"), do: 1.0 defp score_displayname(_), do: 0.0 defp determine_if_followbot(%User{nickname: nickname, name: displayname}) do + # nickname will always be a binary string because it's generated by Pleroma. nick_score = nickname |> String.downcase() |> score_nickname() + # displayname will either be a binary string or nil, if a displayname isn't set. name_score = - displayname - |> String.downcase() - |> score_displayname() + if is_binary(displayname) do + displayname + |> String.downcase() + |> score_displayname() + else + 0.0 + end nick_score + name_score end diff --git a/lib/pleroma/web/activity_pub/mrf/keyword_policy.ex b/lib/pleroma/web/activity_pub/mrf/keyword_policy.ex index 5fdc03414..e8dfba672 100644 --- a/lib/pleroma/web/activity_pub/mrf/keyword_policy.ex +++ b/lib/pleroma/web/activity_pub/mrf/keyword_policy.ex @@ -4,6 +4,10 @@ defmodule Pleroma.Web.ActivityPub.MRF.KeywordPolicy do @behaviour Pleroma.Web.ActivityPub.MRF + defp string_matches?(string, _) when not is_binary(string) do + false + end + defp string_matches?(string, pattern) when is_binary(pattern) do String.contains?(string, pattern) end @@ -44,14 +48,29 @@ defp check_ftl_removal( end defp check_replace(%{"object" => %{"content" => content, "summary" => summary}} = message) do + content = + if is_binary(content) do + content + else + "" + end + + summary = + if is_binary(summary) do + summary + else + "" + end + {content, summary} = - Enum.reduce(Pleroma.Config.get([:mrf_keyword, :replace]), {content, summary}, fn {pattern, - replacement}, - {content_acc, - summary_acc} -> - {String.replace(content_acc, pattern, replacement), - String.replace(summary_acc, pattern, replacement)} - end) + Enum.reduce( + Pleroma.Config.get([:mrf_keyword, :replace]), + {content, summary}, + fn {pattern, replacement}, {content_acc, summary_acc} -> + {String.replace(content_acc, pattern, replacement), + String.replace(summary_acc, pattern, replacement)} + end + ) {:ok, message @@ -59,11 +78,6 @@ defp check_replace(%{"object" => %{"content" => content, "summary" => summary}} |> put_in(["object", "summary"], summary)} end - @impl true - def filter(%{"object" => %{"content" => nil}} = message) do - {:ok, message} - end - @impl true def filter(%{"type" => "Create", "object" => %{"content" => _content}} = message) do with {:ok, message} <- check_reject(message), diff --git a/lib/pleroma/web/activity_pub/relay.ex b/lib/pleroma/web/activity_pub/relay.ex index c496063ea..a7a20ca37 100644 --- a/lib/pleroma/web/activity_pub/relay.ex +++ b/lib/pleroma/web/activity_pub/relay.ex @@ -3,9 +3,9 @@ # SPDX-License-Identifier: AGPL-3.0-only defmodule Pleroma.Web.ActivityPub.Relay do - alias Pleroma.User - alias Pleroma.Object alias Pleroma.Activity + alias Pleroma.Object + alias Pleroma.User alias Pleroma.Web.ActivityPub.ActivityPub require Logger @@ -41,7 +41,7 @@ def unfollow(target_instance) do def publish(%Activity{data: %{"type" => "Create"}} = activity) do with %User{} = user <- get_actor(), - %Object{} = object <- Object.normalize(activity.data["object"]["id"]) do + %Object{} = object <- Object.normalize(activity) do ActivityPub.announce(user, object, nil, true, false) else e -> Logger.error("error: #{inspect(e)}") diff --git a/lib/pleroma/web/activity_pub/transmogrifier.ex b/lib/pleroma/web/activity_pub/transmogrifier.ex index 88007aa16..49ea73204 100644 --- a/lib/pleroma/web/activity_pub/transmogrifier.ex +++ b/lib/pleroma/web/activity_pub/transmogrifier.ex @@ -7,9 +7,9 @@ defmodule Pleroma.Web.ActivityPub.Transmogrifier do A module to handle coding from internal to wire ActivityPub and back. """ alias Pleroma.Activity - alias Pleroma.User alias Pleroma.Object alias Pleroma.Repo + alias Pleroma.User alias Pleroma.Web.ActivityPub.ActivityPub alias Pleroma.Web.ActivityPub.Utils alias Pleroma.Web.ActivityPub.Visibility @@ -83,14 +83,34 @@ def fix_object(object) do |> fix_content_map |> fix_likes |> fix_addressing + |> fix_summary + end + + def fix_summary(%{"summary" => nil} = object) do + object + |> Map.put("summary", "") + end + + def fix_summary(%{"summary" => _} = object) do + # summary is present, nothing to do + object + end + + def fix_summary(object) do + object + |> Map.put("summary", "") end def fix_addressing_list(map, field) do - if is_binary(map[field]) do - map - |> Map.put(field, [map[field]]) - else - map + cond do + is_binary(map[field]) -> + Map.put(map, field, [map[field]]) + + is_nil(map[field]) -> + Map.put(map, field, []) + + true -> + map end end @@ -128,13 +148,42 @@ def fix_explicit_addressing(object) do |> fix_explicit_addressing(explicit_mentions) end + # if as:Public is addressed, then make sure the followers collection is also addressed + # so that the activities will be delivered to local users. + def fix_implicit_addressing(%{"to" => to, "cc" => cc} = object, followers_collection) do + recipients = to ++ cc + + if followers_collection not in recipients do + cond do + "https://www.w3.org/ns/activitystreams#Public" in cc -> + to = to ++ [followers_collection] + Map.put(object, "to", to) + + "https://www.w3.org/ns/activitystreams#Public" in to -> + cc = cc ++ [followers_collection] + Map.put(object, "cc", cc) + + true -> + object + end + else + object + end + end + + def fix_implicit_addressing(object, _), do: object + def fix_addressing(object) do + %User{} = user = User.get_or_fetch_by_ap_id(object["actor"]) + followers_collection = User.ap_followers(user) + object |> fix_addressing_list("to") |> fix_addressing_list("cc") |> fix_addressing_list("bto") |> fix_addressing_list("bcc") |> fix_explicit_addressing + |> fix_implicit_addressing(followers_collection) end def fix_actor(%{"attributedTo" => actor} = object) do @@ -355,6 +404,40 @@ defp get_follow_activity(follow_object, followed) do end end + # Flag objects are placed ahead of the ID check because Mastodon 2.8 and earlier send them + # with nil ID. + def handle_incoming(%{"type" => "Flag", "object" => objects, "actor" => actor} = data) do + with context <- data["context"] || Utils.generate_context_id(), + content <- data["content"] || "", + %User{} = actor <- User.get_cached_by_ap_id(actor), + + # Reduce the object list to find the reported user. + %User{} = account <- + Enum.reduce_while(objects, nil, fn ap_id, _ -> + with %User{} = user <- User.get_cached_by_ap_id(ap_id) do + {:halt, user} + else + _ -> {:cont, nil} + end + end), + + # Remove the reported user from the object list. + statuses <- Enum.filter(objects, fn ap_id -> ap_id != account.ap_id end) do + params = %{ + actor: actor, + context: context, + account: account, + statuses: statuses, + content: content, + additional: %{ + "cc" => [account.ap_id] + } + } + + ActivityPub.flag(params) + end + end + # disallow objects with bogus IDs def handle_incoming(%{"id" => nil}), do: :error def handle_incoming(%{"id" => ""}), do: :error @@ -650,10 +733,10 @@ def get_obj_helper(id) do if object = Object.normalize(id), do: {:ok, object}, else: nil end - def set_reply_to_uri(%{"inReplyTo" => inReplyTo} = object) when is_binary(inReplyTo) do - with false <- String.starts_with?(inReplyTo, "http"), - {:ok, %{data: replied_to_object}} <- get_obj_helper(inReplyTo) do - Map.put(object, "inReplyTo", replied_to_object["external_url"] || inReplyTo) + def set_reply_to_uri(%{"inReplyTo" => in_reply_to} = object) when is_binary(in_reply_to) do + with false <- String.starts_with?(in_reply_to, "http"), + {:ok, %{data: replied_to_object}} <- get_obj_helper(in_reply_to) do + Map.put(object, "inReplyTo", replied_to_object["external_url"] || in_reply_to) else _e -> object end @@ -736,6 +819,7 @@ def prepare_outgoing(%{"type" => "Reject"} = data) do def prepare_outgoing(%{"type" => _type} = data) do data = data + |> strip_internal_fields |> maybe_fix_object_url |> Map.merge(Utils.make_json_ld_header()) @@ -829,10 +913,10 @@ def set_sensitive(object) do end def add_attributed_to(object) do - attributedTo = object["attributedTo"] || object["actor"] + attributed_to = object["attributedTo"] || object["actor"] object - |> Map.put("attributedTo", attributedTo) + |> Map.put("attributedTo", attributed_to) end def add_likes(%{"id" => id, "like_count" => likes} = object) do @@ -870,7 +954,8 @@ defp strip_internal_fields(object) do "announcements", "announcement_count", "emoji", - "context_id" + "context_id", + "deleted_activity_id" ]) end @@ -885,8 +970,9 @@ defp strip_internal_tags(%{"tag" => tags} = object) do defp strip_internal_tags(object), do: object - defp user_upgrade_task(user) do - old_follower_address = User.ap_followers(user) + def perform(:user_upgrade, user) do + # we pass a fake user so that the followers collection is stripped away + old_follower_address = User.ap_followers(%User{nickname: user.nickname}) q = from( @@ -929,28 +1015,18 @@ defp user_upgrade_task(user) do Repo.update_all(q, []) end - def upgrade_user_from_ap_id(ap_id, async \\ true) do + def upgrade_user_from_ap_id(ap_id) do with %User{local: false} = user <- User.get_by_ap_id(ap_id), - {:ok, data} <- ActivityPub.fetch_and_prepare_user_from_ap_id(ap_id) do - already_ap = User.ap_enabled?(user) - - {:ok, user} = - User.upgrade_changeset(user, data) - |> Repo.update() - - if !already_ap do - # This could potentially take a long time, do it in the background - if async do - Task.start(fn -> - user_upgrade_task(user) - end) - else - user_upgrade_task(user) - end + {:ok, data} <- ActivityPub.fetch_and_prepare_user_from_ap_id(ap_id), + already_ap <- User.ap_enabled?(user), + {:ok, user} <- user |> User.upgrade_changeset(data) |> User.update_and_set_cache() do + unless already_ap do + PleromaJobQueue.enqueue(:transmogrifier, __MODULE__, [:user_upgrade, user]) end {:ok, user} else + %User{} = user -> {:ok, user} e -> e end end diff --git a/lib/pleroma/web/activity_pub/utils.ex b/lib/pleroma/web/activity_pub/utils.ex index 88f4779c8..0b53f71c3 100644 --- a/lib/pleroma/web/activity_pub/utils.ex +++ b/lib/pleroma/web/activity_pub/utils.ex @@ -3,16 +3,17 @@ # SPDX-License-Identifier: AGPL-3.0-only defmodule Pleroma.Web.ActivityPub.Utils do - alias Pleroma.Repo - alias Pleroma.Web - alias Pleroma.Object - alias Pleroma.Activity - alias Pleroma.User - alias Pleroma.Notification - alias Pleroma.Web.Router.Helpers - alias Pleroma.Web.Endpoint alias Ecto.Changeset alias Ecto.UUID + alias Pleroma.Activity + alias Pleroma.Notification + alias Pleroma.Object + alias Pleroma.Repo + alias Pleroma.User + alias Pleroma.Web + alias Pleroma.Web.ActivityPub.Visibility + alias Pleroma.Web.Endpoint + alias Pleroma.Web.Router.Helpers import Ecto.Query @@ -98,7 +99,10 @@ def make_json_ld_header do %{ "@context" => [ "https://www.w3.org/ns/activitystreams", - "#{Web.base_url()}/schemas/litepub-0.1.jsonld" + "#{Web.base_url()}/schemas/litepub-0.1.jsonld", + %{ + "@language" => "und" + } ] } end @@ -174,18 +178,26 @@ def maybe_federate(_), do: :ok Adds an id and a published data if they aren't there, also adds it to an included object """ - def lazy_put_activity_defaults(map) do - %{data: %{"id" => context}, id: context_id} = create_context(map["context"]) - + def lazy_put_activity_defaults(map, fake \\ false) do map = - map - |> Map.put_new_lazy("id", &generate_activity_id/0) - |> Map.put_new_lazy("published", &make_date/0) - |> Map.put_new("context", context) - |> Map.put_new("context_id", context_id) + unless fake do + %{data: %{"id" => context}, id: context_id} = create_context(map["context"]) + + map + |> Map.put_new_lazy("id", &generate_activity_id/0) + |> Map.put_new_lazy("published", &make_date/0) + |> Map.put_new("context", context) + |> Map.put_new("context_id", context_id) + else + map + |> Map.put_new("id", "pleroma:fakeid") + |> Map.put_new_lazy("published", &make_date/0) + |> Map.put_new("context", "pleroma:fakecontext") + |> Map.put_new("context_id", -1) + end if is_map(map["object"]) do - object = lazy_put_object_defaults(map["object"], map) + object = lazy_put_object_defaults(map["object"], map, fake) %{map | "object" => object} else map @@ -195,7 +207,18 @@ def lazy_put_activity_defaults(map) do @doc """ Adds an id and published date if they aren't there. """ - def lazy_put_object_defaults(map, activity \\ %{}) do + def lazy_put_object_defaults(map, activity \\ %{}, fake) + + def lazy_put_object_defaults(map, activity, true = _fake) do + map + |> Map.put_new_lazy("published", &make_date/0) + |> Map.put_new("id", "pleroma:fake_object_id") + |> Map.put_new("context", activity["context"]) + |> Map.put_new("fake", true) + |> Map.put_new("context_id", activity["context_id"]) + end + + def lazy_put_object_defaults(map, activity, _fake) do map |> Map.put_new_lazy("id", &generate_object_id/0) |> Map.put_new_lazy("published", &make_date/0) @@ -208,12 +231,12 @@ def lazy_put_object_defaults(map, activity \\ %{}) do """ def insert_full_object(%{"object" => %{"type" => type} = object_data}) when is_map(object_data) and type in @supported_object_types do - with {:ok, _} <- Object.create(object_data) do - :ok + with {:ok, object} <- Object.create(object_data) do + {:ok, object} end end - def insert_full_object(_), do: :ok + def insert_full_object(_), do: {:ok, nil} def update_object_in_activities(%{data: %{"id" => id}} = object) do # TODO @@ -274,13 +297,31 @@ def get_object_likes(%{data: %{"id" => id}}) do Repo.all(query) end - def make_like_data(%User{ap_id: ap_id} = actor, %{data: %{"id" => id}} = object, activity_id) do + def make_like_data( + %User{ap_id: ap_id} = actor, + %{data: %{"actor" => object_actor_id, "id" => id}} = object, + activity_id + ) do + object_actor = User.get_cached_by_ap_id(object_actor_id) + + to = + if Visibility.is_public?(object) do + [actor.follower_address, object.data["actor"]] + else + [object.data["actor"]] + end + + cc = + (object.data["to"] ++ (object.data["cc"] || [])) + |> List.delete(actor.ap_id) + |> List.delete(object_actor.follower_address) + data = %{ "type" => "Like", "actor" => ap_id, "object" => id, - "to" => [actor.follower_address, object.data["actor"]], - "cc" => ["https://www.w3.org/ns/activitystreams#Public"], + "to" => to, + "cc" => cc, "context" => object.data["context"] } @@ -335,7 +376,7 @@ def update_follow_state( [state, actor, object] ) - activity = Repo.get(Activity, activity.id) + activity = Activity.get_by_id(activity.id) {:ok, activity} rescue e -> @@ -385,13 +426,15 @@ def fetch_latest_follow(%User{ap_id: follower_id}, %User{ap_id: followed_id}) do activity.data ), where: activity.actor == ^follower_id, + # this is to use the index where: fragment( - "? @> ?", + "coalesce((?)->'object'->>'id', (?)->>'object') = ?", activity.data, - ^%{object: followed_id} + activity.data, + ^followed_id ), - order_by: [desc: :id], + order_by: [fragment("? desc nulls last", activity.id)], limit: 1 ) @@ -548,13 +591,15 @@ def fetch_latest_block(%User{ap_id: blocker_id}, %User{ap_id: blocked_id}) do activity.data ), where: activity.actor == ^blocker_id, + # this is to use the index where: fragment( - "? @> ?", + "coalesce((?)->'object'->>'id', (?)->>'object') = ?", activity.data, - ^%{object: blocked_id} + activity.data, + ^blocked_id ), - order_by: [desc: :id], + order_by: [fragment("? desc nulls last", activity.id)], limit: 1 ) @@ -602,7 +647,13 @@ def make_create_data(params, additional) do #### Flag-related helpers def make_flag_data(params, additional) do - status_ap_ids = Enum.map(params.statuses || [], & &1.data["id"]) + status_ap_ids = + Enum.map(params.statuses || [], fn + %Activity{} = act -> act.data["id"] + act when is_map(act) -> act["id"] + act when is_binary(act) -> act + end) + object = [params.account.ap_id] ++ status_ap_ids %{ @@ -614,4 +665,43 @@ def make_flag_data(params, additional) do } |> Map.merge(additional) end + + @doc """ + Fetches the OrderedCollection/OrderedCollectionPage from `from`, limiting the amount of pages fetched after + the first one to `pages_left` pages. + If the amount of pages is higher than the collection has, it returns whatever was there. + """ + def fetch_ordered_collection(from, pages_left, acc \\ []) do + with {:ok, response} <- Tesla.get(from), + {:ok, collection} <- Poison.decode(response.body) do + case collection["type"] do + "OrderedCollection" -> + # If we've encountered the OrderedCollection and not the page, + # just call the same function on the page address + fetch_ordered_collection(collection["first"], pages_left) + + "OrderedCollectionPage" -> + if pages_left > 0 do + # There are still more pages + if Map.has_key?(collection, "next") do + # There are still more pages, go deeper saving what we have into the accumulator + fetch_ordered_collection( + collection["next"], + pages_left - 1, + acc ++ collection["orderedItems"] + ) + else + # No more pages left, just return whatever we already have + acc ++ collection["orderedItems"] + end + else + # Got the amount of pages needed, add them all to the accumulator + acc ++ collection["orderedItems"] + end + + _ -> + {:error, "Not an OrderedCollection or OrderedCollectionPage"} + end + end + end end diff --git a/lib/pleroma/web/activity_pub/views/object_view.ex b/lib/pleroma/web/activity_pub/views/object_view.ex index 84fa94e32..6028b773c 100644 --- a/lib/pleroma/web/activity_pub/views/object_view.ex +++ b/lib/pleroma/web/activity_pub/views/object_view.ex @@ -17,7 +17,7 @@ def render("object.json", %{object: %Object{} = object}) do def render("object.json", %{object: %Activity{data: %{"type" => "Create"}} = activity}) do base = Pleroma.Web.ActivityPub.Utils.make_json_ld_header() - object = Object.normalize(activity.data["object"]) + object = Object.normalize(activity) additional = Transmogrifier.prepare_object(activity.data) @@ -28,7 +28,7 @@ def render("object.json", %{object: %Activity{data: %{"type" => "Create"}} = act def render("object.json", %{object: %Activity{} = activity}) do base = Pleroma.Web.ActivityPub.Utils.make_json_ld_header() - object = Object.normalize(activity.data["object"]) + object = Object.normalize(activity) additional = Transmogrifier.prepare_object(activity.data) diff --git a/lib/pleroma/web/activity_pub/views/user_view.ex b/lib/pleroma/web/activity_pub/views/user_view.ex index 415cbd47a..5926a3294 100644 --- a/lib/pleroma/web/activity_pub/views/user_view.ex +++ b/lib/pleroma/web/activity_pub/views/user_view.ex @@ -5,15 +5,15 @@ defmodule Pleroma.Web.ActivityPub.UserView do use Pleroma.Web, :view - alias Pleroma.Web.WebFinger - alias Pleroma.Web.Salmon - alias Pleroma.User alias Pleroma.Repo + alias Pleroma.User alias Pleroma.Web.ActivityPub.ActivityPub alias Pleroma.Web.ActivityPub.Transmogrifier alias Pleroma.Web.ActivityPub.Utils - alias Pleroma.Web.Router.Helpers alias Pleroma.Web.Endpoint + alias Pleroma.Web.Router.Helpers + alias Pleroma.Web.Salmon + alias Pleroma.Web.WebFinger import Ecto.Query @@ -87,16 +87,10 @@ def render("user.json", %{user: user}) do "publicKeyPem" => public_key }, "endpoints" => endpoints, - "icon" => %{ - "type" => "Image", - "url" => User.avatar_url(user) - }, - "image" => %{ - "type" => "Image", - "url" => User.banner_url(user) - }, "tag" => user.info.source_data["tag"] || [] } + |> Map.merge(maybe_make_image(&User.avatar_url/2, "icon", user)) + |> Map.merge(maybe_make_image(&User.banner_url/2, "image", user)) |> Map.merge(Utils.make_json_ld_header()) end @@ -294,4 +288,17 @@ def collection(collection, iri, page, show_items \\ true, total \\ nil) do map end end + + defp maybe_make_image(func, key, user) do + if image = func.(user, no_default: true) do + %{ + key => %{ + "type" => "Image", + "url" => image + } + } + else + %{} + end + end end diff --git a/lib/pleroma/web/admin_api/admin_api_controller.ex b/lib/pleroma/web/admin_api/admin_api_controller.ex index 1b94f0609..fb43d0b01 100644 --- a/lib/pleroma/web/admin_api/admin_api_controller.ex +++ b/lib/pleroma/web/admin_api/admin_api_controller.ex @@ -3,17 +3,19 @@ # SPDX-License-Identifier: AGPL-3.0-only defmodule Pleroma.Web.AdminAPI.AdminAPIController do - @users_page_size 50 - use Pleroma.Web, :controller alias Pleroma.User + alias Pleroma.UserInviteToken alias Pleroma.Web.ActivityPub.Relay - alias Pleroma.Web.MastodonAPI.Admin.AccountView + alias Pleroma.Web.AdminAPI.AccountView + alias Pleroma.Web.AdminAPI.Search import Pleroma.Web.ControllerHelper, only: [json_response: 3] require Logger + @users_page_size 50 + action_fallback(:errors) def user_delete(conn, %{"nickname" => nickname}) do @@ -24,6 +26,26 @@ def user_delete(conn, %{"nickname" => nickname}) do |> json(nickname) end + def user_follow(conn, %{"follower" => follower_nick, "followed" => followed_nick}) do + with %User{} = follower <- User.get_by_nickname(follower_nick), + %User{} = followed <- User.get_by_nickname(followed_nick) do + User.follow(follower, followed) + end + + conn + |> json("ok") + end + + def user_unfollow(conn, %{"follower" => follower_nick, "followed" => followed_nick}) do + with %User{} = follower <- User.get_by_nickname(follower_nick), + %User{} = followed <- User.get_by_nickname(followed_nick) do + User.unfollow(follower, followed) + end + + conn + |> json("ok") + end + def user_create( conn, %{"nickname" => nickname, "email" => email, "password" => password} @@ -44,6 +66,15 @@ def user_create( |> json(user.nickname) end + def user_show(conn, %{"nickname" => nickname}) do + with %User{} = user <- User.get_by_nickname(nickname) do + conn + |> json(AccountView.render("show.json", %{user: user})) + else + _ -> {:error, :not_found} + end + end + def user_toggle_disabled(conn, %{"nickname" => nickname}) do user = User.get_by_nickname(nickname) @@ -75,8 +106,15 @@ def untag_users(conn, %{"nicknames" => nicknames, "tags" => tags}) do def list_users(conn, params) do {page, page_size} = page_params(params) + filters = maybe_parse_filters(params["filters"]) - with {:ok, users, count} <- User.all_for_admin(page, page_size), + search_params = %{ + query: params["query"], + page: page, + page_size: page_size + } + + with {:ok, users, count} <- Search.user(Map.merge(search_params, filters)), do: conn |> json( @@ -88,25 +126,17 @@ def list_users(conn, params) do ) end - def search_users(%{assigns: %{user: admin}} = conn, %{"query" => query} = params) do - {page, page_size} = page_params(params) + @filters ~w(local external active deactivated) - with {:ok, users, count} <- - User.search_for_admin(query, %{ - admin: admin, - local: params["local"] == "true", - page: page, - page_size: page_size - }), - do: - conn - |> json( - AccountView.render("index.json", - users: users, - count: count, - page_size: page_size - ) - ) + defp maybe_parse_filters(filters) when is_nil(filters) or filters == "", do: %{} + + @spec maybe_parse_filters(String.t()) :: %{required(String.t()) => true} | %{} + defp maybe_parse_filters(filters) do + filters + |> String.split(",") + |> Enum.filter(&Enum.member?(@filters, &1)) + |> Enum.map(&String.to_atom(&1)) + |> Enum.into(%{}, &{&1, true}) end def right_add(conn, %{"permission_group" => permission_group, "nickname" => nickname}) @@ -216,7 +246,7 @@ def email_invite(%{assigns: %{user: user}} = conn, %{"email" => email} = params) with true <- Pleroma.Config.get([:instance, :invites_enabled]) && !Pleroma.Config.get([:instance, :registrations_open]), - {:ok, invite_token} <- Pleroma.UserInviteToken.create_token(), + {:ok, invite_token} <- UserInviteToken.create_invite(), email <- Pleroma.UserEmail.user_invitation_email(user, invite_token, email, params["name"]), {:ok, _} <- Pleroma.Mailer.deliver(email) do @@ -225,11 +255,29 @@ def email_invite(%{assigns: %{user: user}} = conn, %{"email" => email} = params) end @doc "Get a account registeration invite token (base64 string)" - def get_invite_token(conn, _params) do - {:ok, token} = Pleroma.UserInviteToken.create_token() + def get_invite_token(conn, params) do + options = params["invite"] || %{} + {:ok, invite} = UserInviteToken.create_invite(options) conn - |> json(token.token) + |> json(invite.token) + end + + @doc "Get list of created invites" + def invites(conn, _params) do + invites = UserInviteToken.list_invites() + + conn + |> json(AccountView.render("invites.json", %{invites: invites})) + end + + @doc "Revokes invite by token" + def revoke_invite(conn, %{"token" => token}) do + invite = UserInviteToken.find_by_token!(token) + {:ok, updated_invite} = UserInviteToken.update_invite(invite, %{used: true}) + + conn + |> json(AccountView.render("invite.json", %{invite: updated_invite})) end @doc "Get a password reset token (base64 string) for given nickname" @@ -241,6 +289,12 @@ def get_password_reset(conn, %{"nickname" => nickname}) do |> json(token.token) end + def errors(conn, {:error, :not_found}) do + conn + |> put_status(404) + |> json("Not found") + end + def errors(conn, {:param_cast, _}) do conn |> put_status(400) diff --git a/lib/pleroma/web/admin_api/search.ex b/lib/pleroma/web/admin_api/search.ex new file mode 100644 index 000000000..9a8e41c2a --- /dev/null +++ b/lib/pleroma/web/admin_api/search.ex @@ -0,0 +1,54 @@ +# Pleroma: A lightweight social networking server +# Copyright © 2017-2019 Pleroma Authors +# SPDX-License-Identifier: AGPL-3.0-only + +defmodule Pleroma.Web.AdminAPI.Search do + import Ecto.Query + + alias Pleroma.Repo + alias Pleroma.User + + @page_size 50 + + def user(%{query: term} = params) when is_nil(term) or term == "" do + query = maybe_filtered_query(params) + + paginated_query = + maybe_filtered_query(params) + |> paginate(params[:page] || 1, params[:page_size] || @page_size) + + count = query |> Repo.aggregate(:count, :id) + + results = Repo.all(paginated_query) + + {:ok, results, count} + end + + def user(%{query: term} = params) when is_binary(term) do + search_query = from(u in maybe_filtered_query(params), where: ilike(u.nickname, ^"%#{term}%")) + + count = search_query |> Repo.aggregate(:count, :id) + + results = + search_query + |> paginate(params[:page] || 1, params[:page_size] || @page_size) + |> Repo.all() + + {:ok, results, count} + end + + defp maybe_filtered_query(params) do + from(u in User, order_by: u.nickname) + |> User.maybe_local_user_query(params[:local]) + |> User.maybe_external_user_query(params[:external]) + |> User.maybe_active_user_query(params[:active]) + |> User.maybe_deactivated_user_query(params[:deactivated]) + end + + defp paginate(query, page, page_size) do + from(u in query, + limit: ^page_size, + offset: ^((page - 1) * page_size) + ) + end +end diff --git a/lib/pleroma/web/admin_api/views/account_view.ex b/lib/pleroma/web/admin_api/views/account_view.ex new file mode 100644 index 000000000..28bb667d8 --- /dev/null +++ b/lib/pleroma/web/admin_api/views/account_view.ex @@ -0,0 +1,47 @@ +# Pleroma: A lightweight social networking server +# Copyright © 2017-2019 Pleroma Authors +# SPDX-License-Identifier: AGPL-3.0-only + +defmodule Pleroma.Web.AdminAPI.AccountView do + use Pleroma.Web, :view + + alias Pleroma.User.Info + alias Pleroma.Web.AdminAPI.AccountView + + def render("index.json", %{users: users, count: count, page_size: page_size}) do + %{ + users: render_many(users, AccountView, "show.json", as: :user), + count: count, + page_size: page_size + } + end + + def render("show.json", %{user: user}) do + %{ + "id" => user.id, + "nickname" => user.nickname, + "deactivated" => user.info.deactivated, + "local" => user.local, + "roles" => Info.roles(user.info), + "tags" => user.tags || [] + } + end + + def render("invite.json", %{invite: invite}) do + %{ + "id" => invite.id, + "token" => invite.token, + "used" => invite.used, + "expires_at" => invite.expires_at, + "uses" => invite.uses, + "max_use" => invite.max_use, + "invite_type" => invite.invite_type + } + end + + def render("invites.json", %{invites: invites}) do + %{ + invites: render_many(invites, AccountView, "invite.json", as: :invite) + } + end +end diff --git a/lib/pleroma/web/auth/authenticator.ex b/lib/pleroma/web/auth/authenticator.ex index 82267c595..89d88af32 100644 --- a/lib/pleroma/web/auth/authenticator.ex +++ b/lib/pleroma/web/auth/authenticator.ex @@ -3,6 +3,7 @@ # SPDX-License-Identifier: AGPL-3.0-only defmodule Pleroma.Web.Auth.Authenticator do + alias Pleroma.Registration alias Pleroma.User def implementation do @@ -12,14 +13,33 @@ def implementation do ) end - @callback get_user(Plug.Conn.t()) :: {:ok, User.t()} | {:error, any()} - def get_user(plug), do: implementation().get_user(plug) + @callback get_user(Plug.Conn.t(), Map.t()) :: {:ok, User.t()} | {:error, any()} + def get_user(plug, params), do: implementation().get_user(plug, params) + + @callback create_from_registration(Plug.Conn.t(), Map.t(), Registration.t()) :: + {:ok, User.t()} | {:error, any()} + def create_from_registration(plug, params, registration), + do: implementation().create_from_registration(plug, params, registration) + + @callback get_registration(Plug.Conn.t(), Map.t()) :: + {:ok, Registration.t()} | {:error, any()} + def get_registration(plug, params), + do: implementation().get_registration(plug, params) @callback handle_error(Plug.Conn.t(), any()) :: any() def handle_error(plug, error), do: implementation().handle_error(plug, error) @callback auth_template() :: String.t() | nil def auth_template do - implementation().auth_template() || Pleroma.Config.get(:auth_template, "show.html") + # Note: `config :pleroma, :auth_template, "..."` support is deprecated + implementation().auth_template() || + Pleroma.Config.get([:auth, :auth_template], Pleroma.Config.get(:auth_template)) || + "show.html" + end + + @callback oauth_consumer_template() :: String.t() | nil + def oauth_consumer_template do + implementation().oauth_consumer_template() || + Pleroma.Config.get([:auth, :oauth_consumer_template], "consumer.html") end end diff --git a/lib/pleroma/web/auth/ldap_authenticator.ex b/lib/pleroma/web/auth/ldap_authenticator.ex new file mode 100644 index 000000000..8b6d5a77f --- /dev/null +++ b/lib/pleroma/web/auth/ldap_authenticator.ex @@ -0,0 +1,150 @@ +# Pleroma: A lightweight social networking server +# Copyright © 2017-2019 Pleroma Authors +# SPDX-License-Identifier: AGPL-3.0-only + +defmodule Pleroma.Web.Auth.LDAPAuthenticator do + alias Pleroma.User + + require Logger + + @behaviour Pleroma.Web.Auth.Authenticator + @base Pleroma.Web.Auth.PleromaAuthenticator + + @connection_timeout 10_000 + @search_timeout 10_000 + + defdelegate get_registration(conn, params), to: @base + + defdelegate create_from_registration(conn, params, registration), to: @base + + def get_user(%Plug.Conn{} = conn, params) do + if Pleroma.Config.get([:ldap, :enabled]) do + {name, password} = + case params do + %{"authorization" => %{"name" => name, "password" => password}} -> + {name, password} + + %{"grant_type" => "password", "username" => name, "password" => password} -> + {name, password} + end + + case ldap_user(name, password) do + %User{} = user -> + {:ok, user} + + {:error, {:ldap_connection_error, _}} -> + # When LDAP is unavailable, try default authenticator + @base.get_user(conn, params) + + error -> + error + end + else + # Fall back to default authenticator + @base.get_user(conn, params) + end + end + + def handle_error(%Plug.Conn{} = _conn, error) do + error + end + + def auth_template, do: nil + + def oauth_consumer_template, do: nil + + defp ldap_user(name, password) do + ldap = Pleroma.Config.get(:ldap, []) + host = Keyword.get(ldap, :host, "localhost") + port = Keyword.get(ldap, :port, 389) + ssl = Keyword.get(ldap, :ssl, false) + sslopts = Keyword.get(ldap, :sslopts, []) + + options = + [{:port, port}, {:ssl, ssl}, {:timeout, @connection_timeout}] ++ + if sslopts != [], do: [{:sslopts, sslopts}], else: [] + + case :eldap.open([to_charlist(host)], options) do + {:ok, connection} -> + try do + if Keyword.get(ldap, :tls, false) do + :application.ensure_all_started(:ssl) + + case :eldap.start_tls( + connection, + Keyword.get(ldap, :tlsopts, []), + @connection_timeout + ) do + :ok -> + :ok + + error -> + Logger.error("Could not start TLS: #{inspect(error)}") + end + end + + bind_user(connection, ldap, name, password) + after + :eldap.close(connection) + end + + {:error, error} -> + Logger.error("Could not open LDAP connection: #{inspect(error)}") + {:error, {:ldap_connection_error, error}} + end + end + + defp bind_user(connection, ldap, name, password) do + uid = Keyword.get(ldap, :uid, "cn") + base = Keyword.get(ldap, :base) + + case :eldap.simple_bind(connection, "#{uid}=#{name},#{base}", password) do + :ok -> + case User.get_by_nickname_or_email(name) do + %User{} = user -> + user + + _ -> + register_user(connection, base, uid, name, password) + end + + error -> + error + end + end + + defp register_user(connection, base, uid, name, password) do + case :eldap.search(connection, [ + {:base, to_charlist(base)}, + {:filter, :eldap.equalityMatch(to_charlist(uid), to_charlist(name))}, + {:scope, :eldap.wholeSubtree()}, + {:attributes, ['mail', 'email']}, + {:timeout, @search_timeout} + ]) do + {:ok, {:eldap_search_result, [{:eldap_entry, _, attributes}], _}} -> + with {_, [mail]} <- List.keyfind(attributes, 'mail', 0) do + params = %{ + email: :erlang.list_to_binary(mail), + name: name, + nickname: name, + password: password, + password_confirmation: password + } + + changeset = User.register_changeset(%User{}, params) + + case User.register(changeset) do + {:ok, user} -> user + error -> error + end + else + _ -> + Logger.error("Could not find LDAP attribute mail: #{inspect(attributes)}") + {:error, :ldap_registration_missing_attributes} + end + + error -> + error + end + end +end diff --git a/lib/pleroma/web/auth/pleroma_authenticator.ex b/lib/pleroma/web/auth/pleroma_authenticator.ex index 3cc19af01..c826adb4c 100644 --- a/lib/pleroma/web/auth/pleroma_authenticator.ex +++ b/lib/pleroma/web/auth/pleroma_authenticator.ex @@ -3,13 +3,22 @@ # SPDX-License-Identifier: AGPL-3.0-only defmodule Pleroma.Web.Auth.PleromaAuthenticator do - alias Pleroma.User alias Comeonin.Pbkdf2 + alias Pleroma.Registration + alias Pleroma.Repo + alias Pleroma.User @behaviour Pleroma.Web.Auth.Authenticator - def get_user(%Plug.Conn{} = conn) do - %{"authorization" => %{"name" => name, "password" => password}} = conn.params + def get_user(%Plug.Conn{} = _conn, params) do + {name, password} = + case params do + %{"authorization" => %{"name" => name, "password" => password}} -> + {name, password} + + %{"grant_type" => "password", "username" => name, "password" => password} -> + {name, password} + end with {_, %User{} = user} <- {:user, User.get_by_nickname_or_email(name)}, {_, true} <- {:checkpw, Pbkdf2.checkpw(password, user.password_hash)} do @@ -20,9 +29,69 @@ def get_user(%Plug.Conn{} = conn) do end end + def get_registration( + %Plug.Conn{assigns: %{ueberauth_auth: %{provider: provider, uid: uid} = auth}}, + _params + ) do + registration = Registration.get_by_provider_uid(provider, uid) + + if registration do + {:ok, registration} + else + info = auth.info + + Registration.changeset(%Registration{}, %{ + provider: to_string(provider), + uid: to_string(uid), + info: %{ + "nickname" => info.nickname, + "email" => info.email, + "name" => info.name, + "description" => info.description + } + }) + |> Repo.insert() + end + end + + def get_registration(%Plug.Conn{} = _conn, _params), do: {:error, :missing_credentials} + + def create_from_registration(_conn, params, registration) do + nickname = value([params["nickname"], Registration.nickname(registration)]) + email = value([params["email"], Registration.email(registration)]) + name = value([params["name"], Registration.name(registration)]) || nickname + bio = value([params["bio"], Registration.description(registration)]) + + random_password = :crypto.strong_rand_bytes(64) |> Base.encode64() + + with {:ok, new_user} <- + User.register_changeset( + %User{}, + %{ + email: email, + nickname: nickname, + name: name, + bio: bio, + password: random_password, + password_confirmation: random_password + }, + external: true, + confirmed: true + ) + |> Repo.insert(), + {:ok, _} <- + Registration.changeset(registration, %{user_id: new_user.id}) |> Repo.update() do + {:ok, new_user} + end + end + + defp value(list), do: Enum.find(list, &(to_string(&1) != "")) + def handle_error(%Plug.Conn{} = _conn, error) do error end def auth_template, do: nil + + def oauth_consumer_template, do: nil end diff --git a/lib/pleroma/web/channels/user_socket.ex b/lib/pleroma/web/channels/user_socket.ex index aed8475fd..6503979a1 100644 --- a/lib/pleroma/web/channels/user_socket.ex +++ b/lib/pleroma/web/channels/user_socket.ex @@ -23,8 +23,8 @@ defmodule Pleroma.Web.UserSocket do # performing token verification on connect. def connect(%{"token" => token}, socket) do with true <- Pleroma.Config.get([:chat, :enabled]), - {:ok, user_id} <- Phoenix.Token.verify(socket, "user socket", token, max_age: 84600), - %User{} = user <- Pleroma.Repo.get(User, user_id) do + {:ok, user_id} <- Phoenix.Token.verify(socket, "user socket", token, max_age: 84_600), + %User{} = user <- Pleroma.User.get_by_id(user_id) do {:ok, assign(socket, :user_name, user.nickname)} else _e -> :error diff --git a/lib/pleroma/web/chat_channel.ex b/lib/pleroma/web/chat_channel.ex index fe63ede66..f63f4bda1 100644 --- a/lib/pleroma/web/chat_channel.ex +++ b/lib/pleroma/web/chat_channel.ex @@ -4,8 +4,8 @@ defmodule Pleroma.Web.ChatChannel do use Phoenix.Channel - alias Pleroma.Web.ChatChannel.ChatChannelState alias Pleroma.User + alias Pleroma.Web.ChatChannel.ChatChannelState def join("chat:public", _message, socket) do send(self(), :after_join) @@ -48,7 +48,7 @@ def add_message(message) do end) end - def messages() do + def messages do Agent.get(__MODULE__, fn state -> state[:messages] |> Enum.reverse() end) end end diff --git a/lib/pleroma/web/common_api/common_api.ex b/lib/pleroma/web/common_api/common_api.ex index 55a9c2572..74babdf14 100644 --- a/lib/pleroma/web/common_api/common_api.ex +++ b/lib/pleroma/web/common_api/common_api.ex @@ -3,14 +3,13 @@ # SPDX-License-Identifier: AGPL-3.0-only defmodule Pleroma.Web.CommonAPI do - alias Pleroma.User - alias Pleroma.Repo alias Pleroma.Activity + alias Pleroma.Formatter alias Pleroma.Object alias Pleroma.ThreadMute + alias Pleroma.User alias Pleroma.Web.ActivityPub.ActivityPub alias Pleroma.Web.ActivityPub.Utils - alias Pleroma.Formatter import Pleroma.Web.CommonAPI.Utils @@ -27,10 +26,47 @@ def follow(follower, followed) do end end + def unfollow(follower, unfollowed) do + with {:ok, follower, _follow_activity} <- User.unfollow(follower, unfollowed), + {:ok, _activity} <- ActivityPub.unfollow(follower, unfollowed) do + {:ok, follower} + end + end + + def accept_follow_request(follower, followed) do + with {:ok, follower} <- User.maybe_follow(follower, followed), + %Activity{} = follow_activity <- Utils.fetch_latest_follow(follower, followed), + {:ok, follow_activity} <- Utils.update_follow_state(follow_activity, "accept"), + {:ok, _activity} <- + ActivityPub.accept(%{ + to: [follower.ap_id], + actor: followed, + object: follow_activity.data["id"], + type: "Accept" + }) do + {:ok, follower} + end + end + + def reject_follow_request(follower, followed) do + with %Activity{} = follow_activity <- Utils.fetch_latest_follow(follower, followed), + {:ok, follow_activity} <- Utils.update_follow_state(follow_activity, "reject"), + {:ok, _activity} <- + ActivityPub.reject(%{ + to: [follower.ap_id], + actor: followed, + object: follow_activity.data["id"], + type: "Reject" + }) do + {:ok, follower} + end + end + def delete(activity_id, user) do - with %Activity{data: %{"object" => %{"id" => object_id}}} <- Repo.get(Activity, activity_id), - %Object{} = object <- Object.normalize(object_id), - true <- user.info.is_moderator || user.ap_id == object.data["actor"], + with %Activity{data: %{"object" => _}} = activity <- + Activity.get_by_id_with_object(activity_id), + %Object{} = object <- Object.normalize(activity), + true <- User.superuser?(user) || user.ap_id == object.data["actor"], {:ok, _} <- unpin(activity_id, user), {:ok, delete} <- ActivityPub.delete(object) do {:ok, delete} @@ -39,7 +75,7 @@ def delete(activity_id, user) do def repeat(id_or_ap_id, user) do with %Activity{} = activity <- get_by_id_or_ap_id(id_or_ap_id), - object <- Object.normalize(activity.data["object"]["id"]), + object <- Object.normalize(activity), nil <- Utils.get_existing_announce(user.ap_id, object) do ActivityPub.announce(user, object) else @@ -50,7 +86,7 @@ def repeat(id_or_ap_id, user) do def unrepeat(id_or_ap_id, user) do with %Activity{} = activity <- get_by_id_or_ap_id(id_or_ap_id), - object <- Object.normalize(activity.data["object"]["id"]) do + object <- Object.normalize(activity) do ActivityPub.unannounce(user, object) else _ -> @@ -60,7 +96,7 @@ def unrepeat(id_or_ap_id, user) do def favorite(id_or_ap_id, user) do with %Activity{} = activity <- get_by_id_or_ap_id(id_or_ap_id), - object <- Object.normalize(activity.data["object"]["id"]), + object <- Object.normalize(activity), nil <- Utils.get_existing_like(user.ap_id, object) do ActivityPub.like(user, object) else @@ -71,7 +107,7 @@ def favorite(id_or_ap_id, user) do def unfavorite(id_or_ap_id, user) do with %Activity{} = activity <- get_by_id_or_ap_id(id_or_ap_id), - object <- Object.normalize(activity.data["object"]["id"]) do + object <- Object.normalize(activity) do ActivityPub.unlike(user, object) else _ -> @@ -88,8 +124,8 @@ def get_visibility(%{"in_reply_to_status_id" => status_id}) when not is_nil(stat nil -> "public" - inReplyTo -> - Pleroma.Web.MastodonAPI.StatusView.get_visibility(inReplyTo.data["object"]) + in_reply_to -> + Pleroma.Web.MastodonAPI.StatusView.get_visibility(in_reply_to.data["object"]) end end @@ -101,15 +137,16 @@ def post(user, %{"status" => status} = data) do with status <- String.trim(status), attachments <- attachments_from_ids(data), - inReplyTo <- get_replied_to_activity(data["in_reply_to_status_id"]), + in_reply_to <- get_replied_to_activity(data["in_reply_to_status_id"]), {content_html, mentions, tags} <- make_content_html( status, attachments, - data + data, + visibility ), - {to, cc} <- to_for_user_and_mentions(user, mentions, inReplyTo, visibility), - context <- make_context(inReplyTo), + {to, cc} <- to_for_user_and_mentions(user, mentions, in_reply_to, visibility), + context <- make_context(in_reply_to), cw <- data["spoiler_text"], full_payload <- String.trim(status <> (data["spoiler_text"] || "")), length when length in 1..limit <- String.length(full_payload), @@ -120,7 +157,7 @@ def post(user, %{"status" => status} = data) do context, content_html, attachments, - inReplyTo, + in_reply_to, tags, cw, cc @@ -130,18 +167,21 @@ def post(user, %{"status" => status} = data) do object, "emoji", (Formatter.get_emoji(status) ++ Formatter.get_emoji(data["spoiler_text"])) - |> Enum.reduce(%{}, fn {name, file}, acc -> + |> Enum.reduce(%{}, fn {name, file, _}, acc -> Map.put(acc, name, "#{Pleroma.Web.Endpoint.static_url()}#{file}") end) ) do res = - ActivityPub.create(%{ - to: to, - actor: user, - context: context, - object: object, - additional: %{"cc" => cc, "directMessage" => visibility == "direct"} - }) + ActivityPub.create( + %{ + to: to, + actor: user, + context: context, + object: object, + additional: %{"cc" => cc, "directMessage" => visibility == "direct"} + }, + Pleroma.Web.ControllerHelper.truthy_param?(data["preview"]) || false + ) res end @@ -248,14 +288,9 @@ def report(user, data) do actor: user, account: account, statuses: statuses, - content: content_html + content: content_html, + forward: data["forward"] || false }) do - Enum.each(User.all_superusers(), fn superuser -> - superuser - |> Pleroma.AdminEmail.report(user, account, statuses, content_html) - |> Pleroma.Mailer.deliver_async() - end) - {:ok, activity} else {:error, err} -> {:error, err} @@ -263,4 +298,24 @@ def report(user, data) do {:account, nil} -> {:error, "Account not found"} end end + + def hide_reblogs(user, muted) do + ap_id = muted.ap_id + + if ap_id not in user.info.muted_reblogs do + info_changeset = User.Info.add_reblog_mute(user.info, ap_id) + changeset = Ecto.Changeset.change(user) |> Ecto.Changeset.put_embed(:info, info_changeset) + User.update_and_set_cache(changeset) + end + end + + def show_reblogs(user, muted) do + ap_id = muted.ap_id + + if ap_id in user.info.muted_reblogs do + info_changeset = User.Info.remove_reblog_mute(user.info, ap_id) + changeset = Ecto.Changeset.change(user) |> Ecto.Changeset.put_embed(:info, info_changeset) + User.update_and_set_cache(changeset) + end + end end diff --git a/lib/pleroma/web/common_api/utils.ex b/lib/pleroma/web/common_api/utils.ex index 60d1185d3..7b9f0ea06 100644 --- a/lib/pleroma/web/common_api/utils.ex +++ b/lib/pleroma/web/common_api/utils.ex @@ -6,24 +6,28 @@ defmodule Pleroma.Web.CommonAPI.Utils do alias Calendar.Strftime alias Comeonin.Pbkdf2 alias Pleroma.Activity + alias Pleroma.Config alias Pleroma.Formatter alias Pleroma.Object alias Pleroma.Repo alias Pleroma.User - alias Pleroma.Config + alias Pleroma.Web.ActivityPub.Utils + alias Pleroma.Web.ActivityPub.Visibility alias Pleroma.Web.Endpoint alias Pleroma.Web.MediaProxy - alias Pleroma.Web.ActivityPub.Utils + + require Logger # This is a hack for twidere. def get_by_id_or_ap_id(id) do - activity = Activity.get_by_id(id) || Activity.get_create_by_object_ap_id(id) + activity = + Activity.get_by_id_with_object(id) || Activity.get_create_by_object_ap_id_with_object(id) activity && if activity.data["type"] == "Create" do activity else - Activity.get_create_by_object_ap_id(activity.data["object"]) + Activity.get_create_by_object_ap_id_with_object(activity.data["object"]) end end @@ -101,7 +105,8 @@ def to_for_user_and_mentions(_user, mentions, inReplyTo, "direct") do def make_content_html( status, attachments, - data + data, + visibility ) do no_attachment_links = data @@ -110,8 +115,15 @@ def make_content_html( content_type = get_content_type(data["content_type"]) + options = + if visibility == "direct" && Config.get([:instance, :safe_dm_mentions]) do + [safe_mention: true] + else + [] + end + status - |> format_input(content_type) + |> format_input(content_type, options) |> maybe_add_attachments(attachments, no_attachment_links) |> maybe_add_nsfw_tag(data) end @@ -231,15 +243,21 @@ def format_asctime(date) do Strftime.strftime!(date, "%a %b %d %H:%M:%S %z %Y") end - def date_to_asctime(date) do - with {:ok, date, _offset} <- date |> DateTime.from_iso8601() do + def date_to_asctime(date) when is_binary(date) do + with {:ok, date, _offset} <- DateTime.from_iso8601(date) do format_asctime(date) else _e -> + Logger.warn("Date #{date} in wrong format, must be ISO 8601") "" end end + def date_to_asctime(date) do + Logger.warn("Date #{date} in wrong format, must be ISO 8601") + "" + end + def to_masto_date(%NaiveDateTime{} = date) do date |> NaiveDateTime.to_iso8601() @@ -266,7 +284,7 @@ defp shortname(name) do end def confirm_current_password(user, password) do - with %User{local: true} = db_user <- Repo.get(User, user.id), + with %User{local: true} = db_user <- User.get_by_id(user.id), true <- Pbkdf2.checkpw(password, db_user.password_hash) do {:ok, db_user} else @@ -276,7 +294,7 @@ def confirm_current_password(user, password) do def emoji_from_profile(%{info: _info} = user) do (Formatter.get_emoji(user.bio) ++ Formatter.get_emoji(user.name)) - |> Enum.map(fn {shortcode, url} -> + |> Enum.map(fn {shortcode, url, _} -> %{ "type" => "Emoji", "icon" => %{"type" => "Image", "url" => "#{Endpoint.url()}#{url}"}, @@ -294,10 +312,10 @@ def maybe_notify_to_recipients( def maybe_notify_mentioned_recipients( recipients, - %Activity{data: %{"to" => _to, "type" => type} = data} = _activity + %Activity{data: %{"to" => _to, "type" => type} = data} = activity ) when type == "Create" do - object = Object.normalize(data["object"]) + object = Object.normalize(activity) object_data = cond do @@ -318,6 +336,24 @@ def maybe_notify_mentioned_recipients( def maybe_notify_mentioned_recipients(recipients, _), do: recipients + def maybe_notify_subscribers( + recipients, + %Activity{data: %{"actor" => actor, "type" => type}} = activity + ) + when type == "Create" do + with %User{} = user <- User.get_cached_by_ap_id(actor) do + subscriber_ids = + user + |> User.subscribers() + |> Enum.filter(&Visibility.visible_for_user?(activity, &1)) + |> Enum.map(& &1.ap_id) + + recipients ++ subscriber_ids + end + end + + def maybe_notify_subscribers(recipients, _), do: recipients + def maybe_extract_mentions(%{"tag" => tag}) do tag |> Enum.filter(fn x -> is_map(x) end) @@ -344,4 +380,33 @@ def get_report_statuses(%User{ap_id: actor}, %{"status_ids" => status_ids}) do end def get_report_statuses(_, _), do: {:ok, nil} + + # DEPRECATED mostly, context objects are now created at insertion time. + def context_to_conversation_id(context) do + with %Object{id: id} <- Object.get_cached_by_ap_id(context) do + id + else + _e -> + changeset = Object.context_mapping(context) + + case Repo.insert(changeset) do + {:ok, %{id: id}} -> + id + + # This should be solved by an upsert, but it seems ecto + # has problems accessing the constraint inside the jsonb. + {:error, _} -> + Object.get_cached_by_ap_id(context).id + end + end + end + + def conversation_id_to_context(id) do + with %Object{data: %{"id" => context}} <- Repo.get(Object, id) do + context + else + _e -> + {:error, "No such conversation"} + end + end end diff --git a/lib/pleroma/web/controller_helper.ex b/lib/pleroma/web/controller_helper.ex index 5915ea40e..181483664 100644 --- a/lib/pleroma/web/controller_helper.ex +++ b/lib/pleroma/web/controller_helper.ex @@ -5,8 +5,14 @@ defmodule Pleroma.Web.ControllerHelper do use Pleroma.Web, :controller + # As in MastoAPI, per https://api.rubyonrails.org/classes/ActiveModel/Type/Boolean.html + @falsy_param_values [false, 0, "0", "f", "F", "false", "FALSE", "off", "OFF"] + def truthy_param?(blank_value) when blank_value in [nil, ""], do: nil + def truthy_param?(value), do: value not in @falsy_param_values + def oauth_scopes(params, default) do - # Note: `scopes` is used by Mastodon — supporting it but sticking to OAuth's standard `scope` wherever we control it + # Note: `scopes` is used by Mastodon — supporting it but sticking to + # OAuth's standard `scope` wherever we control it Pleroma.Web.OAuth.parse_scopes(params["scope"] || params["scopes"], default) end diff --git a/lib/pleroma/web/endpoint.ex b/lib/pleroma/web/endpoint.ex index 3eed047ca..1633477c3 100644 --- a/lib/pleroma/web/endpoint.ex +++ b/lib/pleroma/web/endpoint.ex @@ -25,7 +25,8 @@ defmodule Pleroma.Web.Endpoint do at: "/", from: :pleroma, only: - ~w(index.html static finmoji emoji packs sounds images instance sw.js sw-pleroma.js favicon.png schemas doc) + ~w(index.html robots.txt static finmoji emoji packs sounds images instance sw.js sw-pleroma.js favicon.png schemas doc) + # credo:disable-for-previous-line Credo.Check.Readability.MaxLineLength ) # Code reloading can be explicitly enabled under the @@ -50,11 +51,22 @@ defmodule Pleroma.Web.Endpoint do plug(Plug.MethodOverride) plug(Plug.Head) + secure_cookies = Pleroma.Config.get([__MODULE__, :secure_cookie_flag]) + cookie_name = - if Application.get_env(:pleroma, Pleroma.Web.Endpoint) |> Keyword.get(:secure_cookie_flag), + if secure_cookies, do: "__Host-pleroma_key", else: "pleroma_key" + same_site = + if Pleroma.Config.oauth_consumer_enabled?() do + # Note: "SameSite=Strict" prevents sign in with external OAuth provider + # (there would be no cookies during callback request from OAuth provider) + "SameSite=Lax" + else + "SameSite=Strict" + end + # The session will be stored in the cookie and signed, # this means its contents can be read but not tampered with. # Set :encryption_salt if you would also like to encrypt it. @@ -64,11 +76,30 @@ defmodule Pleroma.Web.Endpoint do key: cookie_name, signing_salt: {Pleroma.Config, :get, [[__MODULE__, :signing_salt], "CqaoopA2"]}, http_only: true, - secure: - Application.get_env(:pleroma, Pleroma.Web.Endpoint) |> Keyword.get(:secure_cookie_flag), - extra: "SameSite=Strict" + secure: secure_cookies, + extra: same_site ) + # Note: the plug and its configuration is compile-time this can't be upstreamed yet + if proxies = Pleroma.Config.get([__MODULE__, :reverse_proxies]) do + plug(RemoteIp, proxies: proxies) + end + + defmodule Instrumenter do + use Prometheus.PhoenixInstrumenter + end + + defmodule PipelineInstrumenter do + use Prometheus.PlugPipelineInstrumenter + end + + defmodule MetricsExporter do + use Prometheus.PlugExporter + end + + plug(PipelineInstrumenter) + plug(MetricsExporter) + plug(Pleroma.Web.Router) @doc """ diff --git a/lib/pleroma/web/federator/federator.ex b/lib/pleroma/web/federator/federator.ex index fbfe97dbc..c47328e13 100644 --- a/lib/pleroma/web/federator/federator.ex +++ b/lib/pleroma/web/federator/federator.ex @@ -5,65 +5,64 @@ defmodule Pleroma.Web.Federator do alias Pleroma.Activity alias Pleroma.User - alias Pleroma.Web.WebFinger - alias Pleroma.Web.Websub - alias Pleroma.Web.Salmon alias Pleroma.Web.ActivityPub.ActivityPub - alias Pleroma.Web.ActivityPub.Visibility alias Pleroma.Web.ActivityPub.Relay alias Pleroma.Web.ActivityPub.Transmogrifier alias Pleroma.Web.ActivityPub.Utils + alias Pleroma.Web.ActivityPub.Visibility alias Pleroma.Web.Federator.RetryQueue alias Pleroma.Web.OStatus - alias Pleroma.Jobs + alias Pleroma.Web.Salmon + alias Pleroma.Web.WebFinger + alias Pleroma.Web.Websub require Logger @websub Application.get_env(:pleroma, :websub) @ostatus Application.get_env(:pleroma, :ostatus) - def init() do + def init do # 1 minute - Process.sleep(1000 * 60 * 1) + Process.sleep(1000 * 60) refresh_subscriptions() end # Client API def incoming_doc(doc) do - Jobs.enqueue(:federator_incoming, __MODULE__, [:incoming_doc, doc]) + PleromaJobQueue.enqueue(:federator_incoming, __MODULE__, [:incoming_doc, doc]) end def incoming_ap_doc(params) do - Jobs.enqueue(:federator_incoming, __MODULE__, [:incoming_ap_doc, params]) + PleromaJobQueue.enqueue(:federator_incoming, __MODULE__, [:incoming_ap_doc, params]) end def publish(activity, priority \\ 1) do - Jobs.enqueue(:federator_outgoing, __MODULE__, [:publish, activity], priority) + PleromaJobQueue.enqueue(:federator_outgoing, __MODULE__, [:publish, activity], priority) end def publish_single_ap(params) do - Jobs.enqueue(:federator_outgoing, __MODULE__, [:publish_single_ap, params]) + PleromaJobQueue.enqueue(:federator_outgoing, __MODULE__, [:publish_single_ap, params]) end def publish_single_websub(websub) do - Jobs.enqueue(:federator_outgoing, __MODULE__, [:publish_single_websub, websub]) + PleromaJobQueue.enqueue(:federator_outgoing, __MODULE__, [:publish_single_websub, websub]) end def verify_websub(websub) do - Jobs.enqueue(:federator_outgoing, __MODULE__, [:verify_websub, websub]) + PleromaJobQueue.enqueue(:federator_outgoing, __MODULE__, [:verify_websub, websub]) end def request_subscription(sub) do - Jobs.enqueue(:federator_outgoing, __MODULE__, [:request_subscription, sub]) + PleromaJobQueue.enqueue(:federator_outgoing, __MODULE__, [:request_subscription, sub]) end - def refresh_subscriptions() do - Jobs.enqueue(:federator_outgoing, __MODULE__, [:refresh_subscriptions]) + def refresh_subscriptions do + PleromaJobQueue.enqueue(:federator_outgoing, __MODULE__, [:refresh_subscriptions]) end def publish_single_salmon(params) do - Jobs.enqueue(:federator_outgoing, __MODULE__, [:publish_single_salmon, params]) + PleromaJobQueue.enqueue(:federator_outgoing, __MODULE__, [:publish_single_salmon, params]) end # Job Worker Callbacks diff --git a/lib/pleroma/web/federator/retry_queue.ex b/lib/pleroma/web/federator/retry_queue.ex index e0ce251d2..71e49494f 100644 --- a/lib/pleroma/web/federator/retry_queue.ex +++ b/lib/pleroma/web/federator/retry_queue.ex @@ -13,7 +13,7 @@ def init(args) do {:ok, %{args | queue_table: queue_table, running_jobs: :sets.new()}} end - def start_link() do + def start_link do enabled = if Mix.env() == :test, do: true, else: Pleroma.Config.get([__MODULE__, :enabled], false) @@ -39,11 +39,11 @@ def enqueue(data, transport, retries \\ 0) do GenServer.cast(__MODULE__, {:maybe_enqueue, data, transport, retries + 1}) end - def get_stats() do + def get_stats do GenServer.call(__MODULE__, :get_stats) end - def reset_stats() do + def reset_stats do GenServer.call(__MODULE__, :reset_stats) end @@ -55,7 +55,7 @@ def get_retry_params(retries) do end end - def get_retry_timer_interval() do + def get_retry_timer_interval do Pleroma.Config.get([:retry_queue, :interval], 1000) end @@ -231,7 +231,7 @@ defp growth_function(retries) do end end - defp maybe_kickoff_timer() do + defp maybe_kickoff_timer do GenServer.cast(__MODULE__, :kickoff_timer) end end diff --git a/lib/pleroma/web/mastodon_api/mastodon_api.ex b/lib/pleroma/web/mastodon_api/mastodon_api.ex index 8b1378917..382f07e6b 100644 --- a/lib/pleroma/web/mastodon_api/mastodon_api.ex +++ b/lib/pleroma/web/mastodon_api/mastodon_api.ex @@ -1 +1,58 @@ +defmodule Pleroma.Web.MastodonAPI.MastodonAPI do + import Ecto.Query + import Ecto.Changeset + alias Pleroma.Activity + alias Pleroma.Notification + alias Pleroma.Pagination + alias Pleroma.ScheduledActivity + alias Pleroma.User + + def get_followers(user, params \\ %{}) do + user + |> User.get_followers_query() + |> Pagination.fetch_paginated(params) + end + + def get_friends(user, params \\ %{}) do + user + |> User.get_friends_query() + |> Pagination.fetch_paginated(params) + end + + def get_notifications(user, params \\ %{}) do + options = cast_params(params) + + user + |> Notification.for_user_query() + |> restrict(:exclude_types, options) + |> Pagination.fetch_paginated(params) + end + + def get_scheduled_activities(user, params \\ %{}) do + user + |> ScheduledActivity.for_user_query() + |> Pagination.fetch_paginated(params) + end + + defp cast_params(params) do + param_types = %{ + exclude_types: {:array, :string} + } + + changeset = cast({%{}, param_types}, params, Map.keys(param_types)) + changeset.changes + end + + defp restrict(query, :exclude_types, %{exclude_types: mastodon_types = [_ | _]}) do + ap_types = + mastodon_types + |> Enum.map(&Activity.from_mastodon_notification_type/1) + |> Enum.filter(& &1) + + query + |> where([q, a], not fragment("? @> ARRAY[?->>'type']::varchar[]", ^ap_types, a.data)) + end + + defp restrict(query, _, _), do: query +end diff --git a/lib/pleroma/web/mastodon_api/mastodon_api_controller.ex b/lib/pleroma/web/mastodon_api/mastodon_api_controller.ex index 21ebef6b4..ed082abdf 100644 --- a/lib/pleroma/web/mastodon_api/mastodon_api_controller.ex +++ b/lib/pleroma/web/mastodon_api/mastodon_api_controller.ex @@ -4,30 +4,32 @@ defmodule Pleroma.Web.MastodonAPI.MastodonAPIController do use Pleroma.Web, :controller + + alias Ecto.Changeset alias Pleroma.Activity alias Pleroma.Config alias Pleroma.Filter alias Pleroma.Notification alias Pleroma.Object alias Pleroma.Repo + alias Pleroma.ScheduledActivity alias Pleroma.Stats alias Pleroma.User alias Pleroma.Web + alias Pleroma.Web.ActivityPub.ActivityPub + alias Pleroma.Web.ActivityPub.Visibility alias Pleroma.Web.CommonAPI - alias Pleroma.Web.MediaProxy - alias Pleroma.Web.Push - alias Push.Subscription - alias Pleroma.Web.MastodonAPI.AccountView + alias Pleroma.Web.MastodonAPI.AppView alias Pleroma.Web.MastodonAPI.FilterView alias Pleroma.Web.MastodonAPI.ListView + alias Pleroma.Web.MastodonAPI.MastodonAPI alias Pleroma.Web.MastodonAPI.MastodonView - alias Pleroma.Web.MastodonAPI.PushSubscriptionView - alias Pleroma.Web.MastodonAPI.StatusView + alias Pleroma.Web.MastodonAPI.NotificationView alias Pleroma.Web.MastodonAPI.ReportView - alias Pleroma.Web.ActivityPub.ActivityPub - alias Pleroma.Web.ActivityPub.Utils - alias Pleroma.Web.ActivityPub.Visibility + alias Pleroma.Web.MastodonAPI.ScheduledActivityView + alias Pleroma.Web.MastodonAPI.StatusView + alias Pleroma.Web.MediaProxy alias Pleroma.Web.OAuth.App alias Pleroma.Web.OAuth.Authorization alias Pleroma.Web.OAuth.Token @@ -53,16 +55,9 @@ def create_app(conn, params) do with cs <- App.register_changeset(%App{}, app_attrs), false <- cs.changes[:client_name] == @local_mastodon_name, {:ok, app} <- Repo.insert(cs) do - res = %{ - id: app.id |> to_string, - name: app.client_name, - client_id: app.client_id, - client_secret: app.client_secret, - redirect_uri: app.redirect_uris, - website: app.website - } - - json(conn, res) + conn + |> put_view(AppView) + |> render("show.json", %{app: app}) end end @@ -134,8 +129,16 @@ def verify_credentials(%{assigns: %{user: user}} = conn, _) do json(conn, account) end - def user(%{assigns: %{user: for_user}} = conn, %{"id" => id}) do - with %User{} = user <- Repo.get(User, id), + def verify_app_credentials(%{assigns: %{user: _user, token: token}} = conn, _) do + with %Token{app: %App{} = app} <- Repo.preload(token, :app) do + conn + |> put_view(AppView) + |> render("short.json", %{app: app}) + end + end + + def user(%{assigns: %{user: for_user}} = conn, %{"id" => nickname_or_id}) do + with %User{} = user <- User.get_cached_by_nickname_or_id(nickname_or_id), true <- User.auth_active?(user) || user.id == for_user.id || User.superuser?(for_user) do account = AccountView.render("account.json", %{user: user, for: for_user}) json(conn, account) @@ -163,6 +166,9 @@ def masto_instance(conn, _params) do }, stats: Stats.get_stats(), thumbnail: Web.base_url() <> "/instance/thumbnail.jpeg", + languages: ["en"], + registrations: Pleroma.Config.get([:instance, :registrations_open]), + # Extra (not present in Mastodon): max_toot_chars: Keyword.get(instance, :limit) } @@ -175,14 +181,15 @@ def peers(conn, _params) do defp mastodonized_emoji do Pleroma.Emoji.get_all() - |> Enum.map(fn {shortcode, relative_url} -> + |> Enum.map(fn {shortcode, relative_url, tags} -> url = to_string(URI.merge(Web.base_url(), relative_url)) %{ "shortcode" => shortcode, "static_url" => url, "visible_in_picker" => true, - "url" => url + "url" => url, + "tags" => String.split(tags, ",") } end) end @@ -193,6 +200,11 @@ def custom_emojis(conn, _params) do end defp add_link_headers(conn, method, activities, param \\ nil, params \\ %{}) do + params = + conn.params + |> Map.drop(["since_id", "max_id"]) + |> Map.merge(params) + last = List.last(activities) first = List.first(activities) @@ -277,7 +289,7 @@ def public_timeline(%{assigns: %{user: user}} = conn, params) do end def user_statuses(%{assigns: %{user: reading_user}} = conn, params) do - with %User{} = user <- Repo.get(User, params["id"]) do + with %User{} = user <- User.get_by_id(params["id"]) do activities = ActivityPub.fetch_user_activities(user, reading_user, params) conn @@ -300,7 +312,8 @@ def dm_timeline(%{assigns: %{user: user}} = conn, params) do |> Map.put(:visibility, "direct") activities = - ActivityPub.fetch_activities_query([user.ap_id], params) + [user.ap_id] + |> ActivityPub.fetch_activities_query(params) |> Repo.all() conn @@ -355,6 +368,55 @@ def get_context(%{assigns: %{user: user}} = conn, %{"id" => id}) do end end + def scheduled_statuses(%{assigns: %{user: user}} = conn, params) do + with scheduled_activities <- MastodonAPI.get_scheduled_activities(user, params) do + conn + |> add_link_headers(:scheduled_statuses, scheduled_activities) + |> put_view(ScheduledActivityView) + |> render("index.json", %{scheduled_activities: scheduled_activities}) + end + end + + def show_scheduled_status(%{assigns: %{user: user}} = conn, %{"id" => scheduled_activity_id}) do + with %ScheduledActivity{} = scheduled_activity <- + ScheduledActivity.get(user, scheduled_activity_id) do + conn + |> put_view(ScheduledActivityView) + |> render("show.json", %{scheduled_activity: scheduled_activity}) + else + _ -> {:error, :not_found} + end + end + + def update_scheduled_status( + %{assigns: %{user: user}} = conn, + %{"id" => scheduled_activity_id} = params + ) do + with %ScheduledActivity{} = scheduled_activity <- + ScheduledActivity.get(user, scheduled_activity_id), + {:ok, scheduled_activity} <- ScheduledActivity.update(scheduled_activity, params) do + conn + |> put_view(ScheduledActivityView) + |> render("show.json", %{scheduled_activity: scheduled_activity}) + else + nil -> {:error, :not_found} + error -> error + end + end + + def delete_scheduled_status(%{assigns: %{user: user}} = conn, %{"id" => scheduled_activity_id}) do + with %ScheduledActivity{} = scheduled_activity <- + ScheduledActivity.get(user, scheduled_activity_id), + {:ok, scheduled_activity} <- ScheduledActivity.delete(scheduled_activity) do + conn + |> put_view(ScheduledActivityView) + |> render("show.json", %{scheduled_activity: scheduled_activity}) + else + nil -> {:error, :not_found} + error -> error + end + end + def post_status(conn, %{"status" => "", "media_ids" => media_ids} = params) when length(media_ids) > 0 do params = @@ -375,12 +437,27 @@ def post_status(%{assigns: %{user: user}} = conn, %{"status" => _} = params) do _ -> Ecto.UUID.generate() end - {:ok, activity} = - Cachex.fetch!(:idempotency_cache, idempotency_key, fn _ -> CommonAPI.post(user, params) end) + scheduled_at = params["scheduled_at"] - conn - |> put_view(StatusView) - |> try_render("status.json", %{activity: activity, for: user, as: :activity}) + if scheduled_at && ScheduledActivity.far_enough?(scheduled_at) do + with {:ok, scheduled_activity} <- + ScheduledActivity.create(user, %{"params" => params, "scheduled_at" => scheduled_at}) do + conn + |> put_view(ScheduledActivityView) + |> render("show.json", %{scheduled_activity: scheduled_activity}) + end + else + params = Map.drop(params, ["scheduled_at"]) + + {:ok, activity} = + Cachex.fetch!(:idempotency_cache, idempotency_key, fn _ -> + CommonAPI.post(user, params) + end) + + conn + |> put_view(StatusView) + |> try_render("status.json", %{activity: activity, for: user, as: :activity}) + end end def delete_status(%{assigns: %{user: user}} = conn, %{"id" => id}) do @@ -498,21 +575,19 @@ def unmute_conversation(%{assigns: %{user: user}} = conn, %{"id" => id}) do end def notifications(%{assigns: %{user: user}} = conn, params) do - notifications = Notification.for_user(user, params) - - result = - notifications - |> Enum.map(fn x -> render_notification(user, x) end) - |> Enum.filter(& &1) + notifications = MastodonAPI.get_notifications(user, params) conn |> add_link_headers(:notifications, notifications) - |> json(result) + |> put_view(NotificationView) + |> render("index.json", %{notifications: notifications, for: user}) end def get_notification(%{assigns: %{user: user}} = conn, %{"id" => id} = _params) do with {:ok, notification} <- Notification.get(user, id) do - json(conn, render_notification(user, notification)) + conn + |> put_view(NotificationView) + |> render("show.json", %{notification: notification, for: user}) else {:error, reason} -> conn @@ -649,9 +724,9 @@ def hashtag_timeline(%{assigns: %{user: user}} = conn, params) do |> render("index.json", %{activities: activities, for: user, as: :activity}) end - def followers(%{assigns: %{user: for_user}} = conn, %{"id" => id}) do - with %User{} = user <- Repo.get(User, id), - {:ok, followers} <- User.get_followers(user) do + def followers(%{assigns: %{user: for_user}} = conn, %{"id" => id} = params) do + with %User{} = user <- User.get_by_id(id), + followers <- MastodonAPI.get_followers(user, params) do followers = cond do for_user && user.id == for_user.id -> followers @@ -660,14 +735,15 @@ def followers(%{assigns: %{user: for_user}} = conn, %{"id" => id}) do end conn + |> add_link_headers(:followers, followers, user) |> put_view(AccountView) |> render("accounts.json", %{users: followers, as: :user}) end end - def following(%{assigns: %{user: for_user}} = conn, %{"id" => id}) do - with %User{} = user <- Repo.get(User, id), - {:ok, followers} <- User.get_friends(user) do + def following(%{assigns: %{user: for_user}} = conn, %{"id" => id} = params) do + with %User{} = user <- User.get_by_id(id), + followers <- MastodonAPI.get_friends(user, params) do followers = cond do for_user && user.id == for_user.id -> followers @@ -676,6 +752,7 @@ def following(%{assigns: %{user: for_user}} = conn, %{"id" => id}) do end conn + |> add_link_headers(:following, followers, user) |> put_view(AccountView) |> render("accounts.json", %{users: followers, as: :user}) end @@ -690,17 +767,8 @@ def follow_requests(%{assigns: %{user: followed}} = conn, _params) do end def authorize_follow_request(%{assigns: %{user: followed}} = conn, %{"id" => id}) do - with %User{} = follower <- Repo.get(User, id), - {:ok, follower} <- User.maybe_follow(follower, followed), - %Activity{} = follow_activity <- Utils.fetch_latest_follow(follower, followed), - {:ok, follow_activity} <- Utils.update_follow_state(follow_activity, "accept"), - {:ok, _activity} <- - ActivityPub.accept(%{ - to: [follower.ap_id], - actor: followed, - object: follow_activity.data["id"], - type: "Accept" - }) do + with %User{} = follower <- User.get_by_id(id), + {:ok, follower} <- CommonAPI.accept_follow_request(follower, followed) do conn |> put_view(AccountView) |> render("relationship.json", %{user: followed, target: follower}) @@ -713,16 +781,8 @@ def authorize_follow_request(%{assigns: %{user: followed}} = conn, %{"id" => id} end def reject_follow_request(%{assigns: %{user: followed}} = conn, %{"id" => id}) do - with %User{} = follower <- Repo.get(User, id), - %Activity{} = follow_activity <- Utils.fetch_latest_follow(follower, followed), - {:ok, follow_activity} <- Utils.update_follow_state(follow_activity, "reject"), - {:ok, _activity} <- - ActivityPub.reject(%{ - to: [follower.ap_id], - actor: followed, - object: follow_activity.data["id"], - type: "Reject" - }) do + with %User{} = follower <- User.get_by_id(id), + {:ok, follower} <- CommonAPI.reject_follow_request(follower, followed) do conn |> put_view(AccountView) |> render("relationship.json", %{user: followed, target: follower}) @@ -735,12 +795,26 @@ def reject_follow_request(%{assigns: %{user: followed}} = conn, %{"id" => id}) d end def follow(%{assigns: %{user: follower}} = conn, %{"id" => id}) do - with %User{} = followed <- Repo.get(User, id), + with %User{} = followed <- User.get_by_id(id), + false <- User.following?(follower, followed), {:ok, follower, followed, _} <- CommonAPI.follow(follower, followed) do conn |> put_view(AccountView) |> render("relationship.json", %{user: follower, target: followed}) else + true -> + followed = User.get_cached_by_id(id) + + {:ok, follower} = + case conn.params["reblogs"] do + true -> CommonAPI.show_reblogs(follower, followed) + false -> CommonAPI.hide_reblogs(follower, followed) + end + + conn + |> put_view(AccountView) + |> render("relationship.json", %{user: follower, target: followed}) + {:error, message} -> conn |> put_resp_content_type("application/json") @@ -749,7 +823,7 @@ def follow(%{assigns: %{user: follower}} = conn, %{"id" => id}) do end def follow(%{assigns: %{user: follower}} = conn, %{"uri" => uri}) do - with %User{} = followed <- Repo.get_by(User, nickname: uri), + with %User{} = followed <- User.get_by_nickname(uri), {:ok, follower, followed, _} <- CommonAPI.follow(follower, followed) do conn |> put_view(AccountView) @@ -763,9 +837,8 @@ def follow(%{assigns: %{user: follower}} = conn, %{"uri" => uri}) do end def unfollow(%{assigns: %{user: follower}} = conn, %{"id" => id}) do - with %User{} = followed <- Repo.get(User, id), - {:ok, _activity} <- ActivityPub.unfollow(follower, followed), - {:ok, follower, _} <- User.unfollow(follower, followed) do + with %User{} = followed <- User.get_by_id(id), + {:ok, follower} <- CommonAPI.unfollow(follower, followed) do conn |> put_view(AccountView) |> render("relationship.json", %{user: follower, target: followed}) @@ -773,7 +846,7 @@ def unfollow(%{assigns: %{user: follower}} = conn, %{"id" => id}) do end def mute(%{assigns: %{user: muter}} = conn, %{"id" => id}) do - with %User{} = muted <- Repo.get(User, id), + with %User{} = muted <- User.get_by_id(id), {:ok, muter} <- User.mute(muter, muted) do conn |> put_view(AccountView) @@ -787,7 +860,7 @@ def mute(%{assigns: %{user: muter}} = conn, %{"id" => id}) do end def unmute(%{assigns: %{user: muter}} = conn, %{"id" => id}) do - with %User{} = muted <- Repo.get(User, id), + with %User{} = muted <- User.get_by_id(id), {:ok, muter} <- User.unmute(muter, muted) do conn |> put_view(AccountView) @@ -808,7 +881,7 @@ def mutes(%{assigns: %{user: user}} = conn, _) do end def block(%{assigns: %{user: blocker}} = conn, %{"id" => id}) do - with %User{} = blocked <- Repo.get(User, id), + with %User{} = blocked <- User.get_by_id(id), {:ok, blocker} <- User.block(blocker, blocked), {:ok, _activity} <- ActivityPub.block(blocker, blocked) do conn @@ -823,7 +896,7 @@ def block(%{assigns: %{user: blocker}} = conn, %{"id" => id}) do end def unblock(%{assigns: %{user: blocker}} = conn, %{"id" => id}) do - with %User{} = blocked <- Repo.get(User, id), + with %User{} = blocked <- User.get_by_id(id), {:ok, blocker} <- User.unblock(blocker, blocked), {:ok, _activity} <- ActivityPub.unblock(blocker, blocked) do conn @@ -858,6 +931,34 @@ def unblock_domain(%{assigns: %{user: blocker}} = conn, %{"domain" => domain}) d json(conn, %{}) end + def subscribe(%{assigns: %{user: user}} = conn, %{"id" => id}) do + with %User{} = subscription_target <- User.get_cached_by_id(id), + {:ok, subscription_target} = User.subscribe(user, subscription_target) do + conn + |> put_view(AccountView) + |> render("relationship.json", %{user: user, target: subscription_target}) + else + {:error, message} -> + conn + |> put_resp_content_type("application/json") + |> send_resp(403, Jason.encode!(%{"error" => message})) + end + end + + def unsubscribe(%{assigns: %{user: user}} = conn, %{"id" => id}) do + with %User{} = subscription_target <- User.get_cached_by_id(id), + {:ok, subscription_target} = User.unsubscribe(user, subscription_target) do + conn + |> put_view(AccountView) + |> render("relationship.json", %{user: user, target: subscription_target}) + else + {:error, message} -> + conn + |> put_resp_content_type("application/json") + |> send_resp(403, Jason.encode!(%{"error" => message})) + end + end + def status_search(user, query) do fetched = if Regex.match?(~r/https?:/, query) do @@ -944,12 +1045,14 @@ def account_search(%{assigns: %{user: user}} = conn, %{"q" => query} = params) d end def favourites(%{assigns: %{user: user}} = conn, params) do - activities = + params = params |> Map.put("type", "Create") |> Map.put("favorited_by", user.ap_id) |> Map.put("blocking_user", user) - |> ActivityPub.fetch_public_activities() + + activities = + ActivityPub.fetch_activities([], params) |> Enum.reverse() conn @@ -959,7 +1062,7 @@ def favourites(%{assigns: %{user: user}} = conn, params) do end def bookmarks(%{assigns: %{user: user}} = conn, _) do - user = Repo.get(User, user.id) + user = User.get_by_id(user.id) activities = user.bookmarks @@ -1016,7 +1119,7 @@ def add_to_list(%{assigns: %{user: user}} = conn, %{"id" => id, "account_ids" => accounts |> Enum.each(fn account_id -> with %Pleroma.List{} = list <- Pleroma.List.get(id, user), - %User{} = followed <- Repo.get(User, account_id) do + %User{} = followed <- User.get_by_id(account_id) do Pleroma.List.follow(list, followed) end end) @@ -1028,7 +1131,7 @@ def remove_from_list(%{assigns: %{user: user}} = conn, %{"id" => id, "account_id accounts |> Enum.each(fn account_id -> with %Pleroma.List{} = list <- Pleroma.List.get(id, user), - %User{} = followed <- Repo.get(Pleroma.User, account_id) do + %User{} = followed <- Pleroma.User.get_by_id(account_id) do Pleroma.List.unfollow(list, followed) end end) @@ -1084,9 +1187,7 @@ def list_timeline(%{assigns: %{user: user}} = conn, %{"list_id" => id} = params) end def index(%{assigns: %{user: user}} = conn, _params) do - token = - conn - |> get_session(:oauth_token) + token = get_session(conn, :oauth_token) if user && token do mastodon_emoji = mastodonized_emoji() @@ -1114,7 +1215,8 @@ def index(%{assigns: %{user: user}} = conn, _params) do auto_play_gif: false, display_sensitive_media: false, reduce_motion: false, - max_toot_chars: limit + max_toot_chars: limit, + mascot: "/images/pleroma-fox-tan-smol.png" }, rights: %{ delete_others_notice: present?(user.info.is_moderator), @@ -1123,7 +1225,8 @@ def index(%{assigns: %{user: user}} = conn, _params) do compose: %{ me: "#{user.id}", default_privacy: user.info.default_scope, - default_sensitive: false + default_sensitive: false, + allow_content_types: Config.get([:instance, :allowed_post_formats]) }, media_attachments: %{ accept_content_types: [ @@ -1185,6 +1288,7 @@ def index(%{assigns: %{user: user}} = conn, _params) do |> render("index.html", %{initial_state: initial_state, flavour: flavour}) else conn + |> put_session(:return_to, conn.request_path) |> redirect(to: "/web/login") end end @@ -1241,16 +1345,22 @@ defp get_user_flavour(_) do "glitch" end - def login(conn, %{"code" => code}) do + def login(%{assigns: %{user: %User{}}} = conn, _params) do + redirect(conn, to: local_mastodon_root_path(conn)) + end + + @doc "Local Mastodon FE login init action" + def login(conn, %{"code" => auth_token}) do with {:ok, app} <- get_or_make_app(), - %Authorization{} = auth <- Repo.get_by(Authorization, token: code, app_id: app.id), + %Authorization{} = auth <- Repo.get_by(Authorization, token: auth_token, app_id: app.id), {:ok, token} <- Token.exchange_token(app, auth) do conn |> put_session(:oauth_token, token.token) - |> redirect(to: "/web/getting-started") + |> redirect(to: local_mastodon_root_path(conn)) end end + @doc "Local Mastodon FE callback action" def login(conn, _) do with {:ok, app} <- get_or_make_app() do path = @@ -1263,12 +1373,22 @@ def login(conn, _) do scope: Enum.join(app.scopes, " ") ) - conn - |> redirect(to: path) + redirect(conn, to: path) end end - defp get_or_make_app() do + defp local_mastodon_root_path(conn) do + case get_session(conn, :return_to) do + nil -> + mastodon_api_path(conn, :index, ["getting-started"]) + + return_to -> + delete_session(conn, :return_to) + return_to + end + end + + defp get_or_make_app do find_attrs = %{client_name: @local_mastodon_name, redirect_uris: "."} scopes = ["read", "write", "follow", "push"] @@ -1304,7 +1424,7 @@ def logout(conn, _) do def relationship_noop(%{assigns: %{user: user}} = conn, %{"id" => id}) do Logger.debug("Unimplemented, returning unmodified relationship") - with %User{} = target <- Repo.get(User, id) do + with %User{} = target <- User.get_by_id(id) do conn |> put_view(AccountView) |> render("relationship.json", %{user: user, target: target}) @@ -1321,45 +1441,6 @@ def empty_object(conn, _) do json(conn, %{}) end - def render_notification(user, %{id: id, activity: activity, inserted_at: created_at} = _params) do - actor = User.get_cached_by_ap_id(activity.data["actor"]) - parent_activity = Activity.get_create_by_object_ap_id(activity.data["object"]) - mastodon_type = Activity.mastodon_notification_type(activity) - - response = %{ - id: to_string(id), - type: mastodon_type, - created_at: CommonAPI.Utils.to_masto_date(created_at), - account: AccountView.render("account.json", %{user: actor, for: user}) - } - - case mastodon_type do - "mention" -> - response - |> Map.merge(%{ - status: StatusView.render("status.json", %{activity: activity, for: user}) - }) - - "favourite" -> - response - |> Map.merge(%{ - status: StatusView.render("status.json", %{activity: parent_activity, for: user}) - }) - - "reblog" -> - response - |> Map.merge(%{ - status: StatusView.render("status.json", %{activity: parent_activity, for: user}) - }) - - "follow" -> - response - - _ -> - nil - end - end - def get_filters(%{assigns: %{user: user}} = conn, _) do filters = Filter.get_filters(user) res = FilterView.render("filters.json", filters: filters) @@ -1419,35 +1500,23 @@ def delete_filter(%{assigns: %{user: user}} = conn, %{"id" => filter_id}) do json(conn, %{}) end - def create_push_subscription(%{assigns: %{user: user, token: token}} = conn, params) do - true = Push.enabled() - Subscription.delete_if_exists(user, token) - {:ok, subscription} = Subscription.create(user, token, params) - view = PushSubscriptionView.render("push_subscription.json", subscription: subscription) - json(conn, view) + # fallback action + # + def errors(conn, {:error, %Changeset{} = changeset}) do + error_message = + changeset + |> Changeset.traverse_errors(fn {message, _opt} -> message end) + |> Enum.map_join(", ", fn {_k, v} -> v end) + + conn + |> put_status(422) + |> json(%{error: error_message}) end - def get_push_subscription(%{assigns: %{user: user, token: token}} = conn, _params) do - true = Push.enabled() - subscription = Subscription.get(user, token) - view = PushSubscriptionView.render("push_subscription.json", subscription: subscription) - json(conn, view) - end - - def update_push_subscription( - %{assigns: %{user: user, token: token}} = conn, - params - ) do - true = Push.enabled() - {:ok, subscription} = Subscription.update(user, token, params) - view = PushSubscriptionView.render("push_subscription.json", subscription: subscription) - json(conn, view) - end - - def delete_push_subscription(%{assigns: %{user: user, token: token}} = conn, _params) do - true = Push.enabled() - {:ok, _response} = Subscription.delete(user, token) - json(conn, %{}) + def errors(conn, {:error, :not_found}) do + conn + |> put_status(404) + |> json(%{error: "Record not found"}) end def errors(conn, _) do @@ -1478,7 +1547,6 @@ def suggestions(%{assigns: %{user: user}} = conn, _) do url, [], adapter: [ - timeout: timeout, recv_timeout: timeout, pool: :default ] @@ -1515,7 +1583,7 @@ def suggestions(%{assigns: %{user: user}} = conn, _) do end def status_card(%{assigns: %{user: user}} = conn, %{"id" => status_id}) do - with %Activity{} = activity <- Repo.get(Activity, status_id), + with %Activity{} = activity <- Activity.get_by_id(status_id), true <- Visibility.visible_for_user?(activity, user) do data = StatusView.render( diff --git a/lib/pleroma/web/mastodon_api/subscription_controller.ex b/lib/pleroma/web/mastodon_api/subscription_controller.ex new file mode 100644 index 000000000..b6c8ff808 --- /dev/null +++ b/lib/pleroma/web/mastodon_api/subscription_controller.ex @@ -0,0 +1,71 @@ +# Pleroma: A lightweight social networking server +# Copyright © 2017-2019 Pleroma Authors +# SPDX-License-Identifier: AGPL-3.0-only + +defmodule Pleroma.Web.MastodonAPI.SubscriptionController do + @moduledoc "The module represents functions to manage user subscriptions." + use Pleroma.Web, :controller + + alias Pleroma.Web.Push + alias Pleroma.Web.Push.Subscription + alias Pleroma.Web.MastodonAPI.PushSubscriptionView, as: View + + action_fallback(:errors) + + # Creates PushSubscription + # POST /api/v1/push/subscription + # + def create(%{assigns: %{user: user, token: token}} = conn, params) do + with true <- Push.enabled(), + {:ok, _} <- Subscription.delete_if_exists(user, token), + {:ok, subscription} <- Subscription.create(user, token, params) do + view = View.render("push_subscription.json", subscription: subscription) + json(conn, view) + end + end + + # Gets PushSubscription + # GET /api/v1/push/subscription + # + def get(%{assigns: %{user: user, token: token}} = conn, _params) do + with true <- Push.enabled(), + {:ok, subscription} <- Subscription.get(user, token) do + view = View.render("push_subscription.json", subscription: subscription) + json(conn, view) + end + end + + # Updates PushSubscription + # PUT /api/v1/push/subscription + # + def update(%{assigns: %{user: user, token: token}} = conn, params) do + with true <- Push.enabled(), + {:ok, subscription} <- Subscription.update(user, token, params) do + view = View.render("push_subscription.json", subscription: subscription) + json(conn, view) + end + end + + # Deletes PushSubscription + # DELETE /api/v1/push/subscription + # + def delete(%{assigns: %{user: user, token: token}} = conn, _params) do + with true <- Push.enabled(), + {:ok, _response} <- Subscription.delete(user, token), + do: json(conn, %{}) + end + + # fallback action + # + def errors(conn, {:error, :not_found}) do + conn + |> put_status(404) + |> json("Not found") + end + + def errors(conn, _) do + conn + |> put_status(500) + |> json("Something went wrong") + end +end diff --git a/lib/pleroma/web/mastodon_api/views/account_view.ex b/lib/pleroma/web/mastodon_api/views/account_view.ex index c32f27be2..af56c4149 100644 --- a/lib/pleroma/web/mastodon_api/views/account_view.ex +++ b/lib/pleroma/web/mastodon_api/views/account_view.ex @@ -53,9 +53,10 @@ def render("relationship.json", %{user: %User{} = user, target: %User{} = target blocking: User.blocks?(user, target), muting: User.mutes?(user, target), muting_notifications: false, + subscribing: User.subscribed_to?(user, target), requested: requested, domain_blocking: false, - showing_reblogs: false, + showing_reblogs: User.showing_reblogs?(user, target), endorsed: false } end @@ -117,13 +118,15 @@ defp do_render("account.json", %{user: user} = opts) do }, # Pleroma extension - pleroma: %{ - confirmation_pending: user_info.confirmation_pending, - tags: user.tags, - is_moderator: user.info.is_moderator, - is_admin: user.info.is_admin, - relationship: relationship - } + pleroma: + %{ + confirmation_pending: user_info.confirmation_pending, + tags: user.tags, + is_moderator: user.info.is_moderator, + is_admin: user.info.is_admin, + relationship: relationship + } + |> with_notification_settings(user, opts[:for]) } end @@ -132,4 +135,10 @@ defp username_from_nickname(string) when is_binary(string) do end defp username_from_nickname(_), do: nil + + defp with_notification_settings(data, %User{id: user_id} = user, %User{id: user_id}) do + Map.put(data, :notification_settings, user.info.notification_settings) + end + + defp with_notification_settings(data, _, _), do: data end diff --git a/lib/pleroma/web/mastodon_api/views/admin/account_view.ex b/lib/pleroma/web/mastodon_api/views/admin/account_view.ex deleted file mode 100644 index 74ca13564..000000000 --- a/lib/pleroma/web/mastodon_api/views/admin/account_view.ex +++ /dev/null @@ -1,25 +0,0 @@ -# Pleroma: A lightweight social networking server -# Copyright © 2017-2019 Pleroma Authors -# SPDX-License-Identifier: AGPL-3.0-only - -defmodule Pleroma.Web.MastodonAPI.Admin.AccountView do - use Pleroma.Web, :view - - alias Pleroma.Web.MastodonAPI.Admin.AccountView - - def render("index.json", %{users: users, count: count, page_size: page_size}) do - %{ - users: render_many(users, AccountView, "show.json", as: :user), - count: count, - page_size: page_size - } - end - - def render("show.json", %{user: user}) do - %{ - "id" => user.id, - "nickname" => user.nickname, - "deactivated" => user.info.deactivated - } - end -end diff --git a/lib/pleroma/web/mastodon_api/views/app_view.ex b/lib/pleroma/web/mastodon_api/views/app_view.ex new file mode 100644 index 000000000..f52b693a6 --- /dev/null +++ b/lib/pleroma/web/mastodon_api/views/app_view.ex @@ -0,0 +1,41 @@ +# Pleroma: A lightweight social networking server +# Copyright © 2017-2019 Pleroma Authors +# SPDX-License-Identifier: AGPL-3.0-only + +defmodule Pleroma.Web.MastodonAPI.AppView do + use Pleroma.Web, :view + + alias Pleroma.Web.OAuth.App + + @vapid_key :web_push_encryption + |> Application.get_env(:vapid_details, []) + |> Keyword.get(:public_key) + + def render("show.json", %{app: %App{} = app}) do + %{ + id: app.id |> to_string, + name: app.client_name, + client_id: app.client_id, + client_secret: app.client_secret, + redirect_uri: app.redirect_uris, + website: app.website + } + |> with_vapid_key() + end + + def render("short.json", %{app: %App{website: webiste, client_name: name}}) do + %{ + name: name, + website: webiste + } + |> with_vapid_key() + end + + defp with_vapid_key(data) do + if @vapid_key do + Map.put(data, "vapid_key", @vapid_key) + else + data + end + end +end diff --git a/lib/pleroma/web/mastodon_api/views/notification_view.ex b/lib/pleroma/web/mastodon_api/views/notification_view.ex new file mode 100644 index 000000000..27e9cab06 --- /dev/null +++ b/lib/pleroma/web/mastodon_api/views/notification_view.ex @@ -0,0 +1,64 @@ +# Pleroma: A lightweight social networking server +# Copyright © 2017-2019 Pleroma Authors +# SPDX-License-Identifier: AGPL-3.0-only + +defmodule Pleroma.Web.MastodonAPI.NotificationView do + use Pleroma.Web, :view + + alias Pleroma.Activity + alias Pleroma.Notification + alias Pleroma.User + alias Pleroma.Web.CommonAPI + alias Pleroma.Web.MastodonAPI.AccountView + alias Pleroma.Web.MastodonAPI.NotificationView + alias Pleroma.Web.MastodonAPI.StatusView + + def render("index.json", %{notifications: notifications, for: user}) do + render_many(notifications, NotificationView, "show.json", %{for: user}) + end + + def render("show.json", %{ + notification: %Notification{activity: activity} = notification, + for: user + }) do + actor = User.get_cached_by_ap_id(activity.data["actor"]) + parent_activity = Activity.get_create_by_object_ap_id(activity.data["object"]) + mastodon_type = Activity.mastodon_notification_type(activity) + + response = %{ + id: to_string(notification.id), + type: mastodon_type, + created_at: CommonAPI.Utils.to_masto_date(notification.inserted_at), + account: AccountView.render("account.json", %{user: actor, for: user}), + pleroma: %{ + is_seen: notification.seen + } + } + + case mastodon_type do + "mention" -> + response + |> Map.merge(%{ + status: StatusView.render("status.json", %{activity: activity, for: user}) + }) + + "favourite" -> + response + |> Map.merge(%{ + status: StatusView.render("status.json", %{activity: parent_activity, for: user}) + }) + + "reblog" -> + response + |> Map.merge(%{ + status: StatusView.render("status.json", %{activity: parent_activity, for: user}) + }) + + "follow" -> + response + + _ -> + nil + end + end +end diff --git a/lib/pleroma/web/mastodon_api/views/push_subscription_view.ex b/lib/pleroma/web/mastodon_api/views/push_subscription_view.ex index e86b789c5..021489711 100644 --- a/lib/pleroma/web/mastodon_api/views/push_subscription_view.ex +++ b/lib/pleroma/web/mastodon_api/views/push_subscription_view.ex @@ -4,6 +4,7 @@ defmodule Pleroma.Web.MastodonAPI.PushSubscriptionView do use Pleroma.Web, :view + alias Pleroma.Web.Push def render("push_subscription.json", %{subscription: subscription}) do %{ @@ -14,7 +15,5 @@ def render("push_subscription.json", %{subscription: subscription}) do } end - defp server_key do - Keyword.get(Application.get_env(:web_push_encryption, :vapid_details), :public_key) - end + defp server_key, do: Keyword.get(Push.vapid_config(), :public_key) end diff --git a/lib/pleroma/web/mastodon_api/views/scheduled_activity_view.ex b/lib/pleroma/web/mastodon_api/views/scheduled_activity_view.ex new file mode 100644 index 000000000..0aae15ab9 --- /dev/null +++ b/lib/pleroma/web/mastodon_api/views/scheduled_activity_view.ex @@ -0,0 +1,57 @@ +# Pleroma: A lightweight social networking server +# Copyright © 2017-2019 Pleroma Authors +# SPDX-License-Identifier: AGPL-3.0-only + +defmodule Pleroma.Web.MastodonAPI.ScheduledActivityView do + use Pleroma.Web, :view + + alias Pleroma.ScheduledActivity + alias Pleroma.Web.CommonAPI + alias Pleroma.Web.MastodonAPI.ScheduledActivityView + alias Pleroma.Web.MastodonAPI.StatusView + + def render("index.json", %{scheduled_activities: scheduled_activities}) do + render_many(scheduled_activities, ScheduledActivityView, "show.json") + end + + def render("show.json", %{scheduled_activity: %ScheduledActivity{} = scheduled_activity}) do + %{ + id: to_string(scheduled_activity.id), + scheduled_at: CommonAPI.Utils.to_masto_date(scheduled_activity.scheduled_at), + params: status_params(scheduled_activity.params) + } + |> with_media_attachments(scheduled_activity) + end + + defp with_media_attachments(data, %{params: %{"media_attachments" => media_attachments}}) do + try do + attachments = render_many(media_attachments, StatusView, "attachment.json", as: :attachment) + Map.put(data, :media_attachments, attachments) + rescue + _ -> data + end + end + + defp with_media_attachments(data, _), do: data + + defp status_params(params) do + data = %{ + text: params["status"], + sensitive: params["sensitive"], + spoiler_text: params["spoiler_text"], + visibility: params["visibility"], + scheduled_at: params["scheduled_at"], + poll: params["poll"], + in_reply_to_id: params["in_reply_to_id"] + } + + data = + if media_ids = params["media_ids"] do + Map.put(data, :media_ids, media_ids) + else + data + end + + data + end +end diff --git a/lib/pleroma/web/mastodon_api/views/status_view.ex b/lib/pleroma/web/mastodon_api/views/status_view.ex index 3468c0e1c..d4a8e4fff 100644 --- a/lib/pleroma/web/mastodon_api/views/status_view.ex +++ b/lib/pleroma/web/mastodon_api/views/status_view.ex @@ -46,6 +46,14 @@ defp get_user(ap_id) do end end + defp get_context_id(%{data: %{"context_id" => context_id}}) when not is_nil(context_id), + do: context_id + + defp get_context_id(%{data: %{"context" => context}}) when is_binary(context), + do: Utils.context_to_conversation_id(context) + + defp get_context_id(_), do: nil + def render("index.json", opts) do replied_to_activities = get_replied_to_activities(opts.activities) @@ -102,7 +110,10 @@ def render( website: nil }, language: nil, - emojis: [] + emojis: [], + pleroma: %{ + local: activity.local + } } end @@ -136,10 +147,37 @@ def render("status.json", %{activity: %{data: %{"object" => object}} = activity} content = object |> render_content() - |> HTML.get_cached_scrubbed_html_for_object( + + content_html = + content + |> HTML.get_cached_scrubbed_html_for_activity( User.html_filter_policy(opts[:for]), activity, - __MODULE__ + "mastoapi:content" + ) + + content_plaintext = + content + |> HTML.get_cached_stripped_html_for_activity( + activity, + "mastoapi:content" + ) + + summary = object["summary"] || "" + + summary_html = + summary + |> HTML.get_cached_scrubbed_html_for_activity( + User.html_filter_policy(opts[:for]), + activity, + "mastoapi:summary" + ) + + summary_plaintext = + summary + |> HTML.get_cached_stripped_html_for_activity( + activity, + "mastoapi:summary" ) card = render("card.json", Pleroma.Web.RichMedia.Helpers.fetch_data_for_activity(activity)) @@ -160,10 +198,10 @@ def render("status.json", %{activity: %{data: %{"object" => object}} = activity} in_reply_to_account_id: reply_to_user && to_string(reply_to_user.id), reblog: nil, card: card, - content: content, + content: content_html, created_at: created_at, reblogs_count: announcement_count, - replies_count: 0, + replies_count: object["repliesCount"] || 0, favourites_count: like_count, reblogged: present?(repeated), favourited: present?(favorited), @@ -171,7 +209,7 @@ def render("status.json", %{activity: %{data: %{"object" => object}} = activity} muted: CommonAPI.thread_muted?(user, activity) || User.mutes?(opts[:for], user), pinned: pinned?(activity, user), sensitive: sensitive, - spoiler_text: object["summary"] || "", + spoiler_text: summary_html, visibility: get_visibility(object), media_attachments: attachments, mentions: mentions, @@ -181,7 +219,13 @@ def render("status.json", %{activity: %{data: %{"object" => object}} = activity} website: nil }, language: nil, - emojis: build_emojis(activity.data["object"]["emoji"]) + emojis: build_emojis(activity.data["object"]["emoji"]), + pleroma: %{ + local: activity.local, + conversation_id: get_context_id(activity), + content: %{"text/plain" => content_plaintext}, + spoiler_text: %{"text/plain" => summary_plaintext} + } } end @@ -251,7 +295,8 @@ def render("attachment.json", %{attachment: attachment}) do preview_url: href, text_url: href, type: type, - description: attachment["name"] + description: attachment["name"], + pleroma: %{mime_type: media_type} } end diff --git a/lib/pleroma/web/mastodon_api/websocket_handler.ex b/lib/pleroma/web/mastodon_api/websocket_handler.ex index 8efe2efd5..1b3721e2b 100644 --- a/lib/pleroma/web/mastodon_api/websocket_handler.ex +++ b/lib/pleroma/web/mastodon_api/websocket_handler.ex @@ -5,9 +5,9 @@ defmodule Pleroma.Web.MastodonAPI.WebsocketHandler do require Logger - alias Pleroma.Web.OAuth.Token alias Pleroma.Repo alias Pleroma.User + alias Pleroma.Web.OAuth.Token @behaviour :cowboy_websocket @@ -90,7 +90,7 @@ defp allow_request(stream, nil) when stream in @anonymous_streams do # Authenticated streams. defp allow_request(stream, {"access_token", access_token}) when stream in @streams do with %Token{user_id: user_id} <- Repo.get_by(Token, token: access_token), - user = %User{} <- Repo.get(User, user_id) do + user = %User{} <- User.get_by_id(user_id) do {:ok, user} else _ -> {:error, 403} diff --git a/lib/pleroma/web/media_proxy/media_proxy.ex b/lib/pleroma/web/media_proxy/media_proxy.ex index 39a725a69..3bd2affe9 100644 --- a/lib/pleroma/web/media_proxy/media_proxy.ex +++ b/lib/pleroma/web/media_proxy/media_proxy.ex @@ -19,7 +19,8 @@ def url(url) do else secret = Application.get_env(:pleroma, Pleroma.Web.Endpoint)[:secret_key_base] - # Must preserve `%2F` for compatibility with S3 (https://git.pleroma.social/pleroma/pleroma/issues/580) + # Must preserve `%2F` for compatibility with S3 + # https://git.pleroma.social/pleroma/pleroma/issues/580 replacement = get_replacement(url, ":2F:") # The URL is url-decoded and encoded again to ensure it is correctly encoded and not twice. diff --git a/lib/pleroma/web/metadata/opengraph.ex b/lib/pleroma/web/metadata/opengraph.ex index cafb8134b..357b80a2d 100644 --- a/lib/pleroma/web/metadata/opengraph.ex +++ b/lib/pleroma/web/metadata/opengraph.ex @@ -88,7 +88,7 @@ defp build_attachments(%{data: %{"attachment" => attachments}}) do # TODO: Add additional properties to objects when we have the data available. # Also, Whatsapp only wants JPEG or PNGs. It seems that if we add a second og:image - # object when a Video or GIF is attached it will display that in the Whatsapp Rich Preview. + # object when a Video or GIF is attached it will display that in Whatsapp Rich Preview. case media_type do "audio" -> [ diff --git a/lib/pleroma/web/metadata/twitter_card.ex b/lib/pleroma/web/metadata/twitter_card.ex index a0be383e5..040b872e7 100644 --- a/lib/pleroma/web/metadata/twitter_card.ex +++ b/lib/pleroma/web/metadata/twitter_card.ex @@ -97,7 +97,8 @@ defp build_attachments(id, %{data: %{"attachment" => attachments}}) do | acc ] - # TODO: Need the true width and height values here or Twitter renders an iFrame with a bad aspect ratio + # TODO: Need the true width and height values here or Twitter renders an iFrame with + # a bad aspect ratio "video" -> [ {:meta, [property: "twitter:card", content: "player"], []}, diff --git a/lib/pleroma/web/metadata/utils.ex b/lib/pleroma/web/metadata/utils.ex index a166800d4..58385a3d1 100644 --- a/lib/pleroma/web/metadata/utils.ex +++ b/lib/pleroma/web/metadata/utils.ex @@ -1,10 +1,10 @@ # Pleroma: A lightweight social networking server -# Copyright \xc2\xa9 2017-2019 Pleroma Authors +# Copyright © 2017-2019 Pleroma Authors # SPDX-License-Identifier: AGPL-3.0-only defmodule Pleroma.Web.Metadata.Utils do - alias Pleroma.HTML alias Pleroma.Formatter + alias Pleroma.HTML alias Pleroma.Web.MediaProxy def scrub_html_and_truncate(%{data: %{"content" => content}} = object) do @@ -12,19 +12,19 @@ def scrub_html_and_truncate(%{data: %{"content" => content}} = object) do # html content comes from DB already encoded, decode first and scrub after |> HtmlEntities.decode() |> String.replace(~r//, " ") - |> HTML.get_cached_stripped_html_for_object(object, __MODULE__) + |> HTML.get_cached_stripped_html_for_activity(object, "metadata") |> Formatter.demojify() |> Formatter.truncate() end - def scrub_html_and_truncate(content) when is_binary(content) do + def scrub_html_and_truncate(content, max_length \\ 200) when is_binary(content) do content # html content comes from DB already encoded, decode first and scrub after |> HtmlEntities.decode() |> String.replace(~r//, " ") |> HTML.strip_tags() |> Formatter.demojify() - |> Formatter.truncate() + |> Formatter.truncate(max_length) end def attachment_url(url) do diff --git a/lib/pleroma/web/nodeinfo/nodeinfo.ex b/lib/pleroma/web/nodeinfo/nodeinfo.ex deleted file mode 100644 index 8b1378917..000000000 --- a/lib/pleroma/web/nodeinfo/nodeinfo.ex +++ /dev/null @@ -1 +0,0 @@ - diff --git a/lib/pleroma/web/nodeinfo/nodeinfo_controller.ex b/lib/pleroma/web/nodeinfo/nodeinfo_controller.ex index f4867d05b..216a962bd 100644 --- a/lib/pleroma/web/nodeinfo/nodeinfo_controller.ex +++ b/lib/pleroma/web/nodeinfo/nodeinfo_controller.ex @@ -6,7 +6,6 @@ defmodule Pleroma.Web.Nodeinfo.NodeinfoController do use Pleroma.Web, :controller alias Pleroma.Config - alias Pleroma.Repo alias Pleroma.Stats alias Pleroma.User alias Pleroma.Web @@ -86,8 +85,7 @@ def raw_nodeinfo do end staff_accounts = - User.moderator_user_query() - |> Repo.all() + User.all_superusers() |> Enum.map(fn u -> u.ap_id end) mrf_user_allowlist = @@ -126,6 +124,9 @@ def raw_nodeinfo do end, if Keyword.get(instance, :allow_relay) do "relay" + end, + if Keyword.get(instance, :safe_dm_mentions) do + "safe_dm_mentions" end ] |> Enum.filter(& &1) diff --git a/lib/pleroma/web/oauth/authorization.ex b/lib/pleroma/web/oauth/authorization.ex index d37c2cb83..3461f9983 100644 --- a/lib/pleroma/web/oauth/authorization.ex +++ b/lib/pleroma/web/oauth/authorization.ex @@ -5,10 +5,10 @@ defmodule Pleroma.Web.OAuth.Authorization do use Ecto.Schema - alias Pleroma.User alias Pleroma.Repo - alias Pleroma.Web.OAuth.Authorization + alias Pleroma.User alias Pleroma.Web.OAuth.App + alias Pleroma.Web.OAuth.Authorization import Ecto.Changeset import Ecto.Query @@ -16,7 +16,7 @@ defmodule Pleroma.Web.OAuth.Authorization do schema "oauth_authorizations" do field(:token, :string) field(:scopes, {:array, :string}, default: []) - field(:valid_until, :naive_datetime) + field(:valid_until, :naive_datetime_usec) field(:used, :boolean, default: false) belongs_to(:user, Pleroma.User, type: Pleroma.FlakeId) belongs_to(:app, App) diff --git a/lib/pleroma/web/oauth/fallback_controller.ex b/lib/pleroma/web/oauth/fallback_controller.ex index f0fe3b578..afaa00242 100644 --- a/lib/pleroma/web/oauth/fallback_controller.ex +++ b/lib/pleroma/web/oauth/fallback_controller.ex @@ -6,8 +6,21 @@ defmodule Pleroma.Web.OAuth.FallbackController do use Pleroma.Web, :controller alias Pleroma.Web.OAuth.OAuthController - # No user/password - def call(conn, _) do + def call(conn, {:register, :generic_error}) do + conn + |> put_status(:internal_server_error) + |> put_flash(:error, "Unknown error, please check the details and try again.") + |> OAuthController.registration_details(conn.params) + end + + def call(conn, {:register, _error}) do + conn + |> put_status(:unauthorized) + |> put_flash(:error, "Invalid Username/Password") + |> OAuthController.registration_details(conn.params) + end + + def call(conn, _error) do conn |> put_status(:unauthorized) |> put_flash(:error, "Invalid Username/Password") diff --git a/lib/pleroma/web/oauth/oauth_controller.ex b/lib/pleroma/web/oauth/oauth_controller.ex index 36318d69b..bee7084ad 100644 --- a/lib/pleroma/web/oauth/oauth_controller.ex +++ b/lib/pleroma/web/oauth/oauth_controller.ex @@ -5,22 +5,46 @@ defmodule Pleroma.Web.OAuth.OAuthController do use Pleroma.Web, :controller - alias Pleroma.Web.Auth.Authenticator - alias Pleroma.Web.OAuth.Authorization - alias Pleroma.Web.OAuth.Token - alias Pleroma.Web.OAuth.App + alias Pleroma.Registration alias Pleroma.Repo alias Pleroma.User - alias Comeonin.Pbkdf2 + alias Pleroma.Web.Auth.Authenticator + alias Pleroma.Web.ControllerHelper + alias Pleroma.Web.OAuth.App + alias Pleroma.Web.OAuth.Authorization + alias Pleroma.Web.OAuth.Token import Pleroma.Web.ControllerHelper, only: [oauth_scopes: 2] + if Pleroma.Config.oauth_consumer_enabled?(), do: plug(Ueberauth) + plug(:fetch_session) plug(:fetch_flash) action_fallback(Pleroma.Web.OAuth.FallbackController) - def authorize(conn, params) do + def authorize(%{assigns: %{token: %Token{} = token}} = conn, params) do + if ControllerHelper.truthy_param?(params["force_login"]) do + do_authorize(conn, params) + else + redirect_uri = + if is_binary(params["redirect_uri"]) do + params["redirect_uri"] + else + app = Repo.preload(token, :app).app + + app.redirect_uris + |> String.split() + |> Enum.at(0) + end + + redirect(conn, external: redirect_uri(conn, redirect_uri)) + end + end + + def authorize(conn, params), do: do_authorize(conn, params) + + defp do_authorize(conn, params) do app = Repo.get_by(App, client_id: params["client_id"]) available_scopes = (app && app.scopes) || [] scopes = oauth_scopes(params, nil) || available_scopes @@ -36,75 +60,73 @@ def authorize(conn, params) do }) end - def create_authorization(conn, %{ - "authorization" => - %{ - "client_id" => client_id, - "redirect_uri" => redirect_uri - } = auth_params - }) do - with {_, {:ok, %User{} = user}} <- {:get_user, Authenticator.get_user(conn)}, - %App{} = app <- Repo.get_by(App, client_id: client_id), - true <- redirect_uri in String.split(app.redirect_uris), - scopes <- oauth_scopes(auth_params, []), - {:unsupported_scopes, []} <- {:unsupported_scopes, scopes -- app.scopes}, - # Note: `scope` param is intentionally not optional in this context - {:missing_scopes, false} <- {:missing_scopes, scopes == []}, - {:auth_active, true} <- {:auth_active, User.auth_active?(user)}, - {:ok, auth} <- Authorization.create_authorization(app, user, scopes) do - redirect_uri = - if redirect_uri == "." do - # Special case: Local MastodonFE - mastodon_api_url(conn, :login) + def create_authorization( + conn, + %{"authorization" => auth_params} = params, + opts \\ [] + ) do + with {:ok, auth} <- do_create_authorization(conn, params, opts[:user]) do + after_create_authorization(conn, auth, auth_params) + else + error -> + handle_create_authorization_error(conn, error, auth_params) + end + end + + def after_create_authorization(conn, auth, %{"redirect_uri" => redirect_uri} = auth_params) do + redirect_uri = redirect_uri(conn, redirect_uri) + + if redirect_uri == "urn:ietf:wg:oauth:2.0:oob" do + render(conn, "results.html", %{ + auth: auth + }) + else + connector = if String.contains?(redirect_uri, "?"), do: "&", else: "?" + url = "#{redirect_uri}#{connector}" + url_params = %{:code => auth.token} + + url_params = + if auth_params["state"] do + Map.put(url_params, :state, auth_params["state"]) else - redirect_uri + url_params end - cond do - redirect_uri == "urn:ietf:wg:oauth:2.0:oob" -> - render(conn, "results.html", %{ - auth: auth - }) + url = "#{url}#{Plug.Conn.Query.encode(url_params)}" - true -> - connector = if String.contains?(redirect_uri, "?"), do: "&", else: "?" - url = "#{redirect_uri}#{connector}" - url_params = %{:code => auth.token} - - url_params = - if auth_params["state"] do - Map.put(url_params, :state, auth_params["state"]) - else - url_params - end - - url = "#{url}#{Plug.Conn.Query.encode(url_params)}" - - redirect(conn, external: url) - end - else - {scopes_issue, _} when scopes_issue in [:unsupported_scopes, :missing_scopes] -> - conn - |> put_flash(:error, "Permissions not specified.") - |> put_status(:unauthorized) - |> authorize(auth_params) - - {:auth_active, false} -> - conn - |> put_flash(:error, "Account confirmation pending.") - |> put_status(:forbidden) - |> authorize(auth_params) - - error -> - Authenticator.handle_error(conn, error) + redirect(conn, external: url) end end + defp handle_create_authorization_error(conn, {scopes_issue, _}, auth_params) + when scopes_issue in [:unsupported_scopes, :missing_scopes] do + # Per https://github.com/tootsuite/mastodon/blob/ + # 51e154f5e87968d6bb115e053689767ab33e80cd/app/controllers/api/base_controller.rb#L39 + conn + |> put_flash(:error, "This action is outside the authorized scopes") + |> put_status(:unauthorized) + |> authorize(auth_params) + end + + defp handle_create_authorization_error(conn, {:auth_active, false}, auth_params) do + # Per https://github.com/tootsuite/mastodon/blob/ + # 51e154f5e87968d6bb115e053689767ab33e80cd/app/controllers/api/base_controller.rb#L76 + conn + |> put_flash(:error, "Your login is missing a confirmed e-mail address") + |> put_status(:forbidden) + |> authorize(auth_params) + end + + defp handle_create_authorization_error(conn, error, _auth_params) do + Authenticator.handle_error(conn, error) + end + def token_exchange(conn, %{"grant_type" => "authorization_code"} = params) do with %App{} = app <- get_app_from_request(conn, params), fixed_token = fix_padding(params["code"]), %Authorization{} = auth <- Repo.get_by(Authorization, token: fixed_token, app_id: app.id), + %User{} = user <- User.get_by_id(auth.user_id), {:ok, token} <- Token.exchange_token(app, auth), {:ok, inserted_at} <- DateTime.from_naive(token.inserted_at, "Etc/UTC") do response = %{ @@ -113,7 +135,8 @@ def token_exchange(conn, %{"grant_type" => "authorization_code"} = params) do refresh_token: token.refresh_token, created_at: DateTime.to_unix(inserted_at), expires_in: 60 * 10, - scope: Enum.join(token.scopes, " ") + scope: Enum.join(token.scopes, " "), + me: user.ap_id } json(conn, response) @@ -126,12 +149,12 @@ def token_exchange(conn, %{"grant_type" => "authorization_code"} = params) do def token_exchange( conn, - %{"grant_type" => "password", "username" => name, "password" => password} = params + %{"grant_type" => "password"} = params ) do - with %App{} = app <- get_app_from_request(conn, params), - %User{} = user <- User.get_by_nickname_or_email(name), - true <- Pbkdf2.checkpw(password, user.password_hash), + with {_, {:ok, %User{} = user}} <- {:get_user, Authenticator.get_user(conn, params)}, + %App{} = app <- get_app_from_request(conn, params), {:auth_active, true} <- {:auth_active, User.auth_active?(user)}, + {:user_active, true} <- {:user_active, !user.info.deactivated}, scopes <- oauth_scopes(params, app.scopes), [] <- scopes -- app.scopes, true <- Enum.any?(scopes), @@ -142,15 +165,23 @@ def token_exchange( access_token: token.token, refresh_token: token.refresh_token, expires_in: 60 * 10, - scope: Enum.join(token.scopes, " ") + scope: Enum.join(token.scopes, " "), + me: user.ap_id } json(conn, response) else {:auth_active, false} -> + # Per https://github.com/tootsuite/mastodon/blob/ + # 51e154f5e87968d6bb115e053689767ab33e80cd/app/controllers/api/base_controller.rb#L76 conn |> put_status(:forbidden) - |> json(%{error: "Account confirmation pending"}) + |> json(%{error: "Your login is missing a confirmed e-mail address"}) + + {:user_active, false} -> + conn + |> put_status(:forbidden) + |> json(%{error: "Your account is currently disabled"}) _error -> put_status(conn, 400) @@ -182,6 +213,184 @@ def token_revoke(conn, %{"token" => token} = params) do end end + @doc "Prepares OAuth request to provider for Ueberauth" + def prepare_request(conn, %{"provider" => provider} = params) do + scope = + oauth_scopes(params, []) + |> Enum.join(" ") + + state = + params + |> Map.delete("scopes") + |> Map.put("scope", scope) + |> Poison.encode!() + + params = + params + |> Map.drop(~w(scope scopes client_id redirect_uri)) + |> Map.put("state", state) + + # Handing the request to Ueberauth + redirect(conn, to: o_auth_path(conn, :request, provider, params)) + end + + def request(conn, params) do + message = + if params["provider"] do + "Unsupported OAuth provider: #{params["provider"]}." + else + "Bad OAuth request." + end + + conn + |> put_flash(:error, message) + |> redirect(to: "/") + end + + def callback(%{assigns: %{ueberauth_failure: failure}} = conn, params) do + params = callback_params(params) + messages = for e <- Map.get(failure, :errors, []), do: e.message + message = Enum.join(messages, "; ") + + conn + |> put_flash(:error, "Failed to authenticate: #{message}.") + |> redirect(external: redirect_uri(conn, params["redirect_uri"])) + end + + def callback(conn, params) do + params = callback_params(params) + + with {:ok, registration} <- Authenticator.get_registration(conn, params) do + user = Repo.preload(registration, :user).user + auth_params = Map.take(params, ~w(client_id redirect_uri scope scopes state)) + + if user do + create_authorization( + conn, + %{"authorization" => auth_params}, + user: user + ) + else + registration_params = + Map.merge(auth_params, %{ + "nickname" => Registration.nickname(registration), + "email" => Registration.email(registration) + }) + + conn + |> put_session(:registration_id, registration.id) + |> registration_details(registration_params) + end + else + _ -> + conn + |> put_flash(:error, "Failed to set up user account.") + |> redirect(external: redirect_uri(conn, params["redirect_uri"])) + end + end + + defp callback_params(%{"state" => state} = params) do + Map.merge(params, Poison.decode!(state)) + end + + def registration_details(conn, params) do + render(conn, "register.html", %{ + client_id: params["client_id"], + redirect_uri: params["redirect_uri"], + state: params["state"], + scopes: oauth_scopes(params, []), + nickname: params["nickname"], + email: params["email"] + }) + end + + def register(conn, %{"op" => "connect"} = params) do + authorization_params = Map.put(params, "name", params["auth_name"]) + create_authorization_params = %{"authorization" => authorization_params} + + with registration_id when not is_nil(registration_id) <- get_session_registration_id(conn), + %Registration{} = registration <- Repo.get(Registration, registration_id), + {_, {:ok, auth}} <- + {:create_authorization, do_create_authorization(conn, create_authorization_params)}, + %User{} = user <- Repo.preload(auth, :user).user, + {:ok, _updated_registration} <- Registration.bind_to_user(registration, user) do + conn + |> put_session_registration_id(nil) + |> after_create_authorization(auth, authorization_params) + else + {:create_authorization, error} -> + {:register, handle_create_authorization_error(conn, error, create_authorization_params)} + + _ -> + {:register, :generic_error} + end + end + + def register(conn, %{"op" => "register"} = params) do + with registration_id when not is_nil(registration_id) <- get_session_registration_id(conn), + %Registration{} = registration <- Repo.get(Registration, registration_id), + {:ok, user} <- Authenticator.create_from_registration(conn, params, registration) do + conn + |> put_session_registration_id(nil) + |> create_authorization( + %{ + "authorization" => %{ + "client_id" => params["client_id"], + "redirect_uri" => params["redirect_uri"], + "scopes" => oauth_scopes(params, nil) + } + }, + user: user + ) + else + {:error, changeset} -> + message = + Enum.map(changeset.errors, fn {field, {error, _}} -> + "#{field} #{error}" + end) + |> Enum.join("; ") + + message = + String.replace( + message, + "ap_id has already been taken", + "nickname has already been taken" + ) + + conn + |> put_status(:forbidden) + |> put_flash(:error, "Error: #{message}.") + |> registration_details(params) + + _ -> + {:register, :generic_error} + end + end + + defp do_create_authorization( + conn, + %{ + "authorization" => + %{ + "client_id" => client_id, + "redirect_uri" => redirect_uri + } = auth_params + } = params, + user \\ nil + ) do + with {_, {:ok, %User{} = user}} <- + {:get_user, (user && {:ok, user}) || Authenticator.get_user(conn, params)}, + %App{} = app <- Repo.get_by(App, client_id: client_id), + true <- redirect_uri in String.split(app.redirect_uris), + scopes <- oauth_scopes(auth_params, []), + {:unsupported_scopes, []} <- {:unsupported_scopes, scopes -- app.scopes}, + # Note: `scope` param is intentionally not optional in this context + {:missing_scopes, false} <- {:missing_scopes, scopes == []}, + {:auth_active, true} <- {:auth_active, User.auth_active?(user)} do + Authorization.create_authorization(app, user, scopes) + end + end + # XXX - for whatever reason our token arrives urlencoded, but Plug.Conn should be # decoding it. Investigate sometime. defp fix_padding(token) do @@ -214,4 +423,14 @@ defp get_app_from_request(conn, params) do nil end end + + # Special case: Local MastodonFE + defp redirect_uri(conn, "."), do: mastodon_api_url(conn, :login) + + defp redirect_uri(_conn, redirect_uri), do: redirect_uri + + defp get_session_registration_id(conn), do: get_session(conn, :registration_id) + + defp put_session_registration_id(conn, registration_id), + do: put_session(conn, :registration_id, registration_id) end diff --git a/lib/pleroma/web/oauth/token.ex b/lib/pleroma/web/oauth/token.ex index ca67632ba..2b5ad9b94 100644 --- a/lib/pleroma/web/oauth/token.ex +++ b/lib/pleroma/web/oauth/token.ex @@ -7,17 +7,17 @@ defmodule Pleroma.Web.OAuth.Token do import Ecto.Query - alias Pleroma.User alias Pleroma.Repo - alias Pleroma.Web.OAuth.Token + alias Pleroma.User alias Pleroma.Web.OAuth.App alias Pleroma.Web.OAuth.Authorization + alias Pleroma.Web.OAuth.Token schema "oauth_tokens" do field(:token, :string) field(:refresh_token, :string) field(:scopes, {:array, :string}, default: []) - field(:valid_until, :naive_datetime) + field(:valid_until, :naive_datetime_usec) belongs_to(:user, Pleroma.User, type: Pleroma.FlakeId) belongs_to(:app, App) @@ -27,7 +27,7 @@ defmodule Pleroma.Web.OAuth.Token do def exchange_token(app, auth) do with {:ok, auth} <- Authorization.use_token(auth), true <- auth.app_id == app.id do - create_token(app, Repo.get(User, auth.user_id), auth.scopes) + create_token(app, User.get_by_id(auth.user_id), auth.scopes) end end diff --git a/lib/pleroma/web/ostatus/activity_representer.ex b/lib/pleroma/web/ostatus/activity_representer.ex index 9e1f24bc4..1a1b74bb0 100644 --- a/lib/pleroma/web/ostatus/activity_representer.ex +++ b/lib/pleroma/web/ostatus/activity_representer.ex @@ -4,8 +4,8 @@ defmodule Pleroma.Web.OStatus.ActivityRepresenter do alias Pleroma.Activity - alias Pleroma.User alias Pleroma.Object + alias Pleroma.User alias Pleroma.Web.OStatus.UserRepresenter require Logger diff --git a/lib/pleroma/web/ostatus/feed_representer.ex b/lib/pleroma/web/ostatus/feed_representer.ex index 025d4731c..b7b97e505 100644 --- a/lib/pleroma/web/ostatus/feed_representer.ex +++ b/lib/pleroma/web/ostatus/feed_representer.ex @@ -4,8 +4,8 @@ defmodule Pleroma.Web.OStatus.FeedRepresenter do alias Pleroma.User - alias Pleroma.Web.OStatus alias Pleroma.Web.MediaProxy + alias Pleroma.Web.OStatus alias Pleroma.Web.OStatus.ActivityRepresenter alias Pleroma.Web.OStatus.UserRepresenter diff --git a/lib/pleroma/web/ostatus/handlers/delete_handler.ex b/lib/pleroma/web/ostatus/handlers/delete_handler.ex index 01b52f08f..b2f9f3946 100644 --- a/lib/pleroma/web/ostatus/handlers/delete_handler.ex +++ b/lib/pleroma/web/ostatus/handlers/delete_handler.ex @@ -4,9 +4,9 @@ defmodule Pleroma.Web.OStatus.DeleteHandler do require Logger - alias Pleroma.Web.XML alias Pleroma.Object alias Pleroma.Web.ActivityPub.ActivityPub + alias Pleroma.Web.XML def handle_delete(entry, _doc \\ nil) do with id <- XML.string_from_xpath("//id", entry), diff --git a/lib/pleroma/web/ostatus/handlers/follow_handler.ex b/lib/pleroma/web/ostatus/handlers/follow_handler.ex index 91ad4bc40..263d3b2dc 100644 --- a/lib/pleroma/web/ostatus/handlers/follow_handler.ex +++ b/lib/pleroma/web/ostatus/handlers/follow_handler.ex @@ -3,10 +3,10 @@ # SPDX-License-Identifier: AGPL-3.0-only defmodule Pleroma.Web.OStatus.FollowHandler do - alias Pleroma.Web.XML - alias Pleroma.Web.OStatus - alias Pleroma.Web.ActivityPub.ActivityPub alias Pleroma.User + alias Pleroma.Web.ActivityPub.ActivityPub + alias Pleroma.Web.OStatus + alias Pleroma.Web.XML def handle(entry, doc) do with {:ok, actor} <- OStatus.find_make_or_update_user(doc), diff --git a/lib/pleroma/web/ostatus/handlers/note_handler.ex b/lib/pleroma/web/ostatus/handlers/note_handler.ex index c2e585cac..db995ec77 100644 --- a/lib/pleroma/web/ostatus/handlers/note_handler.ex +++ b/lib/pleroma/web/ostatus/handlers/note_handler.ex @@ -4,13 +4,14 @@ defmodule Pleroma.Web.OStatus.NoteHandler do require Logger - alias Pleroma.Web.OStatus - alias Pleroma.Web.XML + alias Pleroma.Activity alias Pleroma.Object alias Pleroma.Web.ActivityPub.ActivityPub alias Pleroma.Web.ActivityPub.Utils alias Pleroma.Web.CommonAPI + alias Pleroma.Web.OStatus + alias Pleroma.Web.XML @doc """ Get the context for this note. Uses this: @@ -18,13 +19,13 @@ defmodule Pleroma.Web.OStatus.NoteHandler do 2. The conversation reference in the ostatus xml 3. A newly generated context id. """ - def get_context(entry, inReplyTo) do + def get_context(entry, in_reply_to) do context = (XML.string_from_xpath("//ostatus:conversation[1]", entry) || XML.string_from_xpath("//ostatus:conversation[1]/@ref", entry) || "") |> String.trim() - with %{data: %{"context" => context}} <- Object.get_cached_by_ap_id(inReplyTo) do + with %{data: %{"context" => context}} <- Object.get_cached_by_ap_id(in_reply_to) do context else _e -> @@ -87,14 +88,14 @@ def add_external_url(note, entry) do Map.put(note, "external_url", url) end - def fetch_replied_to_activity(entry, inReplyTo) do - with %Activity{} = activity <- Activity.get_create_by_object_ap_id(inReplyTo) do + def fetch_replied_to_activity(entry, in_reply_to) do + with %Activity{} = activity <- Activity.get_create_by_object_ap_id(in_reply_to) do activity else _e -> - with inReplyToHref when not is_nil(inReplyToHref) <- + with in_reply_to_href when not is_nil(in_reply_to_href) <- XML.string_from_xpath("//thr:in-reply-to[1]/@href", entry), - {:ok, [activity | _]} <- OStatus.fetch_activity_from_url(inReplyToHref) do + {:ok, [activity | _]} <- OStatus.fetch_activity_from_url(in_reply_to_href) do activity else _e -> nil @@ -105,16 +106,17 @@ def fetch_replied_to_activity(entry, inReplyTo) do # TODO: Clean this up a bit. def handle_note(entry, doc \\ nil) do with id <- XML.string_from_xpath("//id", entry), - activity when is_nil(activity) <- Activity.get_create_by_object_ap_id(id), + activity when is_nil(activity) <- Activity.get_create_by_object_ap_id_with_object(id), [author] <- :xmerl_xpath.string('//author[1]', doc), {:ok, actor} <- OStatus.find_make_or_update_user(author), content_html <- OStatus.get_content(entry), cw <- OStatus.get_cw(entry), - inReplyTo <- XML.string_from_xpath("//thr:in-reply-to[1]/@ref", entry), - inReplyToActivity <- fetch_replied_to_activity(entry, inReplyTo), - inReplyTo <- (inReplyToActivity && inReplyToActivity.data["object"]["id"]) || inReplyTo, + in_reply_to <- XML.string_from_xpath("//thr:in-reply-to[1]/@ref", entry), + in_reply_to_activity <- fetch_replied_to_activity(entry, in_reply_to), + in_reply_to <- + (in_reply_to_activity && in_reply_to_activity.data["object"]["id"]) || in_reply_to, attachments <- OStatus.get_attachments(entry), - context <- get_context(entry, inReplyTo), + context <- get_context(entry, in_reply_to), tags <- OStatus.get_tags(entry), mentions <- get_mentions(entry), to <- make_to_list(actor, mentions), @@ -128,7 +130,7 @@ def handle_note(entry, doc \\ nil) do context, content_html, attachments, - inReplyToActivity, + in_reply_to_activity, [], cw ), @@ -140,8 +142,8 @@ def handle_note(entry, doc \\ nil) do # TODO: Handle this case in make_note_data note <- if( - inReplyTo && !inReplyToActivity, - do: note |> Map.put("inReplyTo", inReplyTo), + in_reply_to && !in_reply_to_activity, + do: note |> Map.put("inReplyTo", in_reply_to), else: note ) do ActivityPub.create(%{ diff --git a/lib/pleroma/web/ostatus/handlers/unfollow_handler.ex b/lib/pleroma/web/ostatus/handlers/unfollow_handler.ex index c9085894d..6596ada3b 100644 --- a/lib/pleroma/web/ostatus/handlers/unfollow_handler.ex +++ b/lib/pleroma/web/ostatus/handlers/unfollow_handler.ex @@ -3,10 +3,10 @@ # SPDX-License-Identifier: AGPL-3.0-only defmodule Pleroma.Web.OStatus.UnfollowHandler do - alias Pleroma.Web.XML - alias Pleroma.Web.OStatus - alias Pleroma.Web.ActivityPub.ActivityPub alias Pleroma.User + alias Pleroma.Web.ActivityPub.ActivityPub + alias Pleroma.Web.OStatus + alias Pleroma.Web.XML def handle(entry, doc) do with {:ok, actor} <- OStatus.find_make_or_update_user(doc), diff --git a/lib/pleroma/web/ostatus/ostatus.ex b/lib/pleroma/web/ostatus/ostatus.ex index b4f5761ac..9a34d7ad5 100644 --- a/lib/pleroma/web/ostatus/ostatus.ex +++ b/lib/pleroma/web/ostatus/ostatus.ex @@ -9,22 +9,22 @@ defmodule Pleroma.Web.OStatus do import Pleroma.Web.XML require Logger + alias Pleroma.Activity + alias Pleroma.Object alias Pleroma.Repo alias Pleroma.User alias Pleroma.Web - alias Pleroma.Object - alias Pleroma.Activity alias Pleroma.Web.ActivityPub.ActivityPub alias Pleroma.Web.ActivityPub.Transmogrifier + alias Pleroma.Web.OStatus.DeleteHandler + alias Pleroma.Web.OStatus.FollowHandler + alias Pleroma.Web.OStatus.NoteHandler + alias Pleroma.Web.OStatus.UnfollowHandler alias Pleroma.Web.WebFinger alias Pleroma.Web.Websub - alias Pleroma.Web.OStatus.FollowHandler - alias Pleroma.Web.OStatus.UnfollowHandler - alias Pleroma.Web.OStatus.NoteHandler - alias Pleroma.Web.OStatus.DeleteHandler - def is_representable?(%Activity{data: data}) do - object = Object.normalize(data["object"]) + def is_representable?(%Activity{} = activity) do + object = Object.normalize(activity) cond do is_nil(object) -> @@ -119,7 +119,7 @@ def handle_incoming(xml_string) do def make_share(entry, doc, retweeted_activity) do with {:ok, actor} <- find_make_or_update_user(doc), - %Object{} = object <- Object.normalize(retweeted_activity.data["object"]), + %Object{} = object <- Object.normalize(retweeted_activity), id when not is_nil(id) <- string_from_xpath("/entry/id", entry), {:ok, activity, _object} = ActivityPub.announce(actor, object, id, false) do {:ok, activity} @@ -137,7 +137,7 @@ def handle_share(entry, doc) do def make_favorite(entry, doc, favorited_activity) do with {:ok, actor} <- find_make_or_update_user(doc), - %Object{} = object <- Object.normalize(favorited_activity.data["object"]), + %Object{} = object <- Object.normalize(favorited_activity), id when not is_nil(id) <- string_from_xpath("/entry/id", entry), {:ok, activity, _object} = ActivityPub.like(actor, object, id, false) do {:ok, activity} @@ -159,7 +159,7 @@ def get_or_try_fetching(entry) do Logger.debug("Trying to get entry from db") with id when not is_nil(id) <- string_from_xpath("//activity:object[1]/id", entry), - %Activity{} = activity <- Activity.get_create_by_object_ap_id(id) do + %Activity{} = activity <- Activity.get_create_by_object_ap_id_with_object(id) do {:ok, activity} else _ -> diff --git a/lib/pleroma/web/ostatus/ostatus_controller.ex b/lib/pleroma/web/ostatus/ostatus_controller.ex index 4e963774a..2fb6ce41b 100644 --- a/lib/pleroma/web/ostatus/ostatus_controller.ex +++ b/lib/pleroma/web/ostatus/ostatus_controller.ex @@ -9,13 +9,13 @@ defmodule Pleroma.Web.OStatus.OStatusController do alias Pleroma.Object alias Pleroma.User alias Pleroma.Web.ActivityPub.ActivityPub - alias Pleroma.Web.ActivityPub.Visibility alias Pleroma.Web.ActivityPub.ActivityPubController alias Pleroma.Web.ActivityPub.ObjectView - alias Pleroma.Web.OStatus.ActivityRepresenter - alias Pleroma.Web.OStatus.FeedRepresenter + alias Pleroma.Web.ActivityPub.Visibility alias Pleroma.Web.Federator alias Pleroma.Web.OStatus + alias Pleroma.Web.OStatus.ActivityRepresenter + alias Pleroma.Web.OStatus.FeedRepresenter alias Pleroma.Web.XML plug(Pleroma.Web.FederatingPlug when action in [:salmon_incoming]) @@ -102,7 +102,8 @@ def object(conn, %{"uuid" => uuid}) do ActivityPubController.call(conn, :object) else with id <- o_status_url(conn, :object, uuid), - {_, %Activity{} = activity} <- {:activity, Activity.get_create_by_object_ap_id(id)}, + {_, %Activity{} = activity} <- + {:activity, Activity.get_create_by_object_ap_id_with_object(id)}, {_, true} <- {:public?, Visibility.is_public?(activity)}, %User{} = user <- User.get_cached_by_ap_id(activity.data["actor"]) do case get_format(conn) do @@ -148,13 +149,13 @@ def activity(conn, %{"uuid" => uuid}) do end def notice(conn, %{"id" => id}) do - with {_, %Activity{} = activity} <- {:activity, Activity.get_by_id(id)}, + with {_, %Activity{} = activity} <- {:activity, Activity.get_by_id_with_object(id)}, {_, true} <- {:public?, Visibility.is_public?(activity)}, %User{} = user <- User.get_cached_by_ap_id(activity.data["actor"]) do case format = get_format(conn) do "html" -> if activity.data["type"] == "Create" do - %Object{} = object = Object.normalize(activity.data["object"]) + %Object{} = object = Object.normalize(activity) Fallback.RedirectController.redirector_with_meta(conn, %{ activity_id: activity.id, @@ -191,9 +192,9 @@ def notice(conn, %{"id" => id}) do # Returns an HTML embedded

    You gotta take whatever bellyrubbing opportunity you can get before she changes her mind 🦁 #mastocats

    ", + "contentMap": { + "en": "

    You gotta take whatever bellyrubbing opportunity you can get before she changes her mind 🦁 #mastocats

    " + }, + "attachment": [ + { + "type": "Document", + "mediaType": "video/mp4", + "url": "https://files.mastodon.social/media_attachments/files/013/049/816/original/e7831178a5e0d6d4.mp4", + "name": null + } + ], + "tag": [ + { + "type": "Hashtag", + "href": "https://mastodon.social/tags/mastocats", + "name": "#mastocats" + } + ], + "replies": { + "id": "https://mastodon.social/users/emelie/statuses/101849165031453009/replies", + "type": "Collection", + "first": { + "type": "CollectionPage", + "partOf": "https://mastodon.social/users/emelie/statuses/101849165031453009/replies", + "items": [] + } + } +} diff --git a/test/fixtures/httpoison_mock/webfinger_emelie.json b/test/fixtures/httpoison_mock/webfinger_emelie.json new file mode 100644 index 000000000..0b61cb618 --- /dev/null +++ b/test/fixtures/httpoison_mock/webfinger_emelie.json @@ -0,0 +1,36 @@ +{ + "aliases": [ + "https://mastodon.social/@emelie", + "https://mastodon.social/users/emelie" + ], + "links": [ + { + "href": "https://mastodon.social/@emelie", + "rel": "http://webfinger.net/rel/profile-page", + "type": "text/html" + }, + { + "href": "https://mastodon.social/users/emelie.atom", + "rel": "http://schemas.google.com/g/2010#updates-from", + "type": "application/atom+xml" + }, + { + "href": "https://mastodon.social/users/emelie", + "rel": "self", + "type": "application/activity+json" + }, + { + "href": "https://mastodon.social/api/salmon/15657", + "rel": "salmon" + }, + { + "href": "data:application/magic-public-key,RSA.u3CWs1oAJPE3ZJ9sj6Ut_Mu-mTE7MOijsQc8_6c73XVVuhIEomiozJIH7l8a7S1n5SYL4UuiwcubSOi7u1bbGpYnp5TYhN-Cxvq_P80V4_ncNIPSQzS49it7nSLeG5pA21lGPDA44huquES1un6p9gSmbTwngVX9oe4MYuUeh0Z7vijjU13Llz1cRq_ZgPQPgfz-2NJf-VeXnvyDZDYxZPVBBlrMl3VoGbu0M5L8SjY35559KCZ3woIvqRolcoHXfgvJMdPcJgSZVYxlCw3dA95q9jQcn6s87CPSUs7bmYEQCrDVn5m5NER5TzwBmP4cgJl9AaDVWQtRd4jFZNTxlQ==.AQAB", + "rel": "magic-public-key" + }, + { + "rel": "http://ostatus.org/schema/1.0/subscribe", + "template": "https://mastodon.social/authorize_interaction?uri={uri}" + } + ], + "subject": "acct:emelie@mastodon.social" +} diff --git a/test/fixtures/image.jpg b/test/fixtures/image.jpg index 09834bb5c..edff6246b 100644 Binary files a/test/fixtures/image.jpg and b/test/fixtures/image.jpg differ diff --git a/test/fixtures/lambadalambda.json b/test/fixtures/lambadalambda.json new file mode 100644 index 000000000..1f09fb591 --- /dev/null +++ b/test/fixtures/lambadalambda.json @@ -0,0 +1,64 @@ +{ + "@context": [ + "https://www.w3.org/ns/activitystreams", + "https://w3id.org/security/v1", + { + "manuallyApprovesFollowers": "as:manuallyApprovesFollowers", + "toot": "http://joinmastodon.org/ns#", + "featured": { + "@id": "toot:featured", + "@type": "@id" + }, + "alsoKnownAs": { + "@id": "as:alsoKnownAs", + "@type": "@id" + }, + "movedTo": { + "@id": "as:movedTo", + "@type": "@id" + }, + "schema": "http://schema.org#", + "PropertyValue": "schema:PropertyValue", + "value": "schema:value", + "Hashtag": "as:Hashtag", + "Emoji": "toot:Emoji", + "IdentityProof": "toot:IdentityProof", + "focalPoint": { + "@container": "@list", + "@id": "toot:focalPoint" + } + } + ], + "id": "https://mastodon.social/users/lambadalambda", + "type": "Person", + "following": "https://mastodon.social/users/lambadalambda/following", + "followers": "https://mastodon.social/users/lambadalambda/followers", + "inbox": "https://mastodon.social/users/lambadalambda/inbox", + "outbox": "https://mastodon.social/users/lambadalambda/outbox", + "featured": "https://mastodon.social/users/lambadalambda/collections/featured", + "preferredUsername": "lambadalambda", + "name": "Critical Value", + "summary": "\u003cp\u003e\u003c/p\u003e", + "url": "https://mastodon.social/@lambadalambda", + "manuallyApprovesFollowers": false, + "publicKey": { + "id": "https://mastodon.social/users/lambadalambda#main-key", + "owner": "https://mastodon.social/users/lambadalambda", + "publicKeyPem": "-----BEGIN PUBLIC KEY-----\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAw0P/Tq4gb4G/QVuMGbJo\nC/AfMNcv+m7NfrlOwkVzcU47jgESuYI4UtJayissCdBycHUnfVUd9qol+eznSODz\nCJhfJloqEIC+aSnuEPGA0POtWad6DU0E6/Ho5zQn5WAWUwbRQqowbrsm/GHo2+3v\neR5jGenwA6sYhINg/c3QQbksyV0uJ20Umyx88w8+TJuv53twOfmyDWuYNoQ3y5cc\nHKOZcLHxYOhvwg3PFaGfFHMFiNmF40dTXt9K96r7sbzc44iLD+VphbMPJEjkMuf8\nPGEFOBzy8pm3wJZw2v32RNW2VESwMYyqDzwHXGSq1a73cS7hEnc79gXlELsK04L9\nQQIDAQAB\n-----END PUBLIC KEY-----\n" + }, + "tag": [], + "attachment": [], + "endpoints": { + "sharedInbox": "https://mastodon.social/inbox" + }, + "icon": { + "type": "Image", + "mediaType": "image/gif", + "url": "https://files.mastodon.social/accounts/avatars/000/000/264/original/1429214160519.gif" + }, + "image": { + "type": "Image", + "mediaType": "image/gif", + "url": "https://files.mastodon.social/accounts/headers/000/000/264/original/28b26104f83747d2.gif" + } +} diff --git a/test/fixtures/rel_me_anchor_nofollow.html b/test/fixtures/rel_me_anchor_nofollow.html new file mode 100644 index 000000000..c856f0091 --- /dev/null +++ b/test/fixtures/rel_me_anchor_nofollow.html @@ -0,0 +1,14 @@ + + + + + Blog + + + + + diff --git a/test/fixtures/rel_me_null.html b/test/fixtures/rel_me_null.html index 57d424b80..5ab5f10c1 100644 --- a/test/fixtures/rel_me_null.html +++ b/test/fixtures/rel_me_null.html @@ -8,6 +8,7 @@ diff --git a/test/formatter_test.exs b/test/formatter_test.exs index 7d8864bf4..e74985c4e 100644 --- a/test/formatter_test.exs +++ b/test/formatter_test.exs @@ -181,6 +181,31 @@ test "does not give a replacement for single-character local nicknames who don't expected_text = "@a hi" assert {^expected_text, [] = _mentions, [] = _tags} = Formatter.linkify(text) end + + test "given the 'safe_mention' option, it will only mention people in the beginning" do + user = insert(:user) + _other_user = insert(:user) + third_user = insert(:user) + text = " @#{user.nickname} hey dude i hate @#{third_user.nickname}" + {expected_text, mentions, [] = _tags} = Formatter.linkify(text, safe_mention: true) + + assert mentions == [{"@#{user.nickname}", user}] + + assert expected_text == + "@#{user.nickname} hey dude i hate @#{third_user.nickname}" + end + + test "given the 'safe_mention' option, it will still work without any mention" do + text = "A post without any mention" + {expected_text, mentions, [] = _tags} = Formatter.linkify(text, safe_mention: true) + + assert mentions == [] + assert expected_text == text + end end describe ".parse_tags" do @@ -246,7 +271,9 @@ test "it does not add XSS emoji" do test "it returns the emoji used in the text" do text = "I love :moominmamma:" - assert Formatter.get_emoji(text) == [{"moominmamma", "/finmoji/128px/moominmamma-128.png"}] + assert Formatter.get_emoji(text) == [ + {"moominmamma", "/finmoji/128px/moominmamma-128.png", "Finmoji"} + ] end test "it returns a nice empty result when no emojis are present" do diff --git a/test/html_test.exs b/test/html_test.exs index 29cab17f3..0b5d3d892 100644 --- a/test/html_test.exs +++ b/test/html_test.exs @@ -10,6 +10,8 @@ defmodule Pleroma.HTMLTest do this is in bold

    this is a paragraph

    this is a linebreak
    + this is a link with allowed "rel" attribute: + this is a link with not allowed "rel" attribute: example.com this is an image:
    """ @@ -24,6 +26,8 @@ test "works as expected" do this is in bold this is a paragraph this is a linebreak + this is a link with allowed "rel" attribute: example.com + this is a link with not allowed "rel" attribute: example.com this is an image: alert('hacked') """ @@ -44,6 +48,8 @@ test "normalizes HTML as expected" do this is in bold

    this is a paragraph

    this is a linebreak
    + this is a link with allowed "rel" attribute: + this is a link with not allowed "rel" attribute: example.com this is an image:
    alert('hacked') """ @@ -66,6 +72,8 @@ test "normalizes HTML as expected" do this is in bold

    this is a paragraph

    this is a linebreak
    + this is a link with allowed "rel" attribute: + this is a link with not allowed "rel" attribute: example.com this is an image:
    alert('hacked') """ diff --git a/test/integration/mastodon_websocket_test.exs b/test/integration/mastodon_websocket_test.exs index 0c513b6e7..b42c9ef07 100644 --- a/test/integration/mastodon_websocket_test.exs +++ b/test/integration/mastodon_websocket_test.exs @@ -7,9 +7,9 @@ defmodule Pleroma.Integration.MastodonWebsocketTest do import Pleroma.Factory + alias Pleroma.Integration.WebsocketClient alias Pleroma.Web.CommonAPI alias Pleroma.Web.OAuth - alias Pleroma.Integration.WebsocketClient alias Pleroma.Web.Streamer @path Pleroma.Web.Endpoint.url() diff --git a/test/jobs_test.exs b/test/jobs_test.exs deleted file mode 100644 index ccb518dec..000000000 --- a/test/jobs_test.exs +++ /dev/null @@ -1,83 +0,0 @@ -# Pleroma: A lightweight social networking server -# Copyright © 2017-2019 Pleroma Authors -# SPDX-License-Identifier: AGPL-3.0-only - -defmodule Pleroma.JobsTest do - use ExUnit.Case, async: true - - alias Pleroma.Jobs - alias Jobs.WorkerMock - - setup do - state = %{ - queues: Enum.into([Jobs.create_queue(:testing)], %{}), - refs: %{} - } - - [state: state] - end - - test "creates queue" do - queue = Jobs.create_queue(:foobar) - - assert {:foobar, set} = queue - assert :set == elem(set, 0) |> elem(0) - end - - test "enqueues an element according to priority" do - queue = [%{item: 1, priority: 2}] - - new_queue = Jobs.enqueue_sorted(queue, 2, 1) - assert new_queue == [%{item: 2, priority: 1}, %{item: 1, priority: 2}] - - new_queue = Jobs.enqueue_sorted(queue, 2, 3) - assert new_queue == [%{item: 1, priority: 2}, %{item: 2, priority: 3}] - end - - test "pop first item" do - queue = [%{item: 2, priority: 1}, %{item: 1, priority: 2}] - - assert {2, [%{item: 1, priority: 2}]} = Jobs.queue_pop(queue) - end - - test "enqueue a job", %{state: state} do - assert {:noreply, new_state} = - Jobs.handle_cast({:enqueue, :testing, WorkerMock, [:test_job, :foo, :bar], 3}, state) - - assert %{queues: %{testing: {running_jobs, []}}, refs: _} = new_state - assert :sets.size(running_jobs) == 1 - assert [ref] = :sets.to_list(running_jobs) - assert %{refs: %{^ref => :testing}} = new_state - end - - test "max jobs setting", %{state: state} do - max_jobs = Pleroma.Config.get([Jobs, :testing, :max_jobs]) - - {:noreply, state} = - Enum.reduce(1..(max_jobs + 1), {:noreply, state}, fn _, {:noreply, state} -> - Jobs.handle_cast({:enqueue, :testing, WorkerMock, [:test_job, :foo, :bar], 3}, state) - end) - - assert %{ - queues: %{ - testing: - {running_jobs, [%{item: {WorkerMock, [:test_job, :foo, :bar]}, priority: 3}]} - } - } = state - - assert :sets.size(running_jobs) == max_jobs - end - - test "remove job after it finished", %{state: state} do - {:noreply, new_state} = - Jobs.handle_cast({:enqueue, :testing, WorkerMock, [:test_job, :foo, :bar], 3}, state) - - %{queues: %{testing: {running_jobs, []}}} = new_state - [ref] = :sets.to_list(running_jobs) - - assert {:noreply, %{queues: %{testing: {running_jobs, []}}, refs: %{}}} = - Jobs.handle_info({:DOWN, ref, :process, nil, nil}, new_state) - - assert :sets.size(running_jobs) == 0 - end -end diff --git a/test/notification_test.exs b/test/notification_test.exs index 755874a3d..c3db77b6c 100644 --- a/test/notification_test.exs +++ b/test/notification_test.exs @@ -4,11 +4,11 @@ defmodule Pleroma.NotificationTest do use Pleroma.DataCase - alias Pleroma.Web.TwitterAPI.TwitterAPI - alias Pleroma.Web.CommonAPI - alias Pleroma.User alias Pleroma.Notification + alias Pleroma.User alias Pleroma.Web.ActivityPub.Transmogrifier + alias Pleroma.Web.CommonAPI + alias Pleroma.Web.TwitterAPI.TwitterAPI import Pleroma.Factory describe "create_notifications" do @@ -29,6 +29,18 @@ test "notifies someone when they are directly addressed" do assert notification.activity_id == activity.id assert other_notification.activity_id == activity.id end + + test "it creates a notification for subscribed users" do + user = insert(:user) + subscriber = insert(:user) + + User.subscribe(subscriber, user) + + {:ok, status} = TwitterAPI.create_status(user, %{"status" => "Akariiiin"}) + {:ok, [notification]} = Notification.create_notifications(status) + + assert notification.user_id == subscriber.id + end end describe "create_notification" do @@ -41,6 +53,75 @@ test "it doesn't create a notification for user if the user blocks the activity assert nil == Notification.create_notification(activity, user) end + test "it doesn't create a notificatin for the user if the user mutes the activity author" do + muter = insert(:user) + muted = insert(:user) + {:ok, _} = User.mute(muter, muted) + muter = Repo.get(User, muter.id) + {:ok, activity} = CommonAPI.post(muted, %{"status" => "Hi @#{muter.nickname}"}) + + assert nil == Notification.create_notification(activity, muter) + end + + test "it doesn't create a notification for an activity from a muted thread" do + muter = insert(:user) + other_user = insert(:user) + {:ok, activity} = CommonAPI.post(muter, %{"status" => "hey"}) + CommonAPI.add_mute(muter, activity) + + {:ok, activity} = + CommonAPI.post(other_user, %{ + "status" => "Hi @#{muter.nickname}", + "in_reply_to_status_id" => activity.id + }) + + assert nil == Notification.create_notification(activity, muter) + end + + test "it disables notifications from people on remote instances" do + user = insert(:user, info: %{notification_settings: %{"remote" => false}}) + other_user = insert(:user) + + create_activity = %{ + "@context" => "https://www.w3.org/ns/activitystreams", + "type" => "Create", + "to" => ["https://www.w3.org/ns/activitystreams#Public"], + "actor" => other_user.ap_id, + "object" => %{ + "type" => "Note", + "content" => "Hi @#{user.nickname}", + "attributedTo" => other_user.ap_id + } + } + + {:ok, %{local: false} = activity} = Transmogrifier.handle_incoming(create_activity) + assert nil == Notification.create_notification(activity, user) + end + + test "it disables notifications from people on the local instance" do + user = insert(:user, info: %{notification_settings: %{"local" => false}}) + other_user = insert(:user) + {:ok, activity} = CommonAPI.post(other_user, %{"status" => "hey @#{user.nickname}"}) + assert nil == Notification.create_notification(activity, user) + end + + test "it disables notifications from followers" do + follower = insert(:user) + followed = insert(:user, info: %{notification_settings: %{"followers" => false}}) + User.follow(follower, followed) + {:ok, activity} = CommonAPI.post(follower, %{"status" => "hey @#{followed.nickname}"}) + assert nil == Notification.create_notification(activity, followed) + end + + test "it disables notifications from people the user follows" do + follower = insert(:user, info: %{notification_settings: %{"follows" => false}}) + followed = insert(:user) + User.follow(follower, followed) + follower = Repo.get(User, follower.id) + {:ok, activity} = CommonAPI.post(followed, %{"status" => "hey @#{follower.nickname}"}) + assert nil == Notification.create_notification(activity, follower) + end + test "it doesn't create a notification for user if he is the activity author" do activity = insert(:note_activity) author = User.get_by_ap_id(activity.data["actor"]) @@ -84,6 +165,28 @@ test "it doesn't create a notification for repeat-unrepeat-repeat chains" do {:ok, dupe} = TwitterAPI.repeat(user, status.id) assert nil == Notification.create_notification(dupe, retweeted_user) end + + test "it doesn't create duplicate notifications for follow+subscribed users" do + user = insert(:user) + subscriber = insert(:user) + + {:ok, _, _, _} = TwitterAPI.follow(subscriber, %{"user_id" => user.id}) + User.subscribe(subscriber, user) + {:ok, status} = TwitterAPI.create_status(user, %{"status" => "Akariiiin"}) + {:ok, [_notif]} = Notification.create_notifications(status) + end + + test "it doesn't create subscription notifications if the recipient cannot see the status" do + user = insert(:user) + subscriber = insert(:user) + + User.subscribe(subscriber, user) + + {:ok, status} = + TwitterAPI.create_status(user, %{"status" => "inwisible", "visibility" => "direct"}) + + assert {:ok, []} == Notification.create_notifications(status) + end end describe "get notification" do diff --git a/test/object_test.exs b/test/object_test.exs index a820a34ee..911757d57 100644 --- a/test/object_test.exs +++ b/test/object_test.exs @@ -5,8 +5,8 @@ defmodule Pleroma.ObjectTest do use Pleroma.DataCase import Pleroma.Factory - alias Pleroma.Repo alias Pleroma.Object + alias Pleroma.Repo test "returns an object by it's AP id" do object = insert(:note) diff --git a/test/plugs/legacy_authentication_plug_test.exs b/test/plugs/legacy_authentication_plug_test.exs index 302662797..8b0b06772 100644 --- a/test/plugs/legacy_authentication_plug_test.exs +++ b/test/plugs/legacy_authentication_plug_test.exs @@ -47,16 +47,18 @@ test "it authenticates the auth_user if present and password is correct and rese |> assign(:auth_user, user) conn = - with_mock User, - reset_password: fn user, %{password: password, password_confirmation: password} -> - send(self(), :reset_password) - {:ok, user} - end do - conn - |> LegacyAuthenticationPlug.call(%{}) + with_mocks([ + {:crypt, [], [crypt: fn _password, password_hash -> password_hash end]}, + {User, [], + [ + reset_password: fn user, %{password: password, password_confirmation: password} -> + {:ok, user} + end + ]} + ]) do + LegacyAuthenticationPlug.call(conn, %{}) end - assert_received :reset_password assert conn.assigns.user == user end diff --git a/test/plugs/uploaded_media_plug_test.exs b/test/plugs/uploaded_media_plug_test.exs new file mode 100644 index 000000000..49cf5396a --- /dev/null +++ b/test/plugs/uploaded_media_plug_test.exs @@ -0,0 +1,43 @@ +# Pleroma: A lightweight social networking server +# Copyright © 2017-2018 Pleroma Authors +# SPDX-License-Identifier: AGPL-3.0-only + +defmodule Pleroma.Web.UploadedMediaPlugTest do + use Pleroma.Web.ConnCase + alias Pleroma.Upload + + defp upload_file(context) do + Pleroma.DataCase.ensure_local_uploader(context) + File.cp!("test/fixtures/image.jpg", "test/fixtures/image_tmp.jpg") + + file = %Plug.Upload{ + content_type: "image/jpg", + path: Path.absname("test/fixtures/image_tmp.jpg"), + filename: "nice_tf.jpg" + } + + {:ok, data} = Upload.store(file) + [%{"href" => attachment_url} | _] = data["url"] + [attachment_url: attachment_url] + end + + setup_all :upload_file + + test "does not send Content-Disposition header when name param is not set", %{ + attachment_url: attachment_url + } do + conn = get(build_conn(), attachment_url) + refute Enum.any?(conn.resp_headers, &(elem(&1, 0) == "content-disposition")) + end + + test "sends Content-Disposition header when name param is set", %{ + attachment_url: attachment_url + } do + conn = get(build_conn(), attachment_url <> "?name=\"cofe\".gif") + + assert Enum.any?( + conn.resp_headers, + &(&1 == {"content-disposition", "filename=\"\\\"cofe\\\".gif\""}) + ) + end +end diff --git a/test/registration_test.exs b/test/registration_test.exs new file mode 100644 index 000000000..6143b82c7 --- /dev/null +++ b/test/registration_test.exs @@ -0,0 +1,59 @@ +# Pleroma: A lightweight social networking server +# Copyright © 2017-2019 Pleroma Authors +# SPDX-License-Identifier: AGPL-3.0-only + +defmodule Pleroma.RegistrationTest do + use Pleroma.DataCase + + import Pleroma.Factory + + alias Pleroma.Registration + alias Pleroma.Repo + + describe "generic changeset" do + test "requires :provider, :uid" do + registration = build(:registration, provider: nil, uid: nil) + + cs = Registration.changeset(registration, %{}) + refute cs.valid? + + assert [ + provider: {"can't be blank", [validation: :required]}, + uid: {"can't be blank", [validation: :required]} + ] == cs.errors + end + + test "ensures uniqueness of [:provider, :uid]" do + registration = insert(:registration) + registration2 = build(:registration, provider: registration.provider, uid: registration.uid) + + cs = Registration.changeset(registration2, %{}) + assert cs.valid? + + assert {:error, + %Ecto.Changeset{ + errors: [ + uid: + {"has already been taken", + [constraint: :unique, constraint_name: "registrations_provider_uid_index"]} + ] + }} = Repo.insert(cs) + + # Note: multiple :uid values per [:user_id, :provider] are intentionally allowed + cs2 = Registration.changeset(registration2, %{uid: "available.uid"}) + assert cs2.valid? + assert {:ok, _} = Repo.insert(cs2) + + cs3 = Registration.changeset(registration2, %{provider: "provider2"}) + assert cs3.valid? + assert {:ok, _} = Repo.insert(cs3) + end + + test "allows `nil` :user_id (user-unbound registration)" do + registration = build(:registration, user_id: nil) + cs = Registration.changeset(registration, %{}) + assert cs.valid? + assert {:ok, _} = Repo.insert(cs) + end + end +end diff --git a/test/scheduled_activity_test.exs b/test/scheduled_activity_test.exs new file mode 100644 index 000000000..edc7cc3f9 --- /dev/null +++ b/test/scheduled_activity_test.exs @@ -0,0 +1,64 @@ +# Pleroma: A lightweight social networking server +# Copyright © 2017-2018 Pleroma Authors +# SPDX-License-Identifier: AGPL-3.0-only + +defmodule Pleroma.ScheduledActivityTest do + use Pleroma.DataCase + alias Pleroma.DataCase + alias Pleroma.ScheduledActivity + import Pleroma.Factory + + setup context do + DataCase.ensure_local_uploader(context) + end + + describe "creation" do + test "when daily user limit is exceeded" do + user = insert(:user) + + today = + NaiveDateTime.utc_now() + |> NaiveDateTime.add(:timer.minutes(6), :millisecond) + |> NaiveDateTime.to_iso8601() + + attrs = %{params: %{}, scheduled_at: today} + {:ok, _} = ScheduledActivity.create(user, attrs) + {:ok, _} = ScheduledActivity.create(user, attrs) + {:error, changeset} = ScheduledActivity.create(user, attrs) + assert changeset.errors == [scheduled_at: {"daily limit exceeded", []}] + end + + test "when total user limit is exceeded" do + user = insert(:user) + + today = + NaiveDateTime.utc_now() + |> NaiveDateTime.add(:timer.minutes(6), :millisecond) + |> NaiveDateTime.to_iso8601() + + tomorrow = + NaiveDateTime.utc_now() + |> NaiveDateTime.add(:timer.hours(36), :millisecond) + |> NaiveDateTime.to_iso8601() + + {:ok, _} = ScheduledActivity.create(user, %{params: %{}, scheduled_at: today}) + {:ok, _} = ScheduledActivity.create(user, %{params: %{}, scheduled_at: today}) + {:ok, _} = ScheduledActivity.create(user, %{params: %{}, scheduled_at: tomorrow}) + {:error, changeset} = ScheduledActivity.create(user, %{params: %{}, scheduled_at: tomorrow}) + assert changeset.errors == [scheduled_at: {"total limit exceeded", []}] + end + + test "when scheduled_at is earlier than 5 minute from now" do + user = insert(:user) + + scheduled_at = + NaiveDateTime.utc_now() + |> NaiveDateTime.add(:timer.minutes(4), :millisecond) + |> NaiveDateTime.to_iso8601() + + attrs = %{params: %{}, scheduled_at: scheduled_at} + {:error, changeset} = ScheduledActivity.create(user, attrs) + assert changeset.errors == [scheduled_at: {"must be at least 5 minutes from now", []}] + end + end +end diff --git a/test/scheduled_activity_worker_test.exs b/test/scheduled_activity_worker_test.exs new file mode 100644 index 000000000..b9c91dda6 --- /dev/null +++ b/test/scheduled_activity_worker_test.exs @@ -0,0 +1,19 @@ +# Pleroma: A lightweight social networking server +# Copyright © 2017-2018 Pleroma Authors +# SPDX-License-Identifier: AGPL-3.0-only + +defmodule Pleroma.ScheduledActivityWorkerTest do + use Pleroma.DataCase + alias Pleroma.ScheduledActivity + import Pleroma.Factory + + test "creates a status from the scheduled activity" do + user = insert(:user) + scheduled_activity = insert(:scheduled_activity, user: user, params: %{status: "hi"}) + Pleroma.ScheduledActivityWorker.perform(:execute, scheduled_activity.id) + + refute Repo.get(ScheduledActivity, scheduled_activity.id) + activity = Repo.all(Pleroma.Activity) |> Enum.find(&(&1.actor == user.ap_id)) + assert activity.data["object"]["content"] == "hi" + end +end diff --git a/test/support/builders/user_builder.ex b/test/support/builders/user_builder.ex index 611a5be18..f58e1b0ad 100644 --- a/test/support/builders/user_builder.ex +++ b/test/support/builders/user_builder.ex @@ -1,6 +1,6 @@ defmodule Pleroma.Builders.UserBuilder do - alias Pleroma.User alias Pleroma.Repo + alias Pleroma.User def build(data \\ %{}) do user = %User{ diff --git a/test/support/captcha_mock.ex b/test/support/captcha_mock.ex index 9061f2b45..ef4e68bc5 100644 --- a/test/support/captcha_mock.ex +++ b/test/support/captcha_mock.ex @@ -7,7 +7,7 @@ defmodule Pleroma.Captcha.Mock do @behaviour Service @impl Service - def new(), do: %{type: :mock} + def new, do: %{type: :mock} @impl Service def validate(_token, _captcha, _data), do: :ok diff --git a/test/support/factory.ex b/test/support/factory.ex index d1956d1cd..ea59912cf 100644 --- a/test/support/factory.ex +++ b/test/support/factory.ex @@ -23,7 +23,7 @@ def user_factory do } end - def note_factory do + def note_factory(attrs \\ %{}) do text = sequence(:text, &"This is :moominmamma: note #{&1}") user = insert(:user) @@ -46,7 +46,7 @@ def note_factory do } %Pleroma.Object{ - data: data + data: merge_attributes(data, Map.get(attrs, :data, %{})) } end @@ -95,8 +95,8 @@ def direct_note_activity_factory do } end - def note_activity_factory do - note = insert(:note) + def note_activity_factory(attrs \\ %{}) do + note = attrs[:note] || insert(:note) data = %{ "id" => Pleroma.Web.ActivityPub.Utils.generate_activity_id(), @@ -135,9 +135,9 @@ def article_activity_factory do } end - def announce_activity_factory do - note_activity = insert(:note_activity) - user = insert(:user) + def announce_activity_factory(attrs \\ %{}) do + note_activity = attrs[:note_activity] || insert(:note_activity) + user = attrs[:user] || insert(:user) data = %{ "type" => "Announce", @@ -216,7 +216,7 @@ def oauth_app_factory do redirect_uris: "https://example.com/callback", scopes: ["read", "write", "follow", "push"], website: "https://example.com", - client_id: "aaabbb==", + client_id: Ecto.UUID.generate(), client_secret: "aaa;/&bbb" } end @@ -229,15 +229,66 @@ def instance_factory do end def oauth_token_factory do - user = insert(:user) oauth_app = insert(:oauth_app) %Pleroma.Web.OAuth.Token{ token: :crypto.strong_rand_bytes(32) |> Base.url_encode64(), refresh_token: :crypto.strong_rand_bytes(32) |> Base.url_encode64(), - user_id: user.id, + user: build(:user), app_id: oauth_app.id, valid_until: NaiveDateTime.add(NaiveDateTime.utc_now(), 60 * 10) } end + + def oauth_authorization_factory do + %Pleroma.Web.OAuth.Authorization{ + token: :crypto.strong_rand_bytes(32) |> Base.url_encode64(padding: false), + scopes: ["read", "write", "follow", "push"], + valid_until: NaiveDateTime.add(NaiveDateTime.utc_now(), 60 * 10), + user: build(:user), + app: build(:oauth_app) + } + end + + def push_subscription_factory do + %Pleroma.Web.Push.Subscription{ + user: build(:user), + token: build(:oauth_token), + endpoint: "https://example.com/example/1234", + key_auth: "8eDyX_uCN0XRhSbY5hs7Hg==", + key_p256dh: + "BCIWgsnyXDv1VkhqL2P7YRBvdeuDnlwAPT2guNhdIoW3IP7GmHh1SMKPLxRf7x8vJy6ZFK3ol2ohgn_-0yP7QQA=", + data: %{} + } + end + + def notification_factory do + %Pleroma.Notification{ + user: build(:user) + } + end + + def scheduled_activity_factory do + %Pleroma.ScheduledActivity{ + user: build(:user), + scheduled_at: NaiveDateTime.add(NaiveDateTime.utc_now(), :timer.minutes(60), :millisecond), + params: build(:note) |> Map.from_struct() |> Map.get(:data) + } + end + + def registration_factory do + user = insert(:user) + + %Pleroma.Registration{ + user: user, + provider: "twitter", + uid: "171799000", + info: %{ + "name" => "John Doe", + "email" => "john@doe.com", + "nickname" => "johndoe", + "description" => "My bio" + } + } + end end diff --git a/test/support/http_request_mock.ex b/test/support/http_request_mock.ex index 78e8efc9d..5b355bfe6 100644 --- a/test/support/http_request_mock.ex +++ b/test/support/http_request_mock.ex @@ -36,6 +36,43 @@ def get("https://osada.macgirvin.com/channel/mike", _, _, _) do }} end + def get("https://mastodon.social/users/emelie/statuses/101849165031453009", _, _, _) do + {:ok, + %Tesla.Env{ + status: 200, + body: File.read!("test/fixtures/httpoison_mock/status.emelie.json") + }} + end + + def get("https://mastodon.social/users/emelie", _, _, _) do + {:ok, + %Tesla.Env{ + status: 200, + body: File.read!("test/fixtures/httpoison_mock/emelie.json") + }} + end + + def get( + "https://mastodon.social/.well-known/webfinger?resource=https://mastodon.social/users/emelie", + _, + _, + _ + ) do + {:ok, + %Tesla.Env{ + status: 200, + body: File.read!("test/fixtures/httpoison_mock/webfinger_emelie.json") + }} + end + + def get("https://mastodon.social/users/emelie.atom", _, _, _) do + {:ok, + %Tesla.Env{ + status: 200, + body: File.read!("test/fixtures/httpoison_mock/emelie.atom") + }} + end + def get( "https://osada.macgirvin.com/.well-known/webfinger?resource=acct:mike@osada.macgirvin.com", _, @@ -679,6 +716,10 @@ def get("https://mastodon.social/users/lambadalambda.atom", _, _, _) do {:ok, %Tesla.Env{status: 200, body: File.read!("test/fixtures/lambadalambda.atom")}} end + def get("https://mastodon.social/users/lambadalambda", _, _, _) do + {:ok, %Tesla.Env{status: 200, body: File.read!("test/fixtures/lambadalambda.json")}} + end + def get("https://social.heldscal.la/user/23211", _, _, Accept: "application/activity+json") do {:ok, Tesla.Mock.json(%{"id" => "https://social.heldscal.la/user/23211"}, status: 200)} end diff --git a/test/support/jobs_worker_mock.ex b/test/support/jobs_worker_mock.ex deleted file mode 100644 index 0fb976d05..000000000 --- a/test/support/jobs_worker_mock.ex +++ /dev/null @@ -1,19 +0,0 @@ -# Pleroma: A lightweight social networking server -# Copyright © 2017-2019 Pleroma Authors -# SPDX-License-Identifier: AGPL-3.0-only - -defmodule Pleroma.Jobs.WorkerMock do - require Logger - - def perform(:test_job, arg, arg2) do - Logger.debug({:perform, :test_job, arg, arg2}) - end - - def perform(:test_job, payload) do - Logger.debug({:perform, :test_job, payload}) - end - - def test_job(payload) do - Pleroma.Jobs.enqueue(:testing, __MODULE__, [:test_job, payload]) - end -end diff --git a/test/support/web_push_http_client_mock.ex b/test/support/web_push_http_client_mock.ex new file mode 100644 index 000000000..d8accd21c --- /dev/null +++ b/test/support/web_push_http_client_mock.ex @@ -0,0 +1,23 @@ +# Pleroma: A lightweight social networking server +# Copyright © 2017-2018 Pleroma Authors +# SPDX-License-Identifier: AGPL-3.0-only + +defmodule Pleroma.Web.WebPushHttpClientMock do + def get(url, headers \\ [], options \\ []) do + { + res, + %Tesla.Env{status: status} + } = Pleroma.HTTP.request(:get, url, "", headers, options) + + {res, %{status_code: status}} + end + + def post(url, body, headers \\ [], options \\ []) do + { + res, + %Tesla.Env{status: status} + } = Pleroma.HTTP.request(:post, url, body, headers, options) + + {res, %{status_code: status}} + end +end diff --git a/test/tasks/relay_test.exs b/test/tasks/relay_test.exs index 64ff07753..535dc3756 100644 --- a/test/tasks/relay_test.exs +++ b/test/tasks/relay_test.exs @@ -4,10 +4,10 @@ defmodule Mix.Tasks.Pleroma.RelayTest do alias Pleroma.Activity - alias Pleroma.Web.ActivityPub.ActivityPub - alias Pleroma.Web.ActivityPub.Utils - alias Pleroma.Web.ActivityPub.Relay alias Pleroma.User + alias Pleroma.Web.ActivityPub.ActivityPub + alias Pleroma.Web.ActivityPub.Relay + alias Pleroma.Web.ActivityPub.Utils use Pleroma.DataCase setup_all do @@ -60,7 +60,8 @@ test "relay is unfollowed" do ActivityPub.fetch_activities([], %{ "type" => "Undo", "actor_id" => follower_id, - "limit" => 1 + "limit" => 1, + "skip_preload" => true }) assert undo_activity.data["type"] == "Undo" diff --git a/test/tasks/user_test.exs b/test/tasks/user_test.exs index 7b814d171..242265da5 100644 --- a/test/tasks/user_test.exs +++ b/test/tasks/user_test.exs @@ -245,7 +245,97 @@ test "invite token is generated" do end) =~ "http" assert_received {:mix_shell, :info, [message]} - assert message =~ "Generated" + assert message =~ "Generated user invite token one time" + end + + test "token is generated with expires_at" do + assert capture_io(fn -> + Mix.Tasks.Pleroma.User.run([ + "invite", + "--expires-at", + Date.to_string(Date.utc_today()) + ]) + end) + + assert_received {:mix_shell, :info, [message]} + assert message =~ "Generated user invite token date limited" + end + + test "token is generated with max use" do + assert capture_io(fn -> + Mix.Tasks.Pleroma.User.run([ + "invite", + "--max-use", + "5" + ]) + end) + + assert_received {:mix_shell, :info, [message]} + assert message =~ "Generated user invite token reusable" + end + + test "token is generated with max use and expires date" do + assert capture_io(fn -> + Mix.Tasks.Pleroma.User.run([ + "invite", + "--max-use", + "5", + "--expires-at", + Date.to_string(Date.utc_today()) + ]) + end) + + assert_received {:mix_shell, :info, [message]} + assert message =~ "Generated user invite token reusable date limited" + end + end + + describe "running invites" do + test "invites are listed" do + {:ok, invite} = Pleroma.UserInviteToken.create_invite() + + {:ok, invite2} = + Pleroma.UserInviteToken.create_invite(%{expires_at: Date.utc_today(), max_use: 15}) + + # assert capture_io(fn -> + Mix.Tasks.Pleroma.User.run([ + "invites" + ]) + + # end) + + assert_received {:mix_shell, :info, [message]} + assert_received {:mix_shell, :info, [message2]} + assert_received {:mix_shell, :info, [message3]} + assert message =~ "Invites list:" + assert message2 =~ invite.invite_type + assert message3 =~ invite2.invite_type + end + end + + describe "running revoke_invite" do + test "invite is revoked" do + {:ok, invite} = Pleroma.UserInviteToken.create_invite(%{expires_at: Date.utc_today()}) + + assert capture_io(fn -> + Mix.Tasks.Pleroma.User.run([ + "revoke_invite", + invite.token + ]) + end) + + assert_received {:mix_shell, :info, [message]} + assert message =~ "Invite for token #{invite.token} was revoked." + end + end + + describe "running delete_activities" do + test "activities are deleted" do + %{nickname: nickname} = insert(:user) + + assert :ok == Mix.Tasks.Pleroma.User.run(["delete_activities", nickname]) + assert_received {:mix_shell, :info, [message]} + assert message == "User #{nickname} statuses deleted." end end end diff --git a/test/upload_test.exs b/test/upload_test.exs index b2d9eca38..946ebcb5a 100644 --- a/test/upload_test.exs +++ b/test/upload_test.exs @@ -56,7 +56,7 @@ test "copies the file to the configured folder with deduping" do assert List.first(data["url"])["href"] == Pleroma.Web.base_url() <> - "/media/e7a6d0cf595bff76f14c9a98b6c199539559e8b844e02e51e5efcfd1f614a2df.jpg" + "/media/e30397b58d226d6583ab5b8b3c5defb0c682bda5c31ef07a9f57c1c4986e3781.jpg" end test "copies the file to the configured folder without deduping" do @@ -153,19 +153,20 @@ test "escapes invalid characters in url" do assert Path.basename(attachment_url["href"]) == "an%E2%80%A6%20image.jpg" end - test "replaces : (colon) and ? (question-mark) to %3A and %3F (respectively)" do + test "escapes reserved uri characters" do File.cp!("test/fixtures/image.jpg", "test/fixtures/image_tmp.jpg") file = %Plug.Upload{ content_type: "image/jpg", path: Path.absname("test/fixtures/image_tmp.jpg"), - filename: "is:an?image.jpg" + filename: ":?#[]@!$&\\'()*+,;=.jpg" } {:ok, data} = Upload.store(file) [attachment_url | _] = data["url"] - assert Path.basename(attachment_url["href"]) == "is%3Aan%3Fimage.jpg" + assert Path.basename(attachment_url["href"]) == + "%3A%3F%23%5B%5D%40%21%24%26%5C%27%28%29%2A%2B%2C%3B%3D.jpg" end end end diff --git a/test/user_invite_token_test.exs b/test/user_invite_token_test.exs new file mode 100644 index 000000000..276788254 --- /dev/null +++ b/test/user_invite_token_test.exs @@ -0,0 +1,96 @@ +defmodule Pleroma.UserInviteTokenTest do + use ExUnit.Case, async: true + use Pleroma.DataCase + alias Pleroma.UserInviteToken + + describe "valid_invite?/1 one time invites" do + setup do + invite = %UserInviteToken{invite_type: "one_time"} + + {:ok, invite: invite} + end + + test "not used returns true", %{invite: invite} do + invite = %{invite | used: false} + assert UserInviteToken.valid_invite?(invite) + end + + test "used returns false", %{invite: invite} do + invite = %{invite | used: true} + refute UserInviteToken.valid_invite?(invite) + end + end + + describe "valid_invite?/1 reusable invites" do + setup do + invite = %UserInviteToken{ + invite_type: "reusable", + max_use: 5 + } + + {:ok, invite: invite} + end + + test "with less uses then max use returns true", %{invite: invite} do + invite = %{invite | uses: 4} + assert UserInviteToken.valid_invite?(invite) + end + + test "with equal or more uses then max use returns false", %{invite: invite} do + invite = %{invite | uses: 5} + + refute UserInviteToken.valid_invite?(invite) + + invite = %{invite | uses: 6} + + refute UserInviteToken.valid_invite?(invite) + end + end + + describe "valid_token?/1 date limited invites" do + setup do + invite = %UserInviteToken{invite_type: "date_limited"} + {:ok, invite: invite} + end + + test "expires today returns true", %{invite: invite} do + invite = %{invite | expires_at: Date.utc_today()} + assert UserInviteToken.valid_invite?(invite) + end + + test "expires yesterday returns false", %{invite: invite} do + invite = %{invite | expires_at: Date.add(Date.utc_today(), -1)} + invite = Repo.insert!(invite) + refute UserInviteToken.valid_invite?(invite) + end + end + + describe "valid_token?/1 reusable date limited invites" do + setup do + invite = %UserInviteToken{invite_type: "reusable_date_limited", max_use: 5} + {:ok, invite: invite} + end + + test "not overdue date and less uses returns true", %{invite: invite} do + invite = %{invite | expires_at: Date.utc_today(), uses: 4} + assert UserInviteToken.valid_invite?(invite) + end + + test "overdue date and less uses returns false", %{invite: invite} do + invite = %{invite | expires_at: Date.add(Date.utc_today(), -1)} + invite = Repo.insert!(invite) + refute UserInviteToken.valid_invite?(invite) + end + + test "not overdue date with more uses returns false", %{invite: invite} do + invite = %{invite | expires_at: Date.utc_today(), uses: 5} + refute UserInviteToken.valid_invite?(invite) + end + + test "overdue date with more uses returns false", %{invite: invite} do + invite = %{invite | expires_at: Date.add(Date.utc_today(), -1), uses: 5} + invite = Repo.insert!(invite) + refute UserInviteToken.valid_invite?(invite) + end + end +end diff --git a/test/user_test.exs b/test/user_test.exs index 3a5f3c240..d2167a970 100644 --- a/test/user_test.exs +++ b/test/user_test.exs @@ -3,11 +3,12 @@ # SPDX-License-Identifier: AGPL-3.0-only defmodule Pleroma.UserTest do - alias Pleroma.Builders.UserBuilder alias Pleroma.Activity + alias Pleroma.Builders.UserBuilder alias Pleroma.Repo alias Pleroma.User alias Pleroma.Web.CommonAPI + use Pleroma.DataCase import Pleroma.Factory @@ -121,7 +122,7 @@ test "follow takes a user and another user" do {:ok, user} = User.follow(user, followed) - user = Repo.get(User, user.id) + user = User.get_by_id(user.id) followed = User.get_by_ap_id(followed.ap_id) assert followed.info.follower_count == 1 @@ -145,6 +146,15 @@ test "can't follow a user who blocked us" do {:error, _} = User.follow(blockee, blocker) end + test "can't subscribe to a user who blocked us" do + blocker = insert(:user) + blocked = insert(:user) + + {:ok, blocker} = User.block(blocker, blocked) + + {:error, _} = User.subscribe(blocked, blocker) + end + test "local users do not automatically follow local locked accounts" do follower = insert(:user, info: %{locked: true}) followed = insert(:user, info: %{locked: true}) @@ -177,7 +187,7 @@ test "unfollow takes a user and another user" do {:ok, user, _activity} = User.unfollow(user, followed) - user = Repo.get(User, user.id) + user = User.get_by_id(user.id) assert user.following == [] end @@ -187,7 +197,7 @@ test "unfollow doesn't unfollow yourself" do {:error, _} = User.unfollow(user, user) - user = Repo.get(User, user.id) + user = User.get_by_id(user.id) assert user.following == [user.ap_id] end @@ -199,6 +209,13 @@ test "test if a user is following another user" do refute User.following?(followed, user) end + test "fetches correct profile for nickname beginning with number" do + # Use old-style integer ID to try to reproduce the problem + user = insert(:user, %{id: 1080}) + userwithnumbers = insert(:user, %{nickname: "#{user.id}garbage"}) + assert userwithnumbers == User.get_cached_by_nickname_or_id(userwithnumbers.nickname) + end + describe "user registration" do @full_user_data %{ bio: "A guy", @@ -678,7 +695,7 @@ test "blocks tear down cyclical follow relationships" do assert User.following?(blocked, blocker) {:ok, blocker} = User.block(blocker, blocked) - blocked = Repo.get(User, blocked.id) + blocked = User.get_by_id(blocked.id) assert User.blocks?(blocker, blocked) @@ -696,7 +713,7 @@ test "blocks tear down blocker->blocked follow relationships" do refute User.following?(blocked, blocker) {:ok, blocker} = User.block(blocker, blocked) - blocked = Repo.get(User, blocked.id) + blocked = User.get_by_id(blocked.id) assert User.blocks?(blocker, blocked) @@ -714,13 +731,29 @@ test "blocks tear down blocked->blocker follow relationships" do assert User.following?(blocked, blocker) {:ok, blocker} = User.block(blocker, blocked) - blocked = Repo.get(User, blocked.id) + blocked = User.get_by_id(blocked.id) assert User.blocks?(blocker, blocked) refute User.following?(blocker, blocked) refute User.following?(blocked, blocker) end + + test "blocks tear down blocked->blocker subscription relationships" do + blocker = insert(:user) + blocked = insert(:user) + + {:ok, blocker} = User.subscribe(blocked, blocker) + + assert User.subscribed_to?(blocked, blocker) + refute User.subscribed_to?(blocker, blocked) + + {:ok, blocker} = User.block(blocker, blocked) + + assert User.blocks?(blocker, blocked) + refute User.subscribed_to?(blocker, blocked) + refute User.subscribed_to?(blocked, blocker) + end end describe "domain blocking" do @@ -791,6 +824,16 @@ test ".deactivate can de-activate then re-activate a user" do assert false == user.info.deactivated end + test ".delete_user_activities deletes all create activities" do + user = insert(:user) + + {:ok, activity} = CommonAPI.post(user, %{"status" => "2hu"}) + {:ok, _} = User.delete_user_activities(user) + + # TODO: Remove favorites, repeats, delete activities. + refute Activity.get_by_id(activity.id) + end + test ".delete deactivates a user, all follow relationships and all create activities" do user = insert(:user) followed = insert(:user) @@ -808,9 +851,9 @@ test ".delete deactivates a user, all follow relationships and all create activi {:ok, _} = User.delete(user) - followed = Repo.get(User, followed.id) - follower = Repo.get(User, follower.id) - user = Repo.get(User, user.id) + followed = User.get_by_id(followed.id) + follower = User.get_by_id(follower.id) + user = User.get_by_id(user.id) assert user.info.deactivated @@ -819,7 +862,7 @@ test ".delete deactivates a user, all follow relationships and all create activi # TODO: Remove favorites, repeats, delete activities. - refute Repo.get(Activity, activity.id) + refute Activity.get_by_id(activity.id) end test "get_public_key_for_ap_id fetches a user that's not in the db" do @@ -879,7 +922,11 @@ test "finds a user by full or partial nickname" do user = insert(:user, %{nickname: "john"}) Enum.each(["john", "jo", "j"], fn query -> - assert user == User.search(query) |> List.first() |> Map.put(:search_rank, nil) + assert user == + User.search(query) + |> List.first() + |> Map.put(:search_rank, nil) + |> Map.put(:search_type, nil) end) end @@ -887,7 +934,11 @@ test "finds a user by full or partial name" do user = insert(:user, %{name: "John Doe"}) Enum.each(["John Doe", "JOHN", "doe", "j d", "j", "d"], fn query -> - assert user == User.search(query) |> List.first() |> Map.put(:search_rank, nil) + assert user == + User.search(query) + |> List.first() + |> Map.put(:search_rank, nil) + |> Map.put(:search_type, nil) end) end @@ -941,6 +992,7 @@ test "finds a user whose name is nil" do User.search("lain@pleroma.soykaf.com") |> List.first() |> Map.put(:search_rank, nil) + |> Map.put(:search_type, nil) end test "does not yield false-positive matches" do @@ -958,7 +1010,7 @@ test "works with URIs" do user = User.get_by_ap_id("http://mastodon.example.org/users/admin") assert length(results) == 1 - assert user == result |> Map.put(:search_rank, nil) + assert user == result |> Map.put(:search_rank, nil) |> Map.put(:search_type, nil) end end @@ -1098,4 +1150,21 @@ test "bookmarks" do assert {:ok, user_state3} = User.bookmark(user, id2) assert user_state3.bookmarks == [id2] end + + test "follower count is updated when a follower is blocked" do + user = insert(:user) + follower = insert(:user) + follower2 = insert(:user) + follower3 = insert(:user) + + {:ok, follower} = Pleroma.User.follow(follower, user) + {:ok, _follower2} = Pleroma.User.follow(follower2, user) + {:ok, _follower3} = Pleroma.User.follow(follower3, user) + + {:ok, _} = Pleroma.User.block(user, follower) + + user_show = Pleroma.Web.TwitterAPI.UserView.render("show.json", %{user: user}) + + assert Map.get(user_show, "followers_count") == 2 + end end diff --git a/test/web/activity_pub/activity_pub_controller_test.exs b/test/web/activity_pub/activity_pub_controller_test.exs index 6bd4493f5..8dd8e7e0a 100644 --- a/test/web/activity_pub/activity_pub_controller_test.exs +++ b/test/web/activity_pub/activity_pub_controller_test.exs @@ -5,13 +5,12 @@ defmodule Pleroma.Web.ActivityPub.ActivityPubControllerTest do use Pleroma.Web.ConnCase import Pleroma.Factory - alias Pleroma.Web.ActivityPub.UserView - alias Pleroma.Web.ActivityPub.ObjectView - alias Pleroma.Object - alias Pleroma.Repo alias Pleroma.Activity - alias Pleroma.User alias Pleroma.Instances + alias Pleroma.Object + alias Pleroma.User + alias Pleroma.Web.ActivityPub.ObjectView + alias Pleroma.Web.ActivityPub.UserView setup_all do Tesla.Mock.mock_global(fn env -> apply(HttpRequestMock, :request, [env]) end) @@ -51,7 +50,7 @@ test "it returns a json representation of the user with accept application/json" |> put_req_header("accept", "application/json") |> get("/users/#{user.nickname}") - user = Repo.get(User, user.id) + user = User.get_by_id(user.id) assert json_response(conn, 200) == UserView.render("user.json", %{user: user}) end @@ -66,7 +65,7 @@ test "it returns a json representation of the user with accept application/activ |> put_req_header("accept", "application/activity+json") |> get("/users/#{user.nickname}") - user = Repo.get(User, user.id) + user = User.get_by_id(user.id) assert json_response(conn, 200) == UserView.render("user.json", %{user: user}) end @@ -84,7 +83,7 @@ test "it returns a json representation of the user with accept application/ld+js ) |> get("/users/#{user.nickname}") - user = Repo.get(User, user.id) + user = User.get_by_id(user.id) assert json_response(conn, 200) == UserView.render("user.json", %{user: user}) end @@ -543,7 +542,7 @@ test "it works for more than 10 users", %{conn: conn} do user = insert(:user) Enum.each(1..15, fn _ -> - user = Repo.get(User, user.id) + user = User.get_by_id(user.id) other_user = insert(:user) User.follow(user, other_user) end) diff --git a/test/web/activity_pub/activity_pub_test.exs b/test/web/activity_pub/activity_pub_test.exs index f4029896c..17fec05b1 100644 --- a/test/web/activity_pub/activity_pub_test.exs +++ b/test/web/activity_pub/activity_pub_test.exs @@ -4,14 +4,14 @@ defmodule Pleroma.Web.ActivityPub.ActivityPubTest do use Pleroma.DataCase + alias Pleroma.Activity + alias Pleroma.Builders.ActivityBuilder + alias Pleroma.Instances + alias Pleroma.Object + alias Pleroma.User alias Pleroma.Web.ActivityPub.ActivityPub alias Pleroma.Web.ActivityPub.Utils alias Pleroma.Web.CommonAPI - alias Pleroma.Activity - alias Pleroma.Object - alias Pleroma.User - alias Pleroma.Instances - alias Pleroma.Builders.ActivityBuilder import Pleroma.Factory import Tesla.Mock @@ -140,7 +140,7 @@ test "returns the activity if one with the same id is already in" do activity = insert(:note_activity) {:ok, new_activity} = ActivityPub.insert(activity.data) - assert activity == new_activity + assert activity.id == new_activity.id end test "inserts a given map into the activity database, giving it an id if it has none." do @@ -218,20 +218,53 @@ test "increases user note count only for public activities" do user = insert(:user) {:ok, _} = - CommonAPI.post(Repo.get(User, user.id), %{"status" => "1", "visibility" => "public"}) + CommonAPI.post(User.get_by_id(user.id), %{"status" => "1", "visibility" => "public"}) {:ok, _} = - CommonAPI.post(Repo.get(User, user.id), %{"status" => "2", "visibility" => "unlisted"}) + CommonAPI.post(User.get_by_id(user.id), %{"status" => "2", "visibility" => "unlisted"}) {:ok, _} = - CommonAPI.post(Repo.get(User, user.id), %{"status" => "2", "visibility" => "private"}) + CommonAPI.post(User.get_by_id(user.id), %{"status" => "2", "visibility" => "private"}) {:ok, _} = - CommonAPI.post(Repo.get(User, user.id), %{"status" => "3", "visibility" => "direct"}) + CommonAPI.post(User.get_by_id(user.id), %{"status" => "3", "visibility" => "direct"}) - user = Repo.get(User, user.id) + user = User.get_by_id(user.id) assert user.info.note_count == 2 end + + test "increases replies count" do + user = insert(:user) + user2 = insert(:user) + + {:ok, activity} = CommonAPI.post(user, %{"status" => "1", "visibility" => "public"}) + ap_id = activity.data["id"] + reply_data = %{"status" => "1", "in_reply_to_status_id" => activity.id} + + # public + {:ok, _} = CommonAPI.post(user2, Map.put(reply_data, "visibility", "public")) + assert %{data: data, object: object} = Activity.get_by_ap_id_with_object(ap_id) + assert data["object"]["repliesCount"] == 1 + assert object.data["repliesCount"] == 1 + + # unlisted + {:ok, _} = CommonAPI.post(user2, Map.put(reply_data, "visibility", "unlisted")) + assert %{data: data, object: object} = Activity.get_by_ap_id_with_object(ap_id) + assert data["object"]["repliesCount"] == 2 + assert object.data["repliesCount"] == 2 + + # private + {:ok, _} = CommonAPI.post(user2, Map.put(reply_data, "visibility", "private")) + assert %{data: data, object: object} = Activity.get_by_ap_id_with_object(ap_id) + assert data["object"]["repliesCount"] == 2 + assert object.data["repliesCount"] == 2 + + # direct + {:ok, _} = CommonAPI.post(user2, Map.put(reply_data, "visibility", "direct")) + assert %{data: data, object: object} = Activity.get_by_ap_id_with_object(ap_id) + assert data["object"]["repliesCount"] == 2 + assert object.data["repliesCount"] == 2 + end end describe "fetch activities for recipients" do @@ -270,7 +303,8 @@ test "doesn't return blocked activities" do booster = insert(:user) {:ok, user} = User.block(user, %{ap_id: activity_one.data["actor"]}) - activities = ActivityPub.fetch_activities([], %{"blocking_user" => user}) + activities = + ActivityPub.fetch_activities([], %{"blocking_user" => user, "skip_preload" => true}) assert Enum.member?(activities, activity_two) assert Enum.member?(activities, activity_three) @@ -278,7 +312,8 @@ test "doesn't return blocked activities" do {:ok, user} = User.unblock(user, %{ap_id: activity_one.data["actor"]}) - activities = ActivityPub.fetch_activities([], %{"blocking_user" => user}) + activities = + ActivityPub.fetch_activities([], %{"blocking_user" => user, "skip_preload" => true}) assert Enum.member?(activities, activity_two) assert Enum.member?(activities, activity_three) @@ -287,16 +322,18 @@ test "doesn't return blocked activities" do {:ok, user} = User.block(user, %{ap_id: activity_three.data["actor"]}) {:ok, _announce, %{data: %{"id" => id}}} = CommonAPI.repeat(activity_three.id, booster) %Activity{} = boost_activity = Activity.get_create_by_object_ap_id(id) - activity_three = Repo.get(Activity, activity_three.id) + activity_three = Activity.get_by_id(activity_three.id) - activities = ActivityPub.fetch_activities([], %{"blocking_user" => user}) + activities = + ActivityPub.fetch_activities([], %{"blocking_user" => user, "skip_preload" => true}) assert Enum.member?(activities, activity_two) refute Enum.member?(activities, activity_three) refute Enum.member?(activities, boost_activity) assert Enum.member?(activities, activity_one) - activities = ActivityPub.fetch_activities([], %{"blocking_user" => nil}) + activities = + ActivityPub.fetch_activities([], %{"blocking_user" => nil, "skip_preload" => true}) assert Enum.member?(activities, activity_two) assert Enum.member?(activities, activity_three) @@ -312,14 +349,20 @@ test "doesn't return muted activities" do booster = insert(:user) {:ok, user} = User.mute(user, %User{ap_id: activity_one.data["actor"]}) - activities = ActivityPub.fetch_activities([], %{"muting_user" => user}) + activities = + ActivityPub.fetch_activities([], %{"muting_user" => user, "skip_preload" => true}) assert Enum.member?(activities, activity_two) assert Enum.member?(activities, activity_three) refute Enum.member?(activities, activity_one) # Calling with 'with_muted' will deliver muted activities, too. - activities = ActivityPub.fetch_activities([], %{"muting_user" => user, "with_muted" => true}) + activities = + ActivityPub.fetch_activities([], %{ + "muting_user" => user, + "with_muted" => true, + "skip_preload" => true + }) assert Enum.member?(activities, activity_two) assert Enum.member?(activities, activity_three) @@ -327,7 +370,8 @@ test "doesn't return muted activities" do {:ok, user} = User.unmute(user, %User{ap_id: activity_one.data["actor"]}) - activities = ActivityPub.fetch_activities([], %{"muting_user" => user}) + activities = + ActivityPub.fetch_activities([], %{"muting_user" => user, "skip_preload" => true}) assert Enum.member?(activities, activity_two) assert Enum.member?(activities, activity_three) @@ -336,16 +380,17 @@ test "doesn't return muted activities" do {:ok, user} = User.mute(user, %User{ap_id: activity_three.data["actor"]}) {:ok, _announce, %{data: %{"id" => id}}} = CommonAPI.repeat(activity_three.id, booster) %Activity{} = boost_activity = Activity.get_create_by_object_ap_id(id) - activity_three = Repo.get(Activity, activity_three.id) + activity_three = Activity.get_by_id(activity_three.id) - activities = ActivityPub.fetch_activities([], %{"muting_user" => user}) + activities = + ActivityPub.fetch_activities([], %{"muting_user" => user, "skip_preload" => true}) assert Enum.member?(activities, activity_two) refute Enum.member?(activities, activity_three) refute Enum.member?(activities, boost_activity) assert Enum.member?(activities, activity_one) - activities = ActivityPub.fetch_activities([], %{"muting_user" => nil}) + activities = ActivityPub.fetch_activities([], %{"muting_user" => nil, "skip_preload" => true}) assert Enum.member?(activities, activity_two) assert Enum.member?(activities, activity_three) @@ -353,6 +398,20 @@ test "doesn't return muted activities" do assert Enum.member?(activities, activity_one) end + test "does include announces on request" do + activity_three = insert(:note_activity) + user = insert(:user) + booster = insert(:user) + + {:ok, user} = User.follow(user, booster) + + {:ok, announce, _object} = CommonAPI.repeat(activity_three.id, booster) + + [announce_activity] = ActivityPub.fetch_activities([user.ap_id | user.following]) + + assert announce_activity.id == announce.id + end + test "excludes reblogs on request" do user = insert(:user) {:ok, expected_activity} = ActivityBuilder.insert(%{"type" => "Create"}, %{:user => user}) @@ -424,6 +483,33 @@ test "retrieves ids up to max_id" do assert length(activities) == 20 assert last == last_expected end + + test "doesn't return reblogs for users for whom reblogs have been muted" do + activity = insert(:note_activity) + user = insert(:user) + booster = insert(:user) + {:ok, user} = CommonAPI.hide_reblogs(user, booster) + + {:ok, activity, _} = CommonAPI.repeat(activity.id, booster) + + activities = ActivityPub.fetch_activities([], %{"muting_user" => user}) + + refute Enum.any?(activities, fn %{id: id} -> id == activity.id end) + end + + test "returns reblogs for users for whom reblogs have not been muted" do + activity = insert(:note_activity) + user = insert(:user) + booster = insert(:user) + {:ok, user} = CommonAPI.hide_reblogs(user, booster) + {:ok, user} = CommonAPI.show_reblogs(user, booster) + + {:ok, activity, _} = CommonAPI.repeat(activity.id, booster) + + activities = ActivityPub.fetch_activities([], %{"muting_user" => user}) + + assert Enum.any?(activities, fn %{id: id} -> id == activity.id end) + end end describe "like an object" do @@ -473,7 +559,7 @@ test "unliking a previously liked object" do {:ok, _, _, object} = ActivityPub.unlike(user, object) assert object.data["like_count"] == 0 - assert Repo.get(Activity, like_activity.id) == nil + assert Activity.get_by_id(like_activity.id) == nil end end @@ -524,7 +610,7 @@ test "unannouncing a previously announced object" do assert unannounce_activity.data["actor"] == user.ap_id assert unannounce_activity.data["context"] == announce_activity.data["context"] - assert Repo.get(Activity, announce_activity.id) == nil + assert Activity.get_by_id(announce_activity.id) == nil end end @@ -549,16 +635,6 @@ test "works with base64 encoded images" do end end - describe "fetch the latest Follow" do - test "fetches the latest Follow activity" do - %Activity{data: %{"type" => "Follow"}} = activity = insert(:follow_activity) - follower = Repo.get_by(User, ap_id: activity.data["actor"]) - followed = Repo.get_by(User, ap_id: activity.data["object"]) - - assert activity == Utils.fetch_latest_follow(follower, followed) - end - end - describe "fetching an object" do test "it fetches an object" do {:ok, object} = @@ -663,7 +739,7 @@ test "it creates a delete activity and deletes the original object" do assert delete.data["actor"] == note.data["actor"] assert delete.data["object"] == note.data["object"]["id"] - assert Repo.get(Activity, delete.id) != nil + assert Activity.get_by_id(delete.id) != nil assert Repo.get(Object, object.id).data["type"] == "Tombstone" end @@ -672,25 +748,80 @@ test "decrements user note count only for public activities" do user = insert(:user, info: %{note_count: 10}) {:ok, a1} = - CommonAPI.post(Repo.get(User, user.id), %{"status" => "yeah", "visibility" => "public"}) + CommonAPI.post(User.get_by_id(user.id), %{"status" => "yeah", "visibility" => "public"}) {:ok, a2} = - CommonAPI.post(Repo.get(User, user.id), %{"status" => "yeah", "visibility" => "unlisted"}) + CommonAPI.post(User.get_by_id(user.id), %{"status" => "yeah", "visibility" => "unlisted"}) {:ok, a3} = - CommonAPI.post(Repo.get(User, user.id), %{"status" => "yeah", "visibility" => "private"}) + CommonAPI.post(User.get_by_id(user.id), %{"status" => "yeah", "visibility" => "private"}) {:ok, a4} = - CommonAPI.post(Repo.get(User, user.id), %{"status" => "yeah", "visibility" => "direct"}) + CommonAPI.post(User.get_by_id(user.id), %{"status" => "yeah", "visibility" => "direct"}) {:ok, _} = a1.data["object"]["id"] |> Object.get_by_ap_id() |> ActivityPub.delete() {:ok, _} = a2.data["object"]["id"] |> Object.get_by_ap_id() |> ActivityPub.delete() {:ok, _} = a3.data["object"]["id"] |> Object.get_by_ap_id() |> ActivityPub.delete() {:ok, _} = a4.data["object"]["id"] |> Object.get_by_ap_id() |> ActivityPub.delete() - user = Repo.get(User, user.id) + user = User.get_by_id(user.id) assert user.info.note_count == 10 end + + test "it creates a delete activity and checks that it is also sent to users mentioned by the deleted object" do + user = insert(:user) + note = insert(:note_activity) + + {:ok, object} = + Object.get_by_ap_id(note.data["object"]["id"]) + |> Object.change(%{ + data: %{ + "actor" => note.data["object"]["actor"], + "id" => note.data["object"]["id"], + "to" => [user.ap_id], + "type" => "Note" + } + }) + |> Object.update_and_set_cache() + + {:ok, delete} = ActivityPub.delete(object) + + assert user.ap_id in delete.data["to"] + end + + test "decreases reply count" do + user = insert(:user) + user2 = insert(:user) + + {:ok, activity} = CommonAPI.post(user, %{"status" => "1", "visibility" => "public"}) + reply_data = %{"status" => "1", "in_reply_to_status_id" => activity.id} + ap_id = activity.data["id"] + + {:ok, public_reply} = CommonAPI.post(user2, Map.put(reply_data, "visibility", "public")) + {:ok, unlisted_reply} = CommonAPI.post(user2, Map.put(reply_data, "visibility", "unlisted")) + {:ok, private_reply} = CommonAPI.post(user2, Map.put(reply_data, "visibility", "private")) + {:ok, direct_reply} = CommonAPI.post(user2, Map.put(reply_data, "visibility", "direct")) + + _ = CommonAPI.delete(direct_reply.id, user2) + assert %{data: data, object: object} = Activity.get_by_ap_id_with_object(ap_id) + assert data["object"]["repliesCount"] == 2 + assert object.data["repliesCount"] == 2 + + _ = CommonAPI.delete(private_reply.id, user2) + assert %{data: data, object: object} = Activity.get_by_ap_id_with_object(ap_id) + assert data["object"]["repliesCount"] == 2 + assert object.data["repliesCount"] == 2 + + _ = CommonAPI.delete(public_reply.id, user2) + assert %{data: data, object: object} = Activity.get_by_ap_id_with_object(ap_id) + assert data["object"]["repliesCount"] == 1 + assert object.data["repliesCount"] == 1 + + _ = CommonAPI.delete(unlisted_reply.id, user2) + assert %{data: data, object: object} = Activity.get_by_ap_id_with_object(ap_id) + assert data["object"]["repliesCount"] == 0 + assert object.data["repliesCount"] == 0 + end end describe "timeline post-processing" do @@ -729,6 +860,7 @@ test "it filters broken threads" do activities = ActivityPub.fetch_activities([user1.ap_id | user1.following]) + private_activity_1 = Activity.get_by_ap_id_with_object(private_activity_1.data["id"]) assert [public_activity, private_activity_1, private_activity_3] == activities assert length(activities) == 3 diff --git a/test/web/activity_pub/mrf/anti_followbot_policy_test.exs b/test/web/activity_pub/mrf/anti_followbot_policy_test.exs index 2ea4f9d3f..37a7bfcf7 100644 --- a/test/web/activity_pub/mrf/anti_followbot_policy_test.exs +++ b/test/web/activity_pub/mrf/anti_followbot_policy_test.exs @@ -54,4 +54,19 @@ test "it allows non-followbots" do {:ok, _} = AntiFollowbotPolicy.filter(message) end + + test "it gracefully handles nil display names" do + actor = insert(:user, %{name: nil}) + target = insert(:user) + + message = %{ + "@context" => "https://www.w3.org/ns/activitystreams", + "type" => "Follow", + "actor" => actor.ap_id, + "object" => target.ap_id, + "id" => "https://example.com/activities/1234" + } + + {:ok, _} = AntiFollowbotPolicy.filter(message) + end end diff --git a/test/web/activity_pub/transmogrifier_test.exs b/test/web/activity_pub/transmogrifier_test.exs index 86c66deff..47cffe257 100644 --- a/test/web/activity_pub/transmogrifier_test.exs +++ b/test/web/activity_pub/transmogrifier_test.exs @@ -4,13 +4,14 @@ defmodule Pleroma.Web.ActivityPub.TransmogrifierTest do use Pleroma.DataCase + alias Pleroma.Activity + alias Pleroma.Object + alias Pleroma.Repo + alias Pleroma.User + alias Pleroma.Web.ActivityPub.ActivityPub alias Pleroma.Web.ActivityPub.Transmogrifier alias Pleroma.Web.ActivityPub.Utils - alias Pleroma.Web.ActivityPub.ActivityPub alias Pleroma.Web.OStatus - alias Pleroma.Activity - alias Pleroma.User - alias Pleroma.Repo alias Pleroma.Web.Websub.WebsubClientSubscription import Pleroma.Factory @@ -334,6 +335,53 @@ test "it does not clobber the addressing on announce activities" do assert data["to"] == ["http://mastodon.example.org/users/admin/followers"] end + test "it ensures that as:Public activities make it to their followers collection" do + user = insert(:user) + + data = + File.read!("test/fixtures/mastodon-post-activity.json") + |> Poison.decode!() + |> Map.put("actor", user.ap_id) + |> Map.put("to", ["https://www.w3.org/ns/activitystreams#Public"]) + |> Map.put("cc", []) + + object = + data["object"] + |> Map.put("attributedTo", user.ap_id) + |> Map.put("to", ["https://www.w3.org/ns/activitystreams#Public"]) + |> Map.put("cc", []) + + data = Map.put(data, "object", object) + + {:ok, %Activity{data: data, local: false}} = Transmogrifier.handle_incoming(data) + + assert data["cc"] == [User.ap_followers(user)] + end + + test "it ensures that address fields become lists" do + user = insert(:user) + + data = + File.read!("test/fixtures/mastodon-post-activity.json") + |> Poison.decode!() + |> Map.put("actor", user.ap_id) + |> Map.put("to", nil) + |> Map.put("cc", nil) + + object = + data["object"] + |> Map.put("attributedTo", user.ap_id) + |> Map.put("to", nil) + |> Map.put("cc", nil) + + data = Map.put(data, "object", object) + + {:ok, %Activity{data: data, local: false}} = Transmogrifier.handle_incoming(data) + + assert !is_nil(data["to"]) + assert !is_nil(data["cc"]) + end + test "it works for incoming update activities" do data = File.read!("test/fixtures/mastodon-post-activity.json") |> Poison.decode!() @@ -413,7 +461,7 @@ test "it works for incoming deletes" do {:ok, %Activity{local: false}} = Transmogrifier.handle_incoming(data) - refute Repo.get(Activity, activity.id) + refute Activity.get_by_id(activity.id) end test "it fails for incoming deletes with spoofed origin" do @@ -433,7 +481,7 @@ test "it fails for incoming deletes with spoofed origin" do :error = Transmogrifier.handle_incoming(data) - assert Repo.get(Activity, activity.id) + assert Activity.get_by_id(activity.id) end test "it works for incoming unannounces with an existing notice" do @@ -591,7 +639,7 @@ test "it works for incoming accepts which were pre-accepted" do assert activity.data["object"] == follow_activity.data["id"] - follower = Repo.get(User, follower.id) + follower = User.get_by_id(follower.id) assert User.following?(follower, followed) == true end @@ -613,7 +661,7 @@ test "it works for incoming accepts which were orphaned" do {:ok, activity} = Transmogrifier.handle_incoming(accept_data) assert activity.data["object"] == follow_activity.data["id"] - follower = Repo.get(User, follower.id) + follower = User.get_by_id(follower.id) assert User.following?(follower, followed) == true end @@ -633,7 +681,7 @@ test "it works for incoming accepts which are referenced by IRI only" do {:ok, activity} = Transmogrifier.handle_incoming(accept_data) assert activity.data["object"] == follow_activity.data["id"] - follower = Repo.get(User, follower.id) + follower = User.get_by_id(follower.id) assert User.following?(follower, followed) == true end @@ -652,7 +700,7 @@ test "it fails for incoming accepts which cannot be correlated" do :error = Transmogrifier.handle_incoming(accept_data) - follower = Repo.get(User, follower.id) + follower = User.get_by_id(follower.id) refute User.following?(follower, followed) == true end @@ -671,7 +719,7 @@ test "it fails for incoming rejects which cannot be correlated" do :error = Transmogrifier.handle_incoming(accept_data) - follower = Repo.get(User, follower.id) + follower = User.get_by_id(follower.id) refute User.following?(follower, followed) == true end @@ -696,7 +744,7 @@ test "it works for incoming rejects which are orphaned" do {:ok, activity} = Transmogrifier.handle_incoming(reject_data) refute activity.local - follower = Repo.get(User, follower.id) + follower = User.get_by_id(follower.id) assert User.following?(follower, followed) == false end @@ -718,7 +766,7 @@ test "it works for incoming rejects which are referenced by IRI only" do {:ok, %Activity{data: _}} = Transmogrifier.handle_incoming(reject_data) - follower = Repo.get(User, follower.id) + follower = User.get_by_id(follower.id) assert User.following?(follower, followed) == false end @@ -764,6 +812,30 @@ test "it remaps video URLs as attachments if necessary" do assert object.data["attachment"] == [attachment] end + + test "it accepts Flag activities" do + user = insert(:user) + other_user = insert(:user) + + {:ok, activity} = CommonAPI.post(user, %{"status" => "test post"}) + object = Object.normalize(activity.data["object"]) + + message = %{ + "@context" => "https://www.w3.org/ns/activitystreams", + "cc" => [user.ap_id], + "object" => [user.ap_id, object.data["id"]], + "type" => "Flag", + "content" => "blocked AND reported!!!", + "actor" => other_user.ap_id + } + + assert {:ok, activity} = Transmogrifier.handle_incoming(message) + + assert activity.data["object"] == [user.ap_id, object.data["id"]] + assert activity.data["content"] == "blocked AND reported!!!" + assert activity.data["actor"] == other_user.ap_id + assert activity.data["cc"] == [user.ap_id] + end end describe "prepare outgoing" do @@ -948,7 +1020,7 @@ test "it upgrades a user to activitypub" do {:ok, unrelated_activity} = CommonAPI.post(user_two, %{"status" => "test"}) assert "http://localhost:4001/users/rye@niu.moe/followers" in activity.recipients - user = Repo.get(User, user.id) + user = User.get_by_id(user.id) assert user.info.note_count == 1 {:ok, user} = Transmogrifier.upgrade_user_from_ap_id("https://niu.moe/users/rye") @@ -956,13 +1028,10 @@ test "it upgrades a user to activitypub" do assert user.info.note_count == 1 assert user.follower_address == "https://niu.moe/users/rye/followers" - # Wait for the background task - :timer.sleep(1000) - - user = Repo.get(User, user.id) + user = User.get_by_id(user.id) assert user.info.note_count == 1 - activity = Repo.get(Activity, activity.id) + activity = Activity.get_by_id(activity.id) assert user.follower_address in activity.recipients assert %{ @@ -985,10 +1054,10 @@ test "it upgrades a user to activitypub" do refute "..." in activity.recipients - unrelated_activity = Repo.get(Activity, unrelated_activity.id) + unrelated_activity = Activity.get_by_id(unrelated_activity.id) refute user.follower_address in unrelated_activity.recipients - user_two = Repo.get(User, user_two.id) + user_two = User.get_by_id(user_two.id) assert user.follower_address in user_two.following refute "..." in user_two.following end diff --git a/test/web/activity_pub/utils_test.exs b/test/web/activity_pub/utils_test.exs index aeed0564c..758214e68 100644 --- a/test/web/activity_pub/utils_test.exs +++ b/test/web/activity_pub/utils_test.exs @@ -1,6 +1,33 @@ defmodule Pleroma.Web.ActivityPub.UtilsTest do use Pleroma.DataCase + alias Pleroma.Activity + alias Pleroma.Repo + alias Pleroma.User + alias Pleroma.Web.ActivityPub.ActivityPub alias Pleroma.Web.ActivityPub.Utils + alias Pleroma.Web.CommonAPI + + import Pleroma.Factory + + describe "fetch the latest Follow" do + test "fetches the latest Follow activity" do + %Activity{data: %{"type" => "Follow"}} = activity = insert(:follow_activity) + follower = Repo.get_by(User, ap_id: activity.data["actor"]) + followed = Repo.get_by(User, ap_id: activity.data["object"]) + + assert activity == Utils.fetch_latest_follow(follower, followed) + end + end + + describe "fetch the latest Block" do + test "fetches the latest Block activity" do + blocker = insert(:user) + blocked = insert(:user) + {:ok, activity} = ActivityPub.block(blocker, blocked) + + assert activity == Utils.fetch_latest_block(blocker, blocked) + end + end describe "determine_explicit_mentions()" do test "works with an object that has mentions" do @@ -54,4 +81,128 @@ test "works with an object that has only IR tags" do assert Utils.determine_explicit_mentions(object) == [] end end + + describe "make_like_data" do + setup do + user = insert(:user) + other_user = insert(:user) + third_user = insert(:user) + [user: user, other_user: other_user, third_user: third_user] + end + + test "addresses actor's follower address if the activity is public", %{ + user: user, + other_user: other_user, + third_user: third_user + } do + expected_to = Enum.sort([user.ap_id, other_user.follower_address]) + expected_cc = Enum.sort(["https://www.w3.org/ns/activitystreams#Public", third_user.ap_id]) + + {:ok, activity} = + CommonAPI.post(user, %{ + "status" => + "hey @#{other_user.nickname}, @#{third_user.nickname} how about beering together this weekend?" + }) + + %{"to" => to, "cc" => cc} = Utils.make_like_data(other_user, activity, nil) + assert Enum.sort(to) == expected_to + assert Enum.sort(cc) == expected_cc + end + + test "does not adress actor's follower address if the activity is not public", %{ + user: user, + other_user: other_user, + third_user: third_user + } do + expected_to = Enum.sort([user.ap_id]) + expected_cc = [third_user.ap_id] + + {:ok, activity} = + CommonAPI.post(user, %{ + "status" => "@#{other_user.nickname} @#{third_user.nickname} bought a new swimsuit!", + "visibility" => "private" + }) + + %{"to" => to, "cc" => cc} = Utils.make_like_data(other_user, activity, nil) + assert Enum.sort(to) == expected_to + assert Enum.sort(cc) == expected_cc + end + end + + describe "fetch_ordered_collection" do + import Tesla.Mock + + test "fetches the first OrderedCollectionPage when an OrderedCollection is encountered" do + mock(fn + %{method: :get, url: "http://mastodon.com/outbox"} -> + json(%{"type" => "OrderedCollection", "first" => "http://mastodon.com/outbox?page=true"}) + + %{method: :get, url: "http://mastodon.com/outbox?page=true"} -> + json(%{"type" => "OrderedCollectionPage", "orderedItems" => ["ok"]}) + end) + + assert Utils.fetch_ordered_collection("http://mastodon.com/outbox", 1) == ["ok"] + end + + test "fetches several pages in the right order one after another, but only the specified amount" do + mock(fn + %{method: :get, url: "http://example.com/outbox"} -> + json(%{ + "type" => "OrderedCollectionPage", + "orderedItems" => [0], + "next" => "http://example.com/outbox?page=1" + }) + + %{method: :get, url: "http://example.com/outbox?page=1"} -> + json(%{ + "type" => "OrderedCollectionPage", + "orderedItems" => [1], + "next" => "http://example.com/outbox?page=2" + }) + + %{method: :get, url: "http://example.com/outbox?page=2"} -> + json(%{"type" => "OrderedCollectionPage", "orderedItems" => [2]}) + end) + + assert Utils.fetch_ordered_collection("http://example.com/outbox", 0) == [0] + assert Utils.fetch_ordered_collection("http://example.com/outbox", 1) == [0, 1] + end + + test "returns an error if the url doesn't have an OrderedCollection/Page" do + mock(fn + %{method: :get, url: "http://example.com/not-an-outbox"} -> + json(%{"type" => "NotAnOutbox"}) + end) + + assert {:error, _} = Utils.fetch_ordered_collection("http://example.com/not-an-outbox", 1) + end + + test "returns the what was collected if there are less pages than specified" do + mock(fn + %{method: :get, url: "http://example.com/outbox"} -> + json(%{ + "type" => "OrderedCollectionPage", + "orderedItems" => [0], + "next" => "http://example.com/outbox?page=1" + }) + + %{method: :get, url: "http://example.com/outbox?page=1"} -> + json(%{"type" => "OrderedCollectionPage", "orderedItems" => [1]}) + end) + + assert Utils.fetch_ordered_collection("http://example.com/outbox", 5) == [0, 1] + end + end + + test "make_json_ld_header/0" do + assert Utils.make_json_ld_header() == %{ + "@context" => [ + "https://www.w3.org/ns/activitystreams", + "http://localhost:4001/schemas/litepub-0.1.jsonld", + %{ + "@language" => "und" + } + ] + } + end end diff --git a/test/web/activity_pub/views/object_view_test.exs b/test/web/activity_pub/views/object_view_test.exs index d144a77fc..d939fc5a7 100644 --- a/test/web/activity_pub/views/object_view_test.exs +++ b/test/web/activity_pub/views/object_view_test.exs @@ -2,8 +2,8 @@ defmodule Pleroma.Web.ActivityPub.ObjectViewTest do use Pleroma.DataCase import Pleroma.Factory - alias Pleroma.Web.CommonAPI alias Pleroma.Web.ActivityPub.ObjectView + alias Pleroma.Web.CommonAPI test "renders a note object" do note = insert(:note) diff --git a/test/web/activity_pub/views/user_view_test.exs b/test/web/activity_pub/views/user_view_test.exs index 0bc1d4728..9fb9455d2 100644 --- a/test/web/activity_pub/views/user_view_test.exs +++ b/test/web/activity_pub/views/user_view_test.exs @@ -16,6 +16,29 @@ test "Renders a user, including the public key" do assert String.contains?(result["publicKey"]["publicKeyPem"], "BEGIN PUBLIC KEY") end + test "Does not add an avatar image if the user hasn't set one" do + user = insert(:user) + {:ok, user} = Pleroma.Web.WebFinger.ensure_keys_present(user) + + result = UserView.render("user.json", %{user: user}) + refute result["icon"] + refute result["image"] + + user = + insert(:user, + avatar: %{"url" => [%{"href" => "https://someurl"}]}, + info: %{ + banner: %{"url" => [%{"href" => "https://somebanner"}]} + } + ) + + {:ok, user} = Pleroma.Web.WebFinger.ensure_keys_present(user) + + result = UserView.render("user.json", %{user: user}) + assert result["icon"]["url"] == "https://someurl" + assert result["image"]["url"] == "https://somebanner" + end + describe "endpoints" do test "local users have a usable endpoints structure" do user = insert(:user) diff --git a/test/web/activity_pub/visibilty_test.exs b/test/web/activity_pub/visibilty_test.exs index 1172b7455..24b96c4aa 100644 --- a/test/web/activity_pub/visibilty_test.exs +++ b/test/web/activity_pub/visibilty_test.exs @@ -1,8 +1,8 @@ defmodule Pleroma.Web.ActivityPub.VisibilityTest do use Pleroma.DataCase - alias Pleroma.Web.CommonAPI alias Pleroma.Web.ActivityPub.Visibility + alias Pleroma.Web.CommonAPI import Pleroma.Factory setup do diff --git a/test/web/admin_api/admin_api_controller_test.exs b/test/web/admin_api/admin_api_controller_test.exs index 42e0daf8e..d44392c9d 100644 --- a/test/web/admin_api/admin_api_controller_test.exs +++ b/test/web/admin_api/admin_api_controller_test.exs @@ -5,8 +5,8 @@ defmodule Pleroma.Web.AdminAPI.AdminAPIControllerTest do use Pleroma.Web.ConnCase - alias Pleroma.Repo alias Pleroma.User + alias Pleroma.UserInviteToken import Pleroma.Factory describe "/api/pleroma/admin/user" do @@ -40,6 +40,85 @@ test "Create" do end end + describe "/api/pleroma/admin/users/:nickname" do + test "Show", %{conn: conn} do + admin = insert(:user, info: %{is_admin: true}) + user = insert(:user) + + conn = + conn + |> assign(:user, admin) + |> get("/api/pleroma/admin/users/#{user.nickname}") + + expected = %{ + "deactivated" => false, + "id" => to_string(user.id), + "local" => true, + "nickname" => user.nickname, + "roles" => %{"admin" => false, "moderator" => false}, + "tags" => [] + } + + assert expected == json_response(conn, 200) + end + + test "when the user doesn't exist", %{conn: conn} do + admin = insert(:user, info: %{is_admin: true}) + user = build(:user) + + conn = + conn + |> assign(:user, admin) + |> get("/api/pleroma/admin/users/#{user.nickname}") + + assert "Not found" == json_response(conn, 404) + end + end + + describe "/api/pleroma/admin/user/follow" do + test "allows to force-follow another user" do + admin = insert(:user, info: %{is_admin: true}) + user = insert(:user) + follower = insert(:user) + + build_conn() + |> assign(:user, admin) + |> put_req_header("accept", "application/json") + |> post("/api/pleroma/admin/user/follow", %{ + "follower" => follower.nickname, + "followed" => user.nickname + }) + + user = User.get_by_id(user.id) + follower = User.get_by_id(follower.id) + + assert User.following?(follower, user) + end + end + + describe "/api/pleroma/admin/user/unfollow" do + test "allows to force-unfollow another user" do + admin = insert(:user, info: %{is_admin: true}) + user = insert(:user) + follower = insert(:user) + + User.follow(follower, user) + + build_conn() + |> assign(:user, admin) + |> put_req_header("accept", "application/json") + |> post("/api/pleroma/admin/user/unfollow", %{ + "follower" => follower.nickname, + "followed" => user.nickname + }) + + user = User.get_by_id(user.id) + follower = User.get_by_id(follower.id) + + refute User.following?(follower, user) + end + end + describe "PUT /api/pleroma/admin/users/tag" do setup do admin = insert(:user, info: %{is_admin: true}) @@ -66,13 +145,13 @@ test "it appends specified tags to users with specified nicknames", %{ user2: user2 } do assert json_response(conn, :no_content) - assert Repo.get(User, user1.id).tags == ["x", "foo", "bar"] - assert Repo.get(User, user2.id).tags == ["y", "foo", "bar"] + assert User.get_by_id(user1.id).tags == ["x", "foo", "bar"] + assert User.get_by_id(user2.id).tags == ["y", "foo", "bar"] end test "it does not modify tags of not specified users", %{conn: conn, user3: user3} do assert json_response(conn, :no_content) - assert Repo.get(User, user3.id).tags == ["unchanged"] + assert User.get_by_id(user3.id).tags == ["unchanged"] end end @@ -102,13 +181,13 @@ test "it removes specified tags from users with specified nicknames", %{ user2: user2 } do assert json_response(conn, :no_content) - assert Repo.get(User, user1.id).tags == [] - assert Repo.get(User, user2.id).tags == ["y"] + assert User.get_by_id(user1.id).tags == [] + assert User.get_by_id(user2.id).tags == ["y"] end test "it does not modify tags of not specified users", %{conn: conn, user3: user3} do assert json_response(conn, :no_content) - assert Repo.get(User, user3.id).tags == ["unchanged"] + assert User.get_by_id(user3.id).tags == ["unchanged"] end end @@ -178,7 +257,7 @@ test "deactivates the user", %{conn: conn} do conn |> put("/api/pleroma/admin/activation_status/#{user.nickname}", %{status: false}) - user = Repo.get(User, user.id) + user = User.get_by_id(user.id) assert user.info.deactivated == true assert json_response(conn, :no_content) end @@ -190,7 +269,7 @@ test "activates the user", %{conn: conn} do conn |> put("/api/pleroma/admin/activation_status/#{user.nickname}", %{status: true}) - user = Repo.get(User, user.id) + user = User.get_by_id(user.id) assert user.info.deactivated == false assert json_response(conn, :no_content) end @@ -334,7 +413,7 @@ test "/api/pleroma/admin/password_reset" do describe "GET /api/pleroma/admin/users" do test "renders users array for the first page" do admin = insert(:user, info: %{is_admin: true}) - user = insert(:user) + user = insert(:user, local: false, tags: ["foo", "bar"]) conn = build_conn() @@ -348,12 +427,18 @@ test "renders users array for the first page" do %{ "deactivated" => admin.info.deactivated, "id" => admin.id, - "nickname" => admin.nickname + "nickname" => admin.nickname, + "roles" => %{"admin" => true, "moderator" => false}, + "local" => true, + "tags" => [] }, %{ "deactivated" => user.info.deactivated, "id" => user.id, - "nickname" => user.nickname + "nickname" => user.nickname, + "roles" => %{"admin" => false, "moderator" => false}, + "local" => false, + "tags" => ["foo", "bar"] } ] } @@ -374,6 +459,167 @@ test "renders empty array for the second page" do "users" => [] } end + + test "regular search" do + admin = insert(:user, info: %{is_admin: true}) + user = insert(:user, nickname: "bob") + + conn = + build_conn() + |> assign(:user, admin) + |> get("/api/pleroma/admin/users?query=bo") + + assert json_response(conn, 200) == %{ + "count" => 1, + "page_size" => 50, + "users" => [ + %{ + "deactivated" => user.info.deactivated, + "id" => user.id, + "nickname" => user.nickname, + "roles" => %{"admin" => false, "moderator" => false}, + "local" => true, + "tags" => [] + } + ] + } + end + + test "regular search with page size" do + admin = insert(:user, info: %{is_admin: true}) + user = insert(:user, nickname: "aalice") + user2 = insert(:user, nickname: "alice") + + conn = + build_conn() + |> assign(:user, admin) + |> get("/api/pleroma/admin/users?query=a&page_size=1&page=1") + + assert json_response(conn, 200) == %{ + "count" => 2, + "page_size" => 1, + "users" => [ + %{ + "deactivated" => user.info.deactivated, + "id" => user.id, + "nickname" => user.nickname, + "roles" => %{"admin" => false, "moderator" => false}, + "local" => true, + "tags" => [] + } + ] + } + + conn = + build_conn() + |> assign(:user, admin) + |> get("/api/pleroma/admin/users?query=a&page_size=1&page=2") + + assert json_response(conn, 200) == %{ + "count" => 2, + "page_size" => 1, + "users" => [ + %{ + "deactivated" => user2.info.deactivated, + "id" => user2.id, + "nickname" => user2.nickname, + "roles" => %{"admin" => false, "moderator" => false}, + "local" => true, + "tags" => [] + } + ] + } + end + + test "only local users" do + admin = insert(:user, info: %{is_admin: true}, nickname: "john") + user = insert(:user, nickname: "bob") + + insert(:user, nickname: "bobb", local: false) + + conn = + build_conn() + |> assign(:user, admin) + |> get("/api/pleroma/admin/users?query=bo&filters=local") + + assert json_response(conn, 200) == %{ + "count" => 1, + "page_size" => 50, + "users" => [ + %{ + "deactivated" => user.info.deactivated, + "id" => user.id, + "nickname" => user.nickname, + "roles" => %{"admin" => false, "moderator" => false}, + "local" => true, + "tags" => [] + } + ] + } + end + + test "only local users with no query" do + admin = insert(:user, info: %{is_admin: true}, nickname: "john") + user = insert(:user, nickname: "bob") + + insert(:user, nickname: "bobb", local: false) + + conn = + build_conn() + |> assign(:user, admin) + |> get("/api/pleroma/admin/users?filters=local") + + assert json_response(conn, 200) == %{ + "count" => 2, + "page_size" => 50, + "users" => [ + %{ + "deactivated" => user.info.deactivated, + "id" => user.id, + "nickname" => user.nickname, + "roles" => %{"admin" => false, "moderator" => false}, + "local" => true, + "tags" => [] + }, + %{ + "deactivated" => admin.info.deactivated, + "id" => admin.id, + "nickname" => admin.nickname, + "roles" => %{"admin" => true, "moderator" => false}, + "local" => true, + "tags" => [] + } + ] + } + end + + test "it works with multiple filters" do + admin = insert(:user, nickname: "john", info: %{is_admin: true}) + user = insert(:user, nickname: "bob", local: false, info: %{deactivated: true}) + + insert(:user, nickname: "ken", local: true, info: %{deactivated: true}) + insert(:user, nickname: "bobb", local: false, info: %{deactivated: false}) + + conn = + build_conn() + |> assign(:user, admin) + |> get("/api/pleroma/admin/users?filters=deactivated,external") + + assert json_response(conn, 200) == %{ + "count" => 1, + "page_size" => 50, + "users" => [ + %{ + "deactivated" => user.info.deactivated, + "id" => user.id, + "nickname" => user.nickname, + "roles" => %{"admin" => false, "moderator" => false}, + "local" => user.local, + "tags" => [] + } + ] + } + end end test "PATCH /api/pleroma/admin/users/:nickname/toggle_activation" do @@ -389,94 +635,141 @@ test "PATCH /api/pleroma/admin/users/:nickname/toggle_activation" do %{ "deactivated" => !user.info.deactivated, "id" => user.id, - "nickname" => user.nickname + "nickname" => user.nickname, + "roles" => %{"admin" => false, "moderator" => false}, + "local" => true, + "tags" => [] } end - describe "GET /api/pleroma/admin/users/search" do - test "regular search" do + describe "GET /api/pleroma/admin/invite_token" do + test "without options" do admin = insert(:user, info: %{is_admin: true}) - user = insert(:user, nickname: "bob") conn = build_conn() |> assign(:user, admin) - |> get("/api/pleroma/admin/users/search?query=bo") + |> get("/api/pleroma/admin/invite_token") + + token = json_response(conn, 200) + invite = UserInviteToken.find_by_token!(token) + refute invite.used + refute invite.expires_at + refute invite.max_use + assert invite.invite_type == "one_time" + end + + test "with expires_at" do + admin = insert(:user, info: %{is_admin: true}) + + conn = + build_conn() + |> assign(:user, admin) + |> get("/api/pleroma/admin/invite_token", %{ + "invite" => %{"expires_at" => Date.to_string(Date.utc_today())} + }) + + token = json_response(conn, 200) + invite = UserInviteToken.find_by_token!(token) + + refute invite.used + assert invite.expires_at == Date.utc_today() + refute invite.max_use + assert invite.invite_type == "date_limited" + end + + test "with max_use" do + admin = insert(:user, info: %{is_admin: true}) + + conn = + build_conn() + |> assign(:user, admin) + |> get("/api/pleroma/admin/invite_token", %{ + "invite" => %{"max_use" => 150} + }) + + token = json_response(conn, 200) + invite = UserInviteToken.find_by_token!(token) + refute invite.used + refute invite.expires_at + assert invite.max_use == 150 + assert invite.invite_type == "reusable" + end + + test "with max use and expires_at" do + admin = insert(:user, info: %{is_admin: true}) + + conn = + build_conn() + |> assign(:user, admin) + |> get("/api/pleroma/admin/invite_token", %{ + "invite" => %{"max_use" => 150, "expires_at" => Date.to_string(Date.utc_today())} + }) + + token = json_response(conn, 200) + invite = UserInviteToken.find_by_token!(token) + refute invite.used + assert invite.expires_at == Date.utc_today() + assert invite.max_use == 150 + assert invite.invite_type == "reusable_date_limited" + end + end + + describe "GET /api/pleroma/admin/invites" do + test "no invites" do + admin = insert(:user, info: %{is_admin: true}) + + conn = + build_conn() + |> assign(:user, admin) + |> get("/api/pleroma/admin/invites") + + assert json_response(conn, 200) == %{"invites" => []} + end + + test "with invite" do + admin = insert(:user, info: %{is_admin: true}) + {:ok, invite} = UserInviteToken.create_invite() + + conn = + build_conn() + |> assign(:user, admin) + |> get("/api/pleroma/admin/invites") assert json_response(conn, 200) == %{ - "count" => 1, - "page_size" => 50, - "users" => [ + "invites" => [ %{ - "deactivated" => user.info.deactivated, - "id" => user.id, - "nickname" => user.nickname + "expires_at" => nil, + "id" => invite.id, + "invite_type" => "one_time", + "max_use" => nil, + "token" => invite.token, + "used" => false, + "uses" => 0 } ] } end + end - test "regular search with page size" do + describe "POST /api/pleroma/admin/revoke_invite" do + test "with token" do admin = insert(:user, info: %{is_admin: true}) - user = insert(:user, nickname: "bob") - user2 = insert(:user, nickname: "bo") + {:ok, invite} = UserInviteToken.create_invite() conn = build_conn() |> assign(:user, admin) - |> get("/api/pleroma/admin/users/search?query=bo&page_size=1&page=1") + |> post("/api/pleroma/admin/revoke_invite", %{"token" => invite.token}) assert json_response(conn, 200) == %{ - "count" => 2, - "page_size" => 1, - "users" => [ - %{ - "deactivated" => user.info.deactivated, - "id" => user.id, - "nickname" => user.nickname - } - ] - } - - conn = - build_conn() - |> assign(:user, admin) - |> get("/api/pleroma/admin/users/search?query=bo&page_size=1&page=2") - - assert json_response(conn, 200) == %{ - "count" => 2, - "page_size" => 1, - "users" => [ - %{ - "deactivated" => user2.info.deactivated, - "id" => user2.id, - "nickname" => user2.nickname - } - ] - } - end - - test "only local users" do - admin = insert(:user, info: %{is_admin: true}, nickname: "john") - user = insert(:user, nickname: "bob") - - insert(:user, nickname: "bobb", local: false) - - conn = - build_conn() - |> assign(:user, admin) - |> get("/api/pleroma/admin/users/search?query=bo&local=true") - - assert json_response(conn, 200) == %{ - "count" => 1, - "page_size" => 50, - "users" => [ - %{ - "deactivated" => user.info.deactivated, - "id" => user.id, - "nickname" => user.nickname - } - ] + "expires_at" => nil, + "id" => invite.id, + "invite_type" => "one_time", + "max_use" => nil, + "token" => invite.token, + "used" => true, + "uses" => 0 } end end diff --git a/test/web/admin_api/search_test.exs b/test/web/admin_api/search_test.exs new file mode 100644 index 000000000..3950996ed --- /dev/null +++ b/test/web/admin_api/search_test.exs @@ -0,0 +1,88 @@ +# Pleroma: A lightweight social networking server +# Copyright © 2017-2018 Pleroma Authors +# SPDX-License-Identifier: AGPL-3.0-only + +defmodule Pleroma.Web.AdminAPI.SearchTest do + use Pleroma.Web.ConnCase + + alias Pleroma.Web.AdminAPI.Search + + import Pleroma.Factory + + describe "search for admin" do + test "it ignores case" do + insert(:user, nickname: "papercoach") + insert(:user, nickname: "CanadaPaperCoach") + + {:ok, _results, count} = + Search.user(%{ + query: "paper", + local: false, + page: 1, + page_size: 50 + }) + + assert count == 2 + end + + test "it returns local/external users" do + insert(:user, local: true) + insert(:user, local: false) + insert(:user, local: false) + + {:ok, _results, local_count} = + Search.user(%{ + query: "", + local: true + }) + + {:ok, _results, external_count} = + Search.user(%{ + query: "", + external: true + }) + + assert local_count == 1 + assert external_count == 2 + end + + test "it returns active/deactivated users" do + insert(:user, info: %{deactivated: true}) + insert(:user, info: %{deactivated: true}) + insert(:user, info: %{deactivated: false}) + + {:ok, _results, active_count} = + Search.user(%{ + query: "", + active: true + }) + + {:ok, _results, deactivated_count} = + Search.user(%{ + query: "", + deactivated: true + }) + + assert active_count == 1 + assert deactivated_count == 2 + end + + test "it returns specific user" do + insert(:user) + insert(:user) + insert(:user, nickname: "bob", local: true, info: %{deactivated: false}) + + {:ok, _results, total_count} = Search.user(%{query: ""}) + + {:ok, _results, count} = + Search.user(%{ + query: "Bo", + active: true, + local: true + }) + + assert total_count == 3 + assert count == 1 + end + end +end diff --git a/test/web/common_api/common_api_test.exs b/test/web/common_api/common_api_test.exs index 9ba320f59..34aa5bf18 100644 --- a/test/web/common_api/common_api_test.exs +++ b/test/web/common_api/common_api_test.exs @@ -4,12 +4,30 @@ defmodule Pleroma.Web.CommonAPITest do use Pleroma.DataCase - alias Pleroma.Web.CommonAPI - alias Pleroma.User alias Pleroma.Activity + alias Pleroma.User + alias Pleroma.Web.CommonAPI import Pleroma.Factory + test "with the safe_dm_mention option set, it does not mention people beyond the initial tags" do + har = insert(:user) + jafnhar = insert(:user) + tridi = insert(:user) + option = Pleroma.Config.get([:instance, :safe_dm_mentions]) + Pleroma.Config.put([:instance, :safe_dm_mentions], true) + + {:ok, activity} = + CommonAPI.post(har, %{ + "status" => "@#{jafnhar.nickname} hey, i never want to see @#{tridi.nickname} again", + "visibility" => "direct" + }) + + refute tridi.ap_id in activity.recipients + assert jafnhar.ap_id in activity.recipients + Pleroma.Config.put([:instance, :safe_dm_mentions], option) + end + test "it de-duplicates tags" do user = insert(:user) {:ok, activity} = CommonAPI.post(user, %{"status" => "#2hu #2HU"}) @@ -221,4 +239,27 @@ test "creates a report" do } = flag_activity end end + + describe "reblog muting" do + setup do + muter = insert(:user) + + muted = insert(:user) + + [muter: muter, muted: muted] + end + + test "add a reblog mute", %{muter: muter, muted: muted} do + {:ok, muter} = CommonAPI.hide_reblogs(muter, muted) + + assert Pleroma.User.showing_reblogs?(muter, muted) == false + end + + test "remove a reblog mute", %{muter: muter, muted: muted} do + {:ok, muter} = CommonAPI.hide_reblogs(muter, muted) + {:ok, muter} = CommonAPI.show_reblogs(muter, muted) + + assert Pleroma.User.showing_reblogs?(muter, muted) == true + end + end end diff --git a/test/web/common_api/common_api_utils_test.exs b/test/web/common_api/common_api_utils_test.exs index 684f2a23f..f0c59d5c3 100644 --- a/test/web/common_api/common_api_utils_test.exs +++ b/test/web/common_api/common_api_utils_test.exs @@ -3,9 +3,10 @@ # SPDX-License-Identifier: AGPL-3.0-only defmodule Pleroma.Web.CommonAPI.UtilsTest do + alias Pleroma.Builders.UserBuilder + alias Pleroma.Object alias Pleroma.Web.CommonAPI.Utils alias Pleroma.Web.Endpoint - alias Pleroma.Builders.UserBuilder use Pleroma.DataCase test "it adds attachment links to a given text and attachment set" do @@ -136,4 +137,56 @@ test "works for text/markdown with mentions" do assert output == expected end end + + describe "context_to_conversation_id" do + test "creates a mapping object" do + conversation_id = Utils.context_to_conversation_id("random context") + object = Object.get_by_ap_id("random context") + + assert conversation_id == object.id + end + + test "returns an existing mapping for an existing object" do + {:ok, object} = Object.context_mapping("random context") |> Repo.insert() + conversation_id = Utils.context_to_conversation_id("random context") + + assert conversation_id == object.id + end + end + + describe "formats date to asctime" do + test "when date is in ISO 8601 format" do + date = DateTime.utc_now() |> DateTime.to_iso8601() + + expected = + date + |> DateTime.from_iso8601() + |> elem(1) + |> Calendar.Strftime.strftime!("%a %b %d %H:%M:%S %z %Y") + + assert Utils.date_to_asctime(date) == expected + end + + test "when date is a binary in wrong format" do + date = DateTime.utc_now() + + expected = "" + + assert Utils.date_to_asctime(date) == expected + end + + test "when date is a Unix timestamp" do + date = DateTime.utc_now() |> DateTime.to_unix() + + expected = "" + + assert Utils.date_to_asctime(date) == expected + end + + test "when date is nil" do + expected = "" + + assert Utils.date_to_asctime(nil) == expected + end + end end diff --git a/test/web/federator_test.exs b/test/web/federator_test.exs index 08279f230..52729eb50 100644 --- a/test/web/federator_test.exs +++ b/test/web/federator_test.exs @@ -3,9 +3,9 @@ # SPDX-License-Identifier: AGPL-3.0-only defmodule Pleroma.Web.FederatorTest do + alias Pleroma.Instances alias Pleroma.Web.CommonAPI alias Pleroma.Web.Federator - alias Pleroma.Instances use Pleroma.DataCase import Pleroma.Factory import Mock diff --git a/test/web/instances/instance_test.exs b/test/web/instances/instance_test.exs index a158c0a42..d28730994 100644 --- a/test/web/instances/instance_test.exs +++ b/test/web/instances/instance_test.exs @@ -3,8 +3,8 @@ # SPDX-License-Identifier: AGPL-3.0-only defmodule Pleroma.Instances.InstanceTest do - alias Pleroma.Repo alias Pleroma.Instances.Instance + alias Pleroma.Repo use Pleroma.DataCase diff --git a/test/web/instances/instances_test.exs b/test/web/instances/instances_test.exs index 2530c09fe..f0d84edea 100644 --- a/test/web/instances/instances_test.exs +++ b/test/web/instances/instances_test.exs @@ -102,7 +102,8 @@ test "returns error status on non-binary input" do end end - # Note: implementation-specific (e.g. Instance) details of set_unreachable/1 should be tested in implementation-specific tests + # Note: implementation-specific (e.g. Instance) details of set_unreachable/1 + # should be tested in implementation-specific tests describe "set_unreachable/1" do test "returns error status on non-binary input" do assert {:error, _} = Instances.set_unreachable(nil) diff --git a/test/web/mastodon_api/account_view_test.exs b/test/web/mastodon_api/account_view_test.exs index 6be66ef63..d7487bed9 100644 --- a/test/web/mastodon_api/account_view_test.exs +++ b/test/web/mastodon_api/account_view_test.exs @@ -5,8 +5,8 @@ defmodule Pleroma.Web.MastodonAPI.AccountViewTest do use Pleroma.DataCase import Pleroma.Factory - alias Pleroma.Web.MastodonAPI.AccountView alias Pleroma.User + alias Pleroma.Web.MastodonAPI.AccountView test "Represent a user account" do source_data = %{ @@ -71,6 +71,20 @@ test "Represent a user account" do assert expected == AccountView.render("account.json", %{user: user}) end + test "Represent the user account for the account owner" do + user = insert(:user) + + notification_settings = %{ + "remote" => true, + "local" => true, + "followers" => true, + "follows" => true + } + + assert %{pleroma: %{notification_settings: ^notification_settings}} = + AccountView.render("account.json", %{user: user, for: user}) + end + test "Represent a Service(bot) account" do user = insert(:user, %{ @@ -142,9 +156,10 @@ test "represent a relationship" do blocking: true, muting: false, muting_notifications: false, + subscribing: false, requested: false, domain_blocking: false, - showing_reblogs: false, + showing_reblogs: true, endorsed: false } @@ -198,11 +213,12 @@ test "represent an embedded relationship" do following: false, followed_by: false, blocking: true, + subscribing: false, muting: false, muting_notifications: false, requested: false, domain_blocking: false, - showing_reblogs: false, + showing_reblogs: true, endorsed: false } } diff --git a/test/web/mastodon_api/mastodon_api_controller_test.exs b/test/web/mastodon_api/mastodon_api_controller_test.exs index ec6869db9..3ac5c37a6 100644 --- a/test/web/mastodon_api/mastodon_api_controller_test.exs +++ b/test/web/mastodon_api/mastodon_api_controller_test.exs @@ -5,17 +5,20 @@ defmodule Pleroma.Web.MastodonAPI.MastodonAPIControllerTest do use Pleroma.Web.ConnCase - alias Pleroma.Web.TwitterAPI.TwitterAPI - alias Pleroma.Repo - alias Pleroma.User - alias Pleroma.Object + alias Ecto.Changeset alias Pleroma.Activity alias Pleroma.Notification - alias Pleroma.Web.OStatus - alias Pleroma.Web.CommonAPI + alias Pleroma.Object + alias Pleroma.Repo + alias Pleroma.ScheduledActivity + alias Pleroma.User alias Pleroma.Web.ActivityPub.ActivityPub + alias Pleroma.Web.CommonAPI alias Pleroma.Web.MastodonAPI.FilterView - alias Ecto.Changeset + alias Pleroma.Web.OAuth.App + alias Pleroma.Web.OStatus + alias Pleroma.Web.Push + alias Pleroma.Web.TwitterAPI.TwitterAPI import Pleroma.Factory import ExUnit.CaptureLog import Tesla.Mock @@ -99,7 +102,7 @@ test "posting a status", %{conn: conn} do assert %{"content" => "cofe", "id" => id, "spoiler_text" => "2hu", "sensitive" => false} = json_response(conn_one, 200) - assert Repo.get(Activity, id) + assert Activity.get_by_id(id) conn_two = conn @@ -138,7 +141,56 @@ test "posting a sensitive status", %{conn: conn} do |> post("/api/v1/statuses", %{"status" => "cofe", "sensitive" => true}) assert %{"content" => "cofe", "id" => id, "sensitive" => true} = json_response(conn, 200) - assert Repo.get(Activity, id) + assert Activity.get_by_id(id) + end + + test "posting a fake status", %{conn: conn} do + user = insert(:user) + + real_conn = + conn + |> assign(:user, user) + |> post("/api/v1/statuses", %{ + "status" => + "\"Tenshi Eating a Corndog\" is a much discussed concept on /jp/. The significance of it is disputed, so I will focus on one core concept: the symbolism behind it" + }) + + real_status = json_response(real_conn, 200) + + assert real_status + assert Object.get_by_ap_id(real_status["uri"]) + + real_status = + real_status + |> Map.put("id", nil) + |> Map.put("url", nil) + |> Map.put("uri", nil) + |> Map.put("created_at", nil) + |> Kernel.put_in(["pleroma", "conversation_id"], nil) + + fake_conn = + conn + |> assign(:user, user) + |> post("/api/v1/statuses", %{ + "status" => + "\"Tenshi Eating a Corndog\" is a much discussed concept on /jp/. The significance of it is disputed, so I will focus on one core concept: the symbolism behind it", + "preview" => true + }) + + fake_status = json_response(fake_conn, 200) + + assert fake_status + refute Object.get_by_ap_id(fake_status["uri"]) + + fake_status = + fake_status + |> Map.put("id", nil) + |> Map.put("url", nil) + |> Map.put("uri", nil) + |> Map.put("created_at", nil) + |> Kernel.put_in(["pleroma", "conversation_id"], nil) + + assert real_status == fake_status end test "posting a status with OGP link preview", %{conn: conn} do @@ -153,7 +205,7 @@ test "posting a status with OGP link preview", %{conn: conn} do }) assert %{"id" => id, "card" => %{"title" => "The Rock"}} = json_response(conn, 200) - assert Repo.get(Activity, id) + assert Activity.get_by_id(id) Pleroma.Config.put([:rich_media, :enabled], false) end @@ -168,7 +220,7 @@ test "posting a direct status", %{conn: conn} do |> post("api/v1/statuses", %{"status" => content, "visibility" => "direct"}) assert %{"id" => id, "visibility" => "direct"} = json_response(conn, 200) - assert activity = Repo.get(Activity, id) + assert activity = Activity.get_by_id(id) assert activity.recipients == [user2.ap_id, user1.ap_id] assert activity.data["to"] == [user2.ap_id] assert activity.data["cc"] == [] @@ -287,7 +339,7 @@ test "replying to a status", %{conn: conn} do assert %{"content" => "xD", "id" => id} = json_response(conn, 200) - activity = Repo.get(Activity, id) + activity = Activity.get_by_id(id) assert activity.data["context"] == replied_to.data["context"] assert activity.data["object"]["inReplyToStatusId"] == replied_to.id @@ -303,7 +355,7 @@ test "posting a status with an invalid in_reply_to_id", %{conn: conn} do assert %{"content" => "xD", "id" => id} = json_response(conn, 200) - activity = Repo.get(Activity, id) + activity = Activity.get_by_id(id) assert activity end @@ -332,6 +384,53 @@ test "verify_credentials default scope unlisted", %{conn: conn} do assert id == to_string(user.id) end + test "apps/verify_credentials", %{conn: conn} do + token = insert(:oauth_token) + + conn = + conn + |> assign(:user, token.user) + |> assign(:token, token) + |> get("/api/v1/apps/verify_credentials") + + app = Repo.preload(token, :app).app + + expected = %{ + "name" => app.client_name, + "website" => app.website, + "vapid_key" => Push.vapid_config() |> Keyword.get(:public_key) + } + + assert expected == json_response(conn, 200) + end + + test "creates an oauth app", %{conn: conn} do + user = insert(:user) + app_attrs = build(:oauth_app) + + conn = + conn + |> assign(:user, user) + |> post("/api/v1/apps", %{ + client_name: app_attrs.client_name, + redirect_uris: app_attrs.redirect_uris + }) + + [app] = Repo.all(App) + + expected = %{ + "name" => app.client_name, + "website" => app.website, + "client_id" => app.client_id, + "client_secret" => app.client_secret, + "id" => app.id |> to_string(), + "redirect_uri" => app.redirect_uris, + "vapid_key" => Push.vapid_config() |> Keyword.get(:public_key) + } + + assert expected == json_response(conn, 200) + end + test "get a status", %{conn: conn} do activity = insert(:note_activity) @@ -355,7 +454,7 @@ test "when you created it", %{conn: conn} do assert %{} = json_response(conn, 200) - refute Repo.get(Activity, activity.id) + refute Activity.get_by_id(activity.id) end test "when you didn't create it", %{conn: conn} do @@ -369,7 +468,31 @@ test "when you didn't create it", %{conn: conn} do assert %{"error" => _} = json_response(conn, 403) - assert Repo.get(Activity, activity.id) == activity + assert Activity.get_by_id(activity.id) == activity + end + + test "when you're an admin or moderator", %{conn: conn} do + activity1 = insert(:note_activity) + activity2 = insert(:note_activity) + admin = insert(:user, info: %{is_admin: true}) + moderator = insert(:user, info: %{is_moderator: true}) + + res_conn = + conn + |> assign(:user, admin) + |> delete("/api/v1/statuses/#{activity1.id}") + + assert %{} = json_response(res_conn, 200) + + res_conn = + conn + |> assign(:user, moderator) + |> delete("/api/v1/statuses/#{activity2.id}") + + assert %{} = json_response(res_conn, 200) + + refute Activity.get_by_id(activity1.id) + refute Activity.get_by_id(activity2.id) end end @@ -731,6 +854,96 @@ test "clearing all notifications", %{conn: conn} do assert all = json_response(conn, 200) assert all == [] end + + test "paginates notifications using min_id, since_id, max_id, and limit", %{conn: conn} do + user = insert(:user) + other_user = insert(:user) + + {:ok, activity1} = CommonAPI.post(other_user, %{"status" => "hi @#{user.nickname}"}) + {:ok, activity2} = CommonAPI.post(other_user, %{"status" => "hi @#{user.nickname}"}) + {:ok, activity3} = CommonAPI.post(other_user, %{"status" => "hi @#{user.nickname}"}) + {:ok, activity4} = CommonAPI.post(other_user, %{"status" => "hi @#{user.nickname}"}) + + notification1_id = Repo.get_by(Notification, activity_id: activity1.id).id |> to_string() + notification2_id = Repo.get_by(Notification, activity_id: activity2.id).id |> to_string() + notification3_id = Repo.get_by(Notification, activity_id: activity3.id).id |> to_string() + notification4_id = Repo.get_by(Notification, activity_id: activity4.id).id |> to_string() + + conn = + conn + |> assign(:user, user) + + # min_id + conn_res = + conn + |> get("/api/v1/notifications?limit=2&min_id=#{notification1_id}") + + result = json_response(conn_res, 200) + assert [%{"id" => ^notification3_id}, %{"id" => ^notification2_id}] = result + + # since_id + conn_res = + conn + |> get("/api/v1/notifications?limit=2&since_id=#{notification1_id}") + + result = json_response(conn_res, 200) + assert [%{"id" => ^notification4_id}, %{"id" => ^notification3_id}] = result + + # max_id + conn_res = + conn + |> get("/api/v1/notifications?limit=2&max_id=#{notification4_id}") + + result = json_response(conn_res, 200) + assert [%{"id" => ^notification3_id}, %{"id" => ^notification2_id}] = result + end + + test "filters notifications using exclude_types", %{conn: conn} do + user = insert(:user) + other_user = insert(:user) + + {:ok, mention_activity} = CommonAPI.post(other_user, %{"status" => "hey @#{user.nickname}"}) + {:ok, create_activity} = CommonAPI.post(user, %{"status" => "hey"}) + {:ok, favorite_activity, _} = CommonAPI.favorite(create_activity.id, other_user) + {:ok, reblog_activity, _} = CommonAPI.repeat(create_activity.id, other_user) + {:ok, _, _, follow_activity} = CommonAPI.follow(other_user, user) + + mention_notification_id = + Repo.get_by(Notification, activity_id: mention_activity.id).id |> to_string() + + favorite_notification_id = + Repo.get_by(Notification, activity_id: favorite_activity.id).id |> to_string() + + reblog_notification_id = + Repo.get_by(Notification, activity_id: reblog_activity.id).id |> to_string() + + follow_notification_id = + Repo.get_by(Notification, activity_id: follow_activity.id).id |> to_string() + + conn = + conn + |> assign(:user, user) + + conn_res = + get(conn, "/api/v1/notifications", %{exclude_types: ["mention", "favourite", "reblog"]}) + + assert [%{"id" => ^follow_notification_id}] = json_response(conn_res, 200) + + conn_res = + get(conn, "/api/v1/notifications", %{exclude_types: ["favourite", "reblog", "follow"]}) + + assert [%{"id" => ^mention_notification_id}] = json_response(conn_res, 200) + + conn_res = + get(conn, "/api/v1/notifications", %{exclude_types: ["reblog", "follow", "mention"]}) + + assert [%{"id" => ^favorite_notification_id}] = json_response(conn_res, 200) + + conn_res = + get(conn, "/api/v1/notifications", %{exclude_types: ["follow", "mention", "favourite"]}) + + assert [%{"id" => ^reblog_notification_id}] = json_response(conn_res, 200) + end end describe "reblogging" do @@ -949,8 +1162,8 @@ test "/api/v1/follow_requests works" do {:ok, _activity} = ActivityPub.follow(other_user, user) - user = Repo.get(User, user.id) - other_user = Repo.get(User, other_user.id) + user = User.get_by_id(user.id) + other_user = User.get_by_id(other_user.id) assert User.following?(other_user, user) == false @@ -969,8 +1182,8 @@ test "/api/v1/follow_requests/:id/authorize works" do {:ok, _activity} = ActivityPub.follow(other_user, user) - user = Repo.get(User, user.id) - other_user = Repo.get(User, other_user.id) + user = User.get_by_id(user.id) + other_user = User.get_by_id(other_user.id) assert User.following?(other_user, user) == false @@ -982,8 +1195,8 @@ test "/api/v1/follow_requests/:id/authorize works" do assert relationship = json_response(conn, 200) assert to_string(other_user.id) == relationship["id"] - user = Repo.get(User, user.id) - other_user = Repo.get(User, other_user.id) + user = User.get_by_id(user.id) + other_user = User.get_by_id(other_user.id) assert User.following?(other_user, user) == true end @@ -1006,7 +1219,7 @@ test "/api/v1/follow_requests/:id/reject works" do {:ok, _activity} = ActivityPub.follow(other_user, user) - user = Repo.get(User, user.id) + user = User.get_by_id(user.id) conn = build_conn() @@ -1016,8 +1229,8 @@ test "/api/v1/follow_requests/:id/reject works" do assert relationship = json_response(conn, 200) assert to_string(other_user.id) == relationship["id"] - user = Repo.get(User, user.id) - other_user = Repo.get(User, other_user.id) + user = User.get_by_id(user.id) + other_user = User.get_by_id(other_user.id) assert User.following?(other_user, user) == false end @@ -1040,6 +1253,17 @@ test "account fetching", %{conn: conn} do assert %{"error" => "Can't find user"} = json_response(conn, 404) end + test "account fetching also works nickname", %{conn: conn} do + user = insert(:user) + + conn = + conn + |> get("/api/v1/accounts/#{user.nickname}") + + assert %{"id" => id} = json_response(conn, 200) + assert id == user.id + end + test "media upload", %{conn: conn} do file = %Plug.Upload{ content_type: "image/jpg", @@ -1160,6 +1384,47 @@ test "getting followers, hide_followers, same user requesting", %{conn: conn} do refute [] == json_response(conn, 200) end + test "getting followers, pagination", %{conn: conn} do + user = insert(:user) + follower1 = insert(:user) + follower2 = insert(:user) + follower3 = insert(:user) + {:ok, _} = User.follow(follower1, user) + {:ok, _} = User.follow(follower2, user) + {:ok, _} = User.follow(follower3, user) + + conn = + conn + |> assign(:user, user) + + res_conn = + conn + |> get("/api/v1/accounts/#{user.id}/followers?since_id=#{follower1.id}") + + assert [%{"id" => id3}, %{"id" => id2}] = json_response(res_conn, 200) + assert id3 == follower3.id + assert id2 == follower2.id + + res_conn = + conn + |> get("/api/v1/accounts/#{user.id}/followers?max_id=#{follower3.id}") + + assert [%{"id" => id2}, %{"id" => id1}] = json_response(res_conn, 200) + assert id2 == follower2.id + assert id1 == follower1.id + + res_conn = + conn + |> get("/api/v1/accounts/#{user.id}/followers?limit=1&max_id=#{follower3.id}") + + assert [%{"id" => id2}] = json_response(res_conn, 200) + assert id2 == follower2.id + + assert [link_header] = get_resp_header(res_conn, "link") + assert link_header =~ ~r/since_id=#{follower2.id}/ + assert link_header =~ ~r/max_id=#{follower2.id}/ + end + test "getting following", %{conn: conn} do user = insert(:user) other_user = insert(:user) @@ -1198,6 +1463,47 @@ test "getting following, hide_follows, same user requesting", %{conn: conn} do refute [] == json_response(conn, 200) end + test "getting following, pagination", %{conn: conn} do + user = insert(:user) + following1 = insert(:user) + following2 = insert(:user) + following3 = insert(:user) + {:ok, _} = User.follow(user, following1) + {:ok, _} = User.follow(user, following2) + {:ok, _} = User.follow(user, following3) + + conn = + conn + |> assign(:user, user) + + res_conn = + conn + |> get("/api/v1/accounts/#{user.id}/following?since_id=#{following1.id}") + + assert [%{"id" => id3}, %{"id" => id2}] = json_response(res_conn, 200) + assert id3 == following3.id + assert id2 == following2.id + + res_conn = + conn + |> get("/api/v1/accounts/#{user.id}/following?max_id=#{following3.id}") + + assert [%{"id" => id2}, %{"id" => id1}] = json_response(res_conn, 200) + assert id2 == following2.id + assert id1 == following1.id + + res_conn = + conn + |> get("/api/v1/accounts/#{user.id}/following?limit=1&max_id=#{following3.id}") + + assert [%{"id" => id2}] = json_response(res_conn, 200) + assert id2 == following2.id + + assert [link_header] = get_resp_header(res_conn, "link") + assert link_header =~ ~r/since_id=#{following2.id}/ + assert link_header =~ ~r/max_id=#{following2.id}/ + end + test "following / unfollowing a user", %{conn: conn} do user = insert(:user) other_user = insert(:user) @@ -1209,7 +1515,7 @@ test "following / unfollowing a user", %{conn: conn} do assert %{"id" => _id, "following" => true} = json_response(conn, 200) - user = Repo.get(User, user.id) + user = User.get_by_id(user.id) conn = build_conn() @@ -1218,7 +1524,7 @@ test "following / unfollowing a user", %{conn: conn} do assert %{"id" => _id, "following" => false} = json_response(conn, 200) - user = Repo.get(User, user.id) + user = User.get_by_id(user.id) conn = build_conn() @@ -1240,7 +1546,7 @@ test "muting / unmuting a user", %{conn: conn} do assert %{"id" => _id, "muting" => true} = json_response(conn, 200) - user = Repo.get(User, user.id) + user = User.get_by_id(user.id) conn = build_conn() @@ -1250,6 +1556,25 @@ test "muting / unmuting a user", %{conn: conn} do assert %{"id" => _id, "muting" => false} = json_response(conn, 200) end + test "subscribing / unsubscribing to a user", %{conn: conn} do + user = insert(:user) + subscription_target = insert(:user) + + conn = + conn + |> assign(:user, user) + |> post("/api/v1/pleroma/accounts/#{subscription_target.id}/subscribe") + + assert %{"id" => _id, "subscribing" => true} = json_response(conn, 200) + + conn = + build_conn() + |> assign(:user, user) + |> post("/api/v1/pleroma/accounts/#{subscription_target.id}/unsubscribe") + + assert %{"id" => _id, "subscribing" => false} = json_response(conn, 200) + end + test "getting a list of mutes", %{conn: conn} do user = insert(:user) other_user = insert(:user) @@ -1276,7 +1601,7 @@ test "blocking / unblocking a user", %{conn: conn} do assert %{"id" => _id, "blocking" => true} = json_response(conn, 200) - user = Repo.get(User, user.id) + user = User.get_by_id(user.id) conn = build_conn() @@ -1515,9 +1840,10 @@ test "updates the user's bio", %{conn: conn} do assert user = json_response(conn, 200) assert user["note"] == - "I drink #cofe with @#{user2.nickname}" + ~s(I drink with @) <> user2.nickname <> ~s() end test "updates the user's locking status", %{conn: conn} do @@ -1600,6 +1926,27 @@ test "requires 'write' permission", %{conn: conn} do end test "get instance information", %{conn: conn} do + conn = get(conn, "/api/v1/instance") + assert result = json_response(conn, 200) + + # Note: not checking for "max_toot_chars" since it's optional + assert %{ + "uri" => _, + "title" => _, + "description" => _, + "version" => _, + "email" => _, + "urls" => %{ + "streaming_api" => _ + }, + "stats" => _, + "thumbnail" => _, + "languages" => _, + "registrations" => _ + } = result + end + + test "get instance stats", %{conn: conn} do user = insert(:user, %{local: true}) user2 = insert(:user, %{local: true}) @@ -1611,7 +1958,7 @@ test "get instance information", %{conn: conn} do {:ok, _} = TwitterAPI.create_status(user, %{"status" => "cofe"}) # Stats should count users with missing or nil `info.deactivated` value - user = Repo.get(User, user.id) + user = User.get_by_id(user.id) info_change = Changeset.change(user.info, %{deactivated: nil}) {:ok, _user} = @@ -1926,7 +2273,7 @@ test "submit a report with statuses and comment", %{ |> json_response(200) end - test "accound_id is required", %{ + test "account_id is required", %{ conn: conn, reporter: reporter, activity: activity @@ -1955,4 +2302,355 @@ test "comment must be up to the size specified in the config", %{ |> json_response(400) end end + + describe "link headers" do + test "preserves parameters in link headers", %{conn: conn} do + user = insert(:user) + other_user = insert(:user) + + {:ok, activity1} = + CommonAPI.post(other_user, %{ + "status" => "hi @#{user.nickname}", + "visibility" => "public" + }) + + {:ok, activity2} = + CommonAPI.post(other_user, %{ + "status" => "hi @#{user.nickname}", + "visibility" => "public" + }) + + notification1 = Repo.get_by(Notification, activity_id: activity1.id) + notification2 = Repo.get_by(Notification, activity_id: activity2.id) + + conn = + conn + |> assign(:user, user) + |> get("/api/v1/notifications", %{media_only: true}) + + assert [link_header] = get_resp_header(conn, "link") + assert link_header =~ ~r/media_only=true/ + assert link_header =~ ~r/since_id=#{notification2.id}/ + assert link_header =~ ~r/max_id=#{notification1.id}/ + end + end + + test "accounts fetches correct account for nicknames beginning with numbers", %{conn: conn} do + # Need to set an old-style integer ID to reproduce the problem + # (these are no longer assigned to new accounts but were preserved + # for existing accounts during the migration to flakeIDs) + user_one = insert(:user, %{id: 1212}) + user_two = insert(:user, %{nickname: "#{user_one.id}garbage"}) + + resp_one = + conn + |> get("/api/v1/accounts/#{user_one.id}") + + resp_two = + conn + |> get("/api/v1/accounts/#{user_two.nickname}") + + resp_three = + conn + |> get("/api/v1/accounts/#{user_two.id}") + + acc_one = json_response(resp_one, 200) + acc_two = json_response(resp_two, 200) + acc_three = json_response(resp_three, 200) + refute acc_one == acc_two + assert acc_two == acc_three + end + + describe "custom emoji" do + test "with tags", %{conn: conn} do + [emoji | _body] = + conn + |> get("/api/v1/custom_emojis") + |> json_response(200) + + assert Map.has_key?(emoji, "shortcode") + assert Map.has_key?(emoji, "static_url") + assert Map.has_key?(emoji, "tags") + assert is_list(emoji["tags"]) + assert Map.has_key?(emoji, "url") + assert Map.has_key?(emoji, "visible_in_picker") + end + end + + describe "index/2 redirections" do + setup %{conn: conn} do + session_opts = [ + store: :cookie, + key: "_test", + signing_salt: "cooldude" + ] + + conn = + conn + |> Plug.Session.call(Plug.Session.init(session_opts)) + |> fetch_session() + + test_path = "/web/statuses/test" + %{conn: conn, path: test_path} + end + + test "redirects not logged-in users to the login page", %{conn: conn, path: path} do + conn = get(conn, path) + + assert conn.status == 302 + assert redirected_to(conn) == "/web/login" + end + + test "does not redirect logged in users to the login page", %{conn: conn, path: path} do + token = insert(:oauth_token) + + conn = + conn + |> assign(:user, token.user) + |> put_session(:oauth_token, token.token) + |> get(path) + + assert conn.status == 200 + end + + test "saves referer path to session", %{conn: conn, path: path} do + conn = get(conn, path) + return_to = Plug.Conn.get_session(conn, :return_to) + + assert return_to == path + end + + test "redirects to the saved path after log in", %{conn: conn, path: path} do + app = insert(:oauth_app, client_name: "Mastodon-Local", redirect_uris: ".") + auth = insert(:oauth_authorization, app: app) + + conn = + conn + |> put_session(:return_to, path) + |> get("/web/login", %{code: auth.token}) + + assert conn.status == 302 + assert redirected_to(conn) == path + end + + test "redirects to the getting-started page when referer is not present", %{conn: conn} do + app = insert(:oauth_app, client_name: "Mastodon-Local", redirect_uris: ".") + auth = insert(:oauth_authorization, app: app) + + conn = get(conn, "/web/login", %{code: auth.token}) + + assert conn.status == 302 + assert redirected_to(conn) == "/web/getting-started" + end + end + + describe "scheduled activities" do + test "creates a scheduled activity", %{conn: conn} do + user = insert(:user) + scheduled_at = NaiveDateTime.add(NaiveDateTime.utc_now(), :timer.minutes(120), :millisecond) + + conn = + conn + |> assign(:user, user) + |> post("/api/v1/statuses", %{ + "status" => "scheduled", + "scheduled_at" => scheduled_at + }) + + assert %{"scheduled_at" => expected_scheduled_at} = json_response(conn, 200) + assert expected_scheduled_at == Pleroma.Web.CommonAPI.Utils.to_masto_date(scheduled_at) + assert [] == Repo.all(Activity) + end + + test "creates a scheduled activity with a media attachment", %{conn: conn} do + user = insert(:user) + scheduled_at = NaiveDateTime.add(NaiveDateTime.utc_now(), :timer.minutes(120), :millisecond) + + file = %Plug.Upload{ + content_type: "image/jpg", + path: Path.absname("test/fixtures/image.jpg"), + filename: "an_image.jpg" + } + + {:ok, upload} = ActivityPub.upload(file, actor: user.ap_id) + + conn = + conn + |> assign(:user, user) + |> post("/api/v1/statuses", %{ + "media_ids" => [to_string(upload.id)], + "status" => "scheduled", + "scheduled_at" => scheduled_at + }) + + assert %{"media_attachments" => [media_attachment]} = json_response(conn, 200) + assert %{"type" => "image"} = media_attachment + end + + test "skips the scheduling and creates the activity if scheduled_at is earlier than 5 minutes from now", + %{conn: conn} do + user = insert(:user) + + scheduled_at = + NaiveDateTime.add(NaiveDateTime.utc_now(), :timer.minutes(5) - 1, :millisecond) + + conn = + conn + |> assign(:user, user) + |> post("/api/v1/statuses", %{ + "status" => "not scheduled", + "scheduled_at" => scheduled_at + }) + + assert %{"content" => "not scheduled"} = json_response(conn, 200) + assert [] == Repo.all(ScheduledActivity) + end + + test "returns error when daily user limit is exceeded", %{conn: conn} do + user = insert(:user) + + today = + NaiveDateTime.utc_now() + |> NaiveDateTime.add(:timer.minutes(6), :millisecond) + |> NaiveDateTime.to_iso8601() + + attrs = %{params: %{}, scheduled_at: today} + {:ok, _} = ScheduledActivity.create(user, attrs) + {:ok, _} = ScheduledActivity.create(user, attrs) + + conn = + conn + |> assign(:user, user) + |> post("/api/v1/statuses", %{"status" => "scheduled", "scheduled_at" => today}) + + assert %{"error" => "daily limit exceeded"} == json_response(conn, 422) + end + + test "returns error when total user limit is exceeded", %{conn: conn} do + user = insert(:user) + + today = + NaiveDateTime.utc_now() + |> NaiveDateTime.add(:timer.minutes(6), :millisecond) + |> NaiveDateTime.to_iso8601() + + tomorrow = + NaiveDateTime.utc_now() + |> NaiveDateTime.add(:timer.hours(36), :millisecond) + |> NaiveDateTime.to_iso8601() + + attrs = %{params: %{}, scheduled_at: today} + {:ok, _} = ScheduledActivity.create(user, attrs) + {:ok, _} = ScheduledActivity.create(user, attrs) + {:ok, _} = ScheduledActivity.create(user, %{params: %{}, scheduled_at: tomorrow}) + + conn = + conn + |> assign(:user, user) + |> post("/api/v1/statuses", %{"status" => "scheduled", "scheduled_at" => tomorrow}) + + assert %{"error" => "total limit exceeded"} == json_response(conn, 422) + end + + test "shows scheduled activities", %{conn: conn} do + user = insert(:user) + scheduled_activity_id1 = insert(:scheduled_activity, user: user).id |> to_string() + scheduled_activity_id2 = insert(:scheduled_activity, user: user).id |> to_string() + scheduled_activity_id3 = insert(:scheduled_activity, user: user).id |> to_string() + scheduled_activity_id4 = insert(:scheduled_activity, user: user).id |> to_string() + + conn = + conn + |> assign(:user, user) + + # min_id + conn_res = + conn + |> get("/api/v1/scheduled_statuses?limit=2&min_id=#{scheduled_activity_id1}") + + result = json_response(conn_res, 200) + assert [%{"id" => ^scheduled_activity_id3}, %{"id" => ^scheduled_activity_id2}] = result + + # since_id + conn_res = + conn + |> get("/api/v1/scheduled_statuses?limit=2&since_id=#{scheduled_activity_id1}") + + result = json_response(conn_res, 200) + assert [%{"id" => ^scheduled_activity_id4}, %{"id" => ^scheduled_activity_id3}] = result + + # max_id + conn_res = + conn + |> get("/api/v1/scheduled_statuses?limit=2&max_id=#{scheduled_activity_id4}") + + result = json_response(conn_res, 200) + assert [%{"id" => ^scheduled_activity_id3}, %{"id" => ^scheduled_activity_id2}] = result + end + + test "shows a scheduled activity", %{conn: conn} do + user = insert(:user) + scheduled_activity = insert(:scheduled_activity, user: user) + + res_conn = + conn + |> assign(:user, user) + |> get("/api/v1/scheduled_statuses/#{scheduled_activity.id}") + + assert %{"id" => scheduled_activity_id} = json_response(res_conn, 200) + assert scheduled_activity_id == scheduled_activity.id |> to_string() + + res_conn = + conn + |> assign(:user, user) + |> get("/api/v1/scheduled_statuses/404") + + assert %{"error" => "Record not found"} = json_response(res_conn, 404) + end + + test "updates a scheduled activity", %{conn: conn} do + user = insert(:user) + scheduled_activity = insert(:scheduled_activity, user: user) + + new_scheduled_at = + NaiveDateTime.add(NaiveDateTime.utc_now(), :timer.minutes(120), :millisecond) + + res_conn = + conn + |> assign(:user, user) + |> put("/api/v1/scheduled_statuses/#{scheduled_activity.id}", %{ + scheduled_at: new_scheduled_at + }) + + assert %{"scheduled_at" => expected_scheduled_at} = json_response(res_conn, 200) + assert expected_scheduled_at == Pleroma.Web.CommonAPI.Utils.to_masto_date(new_scheduled_at) + + res_conn = + conn + |> assign(:user, user) + |> put("/api/v1/scheduled_statuses/404", %{scheduled_at: new_scheduled_at}) + + assert %{"error" => "Record not found"} = json_response(res_conn, 404) + end + + test "deletes a scheduled activity", %{conn: conn} do + user = insert(:user) + scheduled_activity = insert(:scheduled_activity, user: user) + + res_conn = + conn + |> assign(:user, user) + |> delete("/api/v1/scheduled_statuses/#{scheduled_activity.id}") + + assert %{} = json_response(res_conn, 200) + assert nil == Repo.get(ScheduledActivity, scheduled_activity.id) + + res_conn = + conn + |> assign(:user, user) + |> delete("/api/v1/scheduled_statuses/#{scheduled_activity.id}") + + assert %{"error" => "Record not found"} = json_response(res_conn, 404) + end + end end diff --git a/test/web/mastodon_api/notification_view_test.exs b/test/web/mastodon_api/notification_view_test.exs new file mode 100644 index 000000000..f2c1eb76c --- /dev/null +++ b/test/web/mastodon_api/notification_view_test.exs @@ -0,0 +1,104 @@ +# Pleroma: A lightweight social networking server +# Copyright © 2017-2018 Pleroma Authors +# SPDX-License-Identifier: AGPL-3.0-only + +defmodule Pleroma.Web.MastodonAPI.NotificationViewTest do + use Pleroma.DataCase + + alias Pleroma.Activity + alias Pleroma.Notification + alias Pleroma.Repo + alias Pleroma.User + alias Pleroma.Web.CommonAPI + alias Pleroma.Web.CommonAPI.Utils + alias Pleroma.Web.MastodonAPI.AccountView + alias Pleroma.Web.MastodonAPI.NotificationView + alias Pleroma.Web.MastodonAPI.StatusView + import Pleroma.Factory + + test "Mention notification" do + user = insert(:user) + mentioned_user = insert(:user) + {:ok, activity} = CommonAPI.post(user, %{"status" => "hey @#{mentioned_user.nickname}"}) + {:ok, [notification]} = Notification.create_notifications(activity) + user = User.get_by_id(user.id) + + expected = %{ + id: to_string(notification.id), + pleroma: %{is_seen: false}, + type: "mention", + account: AccountView.render("account.json", %{user: user, for: mentioned_user}), + status: StatusView.render("status.json", %{activity: activity, for: mentioned_user}), + created_at: Utils.to_masto_date(notification.inserted_at) + } + + result = + NotificationView.render("index.json", %{notifications: [notification], for: mentioned_user}) + + assert [expected] == result + end + + test "Favourite notification" do + user = insert(:user) + another_user = insert(:user) + {:ok, create_activity} = CommonAPI.post(user, %{"status" => "hey"}) + {:ok, favorite_activity, _object} = CommonAPI.favorite(create_activity.id, another_user) + {:ok, [notification]} = Notification.create_notifications(favorite_activity) + create_activity = Activity.get_by_id(create_activity.id) + + expected = %{ + id: to_string(notification.id), + pleroma: %{is_seen: false}, + type: "favourite", + account: AccountView.render("account.json", %{user: another_user, for: user}), + status: StatusView.render("status.json", %{activity: create_activity, for: user}), + created_at: Utils.to_masto_date(notification.inserted_at) + } + + result = NotificationView.render("index.json", %{notifications: [notification], for: user}) + + assert [expected] == result + end + + test "Reblog notification" do + user = insert(:user) + another_user = insert(:user) + {:ok, create_activity} = CommonAPI.post(user, %{"status" => "hey"}) + {:ok, reblog_activity, _object} = CommonAPI.repeat(create_activity.id, another_user) + {:ok, [notification]} = Notification.create_notifications(reblog_activity) + reblog_activity = Activity.get_by_id(create_activity.id) + + expected = %{ + id: to_string(notification.id), + pleroma: %{is_seen: false}, + type: "reblog", + account: AccountView.render("account.json", %{user: another_user, for: user}), + status: StatusView.render("status.json", %{activity: reblog_activity, for: user}), + created_at: Utils.to_masto_date(notification.inserted_at) + } + + result = NotificationView.render("index.json", %{notifications: [notification], for: user}) + + assert [expected] == result + end + + test "Follow notification" do + follower = insert(:user) + followed = insert(:user) + {:ok, follower, followed, _activity} = CommonAPI.follow(follower, followed) + notification = Notification |> Repo.one() |> Repo.preload(:activity) + + expected = %{ + id: to_string(notification.id), + pleroma: %{is_seen: false}, + type: "follow", + account: AccountView.render("account.json", %{user: follower, for: followed}), + created_at: Utils.to_masto_date(notification.inserted_at) + } + + result = + NotificationView.render("index.json", %{notifications: [notification], for: followed}) + + assert [expected] == result + end +end diff --git a/test/web/mastodon_api/push_subscription_view_test.exs b/test/web/mastodon_api/push_subscription_view_test.exs new file mode 100644 index 000000000..dc935fc82 --- /dev/null +++ b/test/web/mastodon_api/push_subscription_view_test.exs @@ -0,0 +1,23 @@ +# Pleroma: A lightweight social networking server +# Copyright © 2017-2018 Pleroma Authors +# SPDX-License-Identifier: AGPL-3.0-only + +defmodule Pleroma.Web.MastodonAPI.PushSubscriptionViewTest do + use Pleroma.DataCase + import Pleroma.Factory + alias Pleroma.Web.MastodonAPI.PushSubscriptionView, as: View + alias Pleroma.Web.Push + + test "Represent a subscription" do + subscription = insert(:push_subscription, data: %{"alerts" => %{"mention" => true}}) + + expected = %{ + alerts: %{"mention" => true}, + endpoint: subscription.endpoint, + id: to_string(subscription.id), + server_key: Keyword.get(Push.vapid_config(), :public_key) + } + + assert expected == View.render("push_subscription.json", %{subscription: subscription}) + end +end diff --git a/test/web/mastodon_api/scheduled_activity_view_test.exs b/test/web/mastodon_api/scheduled_activity_view_test.exs new file mode 100644 index 000000000..ecbb855d4 --- /dev/null +++ b/test/web/mastodon_api/scheduled_activity_view_test.exs @@ -0,0 +1,68 @@ +# Pleroma: A lightweight social networking server +# Copyright © 2017-2018 Pleroma Authors +# SPDX-License-Identifier: AGPL-3.0-only + +defmodule Pleroma.Web.MastodonAPI.ScheduledActivityViewTest do + use Pleroma.DataCase + alias Pleroma.ScheduledActivity + alias Pleroma.Web.ActivityPub.ActivityPub + alias Pleroma.Web.CommonAPI + alias Pleroma.Web.CommonAPI.Utils + alias Pleroma.Web.MastodonAPI.ScheduledActivityView + alias Pleroma.Web.MastodonAPI.StatusView + import Pleroma.Factory + + test "A scheduled activity with a media attachment" do + user = insert(:user) + {:ok, activity} = CommonAPI.post(user, %{"status" => "hi"}) + + scheduled_at = + NaiveDateTime.utc_now() + |> NaiveDateTime.add(:timer.minutes(10), :millisecond) + |> NaiveDateTime.to_iso8601() + + file = %Plug.Upload{ + content_type: "image/jpg", + path: Path.absname("test/fixtures/image.jpg"), + filename: "an_image.jpg" + } + + {:ok, upload} = ActivityPub.upload(file, actor: user.ap_id) + + attrs = %{ + params: %{ + "media_ids" => [upload.id], + "status" => "hi", + "sensitive" => true, + "spoiler_text" => "spoiler", + "visibility" => "unlisted", + "in_reply_to_id" => to_string(activity.id) + }, + scheduled_at: scheduled_at + } + + {:ok, scheduled_activity} = ScheduledActivity.create(user, attrs) + result = ScheduledActivityView.render("show.json", %{scheduled_activity: scheduled_activity}) + + expected = %{ + id: to_string(scheduled_activity.id), + media_attachments: + %{"media_ids" => [upload.id]} + |> Utils.attachments_from_ids() + |> Enum.map(&StatusView.render("attachment.json", %{attachment: &1})), + params: %{ + in_reply_to_id: to_string(activity.id), + media_ids: [upload.id], + poll: nil, + scheduled_at: nil, + sensitive: true, + spoiler_text: "spoiler", + text: "hi", + visibility: "unlisted" + }, + scheduled_at: Utils.to_masto_date(scheduled_activity.scheduled_at) + } + + assert expected == result + end +end diff --git a/test/web/mastodon_api/status_view_test.exs b/test/web/mastodon_api/status_view_test.exs index 351dbf673..db2fdc2f6 100644 --- a/test/web/mastodon_api/status_view_test.exs +++ b/test/web/mastodon_api/status_view_test.exs @@ -5,13 +5,14 @@ defmodule Pleroma.Web.MastodonAPI.StatusViewTest do use Pleroma.DataCase + alias Pleroma.Activity + alias Pleroma.User + alias Pleroma.Web.ActivityPub.ActivityPub + alias Pleroma.Web.CommonAPI + alias Pleroma.Web.CommonAPI.Utils alias Pleroma.Web.MastodonAPI.AccountView alias Pleroma.Web.MastodonAPI.StatusView - alias Pleroma.User alias Pleroma.Web.OStatus - alias Pleroma.Web.CommonAPI - alias Pleroma.Web.ActivityPub.ActivityPub - alias Pleroma.Activity import Pleroma.Factory import Tesla.Mock @@ -72,6 +73,8 @@ test "a note activity" do note = insert(:note_activity) user = User.get_cached_by_ap_id(note.data["actor"]) + convo_id = Utils.context_to_conversation_id(note.data["object"]["context"]) + status = StatusView.render("status.json", %{activity: note}) created_at = @@ -98,7 +101,7 @@ test "a note activity" do muted: false, pinned: false, sensitive: false, - spoiler_text: note.data["object"]["summary"], + spoiler_text: HtmlSanitizeEx.basic_html(note.data["object"]["summary"]), visibility: "public", media_attachments: [], mentions: [], @@ -120,7 +123,13 @@ test "a note activity" do static_url: "corndog.png", visible_in_picker: false } - ] + ], + pleroma: %{ + local: true, + conversation_id: convo_id, + content: %{"text/plain" => HtmlSanitizeEx.strip_tags(note.data["object"]["content"])}, + spoiler_text: %{"text/plain" => HtmlSanitizeEx.strip_tags(note.data["object"]["summary"])} + } } assert status == expected @@ -168,7 +177,7 @@ test "contains mentions" do status = StatusView.render("status.json", %{activity: activity}) - actor = Repo.get_by(User, ap_id: activity.actor) + actor = User.get_by_ap_id(activity.actor) assert status.mentions == Enum.map([user, actor], fn u -> AccountView.render("mention.json", %{user: u}) end) @@ -193,7 +202,8 @@ test "attachments" do remote_url: "someurl", preview_url: "someurl", text_url: "someurl", - description: nil + description: nil, + pleroma: %{mime_type: "image/png"} } assert expected == StatusView.render("attachment.json", %{attachment: object}) diff --git a/test/web/mastodon_api/subscription_controller_test.exs b/test/web/mastodon_api/subscription_controller_test.exs new file mode 100644 index 000000000..7dfb02f63 --- /dev/null +++ b/test/web/mastodon_api/subscription_controller_test.exs @@ -0,0 +1,192 @@ +# Pleroma: A lightweight social networking server +# Copyright © 2017-2019 Pleroma Authors +# SPDX-License-Identifier: AGPL-3.0-only + +defmodule Pleroma.Web.MastodonAPI.SubscriptionControllerTest do + use Pleroma.Web.ConnCase + + import Pleroma.Factory + alias Pleroma.Web.Push + alias Pleroma.Web.Push.Subscription + + @sub %{ + "endpoint" => "https://example.com/example/1234", + "keys" => %{ + "auth" => "8eDyX_uCN0XRhSbY5hs7Hg==", + "p256dh" => + "BCIWgsnyXDv1VkhqL2P7YRBvdeuDnlwAPT2guNhdIoW3IP7GmHh1SMKPLxRf7x8vJy6ZFK3ol2ohgn_-0yP7QQA=" + } + } + @server_key Keyword.get(Push.vapid_config(), :public_key) + + setup do + user = insert(:user) + token = insert(:oauth_token, user: user, scopes: ["push"]) + + conn = + build_conn() + |> assign(:user, user) + |> assign(:token, token) + + %{conn: conn, user: user, token: token} + end + + defmacro assert_error_when_disable_push(do: yield) do + quote do + vapid_details = Application.get_env(:web_push_encryption, :vapid_details, []) + Application.put_env(:web_push_encryption, :vapid_details, []) + assert "Something went wrong" == unquote(yield) + Application.put_env(:web_push_encryption, :vapid_details, vapid_details) + end + end + + describe "creates push subscription" do + test "returns error when push disabled ", %{conn: conn} do + assert_error_when_disable_push do + conn + |> post("/api/v1/push/subscription", %{}) + |> json_response(500) + end + end + + test "successful creation", %{conn: conn} do + result = + conn + |> post("/api/v1/push/subscription", %{ + "data" => %{"alerts" => %{"mention" => true, "test" => true}}, + "subscription" => @sub + }) + |> json_response(200) + + [subscription] = Pleroma.Repo.all(Subscription) + + assert %{ + "alerts" => %{"mention" => true}, + "endpoint" => subscription.endpoint, + "id" => to_string(subscription.id), + "server_key" => @server_key + } == result + end + end + + describe "gets a user subscription" do + test "returns error when push disabled ", %{conn: conn} do + assert_error_when_disable_push do + conn + |> get("/api/v1/push/subscription", %{}) + |> json_response(500) + end + end + + test "returns error when user hasn't subscription", %{conn: conn} do + res = + conn + |> get("/api/v1/push/subscription", %{}) + |> json_response(404) + + assert "Not found" == res + end + + test "returns a user subsciption", %{conn: conn, user: user, token: token} do + subscription = + insert(:push_subscription, + user: user, + token: token, + data: %{"alerts" => %{"mention" => true}} + ) + + res = + conn + |> get("/api/v1/push/subscription", %{}) + |> json_response(200) + + expect = %{ + "alerts" => %{"mention" => true}, + "endpoint" => "https://example.com/example/1234", + "id" => to_string(subscription.id), + "server_key" => @server_key + } + + assert expect == res + end + end + + describe "updates a user subsciption" do + setup %{conn: conn, user: user, token: token} do + subscription = + insert(:push_subscription, + user: user, + token: token, + data: %{"alerts" => %{"mention" => true}} + ) + + %{conn: conn, user: user, token: token, subscription: subscription} + end + + test "returns error when push disabled ", %{conn: conn} do + assert_error_when_disable_push do + conn + |> put("/api/v1/push/subscription", %{data: %{"alerts" => %{"mention" => false}}}) + |> json_response(500) + end + end + + test "returns updated subsciption", %{conn: conn, subscription: subscription} do + res = + conn + |> put("/api/v1/push/subscription", %{ + data: %{"alerts" => %{"mention" => false, "follow" => true}} + }) + |> json_response(200) + + expect = %{ + "alerts" => %{"follow" => true, "mention" => false}, + "endpoint" => "https://example.com/example/1234", + "id" => to_string(subscription.id), + "server_key" => @server_key + } + + assert expect == res + end + end + + describe "deletes the user subscription" do + test "returns error when push disabled ", %{conn: conn} do + assert_error_when_disable_push do + conn + |> delete("/api/v1/push/subscription", %{}) + |> json_response(500) + end + end + + test "returns error when user hasn't subscription", %{conn: conn} do + res = + conn + |> delete("/api/v1/push/subscription", %{}) + |> json_response(404) + + assert "Not found" == res + end + + test "returns empty result and delete user subsciption", %{ + conn: conn, + user: user, + token: token + } do + subscription = + insert(:push_subscription, + user: user, + token: token, + data: %{"alerts" => %{"mention" => true}} + ) + + res = + conn + |> delete("/api/v1/push/subscription", %{}) + |> json_response(200) + + assert %{} == res + refute Pleroma.Repo.get(Subscription, subscription.id) + end + end +end diff --git a/test/web/node_info_test.exs b/test/web/node_info_test.exs index 763549bd1..2fc42b7cc 100644 --- a/test/web/node_info_test.exs +++ b/test/web/node_info_test.exs @@ -8,7 +8,8 @@ defmodule Pleroma.Web.NodeInfoTest do import Pleroma.Factory test "nodeinfo shows staff accounts", %{conn: conn} do - user = insert(:user, %{local: true, info: %{is_moderator: true}}) + moderator = insert(:user, %{local: true, info: %{is_moderator: true}}) + admin = insert(:user, %{local: true, info: %{is_admin: true}}) conn = conn @@ -16,7 +17,8 @@ test "nodeinfo shows staff accounts", %{conn: conn} do assert result = json_response(conn, 200) - assert user.ap_id in result["metadata"]["staffAccounts"] + assert moderator.ap_id in result["metadata"]["staffAccounts"] + assert admin.ap_id in result["metadata"]["staffAccounts"] end test "nodeinfo shows restricted nicknames", %{conn: conn} do @@ -106,4 +108,27 @@ test "returns software.repository field in nodeinfo 2.1", %{conn: conn} do assert result = json_response(conn, 200) assert Pleroma.Application.repository() == result["software"]["repository"] end + + test "it returns the safe_dm_mentions feature if enabled", %{conn: conn} do + option = Pleroma.Config.get([:instance, :safe_dm_mentions]) + Pleroma.Config.put([:instance, :safe_dm_mentions], true) + + response = + conn + |> get("/nodeinfo/2.1.json") + |> json_response(:ok) + + assert "safe_dm_mentions" in response["metadata"]["features"] + + Pleroma.Config.put([:instance, :safe_dm_mentions], false) + + response = + conn + |> get("/nodeinfo/2.1.json") + |> json_response(:ok) + + refute "safe_dm_mentions" in response["metadata"]["features"] + + Pleroma.Config.put([:instance, :safe_dm_mentions], option) + end end diff --git a/test/web/oauth/authorization_test.exs b/test/web/oauth/authorization_test.exs index 306db2e62..d8b008437 100644 --- a/test/web/oauth/authorization_test.exs +++ b/test/web/oauth/authorization_test.exs @@ -4,8 +4,8 @@ defmodule Pleroma.Web.OAuth.AuthorizationTest do use Pleroma.DataCase - alias Pleroma.Web.OAuth.Authorization alias Pleroma.Web.OAuth.App + alias Pleroma.Web.OAuth.Authorization import Pleroma.Factory setup do diff --git a/test/web/oauth/ldap_authorization_test.exs b/test/web/oauth/ldap_authorization_test.exs new file mode 100644 index 000000000..0eb191c76 --- /dev/null +++ b/test/web/oauth/ldap_authorization_test.exs @@ -0,0 +1,195 @@ +# Pleroma: A lightweight social networking server +# Copyright © 2017-2019 Pleroma Authors +# SPDX-License-Identifier: AGPL-3.0-only + +defmodule Pleroma.Web.OAuth.LDAPAuthorizationTest do + use Pleroma.Web.ConnCase + alias Pleroma.Repo + alias Pleroma.Web.OAuth.Token + import Pleroma.Factory + import ExUnit.CaptureLog + import Mock + + @skip if !Code.ensure_loaded?(:eldap), do: :skip + + setup_all do + ldap_authenticator = + Pleroma.Config.get(Pleroma.Web.Auth.Authenticator, Pleroma.Web.Auth.PleromaAuthenticator) + + ldap_enabled = Pleroma.Config.get([:ldap, :enabled]) + + on_exit(fn -> + Pleroma.Config.put(Pleroma.Web.Auth.Authenticator, ldap_authenticator) + Pleroma.Config.put([:ldap, :enabled], ldap_enabled) + end) + + Pleroma.Config.put(Pleroma.Web.Auth.Authenticator, Pleroma.Web.Auth.LDAPAuthenticator) + Pleroma.Config.put([:ldap, :enabled], true) + + :ok + end + + @tag @skip + test "authorizes the existing user using LDAP credentials" do + password = "testpassword" + user = insert(:user, password_hash: Comeonin.Pbkdf2.hashpwsalt(password)) + app = insert(:oauth_app, scopes: ["read", "write"]) + + host = Pleroma.Config.get([:ldap, :host]) |> to_charlist + port = Pleroma.Config.get([:ldap, :port]) + + with_mocks [ + {:eldap, [], + [ + open: fn [^host], [{:port, ^port}, {:ssl, false} | _] -> {:ok, self()} end, + simple_bind: fn _connection, _dn, ^password -> :ok end, + close: fn _connection -> + send(self(), :close_connection) + :ok + end + ]} + ] do + conn = + build_conn() + |> post("/oauth/token", %{ + "grant_type" => "password", + "username" => user.nickname, + "password" => password, + "client_id" => app.client_id, + "client_secret" => app.client_secret + }) + + assert %{"access_token" => token} = json_response(conn, 200) + + token = Repo.get_by(Token, token: token) + + assert token.user_id == user.id + assert_received :close_connection + end + end + + @tag @skip + test "creates a new user after successful LDAP authorization" do + password = "testpassword" + user = build(:user) + app = insert(:oauth_app, scopes: ["read", "write"]) + + host = Pleroma.Config.get([:ldap, :host]) |> to_charlist + port = Pleroma.Config.get([:ldap, :port]) + + with_mocks [ + {:eldap, [], + [ + open: fn [^host], [{:port, ^port}, {:ssl, false} | _] -> {:ok, self()} end, + simple_bind: fn _connection, _dn, ^password -> :ok end, + equalityMatch: fn _type, _value -> :ok end, + wholeSubtree: fn -> :ok end, + search: fn _connection, _options -> + {:ok, + {:eldap_search_result, [{:eldap_entry, '', [{'mail', [to_charlist(user.email)]}]}], + []}} + end, + close: fn _connection -> + send(self(), :close_connection) + :ok + end + ]} + ] do + conn = + build_conn() + |> post("/oauth/token", %{ + "grant_type" => "password", + "username" => user.nickname, + "password" => password, + "client_id" => app.client_id, + "client_secret" => app.client_secret + }) + + assert %{"access_token" => token} = json_response(conn, 200) + + token = Repo.get_by(Token, token: token) |> Repo.preload(:user) + + assert token.user.nickname == user.nickname + assert_received :close_connection + end + end + + @tag @skip + test "falls back to the default authorization when LDAP is unavailable" do + password = "testpassword" + user = insert(:user, password_hash: Comeonin.Pbkdf2.hashpwsalt(password)) + app = insert(:oauth_app, scopes: ["read", "write"]) + + host = Pleroma.Config.get([:ldap, :host]) |> to_charlist + port = Pleroma.Config.get([:ldap, :port]) + + with_mocks [ + {:eldap, [], + [ + open: fn [^host], [{:port, ^port}, {:ssl, false} | _] -> {:error, 'connect failed'} end, + simple_bind: fn _connection, _dn, ^password -> :ok end, + close: fn _connection -> + send(self(), :close_connection) + :ok + end + ]} + ] do + log = + capture_log(fn -> + conn = + build_conn() + |> post("/oauth/token", %{ + "grant_type" => "password", + "username" => user.nickname, + "password" => password, + "client_id" => app.client_id, + "client_secret" => app.client_secret + }) + + assert %{"access_token" => token} = json_response(conn, 200) + + token = Repo.get_by(Token, token: token) + + assert token.user_id == user.id + end) + + assert log =~ "Could not open LDAP connection: 'connect failed'" + refute_received :close_connection + end + end + + @tag @skip + test "disallow authorization for wrong LDAP credentials" do + password = "testpassword" + user = insert(:user, password_hash: Comeonin.Pbkdf2.hashpwsalt(password)) + app = insert(:oauth_app, scopes: ["read", "write"]) + + host = Pleroma.Config.get([:ldap, :host]) |> to_charlist + port = Pleroma.Config.get([:ldap, :port]) + + with_mocks [ + {:eldap, [], + [ + open: fn [^host], [{:port, ^port}, {:ssl, false} | _] -> {:ok, self()} end, + simple_bind: fn _connection, _dn, ^password -> {:error, :invalidCredentials} end, + close: fn _connection -> + send(self(), :close_connection) + :ok + end + ]} + ] do + conn = + build_conn() + |> post("/oauth/token", %{ + "grant_type" => "password", + "username" => user.nickname, + "password" => password, + "client_id" => app.client_id, + "client_secret" => app.client_secret + }) + + assert %{"error" => "Invalid credentials"} = json_response(conn, 400) + assert_received :close_connection + end + end +end diff --git a/test/web/oauth/oauth_controller_test.exs b/test/web/oauth/oauth_controller_test.exs index ed94416ff..ac7843f9b 100644 --- a/test/web/oauth/oauth_controller_test.exs +++ b/test/web/oauth/oauth_controller_test.exs @@ -5,265 +5,676 @@ defmodule Pleroma.Web.OAuth.OAuthControllerTest do use Pleroma.Web.ConnCase import Pleroma.Factory + import Mock + alias Pleroma.Registration alias Pleroma.Repo alias Pleroma.Web.OAuth.Authorization alias Pleroma.Web.OAuth.Token - test "redirects with oauth authorization" do - user = insert(:user) - app = insert(:oauth_app, scopes: ["read", "write", "follow"]) + @session_opts [ + store: :cookie, + key: "_test", + signing_salt: "cooldude" + ] - conn = - build_conn() - |> post("/oauth/authorize", %{ - "authorization" => %{ - "name" => user.nickname, - "password" => "test", - "client_id" => app.client_id, - "redirect_uri" => app.redirect_uris, - "scope" => "read write", - "state" => "statepassed" - } - }) + describe "in OAuth consumer mode, " do + setup do + oauth_consumer_strategies_path = [:auth, :oauth_consumer_strategies] + oauth_consumer_strategies = Pleroma.Config.get(oauth_consumer_strategies_path) + Pleroma.Config.put(oauth_consumer_strategies_path, ~w(twitter facebook)) - target = redirected_to(conn) - assert target =~ app.redirect_uris + on_exit(fn -> + Pleroma.Config.put(oauth_consumer_strategies_path, oauth_consumer_strategies) + end) - query = URI.parse(target).query |> URI.query_decoder() |> Map.new() - - assert %{"state" => "statepassed", "code" => code} = query - auth = Repo.get_by(Authorization, token: code) - assert auth - assert auth.scopes == ["read", "write"] - end - - test "returns 401 for wrong credentials", %{conn: conn} do - user = insert(:user) - app = insert(:oauth_app) - - result = - conn - |> post("/oauth/authorize", %{ - "authorization" => %{ - "name" => user.nickname, - "password" => "wrong", - "client_id" => app.client_id, - "redirect_uri" => app.redirect_uris, - "state" => "statepassed", - "scope" => Enum.join(app.scopes, " ") - } - }) - |> html_response(:unauthorized) - - # Keep the details - assert result =~ app.client_id - assert result =~ app.redirect_uris - - # Error message - assert result =~ "Invalid Username/Password" - end - - test "returns 401 for missing scopes", %{conn: conn} do - user = insert(:user) - app = insert(:oauth_app) - - result = - conn - |> post("/oauth/authorize", %{ - "authorization" => %{ - "name" => user.nickname, - "password" => "test", - "client_id" => app.client_id, - "redirect_uri" => app.redirect_uris, - "state" => "statepassed", - "scope" => "" - } - }) - |> html_response(:unauthorized) - - # Keep the details - assert result =~ app.client_id - assert result =~ app.redirect_uris - - # Error message - assert result =~ "Permissions not specified" - end - - test "returns 401 for scopes beyond app scopes", %{conn: conn} do - user = insert(:user) - app = insert(:oauth_app, scopes: ["read", "write"]) - - result = - conn - |> post("/oauth/authorize", %{ - "authorization" => %{ - "name" => user.nickname, - "password" => "test", - "client_id" => app.client_id, - "redirect_uri" => app.redirect_uris, - "state" => "statepassed", - "scope" => "read write follow" - } - }) - |> html_response(:unauthorized) - - # Keep the details - assert result =~ app.client_id - assert result =~ app.redirect_uris - - # Error message - assert result =~ "Permissions not specified" - end - - test "issues a token for an all-body request" do - user = insert(:user) - app = insert(:oauth_app, scopes: ["read", "write"]) - - {:ok, auth} = Authorization.create_authorization(app, user, ["write"]) - - conn = - build_conn() - |> post("/oauth/token", %{ - "grant_type" => "authorization_code", - "code" => auth.token, - "redirect_uri" => app.redirect_uris, - "client_id" => app.client_id, - "client_secret" => app.client_secret - }) - - assert %{"access_token" => token} = json_response(conn, 200) - - token = Repo.get_by(Token, token: token) - assert token - assert token.scopes == auth.scopes - end - - test "issues a token for `password` grant_type with valid credentials, with full permissions by default" do - password = "testpassword" - user = insert(:user, password_hash: Comeonin.Pbkdf2.hashpwsalt(password)) - - app = insert(:oauth_app, scopes: ["read", "write"]) - - # Note: "scope" param is intentionally omitted - conn = - build_conn() - |> post("/oauth/token", %{ - "grant_type" => "password", - "username" => user.nickname, - "password" => password, - "client_id" => app.client_id, - "client_secret" => app.client_secret - }) - - assert %{"access_token" => token} = json_response(conn, 200) - - token = Repo.get_by(Token, token: token) - assert token - assert token.scopes == app.scopes - end - - test "issues a token for request with HTTP basic auth client credentials" do - user = insert(:user) - app = insert(:oauth_app, scopes: ["scope1", "scope2", "scope3"]) - - {:ok, auth} = Authorization.create_authorization(app, user, ["scope1", "scope2"]) - assert auth.scopes == ["scope1", "scope2"] - - app_encoded = - (URI.encode_www_form(app.client_id) <> ":" <> URI.encode_www_form(app.client_secret)) - |> Base.encode64() - - conn = - build_conn() - |> put_req_header("authorization", "Basic " <> app_encoded) - |> post("/oauth/token", %{ - "grant_type" => "authorization_code", - "code" => auth.token, - "redirect_uri" => app.redirect_uris - }) - - assert %{"access_token" => token, "scope" => scope} = json_response(conn, 200) - - assert scope == "scope1 scope2" - - token = Repo.get_by(Token, token: token) - assert token - assert token.scopes == ["scope1", "scope2"] - end - - test "rejects token exchange with invalid client credentials" do - user = insert(:user) - app = insert(:oauth_app) - - {:ok, auth} = Authorization.create_authorization(app, user) - - conn = - build_conn() - |> put_req_header("authorization", "Basic JTIxOiVGMCU5RiVBNCVCNwo=") - |> post("/oauth/token", %{ - "grant_type" => "authorization_code", - "code" => auth.token, - "redirect_uri" => app.redirect_uris - }) - - assert resp = json_response(conn, 400) - assert %{"error" => _} = resp - refute Map.has_key?(resp, "access_token") - end - - test "rejects token exchange for valid credentials belonging to unconfirmed user and confirmation is required" do - setting = Pleroma.Config.get([:instance, :account_activation_required]) - - unless setting do - Pleroma.Config.put([:instance, :account_activation_required], true) - on_exit(fn -> Pleroma.Config.put([:instance, :account_activation_required], setting) end) + [ + app: insert(:oauth_app), + conn: + build_conn() + |> Plug.Session.call(Plug.Session.init(@session_opts)) + |> fetch_session() + ] end - password = "testpassword" - user = insert(:user, password_hash: Comeonin.Pbkdf2.hashpwsalt(password)) - info_change = Pleroma.User.Info.confirmation_changeset(user.info, :unconfirmed) + test "GET /oauth/authorize renders auth forms, including OAuth consumer form", %{ + app: app, + conn: conn + } do + conn = + get( + conn, + "/oauth/authorize", + %{ + "response_type" => "code", + "client_id" => app.client_id, + "redirect_uri" => app.redirect_uris, + "scope" => "read" + } + ) - {:ok, user} = - user - |> Ecto.Changeset.change() - |> Ecto.Changeset.put_embed(:info, info_change) - |> Repo.update() + assert response = html_response(conn, 200) + assert response =~ "Sign in with Twitter" + assert response =~ o_auth_path(conn, :prepare_request) + end - refute Pleroma.User.auth_active?(user) + test "GET /oauth/prepare_request encodes parameters as `state` and redirects", %{ + app: app, + conn: conn + } do + conn = + get( + conn, + "/oauth/prepare_request", + %{ + "provider" => "twitter", + "scope" => "read follow", + "client_id" => app.client_id, + "redirect_uri" => app.redirect_uris, + "state" => "a_state" + } + ) - app = insert(:oauth_app) + assert response = html_response(conn, 302) - conn = - build_conn() - |> post("/oauth/token", %{ - "grant_type" => "password", - "username" => user.nickname, - "password" => password, + redirect_query = URI.parse(redirected_to(conn)).query + assert %{"state" => state_param} = URI.decode_query(redirect_query) + assert {:ok, state_components} = Poison.decode(state_param) + + expected_client_id = app.client_id + expected_redirect_uri = app.redirect_uris + + assert %{ + "scope" => "read follow", + "client_id" => ^expected_client_id, + "redirect_uri" => ^expected_redirect_uri, + "state" => "a_state" + } = state_components + end + + test "with user-bound registration, GET /oauth//callback redirects to `redirect_uri` with `code`", + %{app: app, conn: conn} do + registration = insert(:registration) + + state_params = %{ + "scope" => Enum.join(app.scopes, " "), "client_id" => app.client_id, - "client_secret" => app.client_secret - }) + "redirect_uri" => app.redirect_uris, + "state" => "" + } - assert resp = json_response(conn, 403) - assert %{"error" => _} = resp - refute Map.has_key?(resp, "access_token") + with_mock Pleroma.Web.Auth.Authenticator, + get_registration: fn _, _ -> {:ok, registration} end do + conn = + get( + conn, + "/oauth/twitter/callback", + %{ + "oauth_token" => "G-5a3AAAAAAAwMH9AAABaektfSM", + "oauth_verifier" => "QZl8vUqNvXMTKpdmUnGejJxuHG75WWWs", + "provider" => "twitter", + "state" => Poison.encode!(state_params) + } + ) + + assert response = html_response(conn, 302) + assert redirected_to(conn) =~ ~r/#{app.redirect_uris}\?code=.+/ + end + end + + test "with user-unbound registration, GET /oauth//callback renders registration_details page", + %{app: app, conn: conn} do + registration = insert(:registration, user: nil) + + state_params = %{ + "scope" => "read write", + "client_id" => app.client_id, + "redirect_uri" => app.redirect_uris, + "state" => "a_state" + } + + with_mock Pleroma.Web.Auth.Authenticator, + get_registration: fn _, _ -> {:ok, registration} end do + conn = + get( + conn, + "/oauth/twitter/callback", + %{ + "oauth_token" => "G-5a3AAAAAAAwMH9AAABaektfSM", + "oauth_verifier" => "QZl8vUqNvXMTKpdmUnGejJxuHG75WWWs", + "provider" => "twitter", + "state" => Poison.encode!(state_params) + } + ) + + assert response = html_response(conn, 200) + assert response =~ ~r/name="op" type="submit" value="register"/ + assert response =~ ~r/name="op" type="submit" value="connect"/ + assert response =~ Registration.email(registration) + assert response =~ Registration.nickname(registration) + end + end + + test "on authentication error, GET /oauth//callback redirects to `redirect_uri`", %{ + app: app, + conn: conn + } do + state_params = %{ + "scope" => Enum.join(app.scopes, " "), + "client_id" => app.client_id, + "redirect_uri" => app.redirect_uris, + "state" => "" + } + + conn = + conn + |> assign(:ueberauth_failure, %{errors: [%{message: "(error description)"}]}) + |> get( + "/oauth/twitter/callback", + %{ + "oauth_token" => "G-5a3AAAAAAAwMH9AAABaektfSM", + "oauth_verifier" => "QZl8vUqNvXMTKpdmUnGejJxuHG75WWWs", + "provider" => "twitter", + "state" => Poison.encode!(state_params) + } + ) + + assert response = html_response(conn, 302) + assert redirected_to(conn) == app.redirect_uris + assert get_flash(conn, :error) == "Failed to authenticate: (error description)." + end + + test "GET /oauth/registration_details renders registration details form", %{ + app: app, + conn: conn + } do + conn = + get( + conn, + "/oauth/registration_details", + %{ + "scopes" => app.scopes, + "client_id" => app.client_id, + "redirect_uri" => app.redirect_uris, + "state" => "a_state", + "nickname" => nil, + "email" => "john@doe.com" + } + ) + + assert response = html_response(conn, 200) + assert response =~ ~r/name="op" type="submit" value="register"/ + assert response =~ ~r/name="op" type="submit" value="connect"/ + end + + test "with valid params, POST /oauth/register?op=register redirects to `redirect_uri` with `code`", + %{ + app: app, + conn: conn + } do + registration = insert(:registration, user: nil, info: %{"nickname" => nil, "email" => nil}) + + conn = + conn + |> put_session(:registration_id, registration.id) + |> post( + "/oauth/register", + %{ + "op" => "register", + "scopes" => app.scopes, + "client_id" => app.client_id, + "redirect_uri" => app.redirect_uris, + "state" => "a_state", + "nickname" => "availablenick", + "email" => "available@email.com" + } + ) + + assert response = html_response(conn, 302) + assert redirected_to(conn) =~ ~r/#{app.redirect_uris}\?code=.+/ + end + + test "with invalid params, POST /oauth/register?op=register renders registration_details page", + %{ + app: app, + conn: conn + } do + another_user = insert(:user) + registration = insert(:registration, user: nil, info: %{"nickname" => nil, "email" => nil}) + + params = %{ + "op" => "register", + "scopes" => app.scopes, + "client_id" => app.client_id, + "redirect_uri" => app.redirect_uris, + "state" => "a_state", + "nickname" => "availablenickname", + "email" => "available@email.com" + } + + for {bad_param, bad_param_value} <- + [{"nickname", another_user.nickname}, {"email", another_user.email}] do + bad_params = Map.put(params, bad_param, bad_param_value) + + conn = + conn + |> put_session(:registration_id, registration.id) + |> post("/oauth/register", bad_params) + + assert html_response(conn, 403) =~ ~r/name="op" type="submit" value="register"/ + assert get_flash(conn, :error) == "Error: #{bad_param} has already been taken." + end + end + + test "with valid params, POST /oauth/register?op=connect redirects to `redirect_uri` with `code`", + %{ + app: app, + conn: conn + } do + user = insert(:user, password_hash: Comeonin.Pbkdf2.hashpwsalt("testpassword")) + registration = insert(:registration, user: nil) + + conn = + conn + |> put_session(:registration_id, registration.id) + |> post( + "/oauth/register", + %{ + "op" => "connect", + "scopes" => app.scopes, + "client_id" => app.client_id, + "redirect_uri" => app.redirect_uris, + "state" => "a_state", + "auth_name" => user.nickname, + "password" => "testpassword" + } + ) + + assert response = html_response(conn, 302) + assert redirected_to(conn) =~ ~r/#{app.redirect_uris}\?code=.+/ + end + + test "with invalid params, POST /oauth/register?op=connect renders registration_details page", + %{ + app: app, + conn: conn + } do + user = insert(:user) + registration = insert(:registration, user: nil) + + params = %{ + "op" => "connect", + "scopes" => app.scopes, + "client_id" => app.client_id, + "redirect_uri" => app.redirect_uris, + "state" => "a_state", + "auth_name" => user.nickname, + "password" => "wrong password" + } + + conn = + conn + |> put_session(:registration_id, registration.id) + |> post("/oauth/register", params) + + assert html_response(conn, 401) =~ ~r/name="op" type="submit" value="connect"/ + assert get_flash(conn, :error) == "Invalid Username/Password" + end end - test "rejects an invalid authorization code" do - app = insert(:oauth_app) + describe "GET /oauth/authorize" do + setup do + [ + app: insert(:oauth_app, redirect_uris: "https://redirect.url"), + conn: + build_conn() + |> Plug.Session.call(Plug.Session.init(@session_opts)) + |> fetch_session() + ] + end - conn = - build_conn() - |> post("/oauth/token", %{ - "grant_type" => "authorization_code", - "code" => "Imobviouslyinvalid", - "redirect_uri" => app.redirect_uris, - "client_id" => app.client_id, - "client_secret" => app.client_secret - }) + test "renders authentication page", %{app: app, conn: conn} do + conn = + get( + conn, + "/oauth/authorize", + %{ + "response_type" => "code", + "client_id" => app.client_id, + "redirect_uri" => app.redirect_uris, + "scope" => "read" + } + ) - assert resp = json_response(conn, 400) - assert %{"error" => _} = json_response(conn, 400) - refute Map.has_key?(resp, "access_token") + assert html_response(conn, 200) =~ ~s(type="submit") + end + + test "renders authentication page if user is already authenticated but `force_login` is tru-ish", + %{app: app, conn: conn} do + token = insert(:oauth_token, app_id: app.id) + + conn = + conn + |> put_session(:oauth_token, token.token) + |> get( + "/oauth/authorize", + %{ + "response_type" => "code", + "client_id" => app.client_id, + "redirect_uri" => app.redirect_uris, + "scope" => "read", + "force_login" => "true" + } + ) + + assert html_response(conn, 200) =~ ~s(type="submit") + end + + test "redirects to app if user is already authenticated", %{app: app, conn: conn} do + token = insert(:oauth_token, app_id: app.id) + + conn = + conn + |> put_session(:oauth_token, token.token) + |> get( + "/oauth/authorize", + %{ + "response_type" => "code", + "client_id" => app.client_id, + "redirect_uri" => app.redirect_uris, + "scope" => "read" + } + ) + + assert redirected_to(conn) == "https://redirect.url" + end + end + + describe "POST /oauth/authorize" do + test "redirects with oauth authorization" do + user = insert(:user) + app = insert(:oauth_app, scopes: ["read", "write", "follow"]) + + conn = + build_conn() + |> post("/oauth/authorize", %{ + "authorization" => %{ + "name" => user.nickname, + "password" => "test", + "client_id" => app.client_id, + "redirect_uri" => app.redirect_uris, + "scope" => "read write", + "state" => "statepassed" + } + }) + + target = redirected_to(conn) + assert target =~ app.redirect_uris + + query = URI.parse(target).query |> URI.query_decoder() |> Map.new() + + assert %{"state" => "statepassed", "code" => code} = query + auth = Repo.get_by(Authorization, token: code) + assert auth + assert auth.scopes == ["read", "write"] + end + + test "returns 401 for wrong credentials", %{conn: conn} do + user = insert(:user) + app = insert(:oauth_app) + + result = + conn + |> post("/oauth/authorize", %{ + "authorization" => %{ + "name" => user.nickname, + "password" => "wrong", + "client_id" => app.client_id, + "redirect_uri" => app.redirect_uris, + "state" => "statepassed", + "scope" => Enum.join(app.scopes, " ") + } + }) + |> html_response(:unauthorized) + + # Keep the details + assert result =~ app.client_id + assert result =~ app.redirect_uris + + # Error message + assert result =~ "Invalid Username/Password" + end + + test "returns 401 for missing scopes", %{conn: conn} do + user = insert(:user) + app = insert(:oauth_app) + + result = + conn + |> post("/oauth/authorize", %{ + "authorization" => %{ + "name" => user.nickname, + "password" => "test", + "client_id" => app.client_id, + "redirect_uri" => app.redirect_uris, + "state" => "statepassed", + "scope" => "" + } + }) + |> html_response(:unauthorized) + + # Keep the details + assert result =~ app.client_id + assert result =~ app.redirect_uris + + # Error message + assert result =~ "This action is outside the authorized scopes" + end + + test "returns 401 for scopes beyond app scopes", %{conn: conn} do + user = insert(:user) + app = insert(:oauth_app, scopes: ["read", "write"]) + + result = + conn + |> post("/oauth/authorize", %{ + "authorization" => %{ + "name" => user.nickname, + "password" => "test", + "client_id" => app.client_id, + "redirect_uri" => app.redirect_uris, + "state" => "statepassed", + "scope" => "read write follow" + } + }) + |> html_response(:unauthorized) + + # Keep the details + assert result =~ app.client_id + assert result =~ app.redirect_uris + + # Error message + assert result =~ "This action is outside the authorized scopes" + end + end + + describe "POST /oauth/token" do + test "issues a token for an all-body request" do + user = insert(:user) + app = insert(:oauth_app, scopes: ["read", "write"]) + + {:ok, auth} = Authorization.create_authorization(app, user, ["write"]) + + conn = + build_conn() + |> post("/oauth/token", %{ + "grant_type" => "authorization_code", + "code" => auth.token, + "redirect_uri" => app.redirect_uris, + "client_id" => app.client_id, + "client_secret" => app.client_secret + }) + + assert %{"access_token" => token, "me" => ap_id} = json_response(conn, 200) + + token = Repo.get_by(Token, token: token) + assert token + assert token.scopes == auth.scopes + assert user.ap_id == ap_id + end + + test "issues a token for `password` grant_type with valid credentials, with full permissions by default" do + password = "testpassword" + user = insert(:user, password_hash: Comeonin.Pbkdf2.hashpwsalt(password)) + + app = insert(:oauth_app, scopes: ["read", "write"]) + + # Note: "scope" param is intentionally omitted + conn = + build_conn() + |> post("/oauth/token", %{ + "grant_type" => "password", + "username" => user.nickname, + "password" => password, + "client_id" => app.client_id, + "client_secret" => app.client_secret + }) + + assert %{"access_token" => token} = json_response(conn, 200) + + token = Repo.get_by(Token, token: token) + assert token + assert token.scopes == app.scopes + end + + test "issues a token for request with HTTP basic auth client credentials" do + user = insert(:user) + app = insert(:oauth_app, scopes: ["scope1", "scope2", "scope3"]) + + {:ok, auth} = Authorization.create_authorization(app, user, ["scope1", "scope2"]) + assert auth.scopes == ["scope1", "scope2"] + + app_encoded = + (URI.encode_www_form(app.client_id) <> ":" <> URI.encode_www_form(app.client_secret)) + |> Base.encode64() + + conn = + build_conn() + |> put_req_header("authorization", "Basic " <> app_encoded) + |> post("/oauth/token", %{ + "grant_type" => "authorization_code", + "code" => auth.token, + "redirect_uri" => app.redirect_uris + }) + + assert %{"access_token" => token, "scope" => scope} = json_response(conn, 200) + + assert scope == "scope1 scope2" + + token = Repo.get_by(Token, token: token) + assert token + assert token.scopes == ["scope1", "scope2"] + end + + test "rejects token exchange with invalid client credentials" do + user = insert(:user) + app = insert(:oauth_app) + + {:ok, auth} = Authorization.create_authorization(app, user) + + conn = + build_conn() + |> put_req_header("authorization", "Basic JTIxOiVGMCU5RiVBNCVCNwo=") + |> post("/oauth/token", %{ + "grant_type" => "authorization_code", + "code" => auth.token, + "redirect_uri" => app.redirect_uris + }) + + assert resp = json_response(conn, 400) + assert %{"error" => _} = resp + refute Map.has_key?(resp, "access_token") + end + + test "rejects token exchange for valid credentials belonging to unconfirmed user and confirmation is required" do + setting = Pleroma.Config.get([:instance, :account_activation_required]) + + unless setting do + Pleroma.Config.put([:instance, :account_activation_required], true) + on_exit(fn -> Pleroma.Config.put([:instance, :account_activation_required], setting) end) + end + + password = "testpassword" + user = insert(:user, password_hash: Comeonin.Pbkdf2.hashpwsalt(password)) + info_change = Pleroma.User.Info.confirmation_changeset(user.info, :unconfirmed) + + {:ok, user} = + user + |> Ecto.Changeset.change() + |> Ecto.Changeset.put_embed(:info, info_change) + |> Repo.update() + + refute Pleroma.User.auth_active?(user) + + app = insert(:oauth_app) + + conn = + build_conn() + |> post("/oauth/token", %{ + "grant_type" => "password", + "username" => user.nickname, + "password" => password, + "client_id" => app.client_id, + "client_secret" => app.client_secret + }) + + assert resp = json_response(conn, 403) + assert %{"error" => _} = resp + refute Map.has_key?(resp, "access_token") + end + + test "rejects token exchange for valid credentials belonging to deactivated user" do + password = "testpassword" + + user = + insert(:user, + password_hash: Comeonin.Pbkdf2.hashpwsalt(password), + info: %{deactivated: true} + ) + + app = insert(:oauth_app) + + conn = + build_conn() + |> post("/oauth/token", %{ + "grant_type" => "password", + "username" => user.nickname, + "password" => password, + "client_id" => app.client_id, + "client_secret" => app.client_secret + }) + + assert resp = json_response(conn, 403) + assert %{"error" => _} = resp + refute Map.has_key?(resp, "access_token") + end + + test "rejects an invalid authorization code" do + app = insert(:oauth_app) + + conn = + build_conn() + |> post("/oauth/token", %{ + "grant_type" => "authorization_code", + "code" => "Imobviouslyinvalid", + "redirect_uri" => app.redirect_uris, + "client_id" => app.client_id, + "client_secret" => app.client_secret + }) + + assert resp = json_response(conn, 400) + assert %{"error" => _} = json_response(conn, 400) + refute Map.has_key?(resp, "access_token") + end end end diff --git a/test/web/oauth/token_test.exs b/test/web/oauth/token_test.exs index 62444a0fa..ad2a49f09 100644 --- a/test/web/oauth/token_test.exs +++ b/test/web/oauth/token_test.exs @@ -4,10 +4,10 @@ defmodule Pleroma.Web.OAuth.TokenTest do use Pleroma.DataCase + alias Pleroma.Repo alias Pleroma.Web.OAuth.App alias Pleroma.Web.OAuth.Authorization alias Pleroma.Web.OAuth.Token - alias Pleroma.Repo import Pleroma.Factory diff --git a/test/web/ostatus/activity_representer_test.exs b/test/web/ostatus/activity_representer_test.exs index eebc5c040..a4bb68c4d 100644 --- a/test/web/ostatus/activity_representer_test.exs +++ b/test/web/ostatus/activity_representer_test.exs @@ -5,12 +5,12 @@ defmodule Pleroma.Web.OStatus.ActivityRepresenterTest do use Pleroma.DataCase - alias Pleroma.Web.OStatus.ActivityRepresenter alias Pleroma.Activity - alias Pleroma.User alias Pleroma.Object + alias Pleroma.User alias Pleroma.Web.ActivityPub.ActivityPub alias Pleroma.Web.OStatus + alias Pleroma.Web.OStatus.ActivityRepresenter import Pleroma.Factory import Tesla.Mock @@ -116,10 +116,10 @@ test "an announce activity" do {:ok, announce, _object} = ActivityPub.announce(user, object) - announce = Repo.get(Activity, announce.id) + announce = Activity.get_by_id(announce.id) note_user = User.get_cached_by_ap_id(note.data["actor"]) - note = Repo.get(Activity, note.id) + note = Activity.get_by_id(note.id) note_xml = ActivityRepresenter.to_simple_form(note, note_user, true) diff --git a/test/web/ostatus/feed_representer_test.exs b/test/web/ostatus/feed_representer_test.exs index efd4e7217..3c7b126e7 100644 --- a/test/web/ostatus/feed_representer_test.exs +++ b/test/web/ostatus/feed_representer_test.exs @@ -6,10 +6,10 @@ defmodule Pleroma.Web.OStatus.FeedRepresenterTest do use Pleroma.DataCase import Pleroma.Factory alias Pleroma.User + alias Pleroma.Web.OStatus alias Pleroma.Web.OStatus.ActivityRepresenter alias Pleroma.Web.OStatus.FeedRepresenter alias Pleroma.Web.OStatus.UserRepresenter - alias Pleroma.Web.OStatus test "returns a feed of the last 20 items of the user" do note_activity = insert(:note_activity) diff --git a/test/web/ostatus/incoming_documents/delete_handling_test.exs b/test/web/ostatus/incoming_documents/delete_handling_test.exs index d295cc539..ca6e61339 100644 --- a/test/web/ostatus/incoming_documents/delete_handling_test.exs +++ b/test/web/ostatus/incoming_documents/delete_handling_test.exs @@ -4,7 +4,6 @@ defmodule Pleroma.Web.OStatus.DeleteHandlingTest do import Pleroma.Factory import Tesla.Mock - alias Pleroma.Repo alias Pleroma.Activity alias Pleroma.Object alias Pleroma.Web.OStatus @@ -32,10 +31,10 @@ test "it removes the mentioned activity" do {:ok, [delete]} = OStatus.handle_incoming(incoming) - refute Repo.get(Activity, note.id) - refute Repo.get(Activity, like.id) + refute Activity.get_by_id(note.id) + refute Activity.get_by_id(like.id) assert Object.get_by_ap_id(note.data["object"]["id"]).data["type"] == "Tombstone" - assert Repo.get(Activity, second_note.id) + assert Activity.get_by_id(second_note.id) assert Object.get_by_ap_id(second_note.data["object"]["id"]) assert delete.data["type"] == "Delete" diff --git a/test/web/ostatus/ostatus_controller_test.exs b/test/web/ostatus/ostatus_controller_test.exs index da9c72be8..2950f11c0 100644 --- a/test/web/ostatus/ostatus_controller_test.exs +++ b/test/web/ostatus/ostatus_controller_test.exs @@ -5,9 +5,9 @@ defmodule Pleroma.Web.OStatus.OStatusControllerTest do use Pleroma.Web.ConnCase import Pleroma.Factory - alias Pleroma.User - alias Pleroma.Repo alias Pleroma.Object + alias Pleroma.Repo + alias Pleroma.User alias Pleroma.Web.CommonAPI alias Pleroma.Web.OStatus.ActivityRepresenter diff --git a/test/web/ostatus/ostatus_test.exs b/test/web/ostatus/ostatus_test.exs index b4b19ab05..9fd100f63 100644 --- a/test/web/ostatus/ostatus_test.exs +++ b/test/web/ostatus/ostatus_test.exs @@ -4,13 +4,13 @@ defmodule Pleroma.Web.OStatusTest do use Pleroma.DataCase - alias Pleroma.Web.OStatus - alias Pleroma.Web.XML + alias Pleroma.Activity + alias Pleroma.Instances alias Pleroma.Object alias Pleroma.Repo alias Pleroma.User - alias Pleroma.Activity - alias Pleroma.Instances + alias Pleroma.Web.OStatus + alias Pleroma.Web.XML import Pleroma.Factory import ExUnit.CaptureLog @@ -154,7 +154,7 @@ test "handle incoming retweets - GS, subscription" do assert "https://pleroma.soykaf.com/users/lain" in activity.data["to"] refute activity.local - retweeted_activity = Repo.get(Activity, retweeted_activity.id) + retweeted_activity = Activity.get_by_id(retweeted_activity.id) assert retweeted_activity.data["type"] == "Create" assert retweeted_activity.data["actor"] == "https://pleroma.soykaf.com/users/lain" refute retweeted_activity.local @@ -181,7 +181,7 @@ test "handle incoming retweets - GS, subscription - local message" do assert user.ap_id in activity.data["to"] refute activity.local - retweeted_activity = Repo.get(Activity, retweeted_activity.id) + retweeted_activity = Activity.get_by_id(retweeted_activity.id) assert note_activity.id == retweeted_activity.id assert retweeted_activity.data["type"] == "Create" assert retweeted_activity.data["actor"] == user.ap_id @@ -344,7 +344,7 @@ test "tries to use the information in poco fields" do {:ok, user} = OStatus.find_or_make_user(uri) - user = Repo.get(Pleroma.User, user.id) + user = Pleroma.User.get_by_id(user.id) assert user.name == "Constance Variable" assert user.nickname == "lambadalambda@social.heldscal.la" assert user.local == false diff --git a/test/web/push/impl_test.exs b/test/web/push/impl_test.exs new file mode 100644 index 000000000..6bac2c9f6 --- /dev/null +++ b/test/web/push/impl_test.exs @@ -0,0 +1,147 @@ +# Pleroma: A lightweight social networking server +# Copyright © 2017-2018 Pleroma Authors +# SPDX-License-Identifier: AGPL-3.0-only + +defmodule Pleroma.Web.Push.ImplTest do + use Pleroma.DataCase + + alias Pleroma.Web.Push.Impl + alias Pleroma.Web.Push.Subscription + + import Pleroma.Factory + + setup_all do + Tesla.Mock.mock_global(fn + %{method: :post, url: "https://example.com/example/1234"} -> + %Tesla.Env{status: 200} + + %{method: :post, url: "https://example.com/example/not_found"} -> + %Tesla.Env{status: 400} + + %{method: :post, url: "https://example.com/example/bad"} -> + %Tesla.Env{status: 100} + end) + + :ok + end + + @sub %{ + endpoint: "https://example.com/example/1234", + keys: %{ + auth: "8eDyX_uCN0XRhSbY5hs7Hg==", + p256dh: + "BCIWgsnyXDv1VkhqL2P7YRBvdeuDnlwAPT2guNhdIoW3IP7GmHh1SMKPLxRf7x8vJy6ZFK3ol2ohgn_-0yP7QQA=" + } + } + @api_key "BASgACIHpN1GYgzSRp" + @message "@Bob: Lorem ipsum dolor sit amet, consectetur adipiscing elit. Fusce sagittis fini..." + + test "performs sending notifications" do + user = insert(:user) + user2 = insert(:user) + insert(:push_subscription, user: user, data: %{alerts: %{"mention" => true}}) + insert(:push_subscription, user: user2, data: %{alerts: %{"mention" => true}}) + + insert(:push_subscription, + user: user, + data: %{alerts: %{"follow" => true, "mention" => true}} + ) + + insert(:push_subscription, + user: user, + data: %{alerts: %{"follow" => true, "mention" => false}} + ) + + notif = + insert(:notification, + user: user, + activity: %Pleroma.Activity{ + data: %{ + "type" => "Create", + "actor" => user.ap_id, + "object" => %{"content" => " "Create", + "object" => %{ + "content" => + "Lorem ipsum dolor sit amet, consectetur :bear: adipiscing elit. Fusce sagittis finibus turpis." + } + } + } + }, + %{nickname: "Bob"} + ) == + "@Bob: Lorem ipsum dolor sit amet, consectetur adipiscing elit. Fusce sagittis fini..." + end + + test "renders body for follow activity" do + assert Impl.format_body(%{activity: %{data: %{"type" => "Follow"}}}, %{nickname: "Bob"}) == + "@Bob has followed you" + end + + test "renders body for announce activity" do + user = insert(:user) + + note = + insert(:note, %{ + data: %{ + "content" => + "Lorem ipsum dolor sit amet, consectetur :bear: adipiscing elit. Fusce sagittis finibus turpis." + } + }) + + note_activity = insert(:note_activity, %{note: note}) + announce_activity = insert(:announce_activity, %{user: user, note_activity: note_activity}) + + assert Impl.format_body(%{activity: announce_activity}, user) == + "@#{user.nickname} repeated: Lorem ipsum dolor sit amet, consectetur adipiscing elit. Fusce sagittis fini..." + end + + test "renders body for like activity" do + assert Impl.format_body(%{activity: %{data: %{"type" => "Like"}}}, %{nickname: "Bob"}) == + "@Bob has favorited your post" + end +end diff --git a/test/web/rel_me_test.exs b/test/web/rel_me_test.exs index ba8038e69..5188f4de1 100644 --- a/test/web/rel_me_test.exs +++ b/test/web/rel_me_test.exs @@ -9,6 +9,12 @@ defmodule Pleroma.Web.RelMeTest do } -> %Tesla.Env{status: 200, body: File.read!("test/fixtures/rel_me_anchor.html")} + %{ + method: :get, + url: "http://example.com/rel_me/anchor_nofollow" + } -> + %Tesla.Env{status: 200, body: File.read!("test/fixtures/rel_me_anchor_nofollow.html")} + %{ method: :get, url: "http://example.com/rel_me/link" @@ -33,6 +39,7 @@ test "parse/1" do assert Pleroma.Web.RelMe.parse("http://example.com/rel_me/link") == {:ok, hrefs} assert Pleroma.Web.RelMe.parse("http://example.com/rel_me/anchor") == {:ok, hrefs} + assert Pleroma.Web.RelMe.parse("http://example.com/rel_me/anchor_nofollow") == {:ok, hrefs} end test "maybe_put_rel_me/2" do @@ -49,6 +56,11 @@ test "maybe_put_rel_me/2" do assert Pleroma.Web.RelMe.maybe_put_rel_me("http://example.com/rel_me/anchor", profile_urls) == attr + assert Pleroma.Web.RelMe.maybe_put_rel_me( + "http://example.com/rel_me/anchor_nofollow", + profile_urls + ) == attr + assert Pleroma.Web.RelMe.maybe_put_rel_me("http://example.com/rel_me/link", profile_urls) == attr end diff --git a/test/web/rich_media/helpers_test.exs b/test/web/rich_media/helpers_test.exs new file mode 100644 index 000000000..60d93768f --- /dev/null +++ b/test/web/rich_media/helpers_test.exs @@ -0,0 +1,62 @@ +defmodule Pleroma.Web.RichMedia.HelpersTest do + use Pleroma.DataCase + + alias Pleroma.Web.CommonAPI + + import Pleroma.Factory + import Tesla.Mock + + setup do + mock(fn env -> apply(HttpRequestMock, :request, [env]) end) + :ok + end + + test "refuses to crawl incomplete URLs" do + user = insert(:user) + + {:ok, activity} = + CommonAPI.post(user, %{ + "status" => "[test](example.com/ogp)", + "content_type" => "text/markdown" + }) + + Pleroma.Config.put([:rich_media, :enabled], true) + + assert %{} == Pleroma.Web.RichMedia.Helpers.fetch_data_for_activity(activity) + + Pleroma.Config.put([:rich_media, :enabled], false) + end + + test "refuses to crawl malformed URLs" do + user = insert(:user) + + {:ok, activity} = + CommonAPI.post(user, %{ + "status" => "[test](example.com[]/ogp)", + "content_type" => "text/markdown" + }) + + Pleroma.Config.put([:rich_media, :enabled], true) + + assert %{} == Pleroma.Web.RichMedia.Helpers.fetch_data_for_activity(activity) + + Pleroma.Config.put([:rich_media, :enabled], false) + end + + test "crawls valid, complete URLs" do + user = insert(:user) + + {:ok, activity} = + CommonAPI.post(user, %{ + "status" => "[test](http://example.com/ogp)", + "content_type" => "text/markdown" + }) + + Pleroma.Config.put([:rich_media, :enabled], true) + + assert %{page_url: "http://example.com/ogp", rich_media: _} = + Pleroma.Web.RichMedia.Helpers.fetch_data_for_activity(activity) + + Pleroma.Config.put([:rich_media, :enabled], false) + end +end diff --git a/test/web/salmon/salmon_test.exs b/test/web/salmon/salmon_test.exs index 9e583ba40..35503259b 100644 --- a/test/web/salmon/salmon_test.exs +++ b/test/web/salmon/salmon_test.exs @@ -4,10 +4,10 @@ defmodule Pleroma.Web.Salmon.SalmonTest do use Pleroma.DataCase - alias Pleroma.Web.Salmon alias Pleroma.Activity alias Pleroma.Repo alias Pleroma.User + alias Pleroma.Web.Salmon import Pleroma.Factory @magickey "RSA.pu0s-halox4tu7wmES1FVSx6u-4wc0YrUFXcqWXZG4-27UmbCOpMQftRCldNRfyA-qLbz-eqiwQhh-1EwUvjsD4cYbAHNGHwTvDOyx5AKthQUP44ykPv7kjKGh3DWKySJvcs9tlUG87hlo7AvnMo9pwRS_Zz2CacQ-MKaXyDepk=.AQAB" @@ -99,7 +99,7 @@ test "it pushes an activity to remote accounts it's addressed to" do } {:ok, activity} = Repo.insert(%Activity{data: activity_data, recipients: activity_data["to"]}) - user = Repo.get_by(User, ap_id: activity.data["actor"]) + user = User.get_by_ap_id(activity.data["actor"]) {:ok, user} = Pleroma.Web.WebFinger.ensure_keys_present(user) poster = fn url, _data, _headers -> diff --git a/test/web/streamer_test.exs b/test/web/streamer_test.exs index 16d7b9c24..bfe18cb7f 100644 --- a/test/web/streamer_test.exs +++ b/test/web/streamer_test.exs @@ -5,10 +5,10 @@ defmodule Pleroma.Web.StreamerTest do use Pleroma.DataCase - alias Pleroma.Web.Streamer alias Pleroma.List alias Pleroma.User alias Pleroma.Web.CommonAPI + alias Pleroma.Web.Streamer import Pleroma.Factory test "it sends to public" do @@ -39,7 +39,15 @@ test "it sends to public" do task = Task.async(fn -> - assert_receive {:text, _}, 4_000 + expected_event = + %{ + "event" => "delete", + "payload" => activity.id + } + |> Jason.encode!() + + assert_receive {:text, received_event}, 4_000 + assert received_event == expected_event end) fake_socket = %{ @@ -194,4 +202,34 @@ test "it send wanted private posts to list" do Task.await(task) end + + test "it doesn't send muted reblogs" do + user1 = insert(:user) + user2 = insert(:user) + user3 = insert(:user) + CommonAPI.hide_reblogs(user1, user2) + + task = + Task.async(fn -> + refute_receive {:text, _}, 1_000 + end) + + fake_socket = %{ + transport_pid: task.pid, + assigns: %{ + user: user1 + } + } + + {:ok, create_activity} = CommonAPI.post(user3, %{"status" => "I'm kawen"}) + {:ok, announce_activity, _} = CommonAPI.repeat(create_activity.id, user2) + + topics = %{ + "public" => [fake_socket] + } + + Streamer.push_to_socket(topics, "public", announce_activity) + + Task.await(task) + end end diff --git a/test/web/twitter_api/representers/activity_representer_test.exs b/test/web/twitter_api/representers/activity_representer_test.exs deleted file mode 100644 index 0e554623c..000000000 --- a/test/web/twitter_api/representers/activity_representer_test.exs +++ /dev/null @@ -1,166 +0,0 @@ -# Pleroma: A lightweight social networking server -# Copyright © 2017-2019 Pleroma Authors -# SPDX-License-Identifier: AGPL-3.0-only - -defmodule Pleroma.Web.TwitterAPI.Representers.ActivityRepresenterTest do - use Pleroma.DataCase - alias Pleroma.User - alias Pleroma.Activity - alias Pleroma.Object - alias Pleroma.Web.TwitterAPI.Representers.ActivityRepresenter - alias Pleroma.Web.TwitterAPI.Representers.ObjectRepresenter - alias Pleroma.Web.ActivityPub.ActivityPub - alias Pleroma.Web.TwitterAPI.UserView - import Pleroma.Factory - - test "a like activity" do - user = insert(:user) - note_activity = insert(:note_activity) - object = Object.get_by_ap_id(note_activity.data["object"]["id"]) - - {:ok, like_activity, _object} = ActivityPub.like(user, object) - - status = - ActivityRepresenter.to_map(like_activity, %{user: user, liked_activity: note_activity}) - - assert status["id"] == like_activity.id - assert status["in_reply_to_status_id"] == note_activity.id - - note_activity = Activity.get_by_ap_id(note_activity.data["id"]) - activity_actor = Repo.get_by(User, ap_id: note_activity.data["actor"]) - liked_status = ActivityRepresenter.to_map(note_activity, %{user: activity_actor, for: user}) - assert liked_status["favorited"] == true - assert status["activity_type"] == "like" - end - - test "an activity" do - user = insert(:user) - # {:ok, mentioned_user } = UserBuilder.insert(%{nickname: "shp", ap_id: "shp"}) - mentioned_user = insert(:user, %{nickname: "shp"}) - - # {:ok, follower} = UserBuilder.insert(%{following: [User.ap_followers(user)]}) - follower = insert(:user, %{following: [User.ap_followers(user)]}) - - object = %Object{ - data: %{ - "type" => "Image", - "url" => [ - %{ - "type" => "Link", - "mediaType" => "image/jpg", - "href" => "http://example.org/image.jpg" - } - ], - "uuid" => 1 - } - } - - content_html = - "Some :2hu: content mentioning @shp" - - content = HtmlSanitizeEx.strip_tags(content_html) - date = DateTime.from_naive!(~N[2016-05-24 13:26:08.003], "Etc/UTC") |> DateTime.to_iso8601() - - {:ok, convo_object} = Object.context_mapping("2hu") |> Repo.insert() - - to = [ - User.ap_followers(user), - "https://www.w3.org/ns/activitystreams#Public", - mentioned_user.ap_id - ] - - activity = %Activity{ - id: 1, - data: %{ - "type" => "Create", - "id" => "id", - "to" => to, - "actor" => User.ap_id(user), - "object" => %{ - "published" => date, - "type" => "Note", - "content" => content_html, - "summary" => "2hu :2hu:", - "inReplyToStatusId" => 213_123, - "attachment" => [ - object - ], - "external_url" => "some url", - "like_count" => 5, - "announcement_count" => 3, - "context" => "2hu", - "tag" => ["content", "mentioning", "nsfw"], - "emoji" => %{ - "2hu" => "corndog.png" - } - }, - "published" => date, - "context" => "2hu" - }, - local: false, - recipients: to - } - - expected_html = - "

    2hu \"2hu\"

    alert('YAY')Some \"2hu\" content mentioning
    @shp" - - expected_status = %{ - "id" => activity.id, - "user" => UserView.render("show.json", %{user: user, for: follower}), - "is_local" => false, - "statusnet_html" => expected_html, - "text" => "2hu :2hu:" <> content, - "is_post_verb" => true, - "created_at" => "Tue May 24 13:26:08 +0000 2016", - "in_reply_to_status_id" => 213_123, - "in_reply_to_screen_name" => nil, - "in_reply_to_user_id" => nil, - "in_reply_to_profileurl" => nil, - "in_reply_to_ostatus_uri" => nil, - "statusnet_conversation_id" => convo_object.id, - "attachments" => [ - ObjectRepresenter.to_map(object) - ], - "attentions" => [ - UserView.render("show.json", %{user: mentioned_user, for: follower}) - ], - "fave_num" => 5, - "repeat_num" => 3, - "favorited" => false, - "repeated" => false, - "pinned" => false, - "external_url" => "some url", - "tags" => ["nsfw", "content", "mentioning"], - "activity_type" => "post", - "possibly_sensitive" => true, - "uri" => activity.data["object"]["id"], - "visibility" => "direct", - "card" => nil, - "muted" => false, - "summary" => "2hu :2hu:", - "summary_html" => - "2hu \"2hu\"" - } - - assert ActivityRepresenter.to_map(activity, %{ - user: user, - for: follower, - mentioned: [mentioned_user] - }) == expected_status - end - - test "a delete activity" do - object = insert(:note) - user = User.get_by_ap_id(object.data["actor"]) - - {:ok, delete} = ActivityPub.delete(object) - - map = ActivityRepresenter.to_map(delete, %{user: user}) - - assert map["is_post_verb"] == false - assert map["activity_type"] == "delete" - assert map["uri"] == object.data["id"] - end -end diff --git a/test/web/twitter_api/twitter_api_controller_test.exs b/test/web/twitter_api/twitter_api_controller_test.exs index ce0812308..72b7ea85e 100644 --- a/test/web/twitter_api/twitter_api_controller_test.exs +++ b/test/web/twitter_api/twitter_api_controller_test.exs @@ -4,23 +4,23 @@ defmodule Pleroma.Web.TwitterAPI.ControllerTest do use Pleroma.Web.ConnCase - alias Pleroma.Web.TwitterAPI.Representers.ActivityRepresenter - alias Pleroma.Builders.ActivityBuilder - alias Pleroma.Builders.UserBuilder - alias Pleroma.Repo - alias Pleroma.Activity - alias Pleroma.User - alias Pleroma.Object - alias Pleroma.Notification - alias Pleroma.Web.ActivityPub.ActivityPub - alias Pleroma.Web.OAuth.Token - alias Pleroma.Web.TwitterAPI.Controller - alias Pleroma.Web.TwitterAPI.UserView - alias Pleroma.Web.TwitterAPI.NotificationView - alias Pleroma.Web.CommonAPI - alias Pleroma.Web.TwitterAPI.TwitterAPI alias Comeonin.Pbkdf2 alias Ecto.Changeset + alias Pleroma.Activity + alias Pleroma.Builders.ActivityBuilder + alias Pleroma.Builders.UserBuilder + alias Pleroma.Notification + alias Pleroma.Object + alias Pleroma.Repo + alias Pleroma.User + alias Pleroma.Web.ActivityPub.ActivityPub + alias Pleroma.Web.CommonAPI + alias Pleroma.Web.OAuth.Token + alias Pleroma.Web.TwitterAPI.ActivityView + alias Pleroma.Web.TwitterAPI.Controller + alias Pleroma.Web.TwitterAPI.NotificationView + alias Pleroma.Web.TwitterAPI.TwitterAPI + alias Pleroma.Web.TwitterAPI.UserView import Pleroma.Factory import Mock @@ -116,7 +116,11 @@ test "with credentials", %{conn: conn, user: user} do |> post(request_path, %{status: "Nice meme.", visibility: "private"}) assert json_response(conn, 200) == - ActivityRepresenter.to_map(Repo.one(Activity), %{user: user, for: user}) + ActivityView.render("activity.json", %{ + activity: Repo.one(Activity), + user: user, + for: user + }) end end @@ -273,7 +277,7 @@ test "returns one status", %{conn: conn} do response = json_response(conn, 200) - assert response == ActivityRepresenter.to_map(activity, %{user: actor}) + assert response == ActivityView.render("activity.json", %{activity: activity, user: actor}) end end @@ -372,7 +376,8 @@ test "with credentials", %{conn: conn, user: current_user} do assert response == Enum.map(returned_activities, fn activity -> - ActivityRepresenter.to_map(activity, %{ + ActivityView.render("activity.json", %{ + activity: activity, user: User.get_cached_by_ap_id(activity.data["actor"]), for: current_user }) @@ -469,10 +474,10 @@ test "with credentials", %{conn: conn, user: current_user} do assert length(response) == 1 assert Enum.at(response, 0) == - ActivityRepresenter.to_map(activity, %{ + ActivityView.render("activity.json", %{ user: current_user, for: current_user, - mentioned: [current_user] + activity: activity }) end @@ -490,7 +495,7 @@ test "does not show DMs in mentions timeline", %{conn: conn, user: current_user} response = json_response(conn, 200) - assert length(response) == 0 + assert Enum.empty?(response) end end @@ -594,7 +599,9 @@ test "with user_id", %{conn: conn} do conn = get(conn, "/api/statuses/user_timeline.json", %{"user_id" => user.id}) response = json_response(conn, 200) assert length(response) == 1 - assert Enum.at(response, 0) == ActivityRepresenter.to_map(activity, %{user: user}) + + assert Enum.at(response, 0) == + ActivityView.render("activity.json", %{user: user, activity: activity}) end test "with screen_name", %{conn: conn} do @@ -604,7 +611,9 @@ test "with screen_name", %{conn: conn} do conn = get(conn, "/api/statuses/user_timeline.json", %{"screen_name" => user.nickname}) response = json_response(conn, 200) assert length(response) == 1 - assert Enum.at(response, 0) == ActivityRepresenter.to_map(activity, %{user: user}) + + assert Enum.at(response, 0) == + ActivityView.render("activity.json", %{user: user, activity: activity}) end test "with credentials", %{conn: conn, user: current_user} do @@ -620,7 +629,11 @@ test "with credentials", %{conn: conn, user: current_user} do assert length(response) == 1 assert Enum.at(response, 0) == - ActivityRepresenter.to_map(activity, %{user: current_user, for: current_user}) + ActivityView.render("activity.json", %{ + user: current_user, + for: current_user, + activity: activity + }) end test "with credentials with user_id", %{conn: conn, user: current_user} do @@ -635,7 +648,9 @@ test "with credentials with user_id", %{conn: conn, user: current_user} do response = json_response(conn, 200) assert length(response) == 1 - assert Enum.at(response, 0) == ActivityRepresenter.to_map(activity, %{user: user}) + + assert Enum.at(response, 0) == + ActivityView.render("activity.json", %{user: user, activity: activity}) end test "with credentials screen_name", %{conn: conn, user: current_user} do @@ -650,7 +665,9 @@ test "with credentials screen_name", %{conn: conn, user: current_user} do response = json_response(conn, 200) assert length(response) == 1 - assert Enum.at(response, 0) == ActivityRepresenter.to_map(activity, %{user: user}) + + assert Enum.at(response, 0) == + ActivityView.render("activity.json", %{user: user, activity: activity}) end test "with credentials with user_id, excluding RTs", %{conn: conn, user: current_user} do @@ -669,7 +686,9 @@ test "with credentials with user_id, excluding RTs", %{conn: conn, user: current response = json_response(conn, 200) assert length(response) == 1 - assert Enum.at(response, 0) == ActivityRepresenter.to_map(activity, %{user: user}) + + assert Enum.at(response, 0) == + ActivityView.render("activity.json", %{user: user, activity: activity}) conn = conn @@ -678,7 +697,9 @@ test "with credentials with user_id, excluding RTs", %{conn: conn, user: current response = json_response(conn, 200) assert length(response) == 1 - assert Enum.at(response, 0) == ActivityRepresenter.to_map(activity, %{user: user}) + + assert Enum.at(response, 0) == + ActivityView.render("activity.json", %{user: user, activity: activity}) end end @@ -698,7 +719,7 @@ test "with credentials", %{conn: conn, user: current_user} do |> with_credentials(current_user.nickname, "test") |> post("/api/friendships/create.json", %{user_id: followed.id}) - current_user = Repo.get(User, current_user.id) + current_user = User.get_by_id(current_user.id) assert User.ap_followers(followed) in current_user.following assert json_response(conn, 200) == @@ -713,8 +734,8 @@ test "for restricted account", %{conn: conn, user: current_user} do |> with_credentials(current_user.nickname, "test") |> post("/api/friendships/create.json", %{user_id: followed.id}) - current_user = Repo.get(User, current_user.id) - followed = Repo.get(User, followed.id) + current_user = User.get_by_id(current_user.id) + followed = User.get_by_id(followed.id) refute User.ap_followers(followed) in current_user.following @@ -743,7 +764,7 @@ test "with credentials", %{conn: conn, user: current_user} do |> with_credentials(current_user.nickname, "test") |> post("/api/friendships/destroy.json", %{user_id: followed.id}) - current_user = Repo.get(User, current_user.id) + current_user = User.get_by_id(current_user.id) assert current_user.following == [current_user.ap_id] assert json_response(conn, 200) == @@ -767,7 +788,7 @@ test "with credentials", %{conn: conn, user: current_user} do |> with_credentials(current_user.nickname, "test") |> post("/api/blocks/create.json", %{user_id: blocked.id}) - current_user = Repo.get(User, current_user.id) + current_user = User.get_by_id(current_user.id) assert User.blocks?(current_user, blocked) assert json_response(conn, 200) == @@ -794,7 +815,7 @@ test "with credentials", %{conn: conn, user: current_user} do |> with_credentials(current_user.nickname, "test") |> post("/api/blocks/destroy.json", %{user_id: blocked.id}) - current_user = Repo.get(User, current_user.id) + current_user = User.get_by_id(current_user.id) assert current_user.info.blocks == [] assert json_response(conn, 200) == @@ -825,7 +846,7 @@ test "with credentials", %{conn: conn, user: current_user} do |> with_credentials(current_user.nickname, "test") |> post("/api/qvitter/update_avatar.json", %{img: avatar_image}) - current_user = Repo.get(User, current_user.id) + current_user = User.get_by_id(current_user.id) assert is_map(current_user.avatar) assert json_response(conn, 200) == @@ -933,11 +954,15 @@ test "with credentials", %{conn: conn, user: current_user} do |> with_credentials(current_user.nickname, "test") |> post(request_path) - activity = Repo.get(Activity, note_activity.id) - activity_user = Repo.get_by(User, ap_id: note_activity.data["actor"]) + activity = Activity.get_by_id(note_activity.id) + activity_user = User.get_by_ap_id(note_activity.data["actor"]) assert json_response(response, 200) == - ActivityRepresenter.to_map(activity, %{user: activity_user, for: current_user}) + ActivityView.render("activity.json", %{ + user: activity_user, + for: current_user, + activity: activity + }) end end @@ -967,11 +992,15 @@ test "with credentials", %{conn: conn, user: current_user} do |> with_credentials(current_user.nickname, "test") |> post(request_path) - activity = Repo.get(Activity, note_activity.id) - activity_user = Repo.get_by(User, ap_id: note_activity.data["actor"]) + activity = Activity.get_by_id(note_activity.id) + activity_user = User.get_by_ap_id(note_activity.data["actor"]) assert json_response(response, 200) == - ActivityRepresenter.to_map(activity, %{user: activity_user, for: current_user}) + ActivityView.render("activity.json", %{ + user: activity_user, + for: current_user, + activity: activity + }) end end @@ -992,7 +1021,7 @@ test "it creates a new user", %{conn: conn} do user = json_response(conn, 200) - fetched_user = Repo.get_by(User, nickname: "lain") + fetched_user = User.get_by_nickname("lain") assert user == UserView.render("show.json", %{user: fetched_user}) end @@ -1080,7 +1109,7 @@ test "it redirects to root url", %{conn: conn, user: user} do test "it confirms the user account", %{conn: conn, user: user} do get(conn, "/api/account/confirm_email/#{user.id}/#{user.info.confirmation_token}") - user = Repo.get(User, user.id) + user = User.get_by_id(user.id) refute user.info.confirmation_pending refute user.info.confirmation_token @@ -1698,7 +1727,7 @@ test "with credentials, valid password and matching new password and confirmatio }) assert json_response(conn, 200) == %{"status" => "success"} - fetched_user = Repo.get(User, current_user.id) + fetched_user = User.get_by_id(current_user.id) assert Pbkdf2.checkpw("newpass", fetched_user.password_hash) == true end end @@ -1739,8 +1768,8 @@ test "it lists friend requests" do {:ok, _activity} = ActivityPub.follow(other_user, user) - user = Repo.get(User, user.id) - other_user = Repo.get(User, other_user.id) + user = User.get_by_id(user.id) + other_user = User.get_by_id(other_user.id) assert User.following?(other_user, user) == false @@ -1779,8 +1808,8 @@ test "it approves a friend request" do {:ok, _activity} = ActivityPub.follow(other_user, user) - user = Repo.get(User, user.id) - other_user = Repo.get(User, other_user.id) + user = User.get_by_id(user.id) + other_user = User.get_by_id(other_user.id) assert User.following?(other_user, user) == false @@ -1802,8 +1831,8 @@ test "it denies a friend request" do {:ok, _activity} = ActivityPub.follow(other_user, user) - user = Repo.get(User, user.id) - other_user = Repo.get(User, other_user.id) + user = User.get_by_id(user.id) + other_user = User.get_by_id(other_user.id) assert User.following?(other_user, user) == false @@ -1955,7 +1984,7 @@ test "with credentials", %{conn: conn, user: user} do user = refresh_record(user) assert json_response(response, 200) == - ActivityRepresenter.to_map(activity, %{user: user, for: user}) + ActivityView.render("activity.json", %{user: user, for: user, activity: activity}) end end @@ -1985,7 +2014,7 @@ test "with credentials", %{conn: conn, user: user} do user = refresh_record(user) assert json_response(response, 200) == - ActivityRepresenter.to_map(activity, %{user: user, for: user}) + ActivityView.render("activity.json", %{user: user, for: user, activity: activity}) end end diff --git a/test/web/twitter_api/twitter_api_test.exs b/test/web/twitter_api/twitter_api_test.exs index aa2a4d650..a4540e651 100644 --- a/test/web/twitter_api/twitter_api_test.exs +++ b/test/web/twitter_api/twitter_api_test.exs @@ -4,18 +4,23 @@ defmodule Pleroma.Web.TwitterAPI.TwitterAPITest do use Pleroma.DataCase - alias Pleroma.Web.TwitterAPI.TwitterAPI - alias Pleroma.Web.TwitterAPI.UserView alias Pleroma.Activity - alias Pleroma.User alias Pleroma.Object alias Pleroma.Repo + alias Pleroma.User alias Pleroma.UserInviteToken alias Pleroma.Web.ActivityPub.ActivityPub alias Pleroma.Web.TwitterAPI.ActivityView + alias Pleroma.Web.TwitterAPI.TwitterAPI + alias Pleroma.Web.TwitterAPI.UserView import Pleroma.Factory + setup_all do + Tesla.Mock.mock_global(fn env -> apply(HttpRequestMock, :request, [env]) end) + :ok + end + test "create a status" do user = insert(:user) mentioned_user = insert(:user, %{nickname: "shp", ap_id: "shp"}) @@ -275,7 +280,7 @@ test "it registers a new user and returns the user." do {:ok, user} = TwitterAPI.register_user(data) - fetched_user = Repo.get_by(User, nickname: "lain") + fetched_user = User.get_by_nickname("lain") assert UserView.render("show.json", %{user: user}) == UserView.render("show.json", %{user: fetched_user}) @@ -293,13 +298,12 @@ test "it registers a new user with empty string in bio and returns the user." do {:ok, user} = TwitterAPI.register_user(data) - fetched_user = Repo.get_by(User, nickname: "lain") + fetched_user = User.get_by_nickname("lain") assert UserView.render("show.json", %{user: user}) == UserView.render("show.json", %{user: fetched_user}) end - @moduletag skip: "needs 'account_activation_required: true' in config" test "it sends confirmation email if :account_activation_required is specified in instance config" do setting = Pleroma.Config.get([:instance, :account_activation_required]) @@ -353,68 +357,313 @@ test "it registers a new user and parses mentions in the bio" do assert user2.bio == expected_text end - @moduletag skip: "needs 'registrations_open: false' in config" - test "it registers a new user via invite token and returns the user." do - {:ok, token} = UserInviteToken.create_token() + describe "register with one time token" do + setup do + setting = Pleroma.Config.get([:instance, :registrations_open]) - data = %{ - "nickname" => "vinny", - "email" => "pasta@pizza.vs", - "fullname" => "Vinny Vinesauce", - "bio" => "streamer", - "password" => "hiptofbees", - "confirm" => "hiptofbees", - "token" => token.token - } + if setting do + Pleroma.Config.put([:instance, :registrations_open], false) + on_exit(fn -> Pleroma.Config.put([:instance, :registrations_open], setting) end) + end - {:ok, user} = TwitterAPI.register_user(data) + :ok + end - fetched_user = Repo.get_by(User, nickname: "vinny") - token = Repo.get_by(UserInviteToken, token: token.token) + test "returns user on success" do + {:ok, invite} = UserInviteToken.create_invite() - assert token.used == true + data = %{ + "nickname" => "vinny", + "email" => "pasta@pizza.vs", + "fullname" => "Vinny Vinesauce", + "bio" => "streamer", + "password" => "hiptofbees", + "confirm" => "hiptofbees", + "token" => invite.token + } - assert UserView.render("show.json", %{user: user}) == - UserView.render("show.json", %{user: fetched_user}) + {:ok, user} = TwitterAPI.register_user(data) + + fetched_user = User.get_by_nickname("vinny") + invite = Repo.get_by(UserInviteToken, token: invite.token) + + assert invite.used == true + + assert UserView.render("show.json", %{user: user}) == + UserView.render("show.json", %{user: fetched_user}) + end + + test "returns error on invalid token" do + data = %{ + "nickname" => "GrimReaper", + "email" => "death@reapers.afterlife", + "fullname" => "Reaper Grim", + "bio" => "Your time has come", + "password" => "scythe", + "confirm" => "scythe", + "token" => "DudeLetMeInImAFairy" + } + + {:error, msg} = TwitterAPI.register_user(data) + + assert msg == "Invalid token" + refute User.get_by_nickname("GrimReaper") + end + + test "returns error on expired token" do + {:ok, invite} = UserInviteToken.create_invite() + UserInviteToken.update_invite!(invite, used: true) + + data = %{ + "nickname" => "GrimReaper", + "email" => "death@reapers.afterlife", + "fullname" => "Reaper Grim", + "bio" => "Your time has come", + "password" => "scythe", + "confirm" => "scythe", + "token" => invite.token + } + + {:error, msg} = TwitterAPI.register_user(data) + + assert msg == "Expired token" + refute User.get_by_nickname("GrimReaper") + end end - @moduletag skip: "needs 'registrations_open: false' in config" - test "it returns an error if invalid token submitted" do - data = %{ - "nickname" => "GrimReaper", - "email" => "death@reapers.afterlife", - "fullname" => "Reaper Grim", - "bio" => "Your time has come", - "password" => "scythe", - "confirm" => "scythe", - "token" => "DudeLetMeInImAFairy" - } + describe "registers with date limited token" do + setup do + setting = Pleroma.Config.get([:instance, :registrations_open]) - {:error, msg} = TwitterAPI.register_user(data) + if setting do + Pleroma.Config.put([:instance, :registrations_open], false) + on_exit(fn -> Pleroma.Config.put([:instance, :registrations_open], setting) end) + end - assert msg == "Invalid token" - refute Repo.get_by(User, nickname: "GrimReaper") + data = %{ + "nickname" => "vinny", + "email" => "pasta@pizza.vs", + "fullname" => "Vinny Vinesauce", + "bio" => "streamer", + "password" => "hiptofbees", + "confirm" => "hiptofbees" + } + + check_fn = fn invite -> + data = Map.put(data, "token", invite.token) + {:ok, user} = TwitterAPI.register_user(data) + fetched_user = User.get_by_nickname("vinny") + + assert UserView.render("show.json", %{user: user}) == + UserView.render("show.json", %{user: fetched_user}) + end + + {:ok, data: data, check_fn: check_fn} + end + + test "returns user on success", %{check_fn: check_fn} do + {:ok, invite} = UserInviteToken.create_invite(%{expires_at: Date.utc_today()}) + + check_fn.(invite) + + invite = Repo.get_by(UserInviteToken, token: invite.token) + + refute invite.used + end + + test "returns user on token which expired tomorrow", %{check_fn: check_fn} do + {:ok, invite} = UserInviteToken.create_invite(%{expires_at: Date.add(Date.utc_today(), 1)}) + + check_fn.(invite) + + invite = Repo.get_by(UserInviteToken, token: invite.token) + + refute invite.used + end + + test "returns an error on overdue date", %{data: data} do + {:ok, invite} = UserInviteToken.create_invite(%{expires_at: Date.add(Date.utc_today(), -1)}) + + data = Map.put(data, "token", invite.token) + + {:error, msg} = TwitterAPI.register_user(data) + + assert msg == "Expired token" + refute User.get_by_nickname("vinny") + invite = Repo.get_by(UserInviteToken, token: invite.token) + + refute invite.used + end end - @moduletag skip: "needs 'registrations_open: false' in config" - test "it returns an error if expired token submitted" do - {:ok, token} = UserInviteToken.create_token() - UserInviteToken.mark_as_used(token.token) + describe "registers with reusable token" do + setup do + setting = Pleroma.Config.get([:instance, :registrations_open]) - data = %{ - "nickname" => "GrimReaper", - "email" => "death@reapers.afterlife", - "fullname" => "Reaper Grim", - "bio" => "Your time has come", - "password" => "scythe", - "confirm" => "scythe", - "token" => token.token - } + if setting do + Pleroma.Config.put([:instance, :registrations_open], false) + on_exit(fn -> Pleroma.Config.put([:instance, :registrations_open], setting) end) + end - {:error, msg} = TwitterAPI.register_user(data) + :ok + end - assert msg == "Expired token" - refute Repo.get_by(User, nickname: "GrimReaper") + test "returns user on success, after him registration fails" do + {:ok, invite} = UserInviteToken.create_invite(%{max_use: 100}) + + UserInviteToken.update_invite!(invite, uses: 99) + + data = %{ + "nickname" => "vinny", + "email" => "pasta@pizza.vs", + "fullname" => "Vinny Vinesauce", + "bio" => "streamer", + "password" => "hiptofbees", + "confirm" => "hiptofbees", + "token" => invite.token + } + + {:ok, user} = TwitterAPI.register_user(data) + fetched_user = User.get_by_nickname("vinny") + invite = Repo.get_by(UserInviteToken, token: invite.token) + + assert invite.used == true + + assert UserView.render("show.json", %{user: user}) == + UserView.render("show.json", %{user: fetched_user}) + + data = %{ + "nickname" => "GrimReaper", + "email" => "death@reapers.afterlife", + "fullname" => "Reaper Grim", + "bio" => "Your time has come", + "password" => "scythe", + "confirm" => "scythe", + "token" => invite.token + } + + {:error, msg} = TwitterAPI.register_user(data) + + assert msg == "Expired token" + refute User.get_by_nickname("GrimReaper") + end + end + + describe "registers with reusable date limited token" do + setup do + setting = Pleroma.Config.get([:instance, :registrations_open]) + + if setting do + Pleroma.Config.put([:instance, :registrations_open], false) + on_exit(fn -> Pleroma.Config.put([:instance, :registrations_open], setting) end) + end + + :ok + end + + test "returns user on success" do + {:ok, invite} = UserInviteToken.create_invite(%{expires_at: Date.utc_today(), max_use: 100}) + + data = %{ + "nickname" => "vinny", + "email" => "pasta@pizza.vs", + "fullname" => "Vinny Vinesauce", + "bio" => "streamer", + "password" => "hiptofbees", + "confirm" => "hiptofbees", + "token" => invite.token + } + + {:ok, user} = TwitterAPI.register_user(data) + fetched_user = User.get_by_nickname("vinny") + invite = Repo.get_by(UserInviteToken, token: invite.token) + + refute invite.used + + assert UserView.render("show.json", %{user: user}) == + UserView.render("show.json", %{user: fetched_user}) + end + + test "error after max uses" do + {:ok, invite} = UserInviteToken.create_invite(%{expires_at: Date.utc_today(), max_use: 100}) + + UserInviteToken.update_invite!(invite, uses: 99) + + data = %{ + "nickname" => "vinny", + "email" => "pasta@pizza.vs", + "fullname" => "Vinny Vinesauce", + "bio" => "streamer", + "password" => "hiptofbees", + "confirm" => "hiptofbees", + "token" => invite.token + } + + {:ok, user} = TwitterAPI.register_user(data) + fetched_user = User.get_by_nickname("vinny") + invite = Repo.get_by(UserInviteToken, token: invite.token) + assert invite.used == true + + assert UserView.render("show.json", %{user: user}) == + UserView.render("show.json", %{user: fetched_user}) + + data = %{ + "nickname" => "GrimReaper", + "email" => "death@reapers.afterlife", + "fullname" => "Reaper Grim", + "bio" => "Your time has come", + "password" => "scythe", + "confirm" => "scythe", + "token" => invite.token + } + + {:error, msg} = TwitterAPI.register_user(data) + + assert msg == "Expired token" + refute User.get_by_nickname("GrimReaper") + end + + test "returns error on overdue date" do + {:ok, invite} = + UserInviteToken.create_invite(%{expires_at: Date.add(Date.utc_today(), -1), max_use: 100}) + + data = %{ + "nickname" => "GrimReaper", + "email" => "death@reapers.afterlife", + "fullname" => "Reaper Grim", + "bio" => "Your time has come", + "password" => "scythe", + "confirm" => "scythe", + "token" => invite.token + } + + {:error, msg} = TwitterAPI.register_user(data) + + assert msg == "Expired token" + refute User.get_by_nickname("GrimReaper") + end + + test "returns error on with overdue date and after max" do + {:ok, invite} = + UserInviteToken.create_invite(%{expires_at: Date.add(Date.utc_today(), -1), max_use: 100}) + + UserInviteToken.update_invite!(invite, uses: 100) + + data = %{ + "nickname" => "GrimReaper", + "email" => "death@reapers.afterlife", + "fullname" => "Reaper Grim", + "bio" => "Your time has come", + "password" => "scythe", + "confirm" => "scythe", + "token" => invite.token + } + + {:error, msg} = TwitterAPI.register_user(data) + + assert msg == "Expired token" + refute User.get_by_nickname("GrimReaper") + end end test "it returns the error on registration problems" do @@ -429,7 +678,7 @@ test "it returns the error on registration problems" do {:error, error_object} = TwitterAPI.register_user(data) assert is_binary(error_object[:error]) - refute Repo.get_by(User, nickname: "lain") + refute User.get_by_nickname("lain") end test "it assigns an integer conversation_id" do @@ -445,22 +694,6 @@ test "it assigns an integer conversation_id" do :ok end - describe "context_to_conversation_id" do - test "creates a mapping object" do - conversation_id = TwitterAPI.context_to_conversation_id("random context") - object = Object.get_by_ap_id("random context") - - assert conversation_id == object.id - end - - test "returns an existing mapping for an existing object" do - {:ok, object} = Object.context_mapping("random context") |> Repo.insert() - conversation_id = TwitterAPI.context_to_conversation_id("random context") - - assert conversation_id == object.id - end - end - describe "fetching a user by uri" do test "fetches a user by uri" do id = "https://mastodon.social/users/lambadalambda" @@ -472,6 +705,7 @@ test "fetches a user by uri" do # Also fetches the feed. # assert Activity.get_create_by_object_ap_id("tag:mastodon.social,2017-04-05:objectId=1641750:objectType=Status") + # credo:disable-for-previous-line Credo.Check.Readability.MaxLineLength end end end diff --git a/test/web/twitter_api/util_controller_test.exs b/test/web/twitter_api/util_controller_test.exs index fc762ab18..a4b3d651a 100644 --- a/test/web/twitter_api/util_controller_test.exs +++ b/test/web/twitter_api/util_controller_test.exs @@ -1,8 +1,17 @@ defmodule Pleroma.Web.TwitterAPI.UtilControllerTest do use Pleroma.Web.ConnCase + alias Pleroma.Notification + alias Pleroma.Repo + alias Pleroma.User + alias Pleroma.Web.CommonAPI import Pleroma.Factory + setup do + Tesla.Mock.mock(fn env -> apply(HttpRequestMock, :request, [env]) end) + :ok + end + describe "POST /api/pleroma/follow_import" do test "it returns HTTP 200", %{conn: conn} do user1 = insert(:user) @@ -52,7 +61,69 @@ test "it returns HTTP 200", %{conn: conn} do end end + describe "POST /api/pleroma/notifications/read" do + test "it marks a single notification as read", %{conn: conn} do + user1 = insert(:user) + user2 = insert(:user) + {:ok, activity1} = CommonAPI.post(user2, %{"status" => "hi @#{user1.nickname}"}) + {:ok, activity2} = CommonAPI.post(user2, %{"status" => "hi @#{user1.nickname}"}) + {:ok, [notification1]} = Notification.create_notifications(activity1) + {:ok, [notification2]} = Notification.create_notifications(activity2) + + conn + |> assign(:user, user1) + |> post("/api/pleroma/notifications/read", %{"id" => "#{notification1.id}"}) + |> json_response(:ok) + + assert Repo.get(Notification, notification1.id).seen + refute Repo.get(Notification, notification2.id).seen + end + end + + describe "PUT /api/pleroma/notification_settings" do + test "it updates notification settings", %{conn: conn} do + user = insert(:user) + + conn + |> assign(:user, user) + |> put("/api/pleroma/notification_settings", %{ + "remote" => false, + "followers" => false, + "bar" => 1 + }) + |> json_response(:ok) + + user = Repo.get(User, user.id) + + assert %{"remote" => false, "local" => true, "followers" => false, "follows" => true} == + user.info.notification_settings + end + end + describe "GET /api/statusnet/config.json" do + test "returns the state of safe_dm_mentions flag", %{conn: conn} do + option = Pleroma.Config.get([:instance, :safe_dm_mentions]) + Pleroma.Config.put([:instance, :safe_dm_mentions], true) + + response = + conn + |> get("/api/statusnet/config.json") + |> json_response(:ok) + + assert response["site"]["safeDMMentionsEnabled"] == "1" + + Pleroma.Config.put([:instance, :safe_dm_mentions], false) + + response = + conn + |> get("/api/statusnet/config.json") + |> json_response(:ok) + + assert response["site"]["safeDMMentionsEnabled"] == "0" + + Pleroma.Config.put([:instance, :safe_dm_mentions], option) + end + test "it returns the managed config", %{conn: conn} do Pleroma.Config.put([:instance, :managed_config], false) Pleroma.Config.put([:fe], theme: "rei-ayanami-towel") @@ -119,4 +190,44 @@ test "returns everything in :pleroma, :frontend_configurations", %{conn: conn} d assert response == Jason.encode!(config |> Enum.into(%{})) |> Jason.decode!() end end + + describe "/api/pleroma/emoji" do + test "returns json with custom emoji with tags", %{conn: conn} do + emoji = + conn + |> get("/api/pleroma/emoji") + |> json_response(200) + + assert Enum.all?(emoji, fn + {_key, + %{ + "image_url" => url, + "tags" => tags + }} -> + is_binary(url) and is_list(tags) + end) + end + end + + describe "GET /ostatus_subscribe?acct=...." do + test "adds status to pleroma instance if the `acct` is a status", %{conn: conn} do + conn = + get( + conn, + "/ostatus_subscribe?acct=https://mastodon.social/users/emelie/statuses/101849165031453009" + ) + + assert redirected_to(conn) =~ "/notice/" + end + + test "show follow account page if the `acct` is a account link", %{conn: conn} do + response = + get( + conn, + "/ostatus_subscribe?acct=https://mastodon.social/users/emelie" + ) + + assert html_response(response, 200) =~ "Log in to follow" + end + end end diff --git a/test/web/twitter_api/views/activity_view_test.exs b/test/web/twitter_api/views/activity_view_test.exs index 0a5384f34..ee9a0c834 100644 --- a/test/web/twitter_api/views/activity_view_test.exs +++ b/test/web/twitter_api/views/activity_view_test.exs @@ -5,15 +5,14 @@ defmodule Pleroma.Web.TwitterAPI.ActivityViewTest do use Pleroma.DataCase + alias Pleroma.Activity + alias Pleroma.Repo + alias Pleroma.User + alias Pleroma.Web.ActivityPub.ActivityPub alias Pleroma.Web.CommonAPI alias Pleroma.Web.CommonAPI.Utils alias Pleroma.Web.TwitterAPI.ActivityView alias Pleroma.Web.TwitterAPI.UserView - alias Pleroma.Web.TwitterAPI.TwitterAPI - alias Pleroma.Repo - alias Pleroma.Activity - alias Pleroma.User - alias Pleroma.Web.ActivityPub.ActivityPub import Pleroma.Factory import Tesla.Mock @@ -82,7 +81,7 @@ test "a create activity with a html status" do result = ActivityView.render("activity.json", activity: activity) assert result["statusnet_html"] == - "#Bike log - Commute Tuesday
    https://pla.bike/posts/20181211/
    #cycling #CHScycling #commute
    MVIMG_20181211_054020.jpg" + "#Bike log - Commute Tuesday
    https://pla.bike/posts/20181211/
    #cycling #CHScycling #commute
    MVIMG_20181211_054020.jpg" assert result["text"] == "#Bike log - Commute Tuesday\nhttps://pla.bike/posts/20181211/\n#cycling #CHScycling #commute\nMVIMG_20181211_054020.jpg" @@ -129,7 +128,7 @@ test "a create activity with a note" do result = ActivityView.render("activity.json", activity: activity) - convo_id = TwitterAPI.context_to_conversation_id(activity.data["object"]["context"]) + convo_id = Utils.context_to_conversation_id(activity.data["object"]["context"]) expected = %{ "activity_type" => "post", @@ -177,12 +176,12 @@ test "a list of activities" do other_user = insert(:user, %{nickname: "shp"}) {:ok, activity} = CommonAPI.post(user, %{"status" => "Hey @shp!"}) - convo_id = TwitterAPI.context_to_conversation_id(activity.data["object"]["context"]) + convo_id = Utils.context_to_conversation_id(activity.data["object"]["context"]) mocks = [ { - TwitterAPI, - [], + Utils, + [:passthrough], [context_to_conversation_id: fn _ -> false end] }, { @@ -197,7 +196,7 @@ test "a list of activities" do assert result["statusnet_conversation_id"] == convo_id assert result["user"] - refute called(TwitterAPI.context_to_conversation_id(:_)) + refute called(Utils.context_to_conversation_id(:_)) refute called(User.get_cached_by_ap_id(user.ap_id)) refute called(User.get_cached_by_ap_id(other_user.ap_id)) end @@ -280,9 +279,9 @@ test "an announce activity" do {:ok, activity} = CommonAPI.post(user, %{"status" => "Hey @shp!"}) {:ok, announce, _object} = CommonAPI.repeat(activity.id, other_user) - convo_id = TwitterAPI.context_to_conversation_id(activity.data["object"]["context"]) + convo_id = Utils.context_to_conversation_id(activity.data["object"]["context"]) - activity = Repo.get(Activity, activity.id) + activity = Activity.get_by_id(activity.id) result = ActivityView.render("activity.json", activity: announce) diff --git a/test/web/twitter_api/views/notification_view_test.exs b/test/web/twitter_api/views/notification_view_test.exs index 3a67f7292..6baeeaf63 100644 --- a/test/web/twitter_api/views/notification_view_test.exs +++ b/test/web/twitter_api/views/notification_view_test.exs @@ -5,14 +5,14 @@ defmodule Pleroma.Web.TwitterAPI.NotificationViewTest do use Pleroma.DataCase - alias Pleroma.User alias Pleroma.Notification - alias Pleroma.Web.TwitterAPI.TwitterAPI - alias Pleroma.Web.TwitterAPI.NotificationView - alias Pleroma.Web.TwitterAPI.UserView - alias Pleroma.Web.TwitterAPI.ActivityView - alias Pleroma.Web.CommonAPI.Utils + alias Pleroma.User alias Pleroma.Web.ActivityPub.ActivityPub + alias Pleroma.Web.CommonAPI.Utils + alias Pleroma.Web.TwitterAPI.ActivityView + alias Pleroma.Web.TwitterAPI.NotificationView + alias Pleroma.Web.TwitterAPI.TwitterAPI + alias Pleroma.Web.TwitterAPI.UserView import Pleroma.Factory diff --git a/test/web/twitter_api/views/user_view_test.exs b/test/web/twitter_api/views/user_view_test.exs index 114f24a1c..0feaf4b64 100644 --- a/test/web/twitter_api/views/user_view_test.exs +++ b/test/web/twitter_api/views/user_view_test.exs @@ -6,8 +6,8 @@ defmodule Pleroma.Web.TwitterAPI.UserViewTest do use Pleroma.DataCase alias Pleroma.User - alias Pleroma.Web.TwitterAPI.UserView alias Pleroma.Web.CommonAPI.Utils + alias Pleroma.Web.TwitterAPI.UserView import Pleroma.Factory @@ -292,7 +292,7 @@ test "A blocked user for the blocker" do } } - blocker = Repo.get(User, blocker.id) + blocker = User.get_by_id(blocker.id) assert represented == UserView.render("show.json", %{user: user, for: blocker}) end diff --git a/test/web/websub/websub_controller_test.exs b/test/web/websub/websub_controller_test.exs index 87b01d89b..1e69ed01a 100644 --- a/test/web/websub/websub_controller_test.exs +++ b/test/web/websub/websub_controller_test.exs @@ -5,10 +5,10 @@ defmodule Pleroma.Web.Websub.WebsubControllerTest do use Pleroma.Web.ConnCase import Pleroma.Factory - alias Pleroma.Web.Websub.WebsubClientSubscription alias Pleroma.Activity alias Pleroma.Repo alias Pleroma.Web.Websub + alias Pleroma.Web.Websub.WebsubClientSubscription test "websub subscription request", %{conn: conn} do user = insert(:user) diff --git a/test/web/websub/websub_test.exs b/test/web/websub/websub_test.exs index 9a9b9df02..74386d7db 100644 --- a/test/web/websub/websub_test.exs +++ b/test/web/websub/websub_test.exs @@ -4,11 +4,13 @@ defmodule Pleroma.Web.WebsubTest do use Pleroma.DataCase - alias Pleroma.Web.Websub - alias Pleroma.Web.Websub.WebsubServerSubscription - alias Pleroma.Web.Websub.WebsubClientSubscription - import Pleroma.Factory + alias Pleroma.Web.Router.Helpers + alias Pleroma.Web.Websub + alias Pleroma.Web.Websub.WebsubClientSubscription + alias Pleroma.Web.Websub.WebsubServerSubscription + + import Pleroma.Factory import Tesla.Mock setup do