diff options
190 files changed, 3741 insertions, 692 deletions
diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 76d1a4210..675d0e067 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -1,8 +1,8 @@ -image: git.pleroma.social:5050/pleroma/pleroma/ci-base:elixir-1.13.4-otp-25 +image: git.pleroma.social:5050/pleroma/pleroma/ci-base:elixir-1.14.5-otp-25 variables: &global_variables # Only used for the release - ELIXIR_VER: 1.13.4 + ELIXIR_VER: 1.17.3 POSTGRES_DB: pleroma_test POSTGRES_USER: postgres POSTGRES_PASSWORD: postgres @@ -71,7 +71,7 @@ check-changelog: tags: - amd64 -build-1.13.4-otp-25: +build-1.14.5-otp-25: extends: - .build_changes_policy - .using-ci-base @@ -119,7 +119,7 @@ benchmark: - mix ecto.migrate - mix pleroma.load_testing -unit-testing-1.13.4-otp-25: +unit-testing-1.14.5-otp-25: extends: - .build_changes_policy - .using-ci-base @@ -272,7 +272,8 @@ stop_review_app: amd64: stage: release - image: elixir:$ELIXIR_VER + image: + name: hexpm/elixir-amd64:1.17.3-erlang-26.2.5.6-ubuntu-focal-20241011 only: &release-only - stable@pleroma/pleroma - develop@pleroma/pleroma @@ -297,8 +298,9 @@ amd64: variables: &release-variables MIX_ENV: prod VIX_COMPILATION_MODE: PLATFORM_PROVIDED_LIBVIPS + DEBIAN_FRONTEND: noninteractive before_script: &before-release - - apt-get update && apt-get install -y cmake libmagic-dev libvips-dev erlang-dev + - apt-get update && apt-get install -y cmake libmagic-dev libvips-dev erlang-dev git - echo "import Config" > config/prod.secret.exs - mix local.hex --force - mix local.rebar --force @@ -313,7 +315,8 @@ amd64-musl: stage: release artifacts: *release-artifacts only: *release-only - image: elixir:$ELIXIR_VER-alpine + image: + name: hexpm/elixir-amd64:1.17.3-erlang-26.2.5.6-alpine-3.17.9 tags: - amd64 cache: *release-cache @@ -327,6 +330,7 @@ amd64-musl: arm: stage: release + allow_failure: true artifacts: *release-artifacts only: *release-only tags: @@ -355,7 +359,8 @@ arm64: only: *release-only tags: - arm - image: arm64v8/elixir:$ELIXIR_VER + image: + name: hexpm/elixir-arm64:1.17.3-erlang-26.2.5.6-ubuntu-focal-20241011 cache: *release-cache variables: *release-variables before_script: *before-release @@ -367,7 +372,8 @@ arm64-musl: only: *release-only tags: - arm - image: arm64v8/elixir:$ELIXIR_VER-alpine + image: + name: hexpm/elixir-arm64:1.17.3-erlang-26.2.5.6-alpine-3.17.9 cache: *release-cache variables: *release-variables before_script: *before-release-musl diff --git a/CHANGELOG.md b/CHANGELOG.md index 61bb2ab54..71178c89a 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,6 +4,77 @@ All notable changes to this project will be documented in this file. The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/). +## 2.8.0 + +### Changed +- Metadata: Do not include .atom feed links for remote accounts +- Bumped `fast_html` to v2.3.0, which notably allows to use system-installed lexbor with passing `WITH_SYSTEM_LEXBOR=1` environment variable at build-time +- Dedupe upload filter now uses a three-level sharding directory structure +- Deprecate `/api/v1/pleroma/accounts/:id/subscribe`/`unsubscribe` +- Restrict incoming activities from unknown actors to a subset that does not imply a previous relationship and early rejection of unrecognized activity types. +- Elixir 1.14 and Erlang/OTP 23 is now the minimum supported release +- Support `id` param in `GET /api/v1/statuses` +- LDAP authentication has been refactored to operate as a GenServer process which will maintain an active connection to the LDAP server. +- Fix 'Setting a marker should mark notifications as read' +- Adjust more Oban workers to enforce unique job constraints. +- Oban updated to 2.18.3 +- Publisher behavior improvement when snoozing Oban jobs due to Gun connection pool contention. +- Poll results refreshing is handled asynchronously and will not attempt to keep fetching updates to a closed poll. +- Tuning for release builds to lower CPU usage. +- Rich Media preview fetching will skip making an HTTP HEAD request to check a URL for allowed content type and length if the Tesla adapter is Gun or Finch +- Fix nonexisting user will not generate metadata for search engine opt-out +- Update Oban to 2.18 +- Worker configuration is no longer available. This only affects custom max_retries values for a couple Oban queues. + +### Added +- Add metadata provider for ActivityPub alternate links +- Added support for argon2 passwords and their conversion for migration from Akkoma fork to upstream. +- Respect :restrict_unauthenticated for hashtag rss/atom feeds +- LDAP configuration now permits overriding the CA root certificate file for TLS validation. +- LDAP now supports users changing their passwords +- Include list id in StatusView +- Added MRF.FODirectReply which changes replies to followers-only posts to be direct. +- Add `id_filter` to MRF to filter URLs and their domain prior to fetching +- Added MRF.QuietReply which prevents replies to public posts from being published to the timelines +- Add `group_key` to notifications +- Allow providing avatar/header descriptions +- Added RemoteReportPolicy from Rebased for handling bogus federated reports +- scrubbers/default: Allow "mention hashtag" classes used by Mastodon +- Added dependencies for Swoosh's Mua mail adapter +- Include session scopes in TokenView + +### Fixed +- Verify a local Update sent through AP C2S so users can only update their own objects +- Fixed malformed follow requests that cause them to appear stuck pending due to the recipient being unable to process them. +- Fix incoming Block activities being rejected +- STARTTLS certificate and hostname verification for LDAP authentication +- LDAPS connections (implicit TLS) are now supported. +- Fix /api/v2/media returning the wrong status code (202) for media processed synchronously +- Miscellaneous fixes for Meilisearch support +- Fix pleroma_ctl mix task calls sometimes not being found +- Add a rate limiter to the OAuth App creation endpoint and ensure registered apps are assigned to users. +- ReceiverWorker will cancel processing jobs instead of retrying if the user cannot be fetched due to 403, 404, or 410 errors or if the account is disabled locally. +- Address case where instance reachability status couldn't be updated +- Remote Fetcher Worker recognizes more permanent failure errors +- StreamerView: Do not leak follows count if hidden +- Imports of blocks, mutes, and follows would retry repeatedly due to incorrect error handling and all work executed in a single job +- Make vapid_config return empty array, fixing preloading for instances without push notifications configured + +### Removed +- Remove stub for /api/v1/accounts/:id/identity_proofs (deprecated by Mastodon 3.5.0) + +## 2.7.1 + +### Changed +- Accept `application/activity+json` for requests to `/.well-known/nodeinfo` + +### Fixed +- Truncate remote user fields, avoids them getting rejected +- Improve the `FollowValidator` to successfully incoming activities with an errant `cc` field. +- Resolved edge case where the API can report you are following a user but the relationship is not fully established. +- The Swoosh email adapter for Mailgun was missing a new dependency on `:multipart` +- Fix Mastodon WebSocket authentication + ## 2.7.0 ### Security diff --git a/Dockerfile b/Dockerfile index 72461305c..fff58154e 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,7 +1,8 @@ +# https://hub.docker.com/r/hexpm/elixir/tags ARG ELIXIR_IMG=hexpm/elixir -ARG ELIXIR_VER=1.13.4 -ARG ERLANG_VER=24.3.4.15 -ARG ALPINE_VER=3.17.5 +ARG ELIXIR_VER=1.14.5 +ARG ERLANG_VER=25.3.2.14 +ARG ALPINE_VER=3.17.9 FROM ${ELIXIR_IMG}:${ELIXIR_VER}-erlang-${ERLANG_VER}-alpine-${ALPINE_VER} as build diff --git a/changelog.d/301-small-image-redirect.change b/changelog.d/301-small-image-redirect.change new file mode 100644 index 000000000..c5be80539 --- /dev/null +++ b/changelog.d/301-small-image-redirect.change @@ -0,0 +1 @@ +Performance: Use 301 (permanent) redirect instead of 302 (temporary) when redirecting small images in media proxy. This allows browsers to cache the redirect response.
\ No newline at end of file diff --git a/changelog.d/actor-published-date.add b/changelog.d/actor-published-date.add new file mode 100644 index 000000000..feac85894 --- /dev/null +++ b/changelog.d/actor-published-date.add @@ -0,0 +1 @@ +Include "published" in actor view diff --git a/changelog.d/argon2-passwords.add b/changelog.d/argon2-passwords.add deleted file mode 100644 index 36fd7faf2..000000000 --- a/changelog.d/argon2-passwords.add +++ /dev/null @@ -1 +0,0 @@ -Added support for argon2 passwords and their conversion for migration from Akkoma fork to upstream. diff --git a/changelog.d/backup-links.add b/changelog.d/backup-links.add new file mode 100644 index 000000000..ff19e736b --- /dev/null +++ b/changelog.d/backup-links.add @@ -0,0 +1 @@ +Link to exported outbox/followers/following collections in backup actor.json diff --git a/changelog.d/bugfix-truncate-remote-user-fields.fix b/changelog.d/bugfix-truncate-remote-user-fields.fix deleted file mode 100644 index 239a3c224..000000000 --- a/changelog.d/bugfix-truncate-remote-user-fields.fix +++ /dev/null @@ -1 +0,0 @@ -Truncate remote user fields, avoids them getting rejected diff --git a/changelog.d/c2s-update-verify.fix b/changelog.d/c2s-update-verify.fix new file mode 100644 index 000000000..a4dfe7c07 --- /dev/null +++ b/changelog.d/c2s-update-verify.fix @@ -0,0 +1 @@ +Verify a local Update sent through AP C2S so users can only update their own objects diff --git a/changelog.d/ci-git-fetch.skip b/changelog.d/ci-builder-skip-arm32.skip index e69de29bb..e69de29bb 100644 --- a/changelog.d/ci-git-fetch.skip +++ b/changelog.d/ci-builder-skip-arm32.skip diff --git a/changelog.d/deprecate-subscribe.change b/changelog.d/deprecate-subscribe.change deleted file mode 100644 index bd7e8aec7..000000000 --- a/changelog.d/deprecate-subscribe.change +++ /dev/null @@ -1 +0,0 @@ -Deprecate `/api/v1/pleroma/accounts/:id/subscribe`/`unsubscribe`
\ No newline at end of file diff --git a/changelog.d/commonapi.skip b/changelog.d/description-update-suggestions.skip index e69de29bb..e69de29bb 100644 --- a/changelog.d/commonapi.skip +++ b/changelog.d/description-update-suggestions.skip diff --git a/changelog.d/drop-unwanted.change b/changelog.d/drop-unwanted.change deleted file mode 100644 index 459d4bfe6..000000000 --- a/changelog.d/drop-unwanted.change +++ /dev/null @@ -1 +0,0 @@ -Restrict incoming activities from unknown actors to a subset that does not imply a previous relationship and early rejection of unrecognized activity types. diff --git a/changelog.d/fix-mastodon-edits.fix b/changelog.d/fix-mastodon-edits.fix new file mode 100644 index 000000000..2e79977e0 --- /dev/null +++ b/changelog.d/fix-mastodon-edits.fix @@ -0,0 +1 @@ +Fix Mastodon incoming edits with inlined "likes" diff --git a/changelog.d/dialyzer.skip b/changelog.d/fix-wrong-config-section.skip index e69de29bb..e69de29bb 100644 --- a/changelog.d/dialyzer.skip +++ b/changelog.d/fix-wrong-config-section.skip diff --git a/changelog.d/follow-hashtags.add b/changelog.d/follow-hashtags.add new file mode 100644 index 000000000..a4994b92b --- /dev/null +++ b/changelog.d/follow-hashtags.add @@ -0,0 +1 @@ +Hashtag following diff --git a/changelog.d/follow-request.fix b/changelog.d/follow-request.fix deleted file mode 100644 index 59d34e9bf..000000000 --- a/changelog.d/follow-request.fix +++ /dev/null @@ -1 +0,0 @@ -Fixed malformed follow requests that cause them to appear stuck pending due to the recipient being unable to process them. diff --git a/changelog.d/follow-validator.fix b/changelog.d/follow-validator.fix deleted file mode 100644 index d49932b7b..000000000 --- a/changelog.d/follow-validator.fix +++ /dev/null @@ -1 +0,0 @@ -Improve the FollowValidator to successfully incoming activities with an errant cc field. diff --git a/changelog.d/following-state.fix b/changelog.d/following-state.fix deleted file mode 100644 index 314ea6210..000000000 --- a/changelog.d/following-state.fix +++ /dev/null @@ -1 +0,0 @@ -Resolved edge case where the API can report you are following a user but the relationship is not fully established. diff --git a/changelog.d/get-statuses-param.change b/changelog.d/get-statuses-param.change deleted file mode 100644 index 3edcad268..000000000 --- a/changelog.d/get-statuses-param.change +++ /dev/null @@ -1 +0,0 @@ -Support `id` param in `GET /api/v1/statuses`
\ No newline at end of file diff --git a/changelog.d/docs-fix.skip b/changelog.d/hexpm-build-images.skip index e69de29bb..e69de29bb 100644 --- a/changelog.d/docs-fix.skip +++ b/changelog.d/hexpm-build-images.skip diff --git a/changelog.d/identity-proofs.remove b/changelog.d/identity-proofs.remove deleted file mode 100644 index efe1c34f5..000000000 --- a/changelog.d/identity-proofs.remove +++ /dev/null @@ -1 +0,0 @@ -Remove stub for /api/v1/accounts/:id/identity_proofs (deprecated by Mastodon 3.5.0)
\ No newline at end of file diff --git a/changelog.d/incoming-scrobbles.fix b/changelog.d/incoming-scrobbles.fix new file mode 100644 index 000000000..fb1e2581c --- /dev/null +++ b/changelog.d/incoming-scrobbles.fix @@ -0,0 +1 @@ +Allow incoming "Listen" activities diff --git a/changelog.d/ldap-ca.add b/changelog.d/ldap-ca.add deleted file mode 100644 index 32ecbb5c0..000000000 --- a/changelog.d/ldap-ca.add +++ /dev/null @@ -1 +0,0 @@ -LDAP configuration now permits overriding the CA root certificate file for TLS validation. diff --git a/changelog.d/ldap-tls.fix b/changelog.d/ldap-tls.fix deleted file mode 100644 index b15137d77..000000000 --- a/changelog.d/ldap-tls.fix +++ /dev/null @@ -1 +0,0 @@ -STARTTLS certificate and hostname verification for LDAP authentication diff --git a/changelog.d/ldaps.fix b/changelog.d/ldaps.fix deleted file mode 100644 index a1dc901ab..000000000 --- a/changelog.d/ldaps.fix +++ /dev/null @@ -1 +0,0 @@ -LDAPS connections (implicit TLS) are now supported. diff --git a/changelog.d/list-id-visibility.add b/changelog.d/list-id-visibility.add deleted file mode 100644 index 2fea2d771..000000000 --- a/changelog.d/list-id-visibility.add +++ /dev/null @@ -1 +0,0 @@ -Include list id in StatusView
\ No newline at end of file diff --git a/changelog.d/mailgun.fix b/changelog.d/mailgun.fix deleted file mode 100644 index 855588752..000000000 --- a/changelog.d/mailgun.fix +++ /dev/null @@ -1 +0,0 @@ -The Swoosh email adapter for Mailgun was missing a new dependency on :multipart diff --git a/changelog.d/manifest-icon-size.skip b/changelog.d/manifest-icon-size.skip deleted file mode 100644 index e69de29bb..000000000 --- a/changelog.d/manifest-icon-size.skip +++ /dev/null diff --git a/changelog.d/mogrify.skip b/changelog.d/mogrify.skip deleted file mode 100644 index e69de29bb..000000000 --- a/changelog.d/mogrify.skip +++ /dev/null diff --git a/changelog.d/mrf-cleanup.skip b/changelog.d/mrf-cleanup.skip deleted file mode 100644 index e69de29bb..000000000 --- a/changelog.d/mrf-cleanup.skip +++ /dev/null diff --git a/changelog.d/mrf-fodirectreply.add b/changelog.d/mrf-fodirectreply.add deleted file mode 100644 index 10fd5d16a..000000000 --- a/changelog.d/mrf-fodirectreply.add +++ /dev/null @@ -1 +0,0 @@ -Added MRF.FODirectReply which changes replies to followers-only posts to be direct. diff --git a/changelog.d/mrf-quietreply.add b/changelog.d/mrf-quietreply.add deleted file mode 100644 index 4ed20bce6..000000000 --- a/changelog.d/mrf-quietreply.add +++ /dev/null @@ -1 +0,0 @@ -Added MRF.QuietReply which prevents replies to public posts from being published to the timelines diff --git a/changelog.d/notifications-group-key.add b/changelog.d/notifications-group-key.add deleted file mode 100644 index 386927f4a..000000000 --- a/changelog.d/notifications-group-key.add +++ /dev/null @@ -1 +0,0 @@ -Add `group_key` to notifications
\ No newline at end of file diff --git a/changelog.d/notifications-marker.change b/changelog.d/notifications-marker.change deleted file mode 100644 index 9e350a95c..000000000 --- a/changelog.d/notifications-marker.change +++ /dev/null @@ -1 +0,0 @@ -Fix 'Setting a marker should mark notifications as read'
\ No newline at end of file diff --git a/changelog.d/oauth-app-spam.fix b/changelog.d/oauth-app-spam.fix deleted file mode 100644 index cdc2e816d..000000000 --- a/changelog.d/oauth-app-spam.fix +++ /dev/null @@ -1 +0,0 @@ -Add a rate limiter to the OAuth App creation endpoint and ensure registered apps are assigned to users. diff --git a/changelog.d/oban-recevier-improvements.fix b/changelog.d/oban-recevier-improvements.fix deleted file mode 100644 index f91502ed2..000000000 --- a/changelog.d/oban-recevier-improvements.fix +++ /dev/null @@ -1 +0,0 @@ -ReceiverWorker will cancel processing jobs instead of retrying if the user cannot be fetched due to 403, 404, or 410 errors or if the account is disabled locally. diff --git a/changelog.d/oban-uniques.change b/changelog.d/oban-uniques.change deleted file mode 100644 index d9deb4696..000000000 --- a/changelog.d/oban-uniques.change +++ /dev/null @@ -1 +0,0 @@ -Adjust more Oban workers to enforce unique job constraints. diff --git a/changelog.d/oban_gun_snooze.change b/changelog.d/oban_gun_snooze.change deleted file mode 100644 index c94525b2a..000000000 --- a/changelog.d/oban_gun_snooze.change +++ /dev/null @@ -1 +0,0 @@ -Publisher behavior improvement when snoozing Oban jobs due to Gun connection pool contention. diff --git a/changelog.d/post-languages.add b/changelog.d/post-languages.add new file mode 100644 index 000000000..04b350f3f --- /dev/null +++ b/changelog.d/post-languages.add @@ -0,0 +1 @@ +Allow to specify post language
\ No newline at end of file diff --git a/changelog.d/profile-image-descriptions.add b/changelog.d/profile-image-descriptions.add deleted file mode 100644 index 85cc48083..000000000 --- a/changelog.d/profile-image-descriptions.add +++ /dev/null @@ -1 +0,0 @@ -Allow providing avatar/header descriptions
\ No newline at end of file diff --git a/changelog.d/publisher-reachability.fix b/changelog.d/publisher-reachability.fix deleted file mode 100644 index 3f50be581..000000000 --- a/changelog.d/publisher-reachability.fix +++ /dev/null @@ -1 +0,0 @@ -Address case where instance reachability status couldn't be updated diff --git a/changelog.d/remote-object-fetcher.fix b/changelog.d/remote-object-fetcher.fix deleted file mode 100644 index dcf2b1b31..000000000 --- a/changelog.d/remote-object-fetcher.fix +++ /dev/null @@ -1 +0,0 @@ -Remote Fetcher Worker recognizes more permanent failure errors diff --git a/changelog.d/rich-media-ignore-host.fix b/changelog.d/rich-media-ignore-host.fix new file mode 100644 index 000000000..b70866ac7 --- /dev/null +++ b/changelog.d/rich-media-ignore-host.fix @@ -0,0 +1 @@ +Fix missing check for domain presence in rich media ignore_host configuration diff --git a/changelog.d/rich-media-no-heads.change b/changelog.d/rich-media-no-heads.change deleted file mode 100644 index 0bab323aa..000000000 --- a/changelog.d/rich-media-no-heads.change +++ /dev/null @@ -1 +0,0 @@ -Rich Media preview fetching will skip making an HTTP HEAD request to check a URL for allowed content type and length if the Tesla adapter is Gun or Finch diff --git a/changelog.d/scrubbers-allow-mention-hashtag.add b/changelog.d/scrubbers-allow-mention-hashtag.add deleted file mode 100644 index c12ab1ffb..000000000 --- a/changelog.d/scrubbers-allow-mention-hashtag.add +++ /dev/null @@ -1 +0,0 @@ -scrubbers/default: Allow "mention hashtag" classes used by Mastodon
\ No newline at end of file diff --git a/changelog.d/stream-follow-relationships-count.fix b/changelog.d/stream-follow-relationships-count.fix deleted file mode 100644 index 68452a88b..000000000 --- a/changelog.d/stream-follow-relationships-count.fix +++ /dev/null @@ -1 +0,0 @@ -StreamerView: Do not leak follows count if hidden
\ No newline at end of file diff --git a/changelog.d/text-extensions.skip b/changelog.d/text-extensions.skip deleted file mode 100644 index e69de29bb..000000000 --- a/changelog.d/text-extensions.skip +++ /dev/null diff --git a/changelog.d/todo-cleanup.skip b/changelog.d/todo-cleanup.skip deleted file mode 100644 index e69de29bb..000000000 --- a/changelog.d/todo-cleanup.skip +++ /dev/null diff --git a/changelog.d/update-oban.change b/changelog.d/update-oban.change deleted file mode 100644 index a67b3e3cf..000000000 --- a/changelog.d/update-oban.change +++ /dev/null @@ -1 +0,0 @@ -Update Oban to 2.18 diff --git a/changelog.d/user-factory.skip b/changelog.d/user-factory.skip deleted file mode 100644 index e69de29bb..000000000 --- a/changelog.d/user-factory.skip +++ /dev/null diff --git a/changelog.d/user-imports.fix b/changelog.d/user-imports.fix deleted file mode 100644 index 0076c73d7..000000000 --- a/changelog.d/user-imports.fix +++ /dev/null @@ -1 +0,0 @@ -Imports of blocks, mutes, and follows would retry repeatedly due to incorrect error handling and all work executed in a single job diff --git a/changelog.d/vips-blurhash.fix b/changelog.d/vips-blurhash.fix new file mode 100644 index 000000000..9e8951b15 --- /dev/null +++ b/changelog.d/vips-blurhash.fix @@ -0,0 +1 @@ +Fix blurhash generation crashes diff --git a/changelog.d/well-known.change b/changelog.d/well-known.change deleted file mode 100644 index e928124fb..000000000 --- a/changelog.d/well-known.change +++ /dev/null @@ -1 +0,0 @@ -Accept application/activity+json for requests to .well-known/nodeinfo diff --git a/changelog.d/workerhelper.change b/changelog.d/workerhelper.change deleted file mode 100644 index 539c9b54f..000000000 --- a/changelog.d/workerhelper.change +++ /dev/null @@ -1 +0,0 @@ -Worker configuration is no longer available. This only affects custom max_retries values for a couple Oban queues. diff --git a/ci/elixir-1.12/build_and_push.sh b/ci/elixir-1.12/build_and_push.sh deleted file mode 100755 index 508262ed8..000000000 --- a/ci/elixir-1.12/build_and_push.sh +++ /dev/null @@ -1 +0,0 @@ -docker buildx build --platform linux/amd64,linux/arm64,linux/arm/v7 -t git.pleroma.social:5050/pleroma/pleroma/ci-base:elixir-1.12 --push . diff --git a/ci/elixir-1.13.4-otp-25/Dockerfile b/ci/elixir-1.13.4-otp-25/Dockerfile deleted file mode 100644 index 25a1639e8..000000000 --- a/ci/elixir-1.13.4-otp-25/Dockerfile +++ /dev/null @@ -1,8 +0,0 @@ -FROM elixir:1.13.4-otp-25 - -# Single RUN statement, otherwise intermediate images are created -# https://docs.docker.com/develop/develop-images/dockerfile_best-practices/#run -RUN apt-get update &&\ - apt-get install -y libmagic-dev cmake libimage-exiftool-perl ffmpeg &&\ - mix local.hex --force &&\ - mix local.rebar --force diff --git a/ci/elixir-1.12/Dockerfile b/ci/elixir-1.14.5-otp-25/Dockerfile index a2b566873..3a35c84c3 100644 --- a/ci/elixir-1.12/Dockerfile +++ b/ci/elixir-1.14.5-otp-25/Dockerfile @@ -1,4 +1,4 @@ -FROM elixir:1.12.3 +FROM elixir:1.14.5-otp-25 # Single RUN statement, otherwise intermediate images are created # https://docs.docker.com/develop/develop-images/dockerfile_best-practices/#run diff --git a/ci/elixir-1.13.4-otp-25/build_and_push.sh b/ci/elixir-1.14.5-otp-25/build_and_push.sh index b8ca1d24d..912c47d0c 100755 --- a/ci/elixir-1.13.4-otp-25/build_and_push.sh +++ b/ci/elixir-1.14.5-otp-25/build_and_push.sh @@ -1 +1 @@ -docker buildx build --platform linux/amd64,linux/arm64 -t git.pleroma.social:5050/pleroma/pleroma/ci-base:elixir-1.13.4-otp-25 --push . +docker buildx build --platform linux/amd64,linux/arm64 -t git.pleroma.social:5050/pleroma/pleroma/ci-base:elixir-1.14.5-otp-25 --push . diff --git a/config/config.exs b/config/config.exs index 4e088d574..a9f6aa0b7 100644 --- a/config/config.exs +++ b/config/config.exs @@ -435,6 +435,11 @@ config :pleroma, :mrf_follow_bot, follower_nickname: nil config :pleroma, :mrf_inline_quote, template: "<bdi>RT:</bdi> {url}" +config :pleroma, :mrf_remote_report, + reject_all: false, + reject_anonymous: true, + reject_empty_message: true + config :pleroma, :mrf_force_mention, mention_parent: true, mention_quoted: true @@ -613,16 +618,17 @@ config :pleroma, Pleroma.Formatter, config :pleroma, :ldap, enabled: System.get_env("LDAP_ENABLED") == "true", - host: System.get_env("LDAP_HOST") || "localhost", - port: String.to_integer(System.get_env("LDAP_PORT") || "389"), + host: System.get_env("LDAP_HOST", "localhost"), + port: String.to_integer(System.get_env("LDAP_PORT", "389")), ssl: System.get_env("LDAP_SSL") == "true", sslopts: [], tls: System.get_env("LDAP_TLS") == "true", tlsopts: [], - base: System.get_env("LDAP_BASE") || "dc=example,dc=com", - uid: System.get_env("LDAP_UID") || "cn", + base: System.get_env("LDAP_BASE", "dc=example,dc=com"), + uid: System.get_env("LDAP_UID", "cn"), # defaults to CAStore's Mozilla roots - cacertfile: nil + cacertfile: System.get_env("LDAP_CACERTFILE", nil), + mail: System.get_env("LDAP_MAIL", "mail") oauth_consumer_strategies = System.get_env("OAUTH_CONSUMER_STRATEGIES") diff --git a/config/description.exs b/config/description.exs index 7a714deff..f091e4924 100644 --- a/config/description.exs +++ b/config/description.exs @@ -2246,15 +2246,9 @@ config :pleroma, :config_description, [ label: "SSL options", type: :keyword, description: "Additional SSL options", - suggestions: [cacertfile: "path/to/file/with/PEM/cacerts", verify: :verify_peer], + suggestions: [verify: :verify_peer], children: [ %{ - key: :cacertfile, - type: :string, - description: "Path to file with PEM encoded cacerts", - suggestions: ["path/to/file/with/PEM/cacerts"] - }, - %{ key: :verify, type: :atom, description: "Type of cert verification", @@ -2273,15 +2267,9 @@ config :pleroma, :config_description, [ label: "TLS options", type: :keyword, description: "Additional TLS options", - suggestions: [cacertfile: "path/to/file/with/PEM/cacerts", verify: :verify_peer], + suggestions: [verify: :verify_peer], children: [ %{ - key: :cacertfile, - type: :string, - description: "Path to file with PEM encoded cacerts", - suggestions: ["path/to/file/with/PEM/cacerts"] - }, - %{ key: :verify, type: :atom, description: "Type of cert verification", @@ -2297,11 +2285,25 @@ config :pleroma, :config_description, [ }, %{ key: :uid, - label: "UID", + label: "UID Attribute", type: :string, description: "LDAP attribute name to authenticate the user, e.g. when \"cn\", the filter will be \"cn=username,base\"", suggestions: ["cn"] + }, + %{ + key: :cacertfile, + label: "CACertfile", + type: :string, + description: "Path to CA certificate file" + }, + %{ + key: :mail, + label: "Mail Attribute", + type: :string, + description: + "LDAP attribute name to use as the email address when automatically registering the user on first login", + suggestions: ["mail"] } ] }, @@ -3305,8 +3307,7 @@ config :pleroma, :config_description, [ suggestions: [ Pleroma.Web.Preload.Providers.Instance, Pleroma.Web.Preload.Providers.User, - Pleroma.Web.Preload.Providers.Timelines, - Pleroma.Web.Preload.Providers.StatusNet + Pleroma.Web.Preload.Providers.Timelines ] } ] diff --git a/docs/configuration/cheatsheet.md b/docs/configuration/cheatsheet.md index 6a535e054..6e2fddcb6 100644 --- a/docs/configuration/cheatsheet.md +++ b/docs/configuration/cheatsheet.md @@ -98,7 +98,7 @@ To add configuration to your config file, you can copy it from the base config. * `moderator_privileges`: A list of privileges a moderator has (e.g. delete messages, manage reports...) * Possible values are the same as for `admin_privileges` -## :database +## :features * `improved_hashtag_timeline`: Setting to force toggle / force disable improved hashtags timeline. `:enabled` forces hashtags to be fetched from `hashtags` table for hashtags timeline. `:disabled` forces object-embedded hashtags to be used (slower). Keep it `:auto` for automatic behaviour (it is auto-set to `:enabled` [unless overridden] when HashtagsTableMigrator completes). ## Background migrations @@ -742,6 +742,21 @@ config :pleroma, Pleroma.Emails.Mailer, auth: :always ``` +An example for Mua adapter: + +```elixir +config :pleroma, Pleroma.Emails.Mailer, + enabled: true, + adapter: Swoosh.Adapters.Mua, + relay: "mail.example.com", + port: 465, + auth: [ + username: "YOUR_USERNAME@domain.tld", + password: "YOUR_SMTP_PASSWORD" + ], + protocol: :ssl +``` + ### :email_notifications Email notifications settings. diff --git a/docs/installation/debian_based_en.md b/docs/installation/debian_based_en.md index b61e4addd..21cfe2bff 100644 --- a/docs/installation/debian_based_en.md +++ b/docs/installation/debian_based_en.md @@ -69,12 +69,18 @@ cd /opt/pleroma sudo -Hu pleroma mix deps.get ``` -* Generate the configuration: `sudo -Hu pleroma MIX_ENV=prod mix pleroma.instance gen` +* Generate the configuration: + +```shell +sudo -Hu pleroma MIX_ENV=prod mix pleroma.instance gen` +``` + +* During this process: * Answer with `yes` if it asks you to install `rebar3`. * This may take some time, because parts of pleroma get compiled first. * After that it will ask you a few questions about your instance and generates a configuration file in `config/generated_config.exs`. -* Check the configuration and if all looks right, rename it, so Pleroma will load it (`prod.secret.exs` for productive instance, `dev.secret.exs` for development instances): +* Check the configuration and if all looks right, rename it, so Pleroma will load it (`prod.secret.exs` for production instances, `dev.secret.exs` for development instances): ```shell sudo -Hu pleroma mv config/{generated_config.exs,prod.secret.exs} diff --git a/docs/installation/debian_based_jp.md b/docs/installation/debian_based_jp.md index 5a0823a63..0817934ff 100644 --- a/docs/installation/debian_based_jp.md +++ b/docs/installation/debian_based_jp.md @@ -14,7 +14,7 @@ Note: This article is potentially outdated because at this time we may not have - PostgreSQL 11.0以上 (Ubuntu16.04では9.5しか提供されていないので,[](https://www.postgresql.org/download/linux/ubuntu/)こちらから新しいバージョンを入手してください) - `postgresql-contrib` 11.0以上 (同上) -- Elixir 1.13 以上 ([Debianのリポジトリからインストールしないこと!!! ここからインストールすること!](https://elixir-lang.org/install.html#unix-and-unix-like)。または [asdf](https://github.com/asdf-vm/asdf) をpleromaユーザーでインストールしてください) +- Elixir 1.14 以上 ([Debianのリポジトリからインストールしないこと!!! ここからインストールすること!](https://elixir-lang.org/install.html#unix-and-unix-like)。または [asdf](https://github.com/asdf-vm/asdf) をpleromaユーザーでインストールしてください) - `erlang-dev` - `erlang-nox` - `git` diff --git a/docs/installation/freebsd_en.md b/docs/installation/freebsd_en.md index 02513daf2..920bc7d35 100644 --- a/docs/installation/freebsd_en.md +++ b/docs/installation/freebsd_en.md @@ -31,7 +31,7 @@ Setup the required services to automatically start at boot, using `sysrc(8)`. ### Install media / graphics packages (optional, see [`docs/installation/optional/media_graphics_packages.md`](../installation/optional/media_graphics_packages.md)) ```shell -# pkg install imagemagick ffmpeg p5-Image-ExifTool +# pkg install imagemagick ffmpeg p5-Image-ExifTool vips ``` ## Configuring Pleroma diff --git a/docs/installation/generic_dependencies.include b/docs/installation/generic_dependencies.include index bdb7f94d3..9f07f62c6 100644 --- a/docs/installation/generic_dependencies.include +++ b/docs/installation/generic_dependencies.include @@ -1,8 +1,8 @@ ## Required dependencies * PostgreSQL >=11.0 -* Elixir >=1.13.0 <1.17 -* Erlang OTP >=22.2.0 (supported: <27) +* Elixir >=1.14.0 <1.17 +* Erlang OTP >=23.0.0 (supported: <27) * git * file / libmagic * gcc or clang diff --git a/docs/installation/openbsd_en.md b/docs/installation/openbsd_en.md index e58e144d2..78bbf399f 100644 --- a/docs/installation/openbsd_en.md +++ b/docs/installation/openbsd_en.md @@ -12,7 +12,7 @@ For any additional information regarding commands and configuration files mentio To install them, run the following command (with doas or as root): ``` -pkg_add elixir gmake git postgresql-server postgresql-contrib cmake ffmpeg ImageMagick +pkg_add elixir gmake git postgresql-server postgresql-contrib cmake ffmpeg ImageMagick libvips ``` Pleroma requires a reverse proxy, OpenBSD has relayd in base (and is used in this guide) and packages/ports are available for nginx (www/nginx) and apache (www/apache-httpd). Independently of the reverse proxy, [acme-client(1)](https://man.openbsd.org/acme-client) can be used to get a certificate from Let's Encrypt. diff --git a/docs/installation/openbsd_fi.md b/docs/installation/openbsd_fi.md index 73aca3a6f..d7c94d8a0 100644 --- a/docs/installation/openbsd_fi.md +++ b/docs/installation/openbsd_fi.md @@ -18,7 +18,7 @@ Matrix-kanava #pleroma:libera.chat ovat hyviä paikkoja löytää apua Asenna tarvittava ohjelmisto: -`# pkg_add git elixir gmake postgresql-server-10.3 postgresql-contrib-10.3 cmake ffmpeg ImageMagick` +`# pkg_add git elixir gmake postgresql-server-10.3 postgresql-contrib-10.3 cmake ffmpeg ImageMagick libvips` #### Optional software diff --git a/installation/openldap/pw_self_service.ldif b/installation/openldap/pw_self_service.ldif new file mode 100644 index 000000000..463dabbfb --- /dev/null +++ b/installation/openldap/pw_self_service.ldif @@ -0,0 +1,7 @@ +dn: olcDatabase={1}mdb,cn=config +changetype: modify +add: olcAccess +olcAccess: {1}to attrs=userPassword + by self write + by anonymous auth + by * none diff --git a/lib/mix/tasks/pleroma/emoji.ex b/lib/mix/tasks/pleroma/emoji.ex index 8b9c921c8..b656f161f 100644 --- a/lib/mix/tasks/pleroma/emoji.ex +++ b/lib/mix/tasks/pleroma/emoji.ex @@ -93,6 +93,7 @@ defmodule Mix.Tasks.Pleroma.Emoji do ) files = fetch_and_decode!(files_loc) + files_to_unzip = for({_, f} <- files, do: f) IO.puts(IO.ANSI.format(["Unpacking ", :bright, pack_name])) @@ -103,17 +104,7 @@ defmodule Mix.Tasks.Pleroma.Emoji do pack_name ]) - files_to_unzip = - Enum.map( - files, - fn {_, f} -> to_charlist(f) end - ) - - {:ok, _} = - :zip.unzip(binary_archive, - cwd: String.to_charlist(pack_path), - file_list: files_to_unzip - ) + {:ok, _} = Pleroma.SafeZip.unzip_data(binary_archive, pack_path, files_to_unzip) IO.puts(IO.ANSI.format(["Writing pack.json for ", :bright, pack_name])) @@ -201,7 +192,7 @@ defmodule Mix.Tasks.Pleroma.Emoji do tmp_pack_dir = Path.join(System.tmp_dir!(), "emoji-pack-#{name}") - {:ok, _} = :zip.unzip(binary_archive, cwd: String.to_charlist(tmp_pack_dir)) + {:ok, _} = Pleroma.SafeZip.unzip_data(binary_archive, tmp_pack_dir) emoji_map = Pleroma.Emoji.Loader.make_shortcode_to_file_map(tmp_pack_dir, exts) diff --git a/lib/mix/tasks/pleroma/search/meilisearch.ex b/lib/mix/tasks/pleroma/search/meilisearch.ex index 8379a0c25..edce9e871 100644 --- a/lib/mix/tasks/pleroma/search/meilisearch.ex +++ b/lib/mix/tasks/pleroma/search/meilisearch.ex @@ -9,7 +9,7 @@ defmodule Mix.Tasks.Pleroma.Search.Meilisearch do import Ecto.Query import Pleroma.Search.Meilisearch, - only: [meili_post: 2, meili_put: 2, meili_get: 1, meili_delete: 1] + only: [meili_put: 2, meili_get: 1, meili_delete: 1] def run(["index"]) do start_pleroma() @@ -28,7 +28,7 @@ defmodule Mix.Tasks.Pleroma.Search.Meilisearch do end {:ok, _} = - meili_post( + meili_put( "/indexes/objects/settings/ranking-rules", [ "published:desc", @@ -42,7 +42,7 @@ defmodule Mix.Tasks.Pleroma.Search.Meilisearch do ) {:ok, _} = - meili_post( + meili_put( "/indexes/objects/settings/searchable-attributes", [ "content" diff --git a/lib/pleroma/application.ex b/lib/pleroma/application.ex index cb15dc1e9..3f199c002 100644 --- a/lib/pleroma/application.ex +++ b/lib/pleroma/application.ex @@ -94,6 +94,7 @@ defmodule Pleroma.Application do children = [ Pleroma.PromEx, + Pleroma.LDAP, Pleroma.Repo, Config.TransferTask, Pleroma.Emoji, diff --git a/lib/pleroma/config/transfer_task.ex b/lib/pleroma/config/transfer_task.ex index ffc95f144..140dd7711 100644 --- a/lib/pleroma/config/transfer_task.ex +++ b/lib/pleroma/config/transfer_task.ex @@ -22,7 +22,8 @@ defmodule Pleroma.Config.TransferTask do {:pleroma, :markup}, {:pleroma, :streamer}, {:pleroma, :pools}, - {:pleroma, :connections_pool} + {:pleroma, :connections_pool}, + {:pleroma, :ldap} ] defp reboot_time_subkeys, diff --git a/lib/pleroma/constants.ex b/lib/pleroma/constants.ex index 5268ebe7a..3762c0035 100644 --- a/lib/pleroma/constants.ex +++ b/lib/pleroma/constants.ex @@ -20,7 +20,8 @@ defmodule Pleroma.Constants do "deleted_activity_id", "pleroma_internal", "generator", - "rules" + "rules", + "language" ] ) @@ -36,10 +37,12 @@ defmodule Pleroma.Constants do "updated", "emoji", "content", + "contentMap", "summary", "sensitive", "attachment", - "generator" + "generator", + "language" ] ) @@ -87,6 +90,7 @@ defmodule Pleroma.Constants do const(activity_types, do: [ + "Block", "Create", "Update", "Delete", @@ -99,7 +103,8 @@ defmodule Pleroma.Constants do "Announce", "Undo", "Flag", - "EmojiReact" + "EmojiReact", + "Listen" ] ) @@ -115,6 +120,10 @@ defmodule Pleroma.Constants do ] ) + const(object_types, + do: ~w[Event Question Answer Audio Video Image Article Note Page ChatMessage] + ) + # basic regex, just there to weed out potential mistakes # https://datatracker.ietf.org/doc/html/rfc2045#section-5.1 const(mime_regex, diff --git a/lib/pleroma/ecto_type/activity_pub/object_validators/content_language_map.ex b/lib/pleroma/ecto_type/activity_pub/object_validators/content_language_map.ex new file mode 100644 index 000000000..dcdab19f8 --- /dev/null +++ b/lib/pleroma/ecto_type/activity_pub/object_validators/content_language_map.ex @@ -0,0 +1,49 @@ +# Pleroma: A lightweight social networking server +# Copyright © 2017-2023 Pleroma Authors <https://pleroma.social/> +# SPDX-License-Identifier: AGPL-3.0-only + +defmodule Pleroma.EctoType.ActivityPub.ObjectValidators.ContentLanguageMap do + use Ecto.Type + + import Pleroma.EctoType.ActivityPub.ObjectValidators.LanguageCode, + only: [good_locale_code?: 1] + + def type, do: :map + + def cast(%{} = object) do + with {status, %{} = data} when status in [:modified, :ok] <- validate_map(object) do + {:ok, data} + else + {_, nil} -> {:ok, nil} + {:error, _} -> :error + end + end + + def cast(_), do: :error + + def dump(data), do: {:ok, data} + + def load(data), do: {:ok, data} + + defp validate_map(%{} = object) do + {status, data} = + object + |> Enum.reduce({:ok, %{}}, fn + {lang, value}, {status, acc} when is_binary(lang) and is_binary(value) -> + if good_locale_code?(lang) do + {status, Map.put(acc, lang, value)} + else + {:modified, acc} + end + + _, {_status, acc} -> + {:modified, acc} + end) + + if data == %{} do + {status, nil} + else + {status, data} + end + end +end diff --git a/lib/pleroma/ecto_type/activity_pub/object_validators/language_code.ex b/lib/pleroma/ecto_type/activity_pub/object_validators/language_code.ex new file mode 100644 index 000000000..4779deeb0 --- /dev/null +++ b/lib/pleroma/ecto_type/activity_pub/object_validators/language_code.ex @@ -0,0 +1,27 @@ +# Pleroma: A lightweight social networking server +# Copyright © 2017-2023 Pleroma Authors <https://pleroma.social/> +# SPDX-License-Identifier: AGPL-3.0-only + +defmodule Pleroma.EctoType.ActivityPub.ObjectValidators.LanguageCode do + use Ecto.Type + + def type, do: :string + + def cast(language) when is_binary(language) do + if good_locale_code?(language) do + {:ok, language} + else + {:error, :invalid_language} + end + end + + def cast(_), do: :error + + def dump(data), do: {:ok, data} + + def load(data), do: {:ok, data} + + def good_locale_code?(code) when is_binary(code), do: code =~ ~r<^[a-zA-Z0-9\-]+\z$> + + def good_locale_code?(_code), do: false +end diff --git a/lib/pleroma/emoji/pack.ex b/lib/pleroma/emoji/pack.ex index 785fdb8b2..cef12822c 100644 --- a/lib/pleroma/emoji/pack.ex +++ b/lib/pleroma/emoji/pack.ex @@ -25,11 +25,12 @@ defmodule Pleroma.Emoji.Pack do alias Pleroma.Emoji alias Pleroma.Emoji.Pack alias Pleroma.Utils + alias Pleroma.SafeZip @spec create(String.t()) :: {:ok, t()} | {:error, File.posix()} | {:error, :empty_values} def create(name) do with :ok <- validate_not_empty([name]), - dir <- Path.join(emoji_path(), name), + dir <- path_join_name_safe(emoji_path(), name), :ok <- File.mkdir(dir) do save_pack(%__MODULE__{pack_file: Path.join(dir, "pack.json")}) end @@ -65,43 +66,21 @@ defmodule Pleroma.Emoji.Pack do {:ok, [binary()]} | {:error, File.posix(), binary()} | {:error, :empty_values} def delete(name) do with :ok <- validate_not_empty([name]), - pack_path <- Path.join(emoji_path(), name) do + pack_path <- path_join_name_safe(emoji_path(), name) do File.rm_rf(pack_path) end end - @spec unpack_zip_emojies(list(tuple())) :: list(map()) - defp unpack_zip_emojies(zip_files) do - Enum.reduce(zip_files, [], fn - {_, path, s, _, _, _}, acc when elem(s, 2) == :regular -> - with( - filename <- Path.basename(path), - shortcode <- Path.basename(filename, Path.extname(filename)), - false <- Emoji.exist?(shortcode) - ) do - [%{path: path, filename: path, shortcode: shortcode} | acc] - else - _ -> acc - end - - _, acc -> - acc - end) - end - @spec add_file(t(), String.t(), Path.t(), Plug.Upload.t()) :: {:ok, t()} | {:error, File.posix() | atom()} def add_file(%Pack{} = pack, _, _, %Plug.Upload{content_type: "application/zip"} = file) do - with {:ok, zip_files} <- :zip.table(to_charlist(file.path)), - [_ | _] = emojies <- unpack_zip_emojies(zip_files), + with {:ok, zip_files} <- SafeZip.list_dir_file(file.path), + [_ | _] = emojies <- map_zip_emojies(zip_files), {:ok, tmp_dir} <- Utils.tmp_dir("emoji") do try do {:ok, _emoji_files} = - :zip.unzip( - to_charlist(file.path), - [{:file_list, Enum.map(emojies, & &1[:path])}, {:cwd, String.to_charlist(tmp_dir)}] - ) + SafeZip.unzip_file(file.path, tmp_dir, Enum.map(emojies, & &1[:path])) {_, updated_pack} = Enum.map_reduce(emojies, pack, fn item, emoji_pack -> @@ -292,7 +271,7 @@ defmodule Pleroma.Emoji.Pack do @spec load_pack(String.t()) :: {:ok, t()} | {:error, :file.posix()} def load_pack(name) do name = Path.basename(name) - pack_file = Path.join([emoji_path(), name, "pack.json"]) + pack_file = path_join_name_safe(emoji_path(), name) |> Path.join("pack.json") with {:ok, _} <- File.stat(pack_file), {:ok, pack_data} <- File.read(pack_file) do @@ -416,10 +395,9 @@ defmodule Pleroma.Emoji.Pack do end defp create_archive_and_cache(pack, hash) do - files = [~c"pack.json" | Enum.map(pack.files, fn {_, file} -> to_charlist(file) end)] - - {:ok, {_, result}} = - :zip.zip(~c"#{pack.name}.zip", files, [:memory, cwd: to_charlist(pack.path)]) + pack_file_list = Enum.into(pack.files, [], fn {_, f} -> f end) + files = ["pack.json" | pack_file_list] + {:ok, {_, result}} = SafeZip.zip("#{pack.name}.zip", files, pack.path, true) ttl_per_file = Pleroma.Config.get!([:emoji, :shared_pack_cache_seconds_per_file]) overall_ttl = :timer.seconds(ttl_per_file * Enum.count(files)) @@ -478,7 +456,7 @@ defmodule Pleroma.Emoji.Pack do end defp save_file(%Plug.Upload{path: upload_path}, pack, filename) do - file_path = Path.join(pack.path, filename) + file_path = path_join_safe(pack.path, filename) create_subdirs(file_path) with {:ok, _} <- File.copy(upload_path, file_path) do @@ -497,8 +475,8 @@ defmodule Pleroma.Emoji.Pack do end defp rename_file(pack, filename, new_filename) do - old_path = Path.join(pack.path, filename) - new_path = Path.join(pack.path, new_filename) + old_path = path_join_safe(pack.path, filename) + new_path = path_join_safe(pack.path, new_filename) create_subdirs(new_path) with :ok <- File.rename(old_path, new_path) do @@ -516,7 +494,7 @@ defmodule Pleroma.Emoji.Pack do defp remove_file(pack, shortcode) do with {:ok, filename} <- get_filename(pack, shortcode), - emoji <- Path.join(pack.path, filename), + emoji <- path_join_safe(pack.path, filename), :ok <- File.rm(emoji) do remove_dir_if_empty(emoji, filename) end @@ -534,7 +512,7 @@ defmodule Pleroma.Emoji.Pack do defp get_filename(pack, shortcode) do with %{^shortcode => filename} when is_binary(filename) <- pack.files, - file_path <- Path.join(pack.path, filename), + file_path <- path_join_safe(pack.path, filename), {:ok, _} <- File.stat(file_path) do {:ok, filename} else @@ -584,11 +562,10 @@ defmodule Pleroma.Emoji.Pack do defp unzip(archive, pack_info, remote_pack, local_pack) do with :ok <- File.mkdir_p!(local_pack.path) do - files = Enum.map(remote_pack["files"], fn {_, path} -> to_charlist(path) end) + files = Enum.map(remote_pack["files"], fn {_, path} -> path end) # Fallback cannot contain a pack.json file - files = if pack_info[:fallback], do: files, else: [~c"pack.json" | files] - - :zip.unzip(archive, cwd: to_charlist(local_pack.path), file_list: files) + files = if pack_info[:fallback], do: files, else: ["pack.json" | files] + SafeZip.unzip_data(archive, local_pack.path, files) end end @@ -649,13 +626,43 @@ defmodule Pleroma.Emoji.Pack do end defp validate_has_all_files(pack, zip) do - with {:ok, f_list} <- :zip.unzip(zip, [:memory]) do - # Check if all files from the pack.json are in the archive - pack.files - |> Enum.all?(fn {_, from_manifest} -> - List.keyfind(f_list, to_charlist(from_manifest), 0) + # Check if all files from the pack.json are in the archive + eset = + Enum.reduce(pack.files, MapSet.new(), fn + {_, file}, s -> MapSet.put(s, to_charlist(file)) end) - |> if(do: :ok, else: {:error, :incomplete}) + + if SafeZip.contains_all_data?(zip, eset), + do: :ok, + else: {:error, :incomplete} + end + + defp path_join_name_safe(dir, name) do + if to_string(name) != Path.basename(name) or name in ["..", ".", ""] do + raise "Invalid or malicious pack name: #{name}" + else + Path.join(dir, name) end end + + defp path_join_safe(dir, path) do + {:ok, safe_path} = Path.safe_relative(path) + Path.join(dir, safe_path) + end + + defp map_zip_emojies(zip_files) do + Enum.reduce(zip_files, [], fn path, acc -> + with( + filename <- Path.basename(path), + shortcode <- Path.basename(filename, Path.extname(filename)), + # note: this only checks the shortcode, if an emoji already exists on the same path, but + # with a different shortcode, the existing one will be degraded to an alias of the new + false <- Emoji.exist?(shortcode) + ) do + [%{path: path, filename: path, shortcode: shortcode} | acc] + else + _ -> acc + end + end) + end end diff --git a/lib/pleroma/frontend.ex b/lib/pleroma/frontend.ex index 816499917..fe7f525ea 100644 --- a/lib/pleroma/frontend.ex +++ b/lib/pleroma/frontend.ex @@ -65,21 +65,12 @@ defmodule Pleroma.Frontend do end def unzip(zip, dest) do - with {:ok, unzipped} <- :zip.unzip(zip, [:memory]) do - File.rm_rf!(dest) - File.mkdir_p!(dest) - - Enum.each(unzipped, fn {filename, data} -> - path = filename - - new_file_path = Path.join(dest, path) - - new_file_path - |> Path.dirname() - |> File.mkdir_p!() + File.rm_rf!(dest) + File.mkdir_p!(dest) - File.write!(new_file_path, data) - end) + case Pleroma.SafeZip.unzip_data(zip, dest) do + {:ok, _} -> :ok + error -> error end end diff --git a/lib/pleroma/hashtag.ex b/lib/pleroma/hashtag.ex index a43d88220..3682f0c14 100644 --- a/lib/pleroma/hashtag.ex +++ b/lib/pleroma/hashtag.ex @@ -12,6 +12,7 @@ defmodule Pleroma.Hashtag do alias Pleroma.Hashtag alias Pleroma.Object alias Pleroma.Repo + alias Pleroma.User.HashtagFollow schema "hashtags" do field(:name, :string) @@ -27,6 +28,14 @@ defmodule Pleroma.Hashtag do |> String.trim() end + def get_by_id(id) do + Repo.get(Hashtag, id) + end + + def get_by_name(name) do + Repo.get_by(Hashtag, name: normalize_name(name)) + end + def get_or_create_by_name(name) do changeset = changeset(%Hashtag{}, %{name: name}) @@ -103,4 +112,22 @@ defmodule Pleroma.Hashtag do {:ok, deleted_count} end end + + def get_followers(%Hashtag{id: hashtag_id}) do + from(hf in HashtagFollow) + |> where([hf], hf.hashtag_id == ^hashtag_id) + |> join(:inner, [hf], u in assoc(hf, :user)) + |> select([hf, u], u.id) + |> Repo.all() + end + + def get_recipients_for_activity(%Pleroma.Activity{object: %{hashtags: tags}}) + when is_list(tags) do + tags + |> Enum.map(&get_followers/1) + |> List.flatten() + |> Enum.uniq() + end + + def get_recipients_for_activity(_activity), do: [] end diff --git a/lib/pleroma/ldap.ex b/lib/pleroma/ldap.ex new file mode 100644 index 000000000..b591c2918 --- /dev/null +++ b/lib/pleroma/ldap.ex @@ -0,0 +1,271 @@ +defmodule Pleroma.LDAP do + use GenServer + + require Logger + + alias Pleroma.Config + alias Pleroma.User + + import Pleroma.Web.Auth.Helpers, only: [fetch_user: 1] + + @connection_timeout 2_000 + @search_timeout 2_000 + + def start_link(_) do + GenServer.start_link(__MODULE__, [], name: __MODULE__) + end + + def bind_user(name, password) do + GenServer.call(__MODULE__, {:bind_user, name, password}) + end + + def change_password(name, password, new_password) do + GenServer.call(__MODULE__, {:change_password, name, password, new_password}) + end + + @impl true + def init(state) do + case {Config.get(Pleroma.Web.Auth.Authenticator), Config.get([:ldap, :enabled])} do + {Pleroma.Web.Auth.LDAPAuthenticator, true} -> + {:ok, state, {:continue, :connect}} + + {Pleroma.Web.Auth.LDAPAuthenticator, false} -> + Logger.error( + "LDAP Authenticator enabled but :pleroma, :ldap is not enabled. Auth will not work." + ) + + {:ok, state} + + {_, true} -> + Logger.warning( + ":pleroma, :ldap is enabled but Pleroma.Web.Authenticator is not set to the LDAPAuthenticator. LDAP will not be used." + ) + + {:ok, state} + + _ -> + {:ok, state} + end + end + + @impl true + def handle_continue(:connect, _state), do: do_handle_connect() + + @impl true + def handle_info(:connect, _state), do: do_handle_connect() + + def handle_info({:bind_after_reconnect, name, password, from}, state) do + result = do_bind_user(state[:handle], name, password) + + GenServer.reply(from, result) + + {:noreply, state} + end + + @impl true + def handle_call({:bind_user, name, password}, from, state) do + case do_bind_user(state[:handle], name, password) do + :needs_reconnect -> + Process.send(self(), {:bind_after_reconnect, name, password, from}, []) + {:noreply, state, {:continue, :connect}} + + result -> + {:reply, result, state, :hibernate} + end + end + + def handle_call({:change_password, name, password, new_password}, _from, state) do + result = change_password(state[:handle], name, password, new_password) + + {:reply, result, state, :hibernate} + end + + @impl true + def terminate(_, state) do + handle = Keyword.get(state, :handle) + + if not is_nil(handle) do + :eldap.close(handle) + end + + :ok + end + + defp do_handle_connect do + state = + case connect() do + {:ok, handle} -> + :eldap.controlling_process(handle, self()) + Process.link(handle) + [handle: handle] + + _ -> + Logger.error("Failed to connect to LDAP. Retrying in 5000ms") + Process.send_after(self(), :connect, 5_000) + [] + end + + {:noreply, state} + end + + defp connect do + ldap = Config.get(:ldap, []) + host = Keyword.get(ldap, :host, "localhost") + port = Keyword.get(ldap, :port, 389) + ssl = Keyword.get(ldap, :ssl, false) + tls = Keyword.get(ldap, :tls, false) + cacertfile = Keyword.get(ldap, :cacertfile) || CAStore.file_path() + + if ssl, do: Application.ensure_all_started(:ssl) + + default_secure_opts = [ + verify: :verify_peer, + cacerts: decode_certfile(cacertfile), + customize_hostname_check: [ + fqdn_fun: fn _ -> to_charlist(host) end + ] + ] + + sslopts = Keyword.merge(default_secure_opts, Keyword.get(ldap, :sslopts, [])) + tlsopts = Keyword.merge(default_secure_opts, Keyword.get(ldap, :tlsopts, [])) + + default_options = [{:port, port}, {:ssl, ssl}, {:timeout, @connection_timeout}] + + # :sslopts can only be included in :eldap.open/2 when {ssl: true} + # or the connection will fail + options = + if ssl do + default_options ++ [{:sslopts, sslopts}] + else + default_options + end + + case :eldap.open([to_charlist(host)], options) do + {:ok, handle} -> + try do + cond do + tls -> + case :eldap.start_tls( + handle, + tlsopts, + @connection_timeout + ) do + :ok -> + {:ok, handle} + + error -> + Logger.error("Could not start TLS: #{inspect(error)}") + :eldap.close(handle) + end + + true -> + {:ok, handle} + end + after + :ok + end + + {:error, error} -> + Logger.error("Could not open LDAP connection: #{inspect(error)}") + {:error, {:ldap_connection_error, error}} + end + end + + defp do_bind_user(handle, name, password) do + dn = make_dn(name) + + case :eldap.simple_bind(handle, dn, password) do + :ok -> + case fetch_user(name) do + %User{} = user -> + user + + _ -> + register_user(handle, ldap_base(), ldap_uid(), name) + end + + # eldap does not inform us of socket closure + # until it is used + {:error, {:gen_tcp_error, :closed}} -> + :eldap.close(handle) + :needs_reconnect + + {:error, error} = e -> + Logger.error("Could not bind LDAP user #{name}: #{inspect(error)}") + e + end + end + + defp register_user(handle, base, uid, name) do + case :eldap.search(handle, [ + {:base, to_charlist(base)}, + {:filter, :eldap.equalityMatch(to_charlist(uid), to_charlist(name))}, + {:scope, :eldap.wholeSubtree()}, + {:timeout, @search_timeout} + ]) do + # The :eldap_search_result record structure changed in OTP 24.3 and added a controls field + # https://github.com/erlang/otp/pull/5538 + {:ok, {:eldap_search_result, [{:eldap_entry, _object, attributes}], _referrals}} -> + try_register(name, attributes) + + {:ok, {:eldap_search_result, [{:eldap_entry, _object, attributes}], _referrals, _controls}} -> + try_register(name, attributes) + + error -> + Logger.error("Couldn't register user because LDAP search failed: #{inspect(error)}") + {:error, {:ldap_search_error, error}} + end + end + + defp try_register(name, attributes) do + mail_attribute = Config.get([:ldap, :mail]) + + params = %{ + name: name, + nickname: name, + password: nil + } + + params = + case List.keyfind(attributes, to_charlist(mail_attribute), 0) do + {_, [mail]} -> Map.put_new(params, :email, :erlang.list_to_binary(mail)) + _ -> params + end + + changeset = User.register_changeset_ldap(%User{}, params) + + case User.register(changeset) do + {:ok, user} -> user + error -> error + end + end + + defp change_password(handle, name, password, new_password) do + dn = make_dn(name) + + with :ok <- :eldap.simple_bind(handle, dn, password) do + :eldap.modify_password(handle, dn, to_charlist(new_password), to_charlist(password)) + end + end + + defp decode_certfile(file) do + with {:ok, data} <- File.read(file) do + data + |> :public_key.pem_decode() + |> Enum.map(fn {_, b, _} -> b end) + else + _ -> + Logger.error("Unable to read certfile: #{file}") + [] + end + end + + defp ldap_uid, do: to_charlist(Config.get([:ldap, :uid], "cn")) + defp ldap_base, do: to_charlist(Config.get([:ldap, :base])) + + defp make_dn(name) do + uid = ldap_uid() + base = ldap_base() + ~c"#{uid}=#{name},#{base}" + end +end diff --git a/lib/pleroma/object.ex b/lib/pleroma/object.ex index 748f18e6c..77dfda851 100644 --- a/lib/pleroma/object.ex +++ b/lib/pleroma/object.ex @@ -99,27 +99,6 @@ defmodule Pleroma.Object do def get_by_id(nil), do: nil def get_by_id(id), do: Repo.get(Object, id) - @spec get_by_id_and_maybe_refetch(integer(), list()) :: Object.t() | nil - def get_by_id_and_maybe_refetch(id, opts \\ []) do - with %Object{updated_at: updated_at} = object <- get_by_id(id) do - if opts[:interval] && - NaiveDateTime.diff(NaiveDateTime.utc_now(), updated_at) > opts[:interval] do - case Fetcher.refetch_object(object) do - {:ok, %Object{} = object} -> - object - - e -> - Logger.error("Couldn't refresh #{object.data["id"]}:\n#{inspect(e)}") - object - end - else - object - end - else - nil -> nil - end - end - def get_by_ap_id(nil), do: nil def get_by_ap_id(ap_id) do diff --git a/lib/pleroma/object/fetcher.ex b/lib/pleroma/object/fetcher.ex index 69a5f3268..c85a8b09f 100644 --- a/lib/pleroma/object/fetcher.ex +++ b/lib/pleroma/object/fetcher.ex @@ -145,6 +145,7 @@ defmodule Pleroma.Object.Fetcher do Logger.debug("Fetching object #{id} via AP") with {:scheme, true} <- {:scheme, String.starts_with?(id, "http")}, + {_, true} <- {:mrf, MRF.id_filter(id)}, {:ok, body} <- get_object(id), {:ok, data} <- safe_json_decode(body), :ok <- Containment.contain_origin_from_id(id, data) do @@ -160,6 +161,9 @@ defmodule Pleroma.Object.Fetcher do {:error, e} -> {:error, e} + {:mrf, false} -> + {:error, {:reject, "Filtered by id"}} + e -> {:error, e} end diff --git a/lib/pleroma/pagination.ex b/lib/pleroma/pagination.ex index 8db732cc9..66812b17b 100644 --- a/lib/pleroma/pagination.ex +++ b/lib/pleroma/pagination.ex @@ -89,9 +89,9 @@ defmodule Pleroma.Pagination do defp cast_params(params) do param_types = %{ - min_id: :string, - since_id: :string, - max_id: :string, + min_id: params[:id_type] || :string, + since_id: params[:id_type] || :string, + max_id: params[:id_type] || :string, offset: :integer, limit: :integer, skip_extra_order: :boolean, diff --git a/lib/pleroma/release_tasks.ex b/lib/pleroma/release_tasks.ex index bcfcd1243..af2d35c8f 100644 --- a/lib/pleroma/release_tasks.ex +++ b/lib/pleroma/release_tasks.ex @@ -16,17 +16,24 @@ defmodule Pleroma.ReleaseTasks do end end + def find_module(task) do + module_name = + task + |> String.split(".") + |> Enum.map(&String.capitalize/1) + |> then(fn x -> [Mix, Tasks, Pleroma] ++ x end) + |> Module.concat() + + case Code.ensure_loaded(module_name) do + {:module, _} -> module_name + _ -> nil + end + end + defp mix_task(task, args) do Application.load(:pleroma) - {:ok, modules} = :application.get_key(:pleroma, :modules) - - module = - Enum.find(modules, fn module -> - module = Module.split(module) - match?(["Mix", "Tasks", "Pleroma" | _], module) and - String.downcase(List.last(module)) == task - end) + module = find_module(task) if module do module.run(args) diff --git a/lib/pleroma/safe_zip.ex b/lib/pleroma/safe_zip.ex new file mode 100644 index 000000000..35fe2be19 --- /dev/null +++ b/lib/pleroma/safe_zip.ex @@ -0,0 +1,216 @@ +# Akkoma: Magically expressive social media +# Copyright © 2024 Akkoma Authors <https://akkoma.dev/> +# SPDX-License-Identifier: AGPL-3.0-only + +defmodule Pleroma.SafeZip do + @moduledoc """ + Wraps the subset of Erlang's zip module we’d like to use + but enforces path-traversal safety everywhere and other checks. + + For convenience almost all functions accept both elixir strings and charlists, + but output elixir strings themselves. However, this means the input parameter type + can no longer be used to distinguish archive file paths from archive binary data in memory, + thus where needed both a _data and _file variant are provided. + """ + + @type text() :: String.t() | [char()] + + defp is_safe_path?(path) do + # Path accepts elixir’s chardata() + case Path.safe_relative(path) do + {:ok, _} -> true + _ -> false + end + end + + defp is_safe_type?(file_type) do + if file_type in [:regular, :directory] do + true + else + false + end + end + + defp maybe_add_file(_type, _path_charlist, nil), do: nil + + defp maybe_add_file(:regular, path_charlist, file_list), + do: [to_string(path_charlist) | file_list] + + defp maybe_add_file(_type, _path_charlist, file_list), do: file_list + + @spec check_safe_archive_and_maybe_list_files(binary() | [char()], [term()], boolean()) :: + {:ok, [String.t()]} | {:error, reason :: term()} + defp check_safe_archive_and_maybe_list_files(archive, opts, list) do + acc = if list, do: [], else: nil + + with {:ok, table} <- :zip.table(archive, opts) do + Enum.reduce_while(table, {:ok, acc}, fn + # ZIP comment + {:zip_comment, _}, acc -> + {:cont, acc} + + # File entry + {:zip_file, path, info, _comment, _offset, _comp_size}, {:ok, fl} -> + with {_, type} <- {:get_type, elem(info, 2)}, + {_, true} <- {:type, is_safe_type?(type)}, + {_, true} <- {:safe_path, is_safe_path?(path)} do + {:cont, {:ok, maybe_add_file(type, path, fl)}} + else + {:get_type, e} -> + {:halt, + {:error, "Couldn't determine file type of ZIP entry at #{path} (#{inspect(e)})"}} + + {:type, _} -> + {:halt, {:error, "Potentially unsafe file type in ZIP at: #{path}"}} + + {:safe_path, _} -> + {:halt, {:error, "Unsafe path in ZIP: #{path}"}} + end + + # new OTP version? + _, _acc -> + {:halt, {:error, "Unknown ZIP record type"}} + end) + end + end + + @spec check_safe_archive_and_list_files(binary() | [char()], [term()]) :: + {:ok, [String.t()]} | {:error, reason :: term()} + defp check_safe_archive_and_list_files(archive, opts \\ []) do + check_safe_archive_and_maybe_list_files(archive, opts, true) + end + + @spec check_safe_archive(binary() | [char()], [term()]) :: :ok | {:error, reason :: term()} + defp check_safe_archive(archive, opts \\ []) do + case check_safe_archive_and_maybe_list_files(archive, opts, false) do + {:ok, _} -> :ok + error -> error + end + end + + @spec check_safe_file_list([text()], text()) :: :ok | {:error, term()} + defp check_safe_file_list([], _), do: :ok + + defp check_safe_file_list([path | tail], cwd) do + with {_, true} <- {:path, is_safe_path?(path)}, + {_, {:ok, fstat}} <- {:stat, File.stat(Path.expand(path, cwd))}, + {_, true} <- {:type, is_safe_type?(fstat.type)} do + check_safe_file_list(tail, cwd) + else + {:path, _} -> + {:error, "Unsafe path escaping cwd: #{path}"} + + {:stat, e} -> + {:error, "Unable to check file type of #{path}: #{inspect(e)}"} + + {:type, _} -> + {:error, "Unsafe type at #{path}"} + end + end + + defp check_safe_file_list(_, _), do: {:error, "Malformed file_list"} + + @doc """ + Checks whether the archive data contais file entries for all paths from fset + + Note this really only accepts entries corresponding to regular _files_, + if a path is contained as for example an directory, this does not count as a match. + """ + @spec contains_all_data?(binary(), MapSet.t()) :: true | false + def contains_all_data?(archive_data, fset) do + with {:ok, table} <- :zip.table(archive_data) do + remaining = + Enum.reduce(table, fset, fn + {:zip_file, path, info, _comment, _offset, _comp_size}, fset -> + if elem(info, 2) == :regular do + MapSet.delete(fset, path) + else + fset + end + + _, _ -> + fset + end) + |> MapSet.size() + + if remaining == 0, do: true, else: false + else + _ -> false + end + end + + @doc """ + List all file entries in ZIP, or error if invalid or unsafe. + + Note this really only lists regular files, no directories, ZIP comments or other types! + """ + @spec list_dir_file(text()) :: {:ok, [String.t()]} | {:error, reason :: term()} + def list_dir_file(archive) do + path = to_charlist(archive) + check_safe_archive_and_list_files(path) + end + + defp stringify_zip({:ok, {fname, data}}), do: {:ok, {to_string(fname), data}} + defp stringify_zip({:ok, fname}), do: {:ok, to_string(fname)} + defp stringify_zip(ret), do: ret + + @spec zip(text(), text(), [text()], boolean()) :: + {:ok, file_name :: String.t()} + | {:ok, {file_name :: String.t(), file_data :: binary()}} + | {:error, reason :: term()} + def zip(name, file_list, cwd, memory \\ false) do + opts = [{:cwd, to_charlist(cwd)}] + opts = if memory, do: [:memory | opts], else: opts + + with :ok <- check_safe_file_list(file_list, cwd) do + file_list = for f <- file_list, do: to_charlist(f) + name = to_charlist(name) + stringify_zip(:zip.zip(name, file_list, opts)) + end + end + + @spec unzip_file(text(), text(), [text()] | nil) :: + {:ok, [String.t()]} + | {:error, reason :: term()} + | {:error, {name :: text(), reason :: term()}} + def unzip_file(archive, target_dir, file_list \\ nil) do + do_unzip(to_charlist(archive), to_charlist(target_dir), file_list) + end + + @spec unzip_data(binary(), text(), [text()] | nil) :: + {:ok, [String.t()]} + | {:error, reason :: term()} + | {:error, {name :: text(), reason :: term()}} + def unzip_data(archive, target_dir, file_list \\ nil) do + do_unzip(archive, to_charlist(target_dir), file_list) + end + + defp stringify_unzip({:ok, [{_fname, _data} | _] = filebinlist}), + do: {:ok, Enum.map(filebinlist, fn {fname, data} -> {to_string(fname), data} end)} + + defp stringify_unzip({:ok, [_fname | _] = filelist}), + do: {:ok, Enum.map(filelist, fn fname -> to_string(fname) end)} + + defp stringify_unzip({:error, {fname, term}}), do: {:error, {to_string(fname), term}} + defp stringify_unzip(ret), do: ret + + @spec do_unzip(binary() | [char()], text(), [text()] | nil) :: + {:ok, [String.t()]} + | {:error, reason :: term()} + | {:error, {name :: text(), reason :: term()}} + defp do_unzip(archive, target_dir, file_list) do + opts = + if file_list != nil do + [ + file_list: for(f <- file_list, do: to_charlist(f)), + cwd: target_dir + ] + else + [cwd: target_dir] + end + + with :ok <- check_safe_archive(archive) do + stringify_unzip(:zip.unzip(archive, opts)) + end + end +end diff --git a/lib/pleroma/search/meilisearch.ex b/lib/pleroma/search/meilisearch.ex index 9bba5b30f..cafae8099 100644 --- a/lib/pleroma/search/meilisearch.ex +++ b/lib/pleroma/search/meilisearch.ex @@ -122,6 +122,7 @@ defmodule Pleroma.Search.Meilisearch do # Only index public or unlisted Notes if not is_nil(object) and object.data["type"] == "Note" and not is_nil(object.data["content"]) and + not is_nil(object.data["published"]) and (Pleroma.Constants.as_public() in object.data["to"] or Pleroma.Constants.as_public() in object.data["cc"]) and object.data["content"] not in ["", "."] do diff --git a/lib/pleroma/upload/filter/analyze_metadata.ex b/lib/pleroma/upload/filter/analyze_metadata.ex index 7ee643277..a8480bf36 100644 --- a/lib/pleroma/upload/filter/analyze_metadata.ex +++ b/lib/pleroma/upload/filter/analyze_metadata.ex @@ -90,9 +90,13 @@ defmodule Pleroma.Upload.Filter.AnalyzeMetadata do {:ok, rgb} = if Image.has_alpha?(resized_image) do # remove alpha channel - resized_image - |> Operation.extract_band!(0, n: 3) - |> Image.write_to_binary() + case Operation.extract_band(resized_image, 0, n: 3) do + {:ok, data} -> + Image.write_to_binary(data) + + _ -> + Image.write_to_binary(resized_image) + end else Image.write_to_binary(resized_image) end diff --git a/lib/pleroma/upload/filter/dedupe.ex b/lib/pleroma/upload/filter/dedupe.ex index ef793d390..7b278d299 100644 --- a/lib/pleroma/upload/filter/dedupe.ex +++ b/lib/pleroma/upload/filter/dedupe.ex @@ -17,8 +17,16 @@ defmodule Pleroma.Upload.Filter.Dedupe do |> Base.encode16(case: :lower) filename = shasum <> "." <> extension - {:ok, :filtered, %Upload{upload | id: shasum, path: filename}} + + {:ok, :filtered, %Upload{upload | id: shasum, path: shard_path(filename)}} end def filter(_), do: {:ok, :noop} + + @spec shard_path(String.t()) :: String.t() + def shard_path( + <<a::binary-size(2), b::binary-size(2), c::binary-size(2), _::binary>> = filename + ) do + Path.join([a, b, c, filename]) + end end diff --git a/lib/pleroma/user.ex b/lib/pleroma/user.ex index 517009253..d9da9ede1 100644 --- a/lib/pleroma/user.ex +++ b/lib/pleroma/user.ex @@ -19,6 +19,7 @@ defmodule Pleroma.User do alias Pleroma.Emoji alias Pleroma.FollowingRelationship alias Pleroma.Formatter + alias Pleroma.Hashtag alias Pleroma.HTML alias Pleroma.Keys alias Pleroma.MFA @@ -27,6 +28,7 @@ defmodule Pleroma.User do alias Pleroma.Registration alias Pleroma.Repo alias Pleroma.User + alias Pleroma.User.HashtagFollow alias Pleroma.UserRelationship alias Pleroma.Web.ActivityPub.ActivityPub alias Pleroma.Web.ActivityPub.Builder @@ -174,6 +176,12 @@ defmodule Pleroma.User do has_many(:outgoing_relationships, UserRelationship, foreign_key: :source_id) has_many(:incoming_relationships, UserRelationship, foreign_key: :target_id) + many_to_many(:followed_hashtags, Hashtag, + on_replace: :delete, + on_delete: :delete_all, + join_through: HashtagFollow + ) + for {relationship_type, [ {outgoing_relation, outgoing_relation_target}, @@ -419,6 +427,11 @@ defmodule Pleroma.User do end end + def image_description(image, default \\ "") + + def image_description(%{"name" => name}, _default), do: name + def image_description(_, default), do: default + # Should probably be renamed or removed @spec ap_id(User.t()) :: String.t() def ap_id(%User{nickname: nickname}), do: "#{Endpoint.url()}/users/#{nickname}" @@ -2856,4 +2869,54 @@ defmodule Pleroma.User do birthday_month: month }) end + + defp maybe_load_followed_hashtags(%User{followed_hashtags: follows} = user) + when is_list(follows), + do: user + + defp maybe_load_followed_hashtags(%User{} = user) do + followed_hashtags = HashtagFollow.get_by_user(user) + %{user | followed_hashtags: followed_hashtags} + end + + def followed_hashtags(%User{followed_hashtags: follows}) + when is_list(follows), + do: follows + + def followed_hashtags(%User{} = user) do + {:ok, user} = + user + |> maybe_load_followed_hashtags() + |> set_cache() + + user.followed_hashtags + end + + def follow_hashtag(%User{} = user, %Hashtag{} = hashtag) do + Logger.debug("Follow hashtag #{hashtag.name} for user #{user.nickname}") + user = maybe_load_followed_hashtags(user) + + with {:ok, _} <- HashtagFollow.new(user, hashtag), + follows <- HashtagFollow.get_by_user(user), + %User{} = user <- user |> Map.put(:followed_hashtags, follows) do + user + |> set_cache() + end + end + + def unfollow_hashtag(%User{} = user, %Hashtag{} = hashtag) do + Logger.debug("Unfollow hashtag #{hashtag.name} for user #{user.nickname}") + user = maybe_load_followed_hashtags(user) + + with {:ok, _} <- HashtagFollow.delete(user, hashtag), + follows <- HashtagFollow.get_by_user(user), + %User{} = user <- user |> Map.put(:followed_hashtags, follows) do + user + |> set_cache() + end + end + + def following_hashtag?(%User{} = user, %Hashtag{} = hashtag) do + not is_nil(HashtagFollow.get(user, hashtag)) + end end diff --git a/lib/pleroma/user/backup.ex b/lib/pleroma/user/backup.ex index d77d49890..4b3092fdb 100644 --- a/lib/pleroma/user/backup.ex +++ b/lib/pleroma/user/backup.ex @@ -22,6 +22,7 @@ defmodule Pleroma.User.Backup do alias Pleroma.Web.ActivityPub.Transmogrifier alias Pleroma.Web.ActivityPub.UserView alias Pleroma.Workers.BackupWorker + alias Pleroma.SafeZip @type t :: %__MODULE__{} @@ -179,12 +180,12 @@ defmodule Pleroma.User.Backup do end @files [ - ~c"actor.json", - ~c"outbox.json", - ~c"likes.json", - ~c"bookmarks.json", - ~c"followers.json", - ~c"following.json" + "actor.json", + "outbox.json", + "likes.json", + "bookmarks.json", + "followers.json", + "following.json" ] @spec run(t()) :: {:ok, t()} | {:error, :failed} @@ -200,7 +201,7 @@ defmodule Pleroma.User.Backup do {_, :ok} <- {:followers, followers(backup.tempdir, backup.user)}, {_, :ok} <- {:following, following(backup.tempdir, backup.user)}, {_, {:ok, _zip_path}} <- - {:zip, :zip.create(to_charlist(tempfile), @files, cwd: to_charlist(backup.tempdir))}, + {:zip, SafeZip.zip(tempfile, @files, backup.tempdir)}, {_, {:ok, %File.Stat{size: zip_size}}} <- {:filestat, File.stat(tempfile)}, {:ok, updated_backup} <- update_record(backup, %{file_size: zip_size}) do {:ok, updated_backup} @@ -246,7 +247,13 @@ defmodule Pleroma.User.Backup do defp actor(dir, user) do with {:ok, json} <- UserView.render("user.json", %{user: user}) - |> Map.merge(%{"likes" => "likes.json", "bookmarks" => "bookmarks.json"}) + |> Map.merge(%{ + "bookmarks" => "bookmarks.json", + "likes" => "likes.json", + "outbox" => "outbox.json", + "followers" => "followers.json", + "following" => "following.json" + }) |> Jason.encode() do File.write(Path.join(dir, "actor.json"), json) end diff --git a/lib/pleroma/user/hashtag_follow.ex b/lib/pleroma/user/hashtag_follow.ex new file mode 100644 index 000000000..3e28b130b --- /dev/null +++ b/lib/pleroma/user/hashtag_follow.ex @@ -0,0 +1,55 @@ +defmodule Pleroma.User.HashtagFollow do + use Ecto.Schema + import Ecto.Query + import Ecto.Changeset + + alias Pleroma.Hashtag + alias Pleroma.Repo + alias Pleroma.User + + schema "user_follows_hashtag" do + belongs_to(:user, User, type: FlakeId.Ecto.CompatType) + belongs_to(:hashtag, Hashtag) + end + + def changeset(%__MODULE__{} = user_hashtag_follow, attrs) do + user_hashtag_follow + |> cast(attrs, [:user_id, :hashtag_id]) + |> unique_constraint(:hashtag_id, + name: :user_hashtag_follows_user_id_hashtag_id_index, + message: "already following" + ) + |> validate_required([:user_id, :hashtag_id]) + end + + def new(%User{} = user, %Hashtag{} = hashtag) do + %__MODULE__{} + |> changeset(%{user_id: user.id, hashtag_id: hashtag.id}) + |> Repo.insert(on_conflict: :nothing) + end + + def delete(%User{} = user, %Hashtag{} = hashtag) do + with %__MODULE__{} = user_hashtag_follow <- get(user, hashtag) do + Repo.delete(user_hashtag_follow) + else + _ -> {:ok, nil} + end + end + + def get(%User{} = user, %Hashtag{} = hashtag) do + from(hf in __MODULE__) + |> where([hf], hf.user_id == ^user.id and hf.hashtag_id == ^hashtag.id) + |> Repo.one() + end + + def get_by_user(%User{} = user) do + user + |> followed_hashtags_query() + |> Repo.all() + end + + def followed_hashtags_query(%User{} = user) do + Ecto.assoc(user, :followed_hashtags) + |> Ecto.Query.order_by([h], desc: h.id) + end +end diff --git a/lib/pleroma/web/activity_pub/activity_pub.ex b/lib/pleroma/web/activity_pub/activity_pub.ex index a2a94a0ff..62c7a7b31 100644 --- a/lib/pleroma/web/activity_pub/activity_pub.ex +++ b/lib/pleroma/web/activity_pub/activity_pub.ex @@ -924,6 +924,31 @@ defmodule Pleroma.Web.ActivityPub.ActivityPub do ) end + # Essentially, either look for activities addressed to `recipients`, _OR_ ones + # that reference a hashtag that the user follows + # Firstly, two fallbacks in case there's no hashtag constraint, or the user doesn't + # follow any + defp restrict_recipients_or_hashtags(query, recipients, user, nil) do + restrict_recipients(query, recipients, user) + end + + defp restrict_recipients_or_hashtags(query, recipients, user, []) do + restrict_recipients(query, recipients, user) + end + + defp restrict_recipients_or_hashtags(query, recipients, _user, hashtag_ids) do + from([activity, object] in query) + |> join(:left, [activity, object], hto in "hashtags_objects", + on: hto.object_id == object.id, + as: :hto + ) + |> where( + [activity, object, hto: hto], + (hto.hashtag_id in ^hashtag_ids and ^Constants.as_public() in activity.recipients) or + fragment("? && ?", ^recipients, activity.recipients) + ) + end + defp restrict_local(query, %{local_only: true}) do from(activity in query, where: activity.local == true) end @@ -1414,7 +1439,7 @@ defmodule Pleroma.Web.ActivityPub.ActivityPub do |> maybe_preload_report_notes(opts) |> maybe_set_thread_muted_field(opts) |> maybe_order(opts) - |> restrict_recipients(recipients, opts[:user]) + |> restrict_recipients_or_hashtags(recipients, opts[:user], opts[:followed_hashtags]) |> restrict_replies(opts) |> restrict_since(opts) |> restrict_local(opts) @@ -1542,16 +1567,23 @@ defmodule Pleroma.Web.ActivityPub.ActivityPub do defp get_actor_url(_url), do: nil - defp normalize_image(%{"url" => url}) do + defp normalize_image(%{"url" => url} = data) do %{ "type" => "Image", "url" => [%{"href" => url}] } + |> maybe_put_description(data) end defp normalize_image(urls) when is_list(urls), do: urls |> List.first() |> normalize_image() defp normalize_image(_), do: nil + defp maybe_put_description(map, %{"name" => description}) when is_binary(description) do + Map.put(map, "name", description) + end + + defp maybe_put_description(map, _), do: map + defp object_to_user_data(data, additional) do fields = data diff --git a/lib/pleroma/web/activity_pub/activity_pub_controller.ex b/lib/pleroma/web/activity_pub/activity_pub_controller.ex index a08eda5f4..7ac0bbab4 100644 --- a/lib/pleroma/web/activity_pub/activity_pub_controller.ex +++ b/lib/pleroma/web/activity_pub/activity_pub_controller.ex @@ -482,7 +482,7 @@ defmodule Pleroma.Web.ActivityPub.ActivityPubController do |> put_status(:forbidden) |> json(message) - {:error, message} -> + {:error, message} when is_binary(message) -> conn |> put_status(:bad_request) |> json(message) diff --git a/lib/pleroma/web/activity_pub/mrf.ex b/lib/pleroma/web/activity_pub/mrf.ex index bc418d908..51ab476b7 100644 --- a/lib/pleroma/web/activity_pub/mrf.ex +++ b/lib/pleroma/web/activity_pub/mrf.ex @@ -108,6 +108,14 @@ defmodule Pleroma.Web.ActivityPub.MRF do def filter(%{} = object), do: get_policies() |> filter(object) + def id_filter(policies, id) when is_binary(id) do + policies + |> Enum.filter(&function_exported?(&1, :id_filter, 1)) + |> Enum.all?(& &1.id_filter(id)) + end + + def id_filter(id) when is_binary(id), do: get_policies() |> id_filter(id) + @impl true def pipeline_filter(%{} = message, meta) do object = meta[:object_data] diff --git a/lib/pleroma/web/activity_pub/mrf/drop_policy.ex b/lib/pleroma/web/activity_pub/mrf/drop_policy.ex index e4fcc9935..cf07db7f3 100644 --- a/lib/pleroma/web/activity_pub/mrf/drop_policy.ex +++ b/lib/pleroma/web/activity_pub/mrf/drop_policy.ex @@ -14,5 +14,11 @@ defmodule Pleroma.Web.ActivityPub.MRF.DropPolicy do end @impl true + def id_filter(id) do + Logger.debug("REJECTING #{id}") + false + end + + @impl true def describe, do: {:ok, %{}} end diff --git a/lib/pleroma/web/activity_pub/mrf/policy.ex b/lib/pleroma/web/activity_pub/mrf/policy.ex index 54ca4b735..08bcac08a 100644 --- a/lib/pleroma/web/activity_pub/mrf/policy.ex +++ b/lib/pleroma/web/activity_pub/mrf/policy.ex @@ -4,6 +4,7 @@ defmodule Pleroma.Web.ActivityPub.MRF.Policy do @callback filter(Pleroma.Activity.t()) :: {:ok | :reject, Pleroma.Activity.t()} + @callback id_filter(String.t()) :: boolean() @callback describe() :: {:ok | :error, map()} @callback config_description() :: %{ optional(:children) => [map()], @@ -13,5 +14,5 @@ defmodule Pleroma.Web.ActivityPub.MRF.Policy do description: String.t() } @callback history_awareness() :: :auto | :manual - @optional_callbacks config_description: 0, history_awareness: 0 + @optional_callbacks config_description: 0, history_awareness: 0, id_filter: 1 end diff --git a/lib/pleroma/web/activity_pub/mrf/remote_report_policy.ex b/lib/pleroma/web/activity_pub/mrf/remote_report_policy.ex new file mode 100644 index 000000000..fa0610bf1 --- /dev/null +++ b/lib/pleroma/web/activity_pub/mrf/remote_report_policy.ex @@ -0,0 +1,118 @@ +defmodule Pleroma.Web.ActivityPub.MRF.RemoteReportPolicy do + @moduledoc "Drop remote reports if they don't contain enough information." + @behaviour Pleroma.Web.ActivityPub.MRF.Policy + + alias Pleroma.Config + + @impl true + def filter(%{"type" => "Flag"} = object) do + with {_, false} <- {:local, local?(object)}, + {:ok, _} <- maybe_reject_all(object), + {:ok, _} <- maybe_reject_anonymous(object), + {:ok, _} <- maybe_reject_third_party(object), + {:ok, _} <- maybe_reject_empty_message(object) do + {:ok, object} + else + {:local, true} -> {:ok, object} + {:reject, message} -> {:reject, message} + error -> {:reject, error} + end + end + + def filter(object), do: {:ok, object} + + defp maybe_reject_all(object) do + if Config.get([:mrf_remote_report, :reject_all]) do + {:reject, "[RemoteReportPolicy] Remote report"} + else + {:ok, object} + end + end + + defp maybe_reject_anonymous(%{"actor" => actor} = object) do + with true <- Config.get([:mrf_remote_report, :reject_anonymous]), + %URI{path: "/actor"} <- URI.parse(actor) do + {:reject, "[RemoteReportPolicy] Anonymous: #{actor}"} + else + _ -> {:ok, object} + end + end + + defp maybe_reject_third_party(%{"object" => objects} = object) do + {_, to} = + case objects do + [head | tail] when is_binary(head) -> {tail, head} + s when is_binary(s) -> {[], s} + _ -> {[], ""} + end + + with true <- Config.get([:mrf_remote_report, :reject_third_party]), + false <- String.starts_with?(to, Pleroma.Web.Endpoint.url()) do + {:reject, "[RemoteReportPolicy] Third-party: #{to}"} + else + _ -> {:ok, object} + end + end + + defp maybe_reject_empty_message(%{"content" => content} = object) + when is_binary(content) and content != "" do + {:ok, object} + end + + defp maybe_reject_empty_message(object) do + if Config.get([:mrf_remote_report, :reject_empty_message]) do + {:reject, ["RemoteReportPolicy] No content"]} + else + {:ok, object} + end + end + + defp local?(%{"actor" => actor}) do + String.starts_with?(actor, Pleroma.Web.Endpoint.url()) + end + + @impl true + def describe do + mrf_remote_report = + Config.get(:mrf_remote_report) + |> Enum.into(%{}) + + {:ok, %{mrf_remote_report: mrf_remote_report}} + end + + @impl true + def config_description do + %{ + key: :mrf_remote_report, + related_policy: "Pleroma.Web.ActivityPub.MRF.RemoteReportPolicy", + label: "MRF Remote Report", + description: "Drop remote reports if they don't contain enough information.", + children: [ + %{ + key: :reject_all, + type: :boolean, + description: "Reject all remote reports? (this option takes precedence)", + suggestions: [false] + }, + %{ + key: :reject_anonymous, + type: :boolean, + description: "Reject anonymous remote reports?", + suggestions: [true] + }, + %{ + key: :reject_third_party, + type: :boolean, + description: "Reject reports on users from third-party instances?", + suggestions: [true] + }, + %{ + key: :reject_empty_message, + type: :boolean, + description: "Reject remote reports with no message?", + suggestions: [true] + } + ] + } + end +end diff --git a/lib/pleroma/web/activity_pub/mrf/simple_policy.ex b/lib/pleroma/web/activity_pub/mrf/simple_policy.ex index ae7f18bfe..a97e8db7b 100644 --- a/lib/pleroma/web/activity_pub/mrf/simple_policy.ex +++ b/lib/pleroma/web/activity_pub/mrf/simple_policy.ex @@ -192,6 +192,18 @@ defmodule Pleroma.Web.ActivityPub.MRF.SimplePolicy do end @impl true + def id_filter(id) do + host_info = URI.parse(id) + + with {:ok, _} <- check_accept(host_info, %{}), + {:ok, _} <- check_reject(host_info, %{}) do + true + else + _ -> false + end + end + + @impl true def filter(%{"type" => "Delete", "actor" => actor} = activity) do %{host: actor_host} = URI.parse(actor) diff --git a/lib/pleroma/web/activity_pub/object_validator.ex b/lib/pleroma/web/activity_pub/object_validator.ex index b3043b93a..ee12f3ebf 100644 --- a/lib/pleroma/web/activity_pub/object_validator.ex +++ b/lib/pleroma/web/activity_pub/object_validator.ex @@ -11,6 +11,8 @@ defmodule Pleroma.Web.ActivityPub.ObjectValidator do @behaviour Pleroma.Web.ActivityPub.ObjectValidator.Validating + import Pleroma.Constants, only: [activity_types: 0, object_types: 0] + alias Pleroma.Activity alias Pleroma.EctoType.ActivityPub.ObjectValidators alias Pleroma.Object @@ -24,6 +26,7 @@ defmodule Pleroma.Web.ActivityPub.ObjectValidator do alias Pleroma.Web.ActivityPub.ObjectValidators.AudioImageVideoValidator alias Pleroma.Web.ActivityPub.ObjectValidators.BlockValidator alias Pleroma.Web.ActivityPub.ObjectValidators.ChatMessageValidator + alias Pleroma.Web.ActivityPub.ObjectValidators.CommonFixes alias Pleroma.Web.ActivityPub.ObjectValidators.CreateChatMessageValidator alias Pleroma.Web.ActivityPub.ObjectValidators.CreateGenericValidator alias Pleroma.Web.ActivityPub.ObjectValidators.DeleteValidator @@ -38,6 +41,16 @@ defmodule Pleroma.Web.ActivityPub.ObjectValidator do @impl true def validate(object, meta) + # This overload works together with the InboxGuardPlug + # and ensures that we are not accepting any activity type + # that cannot pass InboxGuardPlug. + # If we want to support any more activity types, make sure to + # add it in Pleroma.Constants's activity_types or object_types, + # and, if applicable, allowed_activity_types_from_strangers. + def validate(%{"type" => type}, _meta) + when type not in activity_types() and type not in object_types(), + do: {:error, :not_allowed_object_type} + def validate(%{"type" => "Block"} = block_activity, meta) do with {:ok, block_activity} <- block_activity @@ -103,7 +116,10 @@ defmodule Pleroma.Web.ActivityPub.ObjectValidator do meta ) when objtype in ~w[Question Answer Audio Video Image Event Article Note Page] do - with {:ok, object_data} <- cast_and_apply_and_stringify_with_history(object), + with {:ok, object_data} <- + object + |> CommonFixes.maybe_add_language_from_activity(create_activity) + |> cast_and_apply_and_stringify_with_history(), meta = Keyword.put(meta, :object_data, object_data), {:ok, create_activity} <- create_activity @@ -153,11 +169,15 @@ defmodule Pleroma.Web.ActivityPub.ObjectValidator do ) when objtype in ~w[Question Answer Audio Video Event Article Note Page] do with {_, false} <- {:local, Access.get(meta, :local, false)}, - {_, {:ok, object_data, _}} <- {:object_validation, validate(object, meta)}, + {_, {:ok, object_data, _}} <- + {:object_validation, + object + |> CommonFixes.maybe_add_language_from_activity(update_activity) + |> validate(meta)}, meta = Keyword.put(meta, :object_data, object_data), {:ok, update_activity} <- update_activity - |> UpdateValidator.cast_and_validate() + |> UpdateValidator.cast_and_validate(meta) |> Ecto.Changeset.apply_action(:insert) do update_activity = stringify_keys(update_activity) {:ok, update_activity, meta} @@ -165,7 +185,7 @@ defmodule Pleroma.Web.ActivityPub.ObjectValidator do {:local, _} -> with {:ok, object} <- update_activity - |> UpdateValidator.cast_and_validate() + |> UpdateValidator.cast_and_validate(meta) |> Ecto.Changeset.apply_action(:insert) do object = stringify_keys(object) {:ok, object, meta} @@ -195,9 +215,16 @@ defmodule Pleroma.Web.ActivityPub.ObjectValidator do "Answer" -> AnswerValidator end + cast_func = + if type == "Update" do + fn o -> validator.cast_and_validate(o, meta) end + else + fn o -> validator.cast_and_validate(o) end + end + with {:ok, object} <- object - |> validator.cast_and_validate() + |> cast_func.() |> Ecto.Changeset.apply_action(:insert) do object = stringify_keys(object) {:ok, object, meta} diff --git a/lib/pleroma/web/activity_pub/object_validators/article_note_page_validator.ex b/lib/pleroma/web/activity_pub/object_validators/article_note_page_validator.ex index 1b5b2e8fb..81ab354fe 100644 --- a/lib/pleroma/web/activity_pub/object_validators/article_note_page_validator.ex +++ b/lib/pleroma/web/activity_pub/object_validators/article_note_page_validator.ex @@ -30,7 +30,7 @@ defmodule Pleroma.Web.ActivityPub.ObjectValidators.ArticleNotePageValidator do def cast_and_apply(data) do data - |> cast_data + |> cast_data() |> apply_action(:insert) end @@ -85,8 +85,11 @@ defmodule Pleroma.Web.ActivityPub.ObjectValidators.ArticleNotePageValidator do |> fix_replies() |> fix_attachments() |> CommonFixes.fix_quote_url() + |> CommonFixes.fix_likes() |> Transmogrifier.fix_emoji() |> Transmogrifier.fix_content_map() + |> CommonFixes.maybe_add_language() + |> CommonFixes.maybe_add_content_map() end def changeset(struct, data) do diff --git a/lib/pleroma/web/activity_pub/object_validators/audio_image_video_validator.ex b/lib/pleroma/web/activity_pub/object_validators/audio_image_video_validator.ex index 65ac6bb93..034c6f33f 100644 --- a/lib/pleroma/web/activity_pub/object_validators/audio_image_video_validator.ex +++ b/lib/pleroma/web/activity_pub/object_validators/audio_image_video_validator.ex @@ -100,6 +100,7 @@ defmodule Pleroma.Web.ActivityPub.ObjectValidators.AudioImageVideoValidator do |> CommonFixes.fix_actor() |> CommonFixes.fix_object_defaults() |> CommonFixes.fix_quote_url() + |> CommonFixes.fix_likes() |> Transmogrifier.fix_emoji() |> fix_url() |> fix_content() diff --git a/lib/pleroma/web/activity_pub/object_validators/common_fields.ex b/lib/pleroma/web/activity_pub/object_validators/common_fields.ex index 1a5d02601..22cf0cc05 100644 --- a/lib/pleroma/web/activity_pub/object_validators/common_fields.ex +++ b/lib/pleroma/web/activity_pub/object_validators/common_fields.ex @@ -31,6 +31,7 @@ defmodule Pleroma.Web.ActivityPub.ObjectValidators.CommonFields do defmacro object_fields do quote bind_quoted: binding() do field(:content, :string) + field(:contentMap, ObjectValidators.ContentLanguageMap) field(:published, ObjectValidators.DateTime) field(:updated, ObjectValidators.DateTime) @@ -58,6 +59,7 @@ defmodule Pleroma.Web.ActivityPub.ObjectValidators.CommonFields do field(:like_count, :integer, default: 0) field(:announcement_count, :integer, default: 0) field(:quotes_count, :integer, default: 0) + field(:language, ObjectValidators.LanguageCode) field(:inReplyTo, ObjectValidators.ObjectID) field(:quoteUrl, ObjectValidators.ObjectID) field(:url, ObjectValidators.BareUri) diff --git a/lib/pleroma/web/activity_pub/object_validators/common_fixes.ex b/lib/pleroma/web/activity_pub/object_validators/common_fixes.ex index 4699029d4..87d3e0c8f 100644 --- a/lib/pleroma/web/activity_pub/object_validators/common_fixes.ex +++ b/lib/pleroma/web/activity_pub/object_validators/common_fixes.ex @@ -11,6 +11,11 @@ defmodule Pleroma.Web.ActivityPub.ObjectValidators.CommonFixes do alias Pleroma.Web.ActivityPub.Transmogrifier alias Pleroma.Web.ActivityPub.Utils + import Pleroma.EctoType.ActivityPub.ObjectValidators.LanguageCode, + only: [good_locale_code?: 1] + + import Pleroma.Web.Utils.Guards, only: [not_empty_string: 1] + require Pleroma.Constants def cast_and_filter_recipients(message, field, follower_collection, field_fallback \\ []) do @@ -114,6 +119,13 @@ defmodule Pleroma.Web.ActivityPub.ObjectValidators.CommonFixes do def fix_quote_url(data), do: data + # On Mastodon, `"likes"` attribute includes an inlined `Collection` with `totalItems`, + # not a list of users. + # https://github.com/mastodon/mastodon/pull/32007 + def fix_likes(%{"likes" => %{}} = data), do: Map.drop(data, ["likes"]) + + def fix_likes(data), do: data + # https://codeberg.org/fediverse/fep/src/branch/main/fep/e232/fep-e232.md def object_link_tag?(%{ "type" => "Link", @@ -125,4 +137,60 @@ defmodule Pleroma.Web.ActivityPub.ObjectValidators.CommonFixes do end def object_link_tag?(_), do: false + + def maybe_add_language_from_activity(object, activity) do + language = get_language_from_context(activity) + + if language do + Map.put(object, "language", language) + else + object + end + end + + def maybe_add_language(object) do + language = + [ + get_language_from_context(object), + get_language_from_content_map(object) + ] + |> Enum.find(&good_locale_code?(&1)) + + if language do + Map.put(object, "language", language) + else + object + end + end + + defp get_language_from_context(%{"@context" => context}) when is_list(context) do + case context + |> Enum.find(fn + %{"@language" => language} -> language != "und" + _ -> nil + end) do + %{"@language" => language} -> language + _ -> nil + end + end + + defp get_language_from_context(_), do: nil + + defp get_language_from_content_map(%{"contentMap" => content_map, "content" => source_content}) do + content_groups = Map.to_list(content_map) + + case Enum.find(content_groups, fn {_, content} -> content == source_content end) do + {language, _} -> language + _ -> nil + end + end + + defp get_language_from_content_map(_), do: nil + + def maybe_add_content_map(%{"language" => language, "content" => content} = object) + when not_empty_string(language) do + Map.put(object, "contentMap", Map.put(%{}, language, content)) + end + + def maybe_add_content_map(object), do: object end diff --git a/lib/pleroma/web/activity_pub/object_validators/event_validator.ex b/lib/pleroma/web/activity_pub/object_validators/event_validator.ex index ab204f69a..ea14d6aca 100644 --- a/lib/pleroma/web/activity_pub/object_validators/event_validator.ex +++ b/lib/pleroma/web/activity_pub/object_validators/event_validator.ex @@ -28,7 +28,7 @@ defmodule Pleroma.Web.ActivityPub.ObjectValidators.EventValidator do def cast_and_apply(data) do data - |> cast_data + |> cast_data() |> apply_action(:insert) end @@ -38,6 +38,7 @@ defmodule Pleroma.Web.ActivityPub.ObjectValidators.EventValidator do |> validate_data() end + @spec cast_data(map()) :: map() def cast_data(data) do %__MODULE__{} |> changeset(data) @@ -47,7 +48,10 @@ defmodule Pleroma.Web.ActivityPub.ObjectValidators.EventValidator do data |> CommonFixes.fix_actor() |> CommonFixes.fix_object_defaults() + |> CommonFixes.fix_likes() |> Transmogrifier.fix_emoji() + |> CommonFixes.maybe_add_language() + |> CommonFixes.maybe_add_content_map() end def changeset(struct, data) do diff --git a/lib/pleroma/web/activity_pub/object_validators/question_validator.ex b/lib/pleroma/web/activity_pub/object_validators/question_validator.ex index 7f9d4d648..21940f4f1 100644 --- a/lib/pleroma/web/activity_pub/object_validators/question_validator.ex +++ b/lib/pleroma/web/activity_pub/object_validators/question_validator.ex @@ -64,6 +64,7 @@ defmodule Pleroma.Web.ActivityPub.ObjectValidators.QuestionValidator do |> CommonFixes.fix_actor() |> CommonFixes.fix_object_defaults() |> CommonFixes.fix_quote_url() + |> CommonFixes.fix_likes() |> Transmogrifier.fix_emoji() |> fix_closed() end diff --git a/lib/pleroma/web/activity_pub/object_validators/update_validator.ex b/lib/pleroma/web/activity_pub/object_validators/update_validator.ex index 1e940a400..aab90235f 100644 --- a/lib/pleroma/web/activity_pub/object_validators/update_validator.ex +++ b/lib/pleroma/web/activity_pub/object_validators/update_validator.ex @@ -6,6 +6,8 @@ defmodule Pleroma.Web.ActivityPub.ObjectValidators.UpdateValidator do use Ecto.Schema alias Pleroma.EctoType.ActivityPub.ObjectValidators + alias Pleroma.Object + alias Pleroma.User import Ecto.Changeset import Pleroma.Web.ActivityPub.ObjectValidators.CommonValidations @@ -31,23 +33,50 @@ defmodule Pleroma.Web.ActivityPub.ObjectValidators.UpdateValidator do |> cast(data, __schema__(:fields)) end - defp validate_data(cng) do + defp validate_data(cng, meta) do cng |> validate_required([:id, :type, :actor, :to, :cc, :object]) |> validate_inclusion(:type, ["Update"]) |> validate_actor_presence() - |> validate_updating_rights() + |> validate_updating_rights(meta) end - def cast_and_validate(data) do + def cast_and_validate(data, meta \\ []) do data |> cast_data - |> validate_data + |> validate_data(meta) end - # For now we only support updating users, and here the rule is easy: - # object id == actor id - def validate_updating_rights(cng) do + def validate_updating_rights(cng, meta) do + if meta[:local] do + validate_updating_rights_local(cng) + else + validate_updating_rights_remote(cng) + end + end + + # For local Updates, verify the actor can edit the object + def validate_updating_rights_local(cng) do + actor = get_field(cng, :actor) + updated_object = get_field(cng, :object) + + if {:ok, actor} == ObjectValidators.ObjectID.cast(updated_object) do + cng + else + with %User{} = user <- User.get_cached_by_ap_id(actor), + {_, %Object{} = orig_object} <- {:object, Object.normalize(updated_object)}, + :ok <- Object.authorize_access(orig_object, user) do + cng + else + _e -> + cng + |> add_error(:object, "Can't be updated by this actor") + end + end + end + + # For remote Updates, verify the host is the same. + def validate_updating_rights_remote(cng) do with actor = get_field(cng, :actor), object = get_field(cng, :object), {:ok, object_id} <- ObjectValidators.ObjectID.cast(object), diff --git a/lib/pleroma/web/activity_pub/transmogrifier.ex b/lib/pleroma/web/activity_pub/transmogrifier.ex index 2f8a7f8f2..4c9956c7a 100644 --- a/lib/pleroma/web/activity_pub/transmogrifier.ex +++ b/lib/pleroma/web/activity_pub/transmogrifier.ex @@ -16,12 +16,14 @@ defmodule Pleroma.Web.ActivityPub.Transmogrifier do alias Pleroma.Web.ActivityPub.ActivityPub alias Pleroma.Web.ActivityPub.Builder alias Pleroma.Web.ActivityPub.ObjectValidator + alias Pleroma.Web.ActivityPub.ObjectValidators.CommonFixes alias Pleroma.Web.ActivityPub.Pipeline alias Pleroma.Web.ActivityPub.Utils alias Pleroma.Web.ActivityPub.Visibility alias Pleroma.Web.Federator import Ecto.Query + import Pleroma.Web.Utils.Guards, only: [not_empty_string: 1] require Pleroma.Constants @@ -166,7 +168,7 @@ defmodule Pleroma.Web.ActivityPub.Transmogrifier do def fix_quote_url_and_maybe_fetch(object, options \\ []) do quote_url = - case Pleroma.Web.ActivityPub.ObjectValidators.CommonFixes.fix_quote_url(object) do + case CommonFixes.fix_quote_url(object) do %{"quoteUrl" => quote_url} -> quote_url _ -> nil end @@ -336,6 +338,9 @@ defmodule Pleroma.Web.ActivityPub.Transmogrifier do def fix_tag(object), do: object + # prefer content over contentMap + def fix_content_map(%{"content" => content} = object) when not_empty_string(content), do: object + # content map usually only has one language so this will do for now. def fix_content_map(%{"contentMap" => content_map} = object) do content_groups = Map.to_list(content_map) @@ -716,6 +721,7 @@ defmodule Pleroma.Web.ActivityPub.Transmogrifier do |> set_reply_to_uri |> set_quote_url |> set_replies + |> CommonFixes.maybe_add_content_map() |> strip_internal_fields |> strip_internal_tags |> set_type @@ -750,12 +756,11 @@ defmodule Pleroma.Web.ActivityPub.Transmogrifier do object_id |> Object.normalize(fetch: false) |> Map.get(:data) - |> prepare_object data = data - |> Map.put("object", object) - |> Map.merge(Utils.make_json_ld_header()) + |> Map.put("object", prepare_object(object)) + |> Map.merge(Utils.make_json_ld_header(object)) |> Map.delete("bcc") {:ok, data} @@ -763,14 +768,10 @@ defmodule Pleroma.Web.ActivityPub.Transmogrifier do def prepare_outgoing(%{"type" => "Update", "object" => %{"type" => objtype} = object} = data) when objtype in Pleroma.Constants.updatable_object_types() do - object = - object - |> prepare_object - data = data - |> Map.put("object", object) - |> Map.merge(Utils.make_json_ld_header()) + |> Map.put("object", prepare_object(object)) + |> Map.merge(Utils.make_json_ld_header(object)) |> Map.delete("bcc") {:ok, data} @@ -840,7 +841,7 @@ defmodule Pleroma.Web.ActivityPub.Transmogrifier do data |> strip_internal_fields |> maybe_fix_object_url - |> Map.merge(Utils.make_json_ld_header()) + |> Map.merge(Utils.make_json_ld_header(data)) {:ok, data} end diff --git a/lib/pleroma/web/activity_pub/utils.ex b/lib/pleroma/web/activity_pub/utils.ex index 6c792804d..f30c92abf 100644 --- a/lib/pleroma/web/activity_pub/utils.ex +++ b/lib/pleroma/web/activity_pub/utils.ex @@ -20,6 +20,7 @@ defmodule Pleroma.Web.ActivityPub.Utils do alias Pleroma.Web.Router.Helpers import Ecto.Query + import Pleroma.Web.Utils.Guards, only: [not_empty_string: 1] require Logger require Pleroma.Constants @@ -109,18 +110,24 @@ defmodule Pleroma.Web.ActivityPub.Utils do end end - def make_json_ld_header do + def make_json_ld_header(data \\ %{}) do %{ "@context" => [ "https://www.w3.org/ns/activitystreams", "#{Endpoint.url()}/schemas/litepub-0.1.jsonld", %{ - "@language" => "und" + "@language" => get_language(data) } ] } end + defp get_language(%{"language" => language}) when not_empty_string(language) do + language + end + + defp get_language(_), do: "und" + def make_date do DateTime.utc_now() |> DateTime.to_iso8601() end diff --git a/lib/pleroma/web/activity_pub/views/object_view.ex b/lib/pleroma/web/activity_pub/views/object_view.ex index 63caa915c..13b5b2542 100644 --- a/lib/pleroma/web/activity_pub/views/object_view.ex +++ b/lib/pleroma/web/activity_pub/views/object_view.ex @@ -9,7 +9,7 @@ defmodule Pleroma.Web.ActivityPub.ObjectView do alias Pleroma.Web.ActivityPub.Transmogrifier def render("object.json", %{object: %Object{} = object}) do - base = Pleroma.Web.ActivityPub.Utils.make_json_ld_header() + base = Pleroma.Web.ActivityPub.Utils.make_json_ld_header(object.data) additional = Transmogrifier.prepare_object(object.data) Map.merge(base, additional) @@ -17,7 +17,7 @@ defmodule Pleroma.Web.ActivityPub.ObjectView do def render("object.json", %{object: %Activity{data: %{"type" => activity_type}} = activity}) when activity_type in ["Create", "Listen"] do - base = Pleroma.Web.ActivityPub.Utils.make_json_ld_header() + base = Pleroma.Web.ActivityPub.Utils.make_json_ld_header(activity.data) object = Object.normalize(activity, fetch: false) additional = @@ -28,7 +28,7 @@ defmodule Pleroma.Web.ActivityPub.ObjectView do end def render("object.json", %{object: %Activity{} = activity}) do - base = Pleroma.Web.ActivityPub.Utils.make_json_ld_header() + base = Pleroma.Web.ActivityPub.Utils.make_json_ld_header(activity.data) object_id = Object.normalize(activity, id_only: true) additional = diff --git a/lib/pleroma/web/activity_pub/views/user_view.ex b/lib/pleroma/web/activity_pub/views/user_view.ex index 937e4fd67..61975387b 100644 --- a/lib/pleroma/web/activity_pub/views/user_view.ex +++ b/lib/pleroma/web/activity_pub/views/user_view.ex @@ -127,10 +127,25 @@ defmodule Pleroma.Web.ActivityPub.UserView do "capabilities" => capabilities, "alsoKnownAs" => user.also_known_as, "vcard:bday" => birthday, - "webfinger" => "acct:#{User.full_nickname(user)}" + "webfinger" => "acct:#{User.full_nickname(user)}", + "published" => Pleroma.Web.CommonAPI.Utils.to_masto_date(user.inserted_at) } - |> Map.merge(maybe_make_image(&User.avatar_url/2, "icon", user)) - |> Map.merge(maybe_make_image(&User.banner_url/2, "image", user)) + |> Map.merge( + maybe_make_image( + &User.avatar_url/2, + User.image_description(user.avatar, nil), + "icon", + user + ) + ) + |> Map.merge( + maybe_make_image( + &User.banner_url/2, + User.image_description(user.banner, nil), + "image", + user + ) + ) |> Map.merge(Utils.make_json_ld_header()) end @@ -305,16 +320,24 @@ defmodule Pleroma.Web.ActivityPub.UserView do end end - defp maybe_make_image(func, key, user) do + defp maybe_make_image(func, description, key, user) do if image = func.(user, no_default: true) do %{ - key => %{ - "type" => "Image", - "url" => image - } + key => + %{ + "type" => "Image", + "url" => image + } + |> maybe_put_description(description) } else %{} end end + + defp maybe_put_description(map, description) when is_binary(description) do + Map.put(map, "name", description) + end + + defp maybe_put_description(map, _description), do: map end diff --git a/lib/pleroma/web/api_spec.ex b/lib/pleroma/web/api_spec.ex index 314782818..63409870e 100644 --- a/lib/pleroma/web/api_spec.ex +++ b/lib/pleroma/web/api_spec.ex @@ -139,7 +139,8 @@ defmodule Pleroma.Web.ApiSpec do "Search", "Status actions", "Media attachments", - "Bookmark folders" + "Bookmark folders", + "Tags" ] }, %{ diff --git a/lib/pleroma/web/api_spec/operations/media_operation.ex b/lib/pleroma/web/api_spec/operations/media_operation.ex index e6df21246..588b42e06 100644 --- a/lib/pleroma/web/api_spec/operations/media_operation.ex +++ b/lib/pleroma/web/api_spec/operations/media_operation.ex @@ -121,7 +121,7 @@ defmodule Pleroma.Web.ApiSpec.MediaOperation do security: [%{"oAuth" => ["write:media"]}], requestBody: Helpers.request_body("Parameters", create_request()), responses: %{ - 202 => Operation.response("Media", "application/json", Attachment), + 200 => Operation.response("Media", "application/json", Attachment), 400 => Operation.response("Media", "application/json", ApiError), 422 => Operation.response("Media", "application/json", ApiError), 500 => Operation.response("Media", "application/json", ApiError) diff --git a/lib/pleroma/web/api_spec/operations/tag_operation.ex b/lib/pleroma/web/api_spec/operations/tag_operation.ex new file mode 100644 index 000000000..ce4f4ad5b --- /dev/null +++ b/lib/pleroma/web/api_spec/operations/tag_operation.ex @@ -0,0 +1,103 @@ +defmodule Pleroma.Web.ApiSpec.TagOperation do + alias OpenApiSpex.Operation + alias OpenApiSpex.Schema + alias Pleroma.Web.ApiSpec.Schemas.ApiError + alias Pleroma.Web.ApiSpec.Schemas.Tag + + def open_api_operation(action) do + operation = String.to_existing_atom("#{action}_operation") + apply(__MODULE__, operation, []) + end + + def show_operation do + %Operation{ + tags: ["Tags"], + summary: "Hashtag", + description: "View a hashtag", + security: [%{"oAuth" => ["read"]}], + parameters: [id_param()], + operationId: "TagController.show", + responses: %{ + 200 => Operation.response("Hashtag", "application/json", Tag), + 404 => Operation.response("Not Found", "application/json", ApiError) + } + } + end + + def follow_operation do + %Operation{ + tags: ["Tags"], + summary: "Follow a hashtag", + description: "Follow a hashtag", + security: [%{"oAuth" => ["write:follows"]}], + parameters: [id_param()], + operationId: "TagController.follow", + responses: %{ + 200 => Operation.response("Hashtag", "application/json", Tag), + 404 => Operation.response("Not Found", "application/json", ApiError) + } + } + end + + def unfollow_operation do + %Operation{ + tags: ["Tags"], + summary: "Unfollow a hashtag", + description: "Unfollow a hashtag", + security: [%{"oAuth" => ["write:follows"]}], + parameters: [id_param()], + operationId: "TagController.unfollow", + responses: %{ + 200 => Operation.response("Hashtag", "application/json", Tag), + 404 => Operation.response("Not Found", "application/json", ApiError) + } + } + end + + def show_followed_operation do + %Operation{ + tags: ["Tags"], + summary: "Followed hashtags", + description: "View a list of hashtags the currently authenticated user is following", + parameters: pagination_params(), + security: [%{"oAuth" => ["read:follows"]}], + operationId: "TagController.show_followed", + responses: %{ + 200 => + Operation.response("Hashtags", "application/json", %Schema{ + type: :array, + items: Tag + }), + 403 => Operation.response("Forbidden", "application/json", ApiError), + 404 => Operation.response("Not Found", "application/json", ApiError) + } + } + end + + defp id_param do + Operation.parameter( + :id, + :path, + %Schema{type: :string}, + "Name of the hashtag" + ) + end + + def pagination_params do + [ + Operation.parameter(:max_id, :query, :integer, "Return items older than this ID"), + Operation.parameter( + :min_id, + :query, + :integer, + "Return the oldest items newer than this ID" + ), + Operation.parameter( + :limit, + :query, + %Schema{type: :integer, default: 20}, + "Maximum number of items to return. Will be ignored if it's more than 40" + ) + ] + end +end diff --git a/lib/pleroma/web/api_spec/schemas/tag.ex b/lib/pleroma/web/api_spec/schemas/tag.ex index 66bf0ca71..05ff10cd3 100644 --- a/lib/pleroma/web/api_spec/schemas/tag.ex +++ b/lib/pleroma/web/api_spec/schemas/tag.ex @@ -17,11 +17,22 @@ defmodule Pleroma.Web.ApiSpec.Schemas.Tag do type: :string, format: :uri, description: "A link to the hashtag on the instance" + }, + following: %Schema{ + type: :boolean, + description: "Whether the authenticated user is following the hashtag" + }, + history: %Schema{ + type: :array, + items: %Schema{type: :string}, + description: + "A list of historical uses of the hashtag (not implemented, for compatibility only)" } }, example: %{ name: "cofe", - url: "https://lain.com/tag/cofe" + url: "https://lain.com/tag/cofe", + following: false } }) end diff --git a/lib/pleroma/web/auth/authenticator.ex b/lib/pleroma/web/auth/authenticator.ex index 01bf1575c..95be892cd 100644 --- a/lib/pleroma/web/auth/authenticator.ex +++ b/lib/pleroma/web/auth/authenticator.ex @@ -10,4 +10,9 @@ defmodule Pleroma.Web.Auth.Authenticator do @callback handle_error(Plug.Conn.t(), any()) :: any() @callback auth_template() :: String.t() | nil @callback oauth_consumer_template() :: String.t() | nil + + @callback change_password(Pleroma.User.t(), String.t(), String.t(), String.t()) :: + {:ok, Pleroma.User.t()} | {:error, term()} + + @optional_callbacks change_password: 4 end diff --git a/lib/pleroma/web/auth/ldap_authenticator.ex b/lib/pleroma/web/auth/ldap_authenticator.ex index ad5bc9863..ec6601fb9 100644 --- a/lib/pleroma/web/auth/ldap_authenticator.ex +++ b/lib/pleroma/web/auth/ldap_authenticator.ex @@ -3,18 +3,14 @@ # SPDX-License-Identifier: AGPL-3.0-only defmodule Pleroma.Web.Auth.LDAPAuthenticator do + alias Pleroma.LDAP alias Pleroma.User - require Logger - - import Pleroma.Web.Auth.Helpers, only: [fetch_credentials: 1, fetch_user: 1] + import Pleroma.Web.Auth.Helpers, only: [fetch_credentials: 1] @behaviour Pleroma.Web.Auth.Authenticator @base Pleroma.Web.Auth.PleromaAuthenticator - @connection_timeout 10_000 - @search_timeout 10_000 - defdelegate get_registration(conn), to: @base defdelegate create_from_registration(conn, registration), to: @base defdelegate handle_error(conn, error), to: @base @@ -24,7 +20,7 @@ defmodule Pleroma.Web.Auth.LDAPAuthenticator do def get_user(%Plug.Conn{} = conn) do with {:ldap, true} <- {:ldap, Pleroma.Config.get([:ldap, :enabled])}, {:ok, {name, password}} <- fetch_credentials(conn), - %User{} = user <- ldap_user(name, password) do + %User{} = user <- LDAP.bind_user(name, password) do {:ok, user} else {:ldap, _} -> @@ -35,141 +31,12 @@ defmodule Pleroma.Web.Auth.LDAPAuthenticator do end end - defp ldap_user(name, password) do - ldap = Pleroma.Config.get(:ldap, []) - host = Keyword.get(ldap, :host, "localhost") - port = Keyword.get(ldap, :port, 389) - ssl = Keyword.get(ldap, :ssl, false) - tls = Keyword.get(ldap, :tls, false) - cacertfile = Keyword.get(ldap, :cacertfile) || CAStore.file_path() - - default_secure_opts = [ - verify: :verify_peer, - cacerts: decode_certfile(cacertfile), - customize_hostname_check: [ - fqdn_fun: fn _ -> to_charlist(host) end - ] - ] - - sslopts = Keyword.merge(default_secure_opts, Keyword.get(ldap, :sslopts, [])) - tlsopts = Keyword.merge(default_secure_opts, Keyword.get(ldap, :tlsopts, [])) - - # :sslopts can only be included in :eldap.open/2 when {ssl: true} - # or the connection will fail - options = - if ssl do - [{:port, port}, {:ssl, ssl}, {:sslopts, sslopts}, {:timeout, @connection_timeout}] - else - [{:port, port}, {:ssl, ssl}, {:timeout, @connection_timeout}] - end - - case :eldap.open([to_charlist(host)], options) do - {:ok, connection} -> - try do - cond do - ssl -> - :application.ensure_all_started(:ssl) - - tls -> - case :eldap.start_tls( - connection, - tlsopts, - @connection_timeout - ) do - :ok -> - :ok - - error -> - Logger.error("Could not start TLS: #{inspect(error)}") - :eldap.close(connection) - end - - true -> - :ok - end - - bind_user(connection, ldap, name, password) - after - :eldap.close(connection) - end - - {:error, error} -> - Logger.error("Could not open LDAP connection: #{inspect(error)}") - {:error, {:ldap_connection_error, error}} + def change_password(user, password, new_password, new_password) do + case LDAP.change_password(user.nickname, password, new_password) do + :ok -> {:ok, user} + e -> e end end - defp bind_user(connection, ldap, name, password) do - uid = Keyword.get(ldap, :uid, "cn") - base = Keyword.get(ldap, :base) - - case :eldap.simple_bind(connection, "#{uid}=#{name},#{base}", password) do - :ok -> - case fetch_user(name) do - %User{} = user -> - user - - _ -> - register_user(connection, base, uid, name) - end - - error -> - Logger.error("Could not bind LDAP user #{name}: #{inspect(error)}") - {:error, {:ldap_bind_error, error}} - end - end - - defp register_user(connection, base, uid, name) do - case :eldap.search(connection, [ - {:base, to_charlist(base)}, - {:filter, :eldap.equalityMatch(to_charlist(uid), to_charlist(name))}, - {:scope, :eldap.wholeSubtree()}, - {:timeout, @search_timeout} - ]) do - # The :eldap_search_result record structure changed in OTP 24.3 and added a controls field - # https://github.com/erlang/otp/pull/5538 - {:ok, {:eldap_search_result, [{:eldap_entry, _object, attributes}], _referrals}} -> - try_register(name, attributes) - - {:ok, {:eldap_search_result, [{:eldap_entry, _object, attributes}], _referrals, _controls}} -> - try_register(name, attributes) - - error -> - Logger.error("Couldn't register user because LDAP search failed: #{inspect(error)}") - {:error, {:ldap_search_error, error}} - end - end - - defp try_register(name, attributes) do - params = %{ - name: name, - nickname: name, - password: nil - } - - params = - case List.keyfind(attributes, ~c"mail", 0) do - {_, [mail]} -> Map.put_new(params, :email, :erlang.list_to_binary(mail)) - _ -> params - end - - changeset = User.register_changeset_ldap(%User{}, params) - - case User.register(changeset) do - {:ok, user} -> user - error -> error - end - end - - defp decode_certfile(file) do - with {:ok, data} <- File.read(file) do - data - |> :public_key.pem_decode() - |> Enum.map(fn {_, b, _} -> b end) - else - _ -> - Logger.error("Unable to read certfile: #{file}") - [] - end - end + def change_password(_, _, _, _), do: {:error, :password_confirmation} end diff --git a/lib/pleroma/web/auth/pleroma_authenticator.ex b/lib/pleroma/web/auth/pleroma_authenticator.ex index 09a58eb66..0da3f19fc 100644 --- a/lib/pleroma/web/auth/pleroma_authenticator.ex +++ b/lib/pleroma/web/auth/pleroma_authenticator.ex @@ -6,6 +6,7 @@ defmodule Pleroma.Web.Auth.PleromaAuthenticator do alias Pleroma.Registration alias Pleroma.Repo alias Pleroma.User + alias Pleroma.Web.CommonAPI alias Pleroma.Web.Plugs.AuthenticationPlug import Pleroma.Web.Auth.Helpers, only: [fetch_credentials: 1, fetch_user: 1] @@ -101,4 +102,23 @@ defmodule Pleroma.Web.Auth.PleromaAuthenticator do def auth_template, do: nil def oauth_consumer_template, do: nil + + @doc "Changes Pleroma.User password in the database" + def change_password(user, password, new_password, new_password) do + case CommonAPI.Utils.confirm_current_password(user, password) do + {:ok, user} -> + with {:ok, _user} <- + User.reset_password(user, %{ + password: new_password, + password_confirmation: new_password + }) do + {:ok, user} + end + + error -> + error + end + end + + def change_password(_, _, _, _), do: {:error, :password_confirmation} end diff --git a/lib/pleroma/web/auth/wrapper_authenticator.ex b/lib/pleroma/web/auth/wrapper_authenticator.ex index a077cfa41..97b901036 100644 --- a/lib/pleroma/web/auth/wrapper_authenticator.ex +++ b/lib/pleroma/web/auth/wrapper_authenticator.ex @@ -39,4 +39,8 @@ defmodule Pleroma.Web.Auth.WrapperAuthenticator do implementation().oauth_consumer_template() || Pleroma.Config.get([:auth, :oauth_consumer_template], "consumer.html") end + + @impl true + def change_password(user, password, new_password, new_password_confirmation), + do: implementation().change_password(user, password, new_password, new_password_confirmation) end diff --git a/lib/pleroma/web/common_api/activity_draft.ex b/lib/pleroma/web/common_api/activity_draft.ex index 8aa1e258d..4220757df 100644 --- a/lib/pleroma/web/common_api/activity_draft.ex +++ b/lib/pleroma/web/common_api/activity_draft.ex @@ -11,6 +11,9 @@ defmodule Pleroma.Web.CommonAPI.ActivityDraft do alias Pleroma.Web.CommonAPI alias Pleroma.Web.CommonAPI.Utils + import Pleroma.EctoType.ActivityPub.ObjectValidators.LanguageCode, + only: [good_locale_code?: 1] + import Pleroma.Web.Gettext import Pleroma.Web.Utils.Guards, only: [not_empty_string: 1] @@ -38,6 +41,7 @@ defmodule Pleroma.Web.CommonAPI.ActivityDraft do cc: [], context: nil, sensitive: false, + language: nil, object: nil, preview?: false, changes: %{} @@ -64,6 +68,7 @@ defmodule Pleroma.Web.CommonAPI.ActivityDraft do |> content() |> with_valid(&to_and_cc/1) |> with_valid(&context/1) + |> with_valid(&language/1) |> sensitive() |> with_valid(&object/1) |> preview?() @@ -249,6 +254,16 @@ defmodule Pleroma.Web.CommonAPI.ActivityDraft do %__MODULE__{draft | sensitive: sensitive} end + defp language(draft) do + language = draft.params[:language] + + if good_locale_code?(language) do + %__MODULE__{draft | language: language} + else + draft + end + end + defp object(draft) do emoji = Map.merge(Pleroma.Emoji.Formatter.get_emoji_map(draft.full_payload), draft.emoji) @@ -288,6 +303,7 @@ defmodule Pleroma.Web.CommonAPI.ActivityDraft do "mediaType" => Utils.get_content_type(draft.params[:content_type]) }) |> Map.put("generator", draft.params[:generator]) + |> Map.put("language", draft.language) %__MODULE__{draft | object: object} end diff --git a/lib/pleroma/web/endpoint.ex b/lib/pleroma/web/endpoint.ex index fef907ace..bab3c9fd0 100644 --- a/lib/pleroma/web/endpoint.ex +++ b/lib/pleroma/web/endpoint.ex @@ -14,6 +14,7 @@ defmodule Pleroma.Web.Endpoint do websocket: [ path: "/", compress: false, + connect_info: [:sec_websocket_protocol], error_handler: {Pleroma.Web.MastodonAPI.WebsocketHandler, :handle_error, []}, fullsweep_after: 20 ] diff --git a/lib/pleroma/web/fallback/redirect_controller.ex b/lib/pleroma/web/fallback/redirect_controller.ex index 4a0885fab..6637848a9 100644 --- a/lib/pleroma/web/fallback/redirect_controller.ex +++ b/lib/pleroma/web/fallback/redirect_controller.ex @@ -46,7 +46,7 @@ defmodule Pleroma.Web.Fallback.RedirectController do redirector_with_meta(conn, %{user: user}) else nil -> - redirector(conn, params) + redirector_with_meta(conn, Map.delete(params, "maybe_nickname_or_id")) end end diff --git a/lib/pleroma/web/feed/tag_controller.ex b/lib/pleroma/web/feed/tag_controller.ex index e60767327..02d639296 100644 --- a/lib/pleroma/web/feed/tag_controller.ex +++ b/lib/pleroma/web/feed/tag_controller.ex @@ -10,7 +10,7 @@ defmodule Pleroma.Web.Feed.TagController do alias Pleroma.Web.Feed.FeedView def feed(conn, params) do - if Config.get!([:instance, :public]) do + if not Config.restrict_unauthenticated_access?(:timelines, :local) do render_feed(conn, params) else render_error(conn, :not_found, "Not found") @@ -18,10 +18,12 @@ defmodule Pleroma.Web.Feed.TagController do end defp render_feed(conn, %{"tag" => raw_tag} = params) do + local_only = Config.restrict_unauthenticated_access?(:timelines, :federated) + {format, tag} = parse_tag(raw_tag) activities = - %{type: ["Create"], tag: tag} + %{type: ["Create"], tag: tag, local_only: local_only} |> Pleroma.Maps.put_if_present(:max_id, params["max_id"]) |> ActivityPub.fetch_public_activities() diff --git a/lib/pleroma/web/feed/user_controller.ex b/lib/pleroma/web/feed/user_controller.ex index 6657c2b3e..304313068 100644 --- a/lib/pleroma/web/feed/user_controller.ex +++ b/lib/pleroma/web/feed/user_controller.ex @@ -15,11 +15,11 @@ defmodule Pleroma.Web.Feed.UserController do action_fallback(:errors) - def feed_redirect(%{assigns: %{format: "html"}} = conn, %{"nickname" => nickname}) do + def feed_redirect(%{assigns: %{format: "html"}} = conn, %{"nickname" => nickname} = params) do with {_, %User{} = user} <- {:fetch_user, User.get_cached_by_nickname_or_id(nickname)} do Pleroma.Web.Fallback.RedirectController.redirector_with_meta(conn, %{user: user}) else - _ -> Pleroma.Web.Fallback.RedirectController.redirector(conn, nil) + _ -> Pleroma.Web.Fallback.RedirectController.redirector_with_meta(conn, params) end end diff --git a/lib/pleroma/web/mastodon_api/controllers/media_controller.ex b/lib/pleroma/web/mastodon_api/controllers/media_controller.ex index 056bad844..41056d389 100644 --- a/lib/pleroma/web/mastodon_api/controllers/media_controller.ex +++ b/lib/pleroma/web/mastodon_api/controllers/media_controller.ex @@ -53,9 +53,7 @@ defmodule Pleroma.Web.MastodonAPI.MediaController do ) do attachment_data = Map.put(object.data, "id", object.id) - conn - |> put_status(202) - |> render("attachment.json", %{attachment: attachment_data}) + render(conn, "attachment.json", %{attachment: attachment_data}) end end diff --git a/lib/pleroma/web/mastodon_api/controllers/poll_controller.ex b/lib/pleroma/web/mastodon_api/controllers/poll_controller.ex index a2af8148c..6526457df 100644 --- a/lib/pleroma/web/mastodon_api/controllers/poll_controller.ex +++ b/lib/pleroma/web/mastodon_api/controllers/poll_controller.ex @@ -12,6 +12,7 @@ defmodule Pleroma.Web.MastodonAPI.PollController do alias Pleroma.Web.ActivityPub.Visibility alias Pleroma.Web.CommonAPI alias Pleroma.Web.Plugs.OAuthScopesPlug + alias Pleroma.Workers.PollWorker action_fallback(Pleroma.Web.MastodonAPI.FallbackController) @@ -27,12 +28,16 @@ defmodule Pleroma.Web.MastodonAPI.PollController do defdelegate open_api_operation(action), to: Pleroma.Web.ApiSpec.PollOperation @cachex Pleroma.Config.get([:cachex, :provider], Cachex) + @poll_refresh_interval 120 @doc "GET /api/v1/polls/:id" def show(%{assigns: %{user: user}, private: %{open_api_spex: %{params: %{id: id}}}} = conn, _) do - with %Object{} = object <- Object.get_by_id_and_maybe_refetch(id, interval: 60), - %Activity{} = activity <- Activity.get_create_by_object_ap_id(object.data["id"]), + with %Object{} = object <- Object.get_by_id(id), + %Activity{} = activity <- + Activity.get_create_by_object_ap_id_with_object(object.data["id"]), true <- Visibility.visible_for_user?(activity, user) do + maybe_refresh_poll(activity) + try_render(conn, "show.json", %{object: object, for: user}) else error when is_nil(error) or error == false -> @@ -70,4 +75,13 @@ defmodule Pleroma.Web.MastodonAPI.PollController do end end) end + + defp maybe_refresh_poll(%Activity{object: %Object{} = object} = activity) do + with false <- activity.local, + {:ok, end_time} <- NaiveDateTime.from_iso8601(object.data["closed"]), + {_, :lt} <- {:closed_compare, NaiveDateTime.compare(object.updated_at, end_time)} do + PollWorker.new(%{"op" => "refresh", "activity_id" => activity.id}) + |> Oban.insert(unique: [period: @poll_refresh_interval]) + end + end end diff --git a/lib/pleroma/web/mastodon_api/controllers/tag_controller.ex b/lib/pleroma/web/mastodon_api/controllers/tag_controller.ex new file mode 100644 index 000000000..21c21e984 --- /dev/null +++ b/lib/pleroma/web/mastodon_api/controllers/tag_controller.ex @@ -0,0 +1,77 @@ +defmodule Pleroma.Web.MastodonAPI.TagController do + @moduledoc "Hashtag routes for mastodon API" + use Pleroma.Web, :controller + + alias Pleroma.Hashtag + alias Pleroma.Pagination + alias Pleroma.User + + import Pleroma.Web.ControllerHelper, + only: [ + add_link_headers: 2 + ] + + plug(Pleroma.Web.ApiSpec.CastAndValidate) + + plug( + Pleroma.Web.Plugs.OAuthScopesPlug, + %{scopes: ["read"]} when action in [:show] + ) + + plug( + Pleroma.Web.Plugs.OAuthScopesPlug, + %{scopes: ["read:follows"]} when action in [:show_followed] + ) + + plug( + Pleroma.Web.Plugs.OAuthScopesPlug, + %{scopes: ["write:follows"]} when action in [:follow, :unfollow] + ) + + defdelegate open_api_operation(action), to: Pleroma.Web.ApiSpec.TagOperation + + def show(conn, %{id: id}) do + with %Hashtag{} = hashtag <- Hashtag.get_by_name(id) do + render(conn, "show.json", tag: hashtag, for_user: conn.assigns.user) + else + _ -> conn |> render_error(:not_found, "Hashtag not found") + end + end + + def follow(conn, %{id: id}) do + with %Hashtag{} = hashtag <- Hashtag.get_by_name(id), + %User{} = user <- conn.assigns.user, + {:ok, _} <- + User.follow_hashtag(user, hashtag) do + render(conn, "show.json", tag: hashtag, for_user: user) + else + _ -> render_error(conn, :not_found, "Hashtag not found") + end + end + + def unfollow(conn, %{id: id}) do + with %Hashtag{} = hashtag <- Hashtag.get_by_name(id), + %User{} = user <- conn.assigns.user, + {:ok, _} <- + User.unfollow_hashtag(user, hashtag) do + render(conn, "show.json", tag: hashtag, for_user: user) + else + _ -> render_error(conn, :not_found, "Hashtag not found") + end + end + + def show_followed(conn, params) do + with %{assigns: %{user: %User{} = user}} <- conn do + params = Map.put(params, :id_type, :integer) + + hashtags = + user + |> User.HashtagFollow.followed_hashtags_query() + |> Pagination.fetch_paginated(params) + + conn + |> add_link_headers(hashtags) + |> render("index.json", tags: hashtags, for_user: user) + end + end +end diff --git a/lib/pleroma/web/mastodon_api/controllers/timeline_controller.ex b/lib/pleroma/web/mastodon_api/controllers/timeline_controller.ex index 293c61b41..5ee74a80e 100644 --- a/lib/pleroma/web/mastodon_api/controllers/timeline_controller.ex +++ b/lib/pleroma/web/mastodon_api/controllers/timeline_controller.ex @@ -40,6 +40,11 @@ defmodule Pleroma.Web.MastodonAPI.TimelineController do # GET /api/v1/timelines/home def home(%{assigns: %{user: user}} = conn, params) do + followed_hashtags = + user + |> User.followed_hashtags() + |> Enum.map(& &1.id) + params = params |> Map.put(:type, ["Create", "Announce"]) @@ -49,6 +54,7 @@ defmodule Pleroma.Web.MastodonAPI.TimelineController do |> Map.put(:announce_filtering_user, user) |> Map.put(:user, user) |> Map.put(:local_only, params[:local]) + |> Map.put(:followed_hashtags, followed_hashtags) |> Map.delete(:local) activities = diff --git a/lib/pleroma/web/mastodon_api/views/account_view.ex b/lib/pleroma/web/mastodon_api/views/account_view.ex index 7de6745d4..f6727d29d 100644 --- a/lib/pleroma/web/mastodon_api/views/account_view.ex +++ b/lib/pleroma/web/mastodon_api/views/account_view.ex @@ -219,10 +219,10 @@ defmodule Pleroma.Web.MastodonAPI.AccountView do avatar = User.avatar_url(user) |> MediaProxy.url() avatar_static = User.avatar_url(user) |> MediaProxy.preview_url(static: true) - avatar_description = image_description(user.avatar) + avatar_description = User.image_description(user.avatar) header = User.banner_url(user) |> MediaProxy.url() header_static = User.banner_url(user) |> MediaProxy.preview_url(static: true) - header_description = image_description(user.banner) + header_description = User.image_description(user.banner) following_count = if !user.hide_follows_count or !user.hide_follows or self, @@ -349,10 +349,6 @@ defmodule Pleroma.Web.MastodonAPI.AccountView do defp username_from_nickname(_), do: nil - defp image_description(%{"name" => name}), do: name - - defp image_description(_), do: "" - defp maybe_put_follow_requests_count( data, %User{id: user_id} = user, diff --git a/lib/pleroma/web/mastodon_api/views/status_view.ex b/lib/pleroma/web/mastodon_api/views/status_view.ex index 3bf870c24..10966edd6 100644 --- a/lib/pleroma/web/mastodon_api/views/status_view.ex +++ b/lib/pleroma/web/mastodon_api/views/status_view.ex @@ -227,7 +227,7 @@ defmodule Pleroma.Web.MastodonAPI.StatusView do mentions: mentions, tags: reblogged[:tags] || [], application: build_application(object.data["generator"]), - language: nil, + language: get_language(object), emojis: [], pleroma: %{ local: activity.local, @@ -445,7 +445,7 @@ defmodule Pleroma.Web.MastodonAPI.StatusView do mentions: mentions, tags: build_tags(tags), application: build_application(object.data["generator"]), - language: nil, + language: get_language(object), emojis: build_emojis(object.data["emoji"]), pleroma: %{ local: activity.local, @@ -829,6 +829,10 @@ defmodule Pleroma.Web.MastodonAPI.StatusView do Utils.get_content_type(nil) end + defp get_language(%{data: %{"language" => "und"}}), do: nil + + defp get_language(object), do: object.data["language"] + defp proxied_url(url, page_url_data) do if is_binary(url) do build_image_url(URI.parse(url), page_url_data) |> MediaProxy.url() diff --git a/lib/pleroma/web/mastodon_api/views/tag_view.ex b/lib/pleroma/web/mastodon_api/views/tag_view.ex new file mode 100644 index 000000000..e24d423c2 --- /dev/null +++ b/lib/pleroma/web/mastodon_api/views/tag_view.ex @@ -0,0 +1,25 @@ +defmodule Pleroma.Web.MastodonAPI.TagView do + use Pleroma.Web, :view + alias Pleroma.User + alias Pleroma.Web.Router.Helpers + + def render("index.json", %{tags: tags, for_user: user}) do + safe_render_many(tags, __MODULE__, "show.json", %{for_user: user}) + end + + def render("show.json", %{tag: tag, for_user: user}) do + following = + with %User{} <- user do + User.following_hashtag?(user, tag) + else + _ -> false + end + + %{ + name: tag.name, + url: Helpers.tag_feed_url(Pleroma.Web.Endpoint, :feed, tag.name), + history: [], + following: following + } + end +end diff --git a/lib/pleroma/web/mastodon_api/websocket_handler.ex b/lib/pleroma/web/mastodon_api/websocket_handler.ex index 730295a4c..3ed1cdd6c 100644 --- a/lib/pleroma/web/mastodon_api/websocket_handler.ex +++ b/lib/pleroma/web/mastodon_api/websocket_handler.ex @@ -22,7 +22,7 @@ defmodule Pleroma.Web.MastodonAPI.WebsocketHandler do # This only prepares the connection and is not in the process yet @impl Phoenix.Socket.Transport def connect(%{params: params} = transport_info) do - with access_token <- Map.get(params, "access_token"), + with access_token <- find_access_token(transport_info), {:ok, user, oauth_token} <- authenticate_request(access_token), {:ok, topic} <- Streamer.get_topic(params["stream"], user, oauth_token, params) do @@ -244,4 +244,13 @@ defmodule Pleroma.Web.MastodonAPI.WebsocketHandler do def handle_error(conn, _reason) do Plug.Conn.send_resp(conn, 404, "Not Found") end + + defp find_access_token(%{ + connect_info: %{sec_websocket_protocol: [token]} + }), + do: token + + defp find_access_token(%{params: %{"access_token" => token}}), do: token + + defp find_access_token(_), do: nil end diff --git a/lib/pleroma/web/media_proxy/media_proxy_controller.ex b/lib/pleroma/web/media_proxy/media_proxy_controller.ex index 0b446e0a6..a0aafc32e 100644 --- a/lib/pleroma/web/media_proxy/media_proxy_controller.ex +++ b/lib/pleroma/web/media_proxy/media_proxy_controller.ex @@ -71,11 +71,15 @@ defmodule Pleroma.Web.MediaProxy.MediaProxyController do drop_static_param_and_redirect(conn) content_type == "image/gif" -> - redirect(conn, external: media_proxy_url) + conn + |> put_status(301) + |> redirect(external: media_proxy_url) min_content_length_for_preview() > 0 and content_length > 0 and content_length < min_content_length_for_preview() -> - redirect(conn, external: media_proxy_url) + conn + |> put_status(301) + |> redirect(external: media_proxy_url) true -> handle_preview(content_type, conn, media_proxy_url) diff --git a/lib/pleroma/web/metadata.ex b/lib/pleroma/web/metadata.ex index 59d018730..4ee7c41ec 100644 --- a/lib/pleroma/web/metadata.ex +++ b/lib/pleroma/web/metadata.ex @@ -7,6 +7,7 @@ defmodule Pleroma.Web.Metadata do def build_tags(params) do providers = [ + Pleroma.Web.Metadata.Providers.ActivityPub, Pleroma.Web.Metadata.Providers.RelMe, Pleroma.Web.Metadata.Providers.RestrictIndexing | activated_providers() diff --git a/lib/pleroma/web/metadata/providers/activity_pub.ex b/lib/pleroma/web/metadata/providers/activity_pub.ex new file mode 100644 index 000000000..bd9f92332 --- /dev/null +++ b/lib/pleroma/web/metadata/providers/activity_pub.ex @@ -0,0 +1,22 @@ +# Pleroma: A lightweight social networking server +# Copyright © 2017-2024 Pleroma Authors <https://pleroma.social/> +# SPDX-License-Identifier: AGPL-3.0-only + +defmodule Pleroma.Web.Metadata.Providers.ActivityPub do + alias Pleroma.Web.Metadata.Providers.Provider + + @behaviour Provider + + @impl Provider + def build_tags(%{object: %{data: %{"id" => object_id}}}) do + [{:link, [rel: "alternate", type: "application/activity+json", href: object_id], []}] + end + + @impl Provider + def build_tags(%{user: user}) do + [{:link, [rel: "alternate", type: "application/activity+json", href: user.ap_id], []}] + end + + @impl Provider + def build_tags(_), do: [] +end diff --git a/lib/pleroma/web/metadata/providers/feed.ex b/lib/pleroma/web/metadata/providers/feed.ex index e97d6a54f..5a0f2338e 100644 --- a/lib/pleroma/web/metadata/providers/feed.ex +++ b/lib/pleroma/web/metadata/providers/feed.ex @@ -10,7 +10,7 @@ defmodule Pleroma.Web.Metadata.Providers.Feed do @behaviour Provider @impl Provider - def build_tags(%{user: user}) do + def build_tags(%{user: %{local: true} = user}) do [ {:link, [ @@ -20,4 +20,7 @@ defmodule Pleroma.Web.Metadata.Providers.Feed do ], []} ] end + + @impl Provider + def build_tags(_), do: [] end diff --git a/lib/pleroma/web/metadata/providers/open_graph.ex b/lib/pleroma/web/metadata/providers/open_graph.ex index 97d3865ed..fa5fbe553 100644 --- a/lib/pleroma/web/metadata/providers/open_graph.ex +++ b/lib/pleroma/web/metadata/providers/open_graph.ex @@ -67,6 +67,9 @@ defmodule Pleroma.Web.Metadata.Providers.OpenGraph do end end + @impl Provider + def build_tags(_), do: [] + defp build_attachments(%{data: %{"attachment" => attachments}}) do Enum.reduce(attachments, [], fn attachment, acc -> rendered_tags = diff --git a/lib/pleroma/web/metadata/providers/rel_me.ex b/lib/pleroma/web/metadata/providers/rel_me.ex index eabd8cb00..39aa71f06 100644 --- a/lib/pleroma/web/metadata/providers/rel_me.ex +++ b/lib/pleroma/web/metadata/providers/rel_me.ex @@ -20,6 +20,9 @@ defmodule Pleroma.Web.Metadata.Providers.RelMe do end) end + @impl Provider + def build_tags(_), do: [] + defp append_fields_tag(bio, fields) do fields |> Enum.reduce(bio, fn %{"value" => v}, res -> res <> v end) diff --git a/lib/pleroma/web/metadata/providers/twitter_card.ex b/lib/pleroma/web/metadata/providers/twitter_card.ex index 426022c65..7f50877c3 100644 --- a/lib/pleroma/web/metadata/providers/twitter_card.ex +++ b/lib/pleroma/web/metadata/providers/twitter_card.ex @@ -44,6 +44,9 @@ defmodule Pleroma.Web.Metadata.Providers.TwitterCard do end end + @impl Provider + def build_tags(_), do: [] + defp title_tag(user) do {:meta, [name: "twitter:title", content: Utils.user_name_string(user)], []} end diff --git a/lib/pleroma/web/push.ex b/lib/pleroma/web/push.ex index 6d777142e..77f77f88e 100644 --- a/lib/pleroma/web/push.ex +++ b/lib/pleroma/web/push.ex @@ -20,7 +20,7 @@ defmodule Pleroma.Web.Push do end def vapid_config do - Application.get_env(:web_push_encryption, :vapid_details, nil) + Application.get_env(:web_push_encryption, :vapid_details, []) end def enabled, do: match?([subject: _, public_key: _, private_key: _], vapid_config()) diff --git a/lib/pleroma/web/rich_media/card.ex b/lib/pleroma/web/rich_media/card.ex index abad4957e..6b4bb9555 100644 --- a/lib/pleroma/web/rich_media/card.ex +++ b/lib/pleroma/web/rich_media/card.ex @@ -54,7 +54,10 @@ defmodule Pleroma.Web.RichMedia.Card do @spec get_by_url(String.t() | nil) :: t() | nil | :error def get_by_url(url) when is_binary(url) do - if @config_impl.get([:rich_media, :enabled]) do + host = URI.parse(url).host + + with true <- @config_impl.get([:rich_media, :enabled]), + true <- host not in @config_impl.get([:rich_media, :ignore_hosts], []) do url_hash = url_to_hash(url) @cachex.fetch!(:rich_media_cache, url_hash, fn _ -> @@ -69,7 +72,7 @@ defmodule Pleroma.Web.RichMedia.Card do end end) else - :error + false -> :error end end @@ -77,7 +80,10 @@ defmodule Pleroma.Web.RichMedia.Card do @spec get_or_backfill_by_url(String.t(), keyword()) :: t() | nil def get_or_backfill_by_url(url, opts \\ []) do - if @config_impl.get([:rich_media, :enabled]) do + host = URI.parse(url).host + + with true <- @config_impl.get([:rich_media, :enabled]), + true <- host not in @config_impl.get([:rich_media, :ignore_hosts], []) do case get_by_url(url) do %__MODULE__{} = card -> card @@ -94,7 +100,7 @@ defmodule Pleroma.Web.RichMedia.Card do nil end else - nil + false -> nil end end diff --git a/lib/pleroma/web/router.ex b/lib/pleroma/web/router.ex index d78a6aef4..bf8ebf3e4 100644 --- a/lib/pleroma/web/router.ex +++ b/lib/pleroma/web/router.ex @@ -755,6 +755,11 @@ defmodule Pleroma.Web.Router do get("/announcements", AnnouncementController, :index) post("/announcements/:id/dismiss", AnnouncementController, :mark_read) + + get("/tags/:id", TagController, :show) + post("/tags/:id/follow", TagController, :follow) + post("/tags/:id/unfollow", TagController, :unfollow) + get("/followed_tags", TagController, :show_followed) end scope "/api/v1", Pleroma.Web.MastodonAPI do diff --git a/lib/pleroma/web/streamer.ex b/lib/pleroma/web/streamer.ex index 76dc0f42d..cc149e04c 100644 --- a/lib/pleroma/web/streamer.ex +++ b/lib/pleroma/web/streamer.ex @@ -19,6 +19,7 @@ defmodule Pleroma.Web.Streamer do alias Pleroma.Web.OAuth.Token alias Pleroma.Web.Plugs.OAuthScopesPlug alias Pleroma.Web.StreamerView + require Pleroma.Constants @registry Pleroma.Web.StreamerRegistry @@ -305,7 +306,17 @@ defmodule Pleroma.Web.Streamer do User.get_recipients_from_activity(item) |> Enum.map(fn %{id: id} -> "user:#{id}" end) - Enum.each(recipient_topics, fn topic -> + hashtag_recipients = + if Pleroma.Constants.as_public() in item.recipients do + Pleroma.Hashtag.get_recipients_for_activity(item) + |> Enum.map(fn id -> "user:#{id}" end) + else + [] + end + + all_recipients = Enum.uniq(recipient_topics ++ hashtag_recipients) + + Enum.each(all_recipients, fn topic -> push_to_socket(topic, item) end) end diff --git a/lib/pleroma/web/twitter_api/controllers/util_controller.ex b/lib/pleroma/web/twitter_api/controllers/util_controller.ex index 6805233df..aeafa195d 100644 --- a/lib/pleroma/web/twitter_api/controllers/util_controller.ex +++ b/lib/pleroma/web/twitter_api/controllers/util_controller.ex @@ -13,6 +13,7 @@ defmodule Pleroma.Web.TwitterAPI.UtilController do alias Pleroma.Healthcheck alias Pleroma.User alias Pleroma.Web.ActivityPub.ActivityPub + alias Pleroma.Web.Auth.WrapperAuthenticator, as: Authenticator alias Pleroma.Web.CommonAPI alias Pleroma.Web.Plugs.OAuthScopesPlug alias Pleroma.Web.WebFinger @@ -195,19 +196,21 @@ defmodule Pleroma.Web.TwitterAPI.UtilController do %{assigns: %{user: user}, private: %{open_api_spex: %{body_params: body_params}}} = conn, _ ) do - case CommonAPI.Utils.confirm_current_password(user, body_params.password) do - {:ok, user} -> - with {:ok, _user} <- - User.reset_password(user, %{ - password: body_params.new_password, - password_confirmation: body_params.new_password_confirmation - }) do - json(conn, %{status: "success"}) - else - {:error, changeset} -> - {_, {error, _}} = Enum.at(changeset.errors, 0) - json(conn, %{error: "New password #{error}."}) - end + with {:ok, %User{}} <- + Authenticator.change_password( + user, + body_params.password, + body_params.new_password, + body_params.new_password_confirmation + ) do + json(conn, %{status: "success"}) + else + {:error, %Ecto.Changeset{} = changeset} -> + {_, {error, _}} = Enum.at(changeset.errors, 0) + json(conn, %{error: "New password #{error}."}) + + {:error, :password_confirmation} -> + json(conn, %{error: "New password does not match confirmation."}) {:error, msg} -> json(conn, %{error: msg}) diff --git a/lib/pleroma/web/twitter_api/views/token_view.ex b/lib/pleroma/web/twitter_api/views/token_view.ex index 2e492c13f..36776ce3b 100644 --- a/lib/pleroma/web/twitter_api/views/token_view.ex +++ b/lib/pleroma/web/twitter_api/views/token_view.ex @@ -15,7 +15,8 @@ defmodule Pleroma.Web.TwitterAPI.TokenView do %{ id: token_entry.id, valid_until: token_entry.valid_until, - app_name: token_entry.app.client_name + app_name: token_entry.app.client_name, + scopes: token_entry.scopes } end end diff --git a/lib/pleroma/workers/poll_worker.ex b/lib/pleroma/workers/poll_worker.ex index d263aa1b9..a9afe9d63 100644 --- a/lib/pleroma/workers/poll_worker.ex +++ b/lib/pleroma/workers/poll_worker.ex @@ -11,27 +11,46 @@ defmodule Pleroma.Workers.PollWorker do alias Pleroma.Activity alias Pleroma.Notification alias Pleroma.Object + alias Pleroma.Object.Fetcher + + @stream_out_impl Pleroma.Config.get( + [__MODULE__, :stream_out], + Pleroma.Web.ActivityPub.ActivityPub + ) @impl true def perform(%Job{args: %{"op" => "poll_end", "activity_id" => activity_id}}) do - with %Activity{} = activity <- find_poll_activity(activity_id), + with {_, %Activity{} = activity} <- {:activity, Activity.get_by_id(activity_id)}, {:ok, notifications} <- Notification.create_poll_notifications(activity) do + unless activity.local do + # Schedule a final refresh + __MODULE__.new(%{"op" => "refresh", "activity_id" => activity_id}) + |> Oban.insert() + end + Notification.stream(notifications) else - {:error, :poll_activity_not_found} = e -> {:cancel, e} + {:activity, nil} -> {:cancel, :poll_activity_not_found} e -> {:error, e} end end - @impl true - def timeout(_job), do: :timer.seconds(5) + def perform(%Job{args: %{"op" => "refresh", "activity_id" => activity_id}}) do + with {_, %Activity{object: object}} <- + {:activity, Activity.get_by_id_with_object(activity_id)}, + {_, {:ok, _object}} <- {:refetch, Fetcher.refetch_object(object)} do + stream_update(activity_id) - defp find_poll_activity(activity_id) do - with nil <- Activity.get_by_id(activity_id) do - {:error, :poll_activity_not_found} + :ok + else + {:activity, nil} -> {:cancel, :poll_activity_not_found} + {:refetch, _} = e -> {:cancel, e} end end + @impl true + def timeout(_job), do: :timer.seconds(5) + def schedule_poll_end(%Activity{data: %{"type" => "Create"}, id: activity_id} = activity) do with %Object{data: %{"type" => "Question", "closed" => closed}} when is_binary(closed) <- Object.normalize(activity), @@ -49,4 +68,10 @@ defmodule Pleroma.Workers.PollWorker do end def schedule_poll_end(activity), do: {:error, activity} + + defp stream_update(activity_id) do + Activity.get_by_id(activity_id) + |> Activity.normalize() + |> @stream_out_impl.stream_out() + end end @@ -4,8 +4,8 @@ defmodule Pleroma.Mixfile do def project do [ app: :pleroma, - version: version("2.7.0"), - elixir: "~> 1.13", + version: version("2.8.0"), + elixir: "~> 1.14", elixirc_paths: elixirc_paths(Mix.env()), compilers: Mix.compilers(), elixirc_options: [warnings_as_errors: warnings_as_errors(), prune_code_paths: false], @@ -132,7 +132,8 @@ defmodule Pleroma.Mixfile do # Type `mix help deps` for examples and options. defp deps do [ - {:phoenix, "~> 1.7.3"}, + {:phoenix, + git: "https://github.com/feld/phoenix", branch: "v1.7.14-websocket-headers", override: true}, {:phoenix_ecto, "~> 4.4"}, {:ecto_sql, "~> 3.10"}, {:ecto_enum, "~> 1.4"}, @@ -153,7 +154,7 @@ defmodule Pleroma.Mixfile do {:calendar, "~> 1.0"}, {:cachex, "~> 3.2"}, {:tesla, "~> 1.11"}, - {:castore, "~> 0.1"}, + {:castore, "~> 1.0"}, {:cowlib, "~> 2.9", override: true}, {:gun, "~> 2.0.0-rc.1", override: true}, {:finch, "~> 0.15"}, @@ -169,6 +170,8 @@ defmodule Pleroma.Mixfile do {:swoosh, "~> 1.16.9"}, {:phoenix_swoosh, "~> 1.1"}, {:gen_smtp, "~> 0.13"}, + {:mua, "~> 0.2.0"}, + {:mail, "~> 0.3.0"}, {:ex_syslogger, "~> 1.4"}, {:floki, "~> 0.35"}, {:timex, "~> 3.6"}, @@ -11,7 +11,7 @@ "cachex": {:hex, :cachex, "3.6.0", "14a1bfbeee060dd9bec25a5b6f4e4691e3670ebda28c8ba2884b12fe30b36bf8", [:mix], [{:eternal, "~> 1.2", [hex: :eternal, repo: "hexpm", optional: false]}, {:jumper, "~> 1.0", [hex: :jumper, repo: "hexpm", optional: false]}, {:sleeplocks, "~> 1.1", [hex: :sleeplocks, repo: "hexpm", optional: false]}, {:unsafe, "~> 1.0", [hex: :unsafe, repo: "hexpm", optional: false]}], "hexpm", "ebf24e373883bc8e0c8d894a63bbe102ae13d918f790121f5cfe6e485cc8e2e2"}, "calendar": {:hex, :calendar, "1.0.0", "f52073a708528482ec33d0a171954ca610fe2bd28f1e871f247dc7f1565fa807", [:mix], [{:tzdata, "~> 0.5.20 or ~> 0.1.201603 or ~> 1.0", [hex: :tzdata, repo: "hexpm", optional: false]}], "hexpm", "990e9581920c82912a5ee50e62ff5ef96da6b15949a2ee4734f935fdef0f0a6f"}, "captcha": {:git, "https://git.pleroma.social/pleroma/elixir-libraries/elixir-captcha.git", "6630c42aaaab124e697b4e513190c89d8b64e410", [ref: "6630c42aaaab124e697b4e513190c89d8b64e410"]}, - "castore": {:hex, :castore, "0.1.22", "4127549e411bedd012ca3a308dede574f43819fe9394254ca55ab4895abfa1a2", [:mix], [], "hexpm", "c17576df47eb5aa1ee40cc4134316a99f5cad3e215d5c77b8dd3cfef12a22cac"}, + "castore": {:hex, :castore, "1.0.8", "dedcf20ea746694647f883590b82d9e96014057aff1d44d03ec90f36a5c0dc6e", [:mix], [], "hexpm", "0b2b66d2ee742cb1d9cb8c8be3b43c3a70ee8651f37b75a8b982e036752983f1"}, "cc_precompiler": {:hex, :cc_precompiler, "0.1.9", "e8d3364f310da6ce6463c3dd20cf90ae7bbecbf6c5203b98bf9b48035592649b", [:mix], [{:elixir_make, "~> 0.7", [hex: :elixir_make, repo: "hexpm", optional: false]}], "hexpm", "9dcab3d0f3038621f1601f13539e7a9ee99843862e66ad62827b0c42b2f58a54"}, "certifi": {:hex, :certifi, "2.12.0", "2d1cca2ec95f59643862af91f001478c9863c2ac9cb6e2f89780bfd8de987329", [:rebar3], [], "hexpm", "ee68d85df22e554040cdb4be100f33873ac6051387baf6a8f6ce82272340ff1c"}, "combine": {:hex, :combine, "0.10.0", "eff8224eeb56498a2af13011d142c5e7997a80c8f5b97c499f84c841032e429f", [:mix], [], "hexpm", "1b1dbc1790073076580d0d1d64e42eae2366583e7aecd455d1215b0d16f2451b"}, @@ -50,7 +50,7 @@ "ex_syslogger": {:hex, :ex_syslogger, "1.5.2", "72b6aa2d47a236e999171f2e1ec18698740f40af0bd02c8c650bf5f1fd1bac79", [:mix], [{:poison, ">= 1.5.0", [hex: :poison, repo: "hexpm", optional: true]}, {:syslog, "~> 1.1.0", [hex: :syslog, repo: "hexpm", optional: false]}], "hexpm", "ab9fab4136dbc62651ec6f16fa4842f10cf02ab4433fa3d0976c01be99398399"}, "exile": {:hex, :exile, "0.10.0", "b69e2d27a9af670b0f0a0898addca0eda78f6f5ba95ccfbc9bc6ccdd04925436", [:make, :mix], [{:elixir_make, "~> 0.6", [hex: :elixir_make, repo: "hexpm", optional: false]}], "hexpm", "c62ee8fee565b5ac4a898d0dcd58d2b04fb5eec1655af1ddcc9eb582c6732c33"}, "expo": {:hex, :expo, "0.5.1", "249e826a897cac48f591deba863b26c16682b43711dd15ee86b92f25eafd96d9", [:mix], [], "hexpm", "68a4233b0658a3d12ee00d27d37d856b1ba48607e7ce20fd376958d0ba6ce92b"}, - "fast_html": {:hex, :fast_html, "2.2.0", "6c5ef1be087a4ed613b0379c13f815c4d11742b36b67bb52cee7859847c84520", [:make, :mix], [{:elixir_make, "~> 0.4", [hex: :elixir_make, repo: "hexpm", optional: false]}, {:nimble_pool, "~> 0.2.0", [hex: :nimble_pool, repo: "hexpm", optional: false]}], "hexpm", "064c4f23b4a6168f9187dac8984b056f2c531bb0787f559fd6a8b34b38aefbae"}, + "fast_html": {:hex, :fast_html, "2.3.0", "08c1d8ead840dd3060ba02c761bed9f37f456a1ddfe30bcdcfee8f651cec06a6", [:make, :mix], [{:elixir_make, "~> 0.4", [hex: :elixir_make, repo: "hexpm", optional: false]}, {:nimble_pool, "~> 0.2.0", [hex: :nimble_pool, repo: "hexpm", optional: false]}], "hexpm", "f18e3c7668f82d3ae0b15f48d48feeb257e28aa5ab1b0dbf781c7312e5da029d"}, "fast_sanitize": {:hex, :fast_sanitize, "0.2.3", "67b93dfb34e302bef49fec3aaab74951e0f0602fd9fa99085987af05bd91c7a5", [:mix], [{:fast_html, "~> 2.0", [hex: :fast_html, repo: "hexpm", optional: false]}, {:plug, "~> 1.8", [hex: :plug, repo: "hexpm", optional: false]}], "hexpm", "e8ad286d10d0386e15d67d0ee125245ebcfbc7d7290b08712ba9013c8c5e56e2"}, "file_system": {:hex, :file_system, "0.2.10", "fb082005a9cd1711c05b5248710f8826b02d7d1784e7c3451f9c1231d4fc162d", [:mix], [], "hexpm", "41195edbfb562a593726eda3b3e8b103a309b733ad25f3d642ba49696bf715dc"}, "finch": {:hex, :finch, "0.18.0", "944ac7d34d0bd2ac8998f79f7a811b21d87d911e77a786bc5810adb75632ada4", [:mix], [{:castore, "~> 0.1 or ~> 1.0", [hex: :castore, repo: "hexpm", optional: false]}, {:mime, "~> 1.0 or ~> 2.0", [hex: :mime, repo: "hexpm", optional: false]}, {:mint, "~> 1.3", [hex: :mint, repo: "hexpm", optional: false]}, {:nimble_options, "~> 0.4 or ~> 1.0", [hex: :nimble_options, repo: "hexpm", optional: false]}, {:nimble_pool, "~> 0.2.6 or ~> 1.0", [hex: :nimble_pool, repo: "hexpm", optional: false]}, {:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "69f5045b042e531e53edc2574f15e25e735b522c37e2ddb766e15b979e03aa65"}, @@ -72,6 +72,7 @@ "jumper": {:hex, :jumper, "1.0.2", "68cdcd84472a00ac596b4e6459a41b3062d4427cbd4f1e8c8793c5b54f1406a7", [:mix], [], "hexpm", "9b7782409021e01ab3c08270e26f36eb62976a38c1aa64b2eaf6348422f165e1"}, "linkify": {:hex, :linkify, "0.5.3", "5f8143d8f61f5ff08d3aeeff47ef6509492b4948d8f08007fbf66e4d2246a7f2", [:mix], [], "hexpm", "3ef35a1377d47c25506e07c1c005ea9d38d700699d92ee92825f024434258177"}, "logger_backends": {:hex, :logger_backends, "1.0.0", "09c4fad6202e08cb0fbd37f328282f16539aca380f512523ce9472b28edc6bdf", [:mix], [], "hexpm", "1faceb3e7ec3ef66a8f5746c5afd020e63996df6fd4eb8cdb789e5665ae6c9ce"}, + "mail": {:hex, :mail, "0.3.1", "cb0a14e4ed8904e4e5a08214e686ccf6f9099346885db17d8c309381f865cc5c", [:mix], [], "hexpm", "1db701e89865c1d5fa296b2b57b1cd587587cca8d8a1a22892b35ef5a8e352a6"}, "majic": {:hex, :majic, "1.0.0", "37e50648db5f5c2ff0c9fb46454d034d11596c03683807b9fb3850676ffdaab3", [:make, :mix], [{:elixir_make, "~> 0.6.1", [hex: :elixir_make, repo: "hexpm", optional: false]}, {:mime, "~> 1.0", [hex: :mime, repo: "hexpm", optional: false]}, {:nimble_pool, "~> 0.2", [hex: :nimble_pool, repo: "hexpm", optional: false]}, {:plug, "~> 1.0", [hex: :plug, repo: "hexpm", optional: true]}], "hexpm", "7905858f76650d49695f14ea55cd9aaaee0c6654fa391671d4cf305c275a0a9e"}, "makeup": {:hex, :makeup, "1.0.5", "d5a830bc42c9800ce07dd97fa94669dfb93d3bf5fcf6ea7a0c67b2e0e4a7f26c", [:mix], [{:nimble_parsec, "~> 0.5 or ~> 1.0", [hex: :nimble_parsec, repo: "hexpm", optional: false]}], "hexpm", "cfa158c02d3f5c0c665d0af11512fed3fba0144cf1aadee0f2ce17747fba2ca9"}, "makeup_elixir": {:hex, :makeup_elixir, "0.14.1", "4f0e96847c63c17841d42c08107405a005a2680eb9c7ccadfd757bd31dabccfb", [:mix], [{:makeup, "~> 1.0", [hex: :makeup, repo: "hexpm", optional: false]}], "hexpm", "f2438b1a80eaec9ede832b5c41cd4f373b38fd7aa33e3b22d9db79e640cbde11"}, @@ -85,18 +86,19 @@ "mock": {:hex, :mock, "0.3.8", "7046a306b71db2488ef54395eeb74df0a7f335a7caca4a3d3875d1fc81c884dd", [:mix], [{:meck, "~> 0.9.2", [hex: :meck, repo: "hexpm", optional: false]}], "hexpm", "7fa82364c97617d79bb7d15571193fc0c4fe5afd0c932cef09426b3ee6fe2022"}, "mogrify": {:hex, :mogrify, "0.9.3", "238c782f00271dace01369ad35ae2e9dd020feee3443b9299ea5ea6bed559841", [:mix], [], "hexpm", "0189b1e1de27455f2b9ae8cf88239cefd23d38de9276eb5add7159aea51731e6"}, "mox": {:hex, :mox, "1.1.0", "0f5e399649ce9ab7602f72e718305c0f9cdc351190f72844599545e4996af73c", [:mix], [], "hexpm", "d44474c50be02d5b72131070281a5d3895c0e7a95c780e90bc0cfe712f633a13"}, + "mua": {:hex, :mua, "0.2.3", "46b29b7b2bb14105c0b7be9526f7c452df17a7841b30b69871c024a822ff551c", [:mix], [{:castore, "~> 0.1.0 or ~> 1.0", [hex: :castore, repo: "hexpm", optional: true]}], "hexpm", "7fe861a87fcc06a980d3941bbcb2634e5f0f30fd6ad15ef6c0423ff9dc7e46de"}, "multipart": {:hex, :multipart, "0.4.0", "634880a2148d4555d050963373d0e3bbb44a55b2badd87fa8623166172e9cda0", [:mix], [{:mime, "~> 1.2 or ~> 2.0", [hex: :mime, repo: "hexpm", optional: false]}], "hexpm", "3c5604bc2fb17b3137e5d2abdf5dacc2647e60c5cc6634b102cf1aef75a06f0a"}, "nimble_options": {:hex, :nimble_options, "1.1.1", "e3a492d54d85fc3fd7c5baf411d9d2852922f66e69476317787a7b2bb000a61b", [:mix], [], "hexpm", "821b2470ca9442c4b6984882fe9bb0389371b8ddec4d45a9504f00a66f650b44"}, "nimble_parsec": {:hex, :nimble_parsec, "0.6.0", "32111b3bf39137144abd7ba1cce0914533b2d16ef35e8abc5ec8be6122944263", [:mix], [], "hexpm", "27eac315a94909d4dc68bc07a4a83e06c8379237c5ea528a9acff4ca1c873c52"}, "nimble_pool": {:hex, :nimble_pool, "0.2.6", "91f2f4c357da4c4a0a548286c84a3a28004f68f05609b4534526871a22053cde", [:mix], [], "hexpm", "1c715055095d3f2705c4e236c18b618420a35490da94149ff8b580a2144f653f"}, "nodex": {:git, "https://git.pleroma.social/pleroma/nodex", "cb6730f943cfc6aad674c92161be23a8411f15d1", [ref: "cb6730f943cfc6aad674c92161be23a8411f15d1"]}, - "oban": {:hex, :oban, "2.18.2", "583e78965ee15263ac968e38c983bad169ae55eadaa8e1e39912562badff93ba", [:mix], [{:ecto_sql, "~> 3.10", [hex: :ecto_sql, repo: "hexpm", optional: false]}, {:ecto_sqlite3, "~> 0.9", [hex: :ecto_sqlite3, repo: "hexpm", optional: true]}, {:jason, "~> 1.1", [hex: :jason, repo: "hexpm", optional: false]}, {:postgrex, "~> 0.16", [hex: :postgrex, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "9dd25fd35883a91ed995e9fe516e479344d3a8623dfe2b8c3fc8e5be0228ec3a"}, + "oban": {:hex, :oban, "2.18.3", "1608c04f8856c108555c379f2f56bc0759149d35fa9d3b825cb8a6769f8ae926", [:mix], [{:ecto_sql, "~> 3.10", [hex: :ecto_sql, repo: "hexpm", optional: false]}, {:ecto_sqlite3, "~> 0.9", [hex: :ecto_sqlite3, repo: "hexpm", optional: true]}, {:jason, "~> 1.1", [hex: :jason, repo: "hexpm", optional: false]}, {:postgrex, "~> 0.16", [hex: :postgrex, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "36ca6ca84ef6518f9c2c759ea88efd438a3c81d667ba23b02b062a0aa785475e"}, "oban_live_dashboard": {:hex, :oban_live_dashboard, "0.1.1", "8aa4ceaf381c818f7d5c8185cc59942b8ac82ef0cf559881aacf8d3f8ac7bdd3", [:mix], [{:oban, "~> 2.15", [hex: :oban, repo: "hexpm", optional: false]}, {:phoenix_live_dashboard, "~> 0.7", [hex: :phoenix_live_dashboard, repo: "hexpm", optional: false]}], "hexpm", "16dc4ce9c9a95aa2e655e35ed4e675652994a8def61731a18af85e230e1caa63"}, "octo_fetch": {:hex, :octo_fetch, "0.4.0", "074b5ecbc08be10b05b27e9db08bc20a3060142769436242702931c418695b19", [:mix], [{:castore, "~> 0.1 or ~> 1.0", [hex: :castore, repo: "hexpm", optional: false]}, {:ssl_verify_fun, "~> 1.1", [hex: :ssl_verify_fun, repo: "hexpm", optional: false]}], "hexpm", "cf8be6f40cd519d7000bb4e84adcf661c32e59369ca2827c4e20042eda7a7fc6"}, "open_api_spex": {:hex, :open_api_spex, "3.18.2", "8c855e83bfe8bf81603d919d6e892541eafece3720f34d1700b58024dadde247", [:mix], [{:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}, {:plug, "~> 1.7", [hex: :plug, repo: "hexpm", optional: false]}, {:poison, "~> 3.0 or ~> 4.0 or ~> 5.0", [hex: :poison, repo: "hexpm", optional: true]}, {:ymlr, "~> 2.0 or ~> 3.0 or ~> 4.0", [hex: :ymlr, repo: "hexpm", optional: true]}], "hexpm", "aa3e6dcfc0ad6a02596b2172662da21c9dd848dac145ea9e603f54e3d81b8d2b"}, "parse_trans": {:hex, :parse_trans, "3.4.1", "6e6aa8167cb44cc8f39441d05193be6e6f4e7c2946cb2759f015f8c56b76e5ff", [:rebar3], [], "hexpm", "620a406ce75dada827b82e453c19cf06776be266f5a67cff34e1ef2cbb60e49a"}, "pbkdf2_elixir": {:hex, :pbkdf2_elixir, "1.2.1", "9cbe354b58121075bd20eb83076900a3832324b7dd171a6895fab57b6bb2752c", [:mix], [{:comeonin, "~> 5.3", [hex: :comeonin, repo: "hexpm", optional: false]}], "hexpm", "d3b40a4a4630f0b442f19eca891fcfeeee4c40871936fed2f68e1c4faa30481f"}, - "phoenix": {:hex, :phoenix, "1.7.14", "a7d0b3f1bc95987044ddada111e77bd7f75646a08518942c72a8440278ae7825", [:mix], [{:castore, ">= 0.0.0", [hex: :castore, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}, {:phoenix_pubsub, "~> 2.1", [hex: :phoenix_pubsub, repo: "hexpm", optional: false]}, {:phoenix_template, "~> 1.0", [hex: :phoenix_template, repo: "hexpm", optional: false]}, {:phoenix_view, "~> 2.0", [hex: :phoenix_view, repo: "hexpm", optional: true]}, {:plug, "~> 1.14", [hex: :plug, repo: "hexpm", optional: false]}, {:plug_cowboy, "~> 2.7", [hex: :plug_cowboy, repo: "hexpm", optional: true]}, {:plug_crypto, "~> 1.2 or ~> 2.0", [hex: :plug_crypto, repo: "hexpm", optional: false]}, {:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}, {:websock_adapter, "~> 0.5.3", [hex: :websock_adapter, repo: "hexpm", optional: false]}], "hexpm", "c7859bc56cc5dfef19ecfc240775dae358cbaa530231118a9e014df392ace61a"}, + "phoenix": {:git, "https://github.com/feld/phoenix", "fb6dc76c657422e49600896c64aab4253fceaef6", [branch: "v1.7.14-websocket-headers"]}, "phoenix_ecto": {:hex, :phoenix_ecto, "4.4.3", "86e9878f833829c3f66da03d75254c155d91d72a201eb56ae83482328dc7ca93", [:mix], [{:ecto, "~> 3.5", [hex: :ecto, repo: "hexpm", optional: false]}, {:phoenix_html, "~> 2.14.2 or ~> 3.0 or ~> 4.0", [hex: :phoenix_html, repo: "hexpm", optional: true]}, {:plug, "~> 1.9", [hex: :plug, repo: "hexpm", optional: false]}], "hexpm", "d36c401206f3011fefd63d04e8ef626ec8791975d9d107f9a0817d426f61ac07"}, "phoenix_html": {:hex, :phoenix_html, "3.3.4", "42a09fc443bbc1da37e372a5c8e6755d046f22b9b11343bf885067357da21cb3", [:mix], [{:plug, "~> 1.5", [hex: :plug, repo: "hexpm", optional: true]}], "hexpm", "0249d3abec3714aff3415e7ee3d9786cb325be3151e6c4b3021502c585bf53fb"}, "phoenix_live_dashboard": {:hex, :phoenix_live_dashboard, "0.8.3", "7ff51c9b6609470f681fbea20578dede0e548302b0c8bdf338b5a753a4f045bf", [:mix], [{:ecto, "~> 3.6.2 or ~> 3.7", [hex: :ecto, repo: "hexpm", optional: true]}, {:ecto_mysql_extras, "~> 0.5", [hex: :ecto_mysql_extras, repo: "hexpm", optional: true]}, {:ecto_psql_extras, "~> 0.7", [hex: :ecto_psql_extras, repo: "hexpm", optional: true]}, {:ecto_sqlite3_extras, "~> 1.1.7 or ~> 1.2.0", [hex: :ecto_sqlite3_extras, repo: "hexpm", optional: true]}, {:mime, "~> 1.6 or ~> 2.0", [hex: :mime, repo: "hexpm", optional: false]}, {:phoenix_live_view, "~> 0.19 or ~> 1.0", [hex: :phoenix_live_view, repo: "hexpm", optional: false]}, {:telemetry_metrics, "~> 0.6 or ~> 1.0", [hex: :telemetry_metrics, repo: "hexpm", optional: false]}], "hexpm", "f9470a0a8bae4f56430a23d42f977b5a6205fdba6559d76f932b876bfaec652d"}, diff --git a/priv/gettext/fr/LC_MESSAGES/config_descriptions.po b/priv/gettext/fr/LC_MESSAGES/config_descriptions.po index e43db68aa..c24ab6751 100644 --- a/priv/gettext/fr/LC_MESSAGES/config_descriptions.po +++ b/priv/gettext/fr/LC_MESSAGES/config_descriptions.po @@ -3,14 +3,16 @@ msgstr "" "Project-Id-Version: PACKAGE VERSION\n" "Report-Msgid-Bugs-To: \n" "POT-Creation-Date: 2022-07-22 02:09+0300\n" -"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n" -"Last-Translator: Automatically generated\n" -"Language-Team: none\n" +"PO-Revision-Date: 2024-10-13 21:03+0000\n" +"Last-Translator: Codimp <contact@lithio.fr>\n" +"Language-Team: French <https://translate.pleroma.social/projects/pleroma/" +"pleroma-backend-domain-config_descriptions/fr/>\n" "Language: fr\n" "MIME-Version: 1.0\n" "Content-Type: text/plain; charset=UTF-8\n" "Content-Transfer-Encoding: 8bit\n" -"X-Generator: Translate Toolkit 3.7.2\n" +"Plural-Forms: nplurals=2; plural=n > 1;\n" +"X-Generator: Weblate 4.13.1\n" ## This file is a PO Template file. ## @@ -21,7 +23,6 @@ msgstr "" ## Run "mix gettext.extract" to bring this file up to ## date. Leave "msgstr"s empty as changing them here has no ## effect: edit them in PO (.po) files instead. - #: lib/pleroma/docs/translator.ex:5 #, elixir-autogen, elixir-format msgctxt "config description at :esshd" @@ -32,25 +33,30 @@ msgstr "" #, elixir-autogen, elixir-format msgctxt "config description at :logger" msgid "Logger-related settings" -msgstr "" +msgstr "Paramètres liés à la journalisation" #: lib/pleroma/docs/translator.ex:5 #, elixir-autogen, elixir-format msgctxt "config description at :mime" msgid "Mime Types settings" -msgstr "" +msgstr "Paramètres des types Mime" #: lib/pleroma/docs/translator.ex:5 #, elixir-autogen, elixir-format msgctxt "config description at :pleroma" msgid "Allows setting a token that can be used to authenticate requests with admin privileges without a normal user account token. Append the `admin_token` parameter to requests to utilize it. (Please reconsider using HTTP Basic Auth or OAuth-based authentication if possible)" msgstr "" +"Permet de configurer un jeton qui peut être utilisé pour authentifier les " +"requêtes avec des privilèges administrateurs sans utiliser un jeton de " +"compte utilisateur standard. Pour l'utiliser, ajoutez le paramètre " +"`admin_token`aux requêtes. (Vous devriez utiliser l'authentification HTTP " +"Basic ou OAuth à la place si vous le pouvez)" #: lib/pleroma/docs/translator.ex:5 #, elixir-autogen, elixir-format msgctxt "config description at :pleroma" msgid "Authenticator" -msgstr "" +msgstr "Authentifieur" #: lib/pleroma/docs/translator.ex:5 #, elixir-autogen, elixir-format @@ -62,7 +68,7 @@ msgstr "" #, elixir-autogen, elixir-format msgctxt "config label at :cors_plug" msgid "CORS plug config" -msgstr "" +msgstr "Configuration du plug CORS" #: lib/pleroma/docs/translator.ex:5 #, elixir-autogen, elixir-format @@ -74,25 +80,25 @@ msgstr "" #, elixir-autogen, elixir-format msgctxt "config label at :logger" msgid "Logger" -msgstr "" +msgstr "Journaliseur" #: lib/pleroma/docs/translator.ex:5 #, elixir-autogen, elixir-format msgctxt "config label at :mime" msgid "Mime Types" -msgstr "" +msgstr "Types Mime" #: lib/pleroma/docs/translator.ex:5 #, elixir-autogen, elixir-format msgctxt "config label at :pleroma" msgid "Pleroma Admin Token" -msgstr "" +msgstr "Jeton Administrateur Pleroma" #: lib/pleroma/docs/translator.ex:5 #, elixir-autogen, elixir-format msgctxt "config label at :pleroma" msgid "Pleroma Authenticator" -msgstr "" +msgstr "Authentifieur Pleroma" #: lib/pleroma/docs/translator.ex:5 #, elixir-autogen, elixir-format @@ -104,103 +110,111 @@ msgstr "" #, elixir-autogen, elixir-format msgctxt "config description at :logger-:console" msgid "Console logger settings" -msgstr "" +msgstr "Paramètres de journalisation de la console" #: lib/pleroma/docs/translator.ex:5 #, elixir-autogen, elixir-format msgctxt "config description at :logger-:ex_syslogger" msgid "ExSyslogger-related settings" -msgstr "" +msgstr "Paramètres liés à ExSyslogger" #: lib/pleroma/docs/translator.ex:5 #, elixir-autogen, elixir-format msgctxt "config description at :pleroma-:activitypub" msgid "ActivityPub-related settings" -msgstr "" +msgstr "Paramètres liés à ActivityPub" #: lib/pleroma/docs/translator.ex:5 #, elixir-autogen, elixir-format msgctxt "config description at :pleroma-:assets" msgid "This section configures assets to be used with various frontends. Currently the only option relates to mascots on the mastodon frontend" msgstr "" +"Cette section configure les annexes (assets) à utiliser avec divers " +"frontaux. La seule option est actuellement liée au mascottes du frontal " +"mastodon" #: lib/pleroma/docs/translator.ex:5 #, elixir-autogen, elixir-format msgctxt "config description at :pleroma-:auth" msgid "Authentication / authorization settings" -msgstr "" +msgstr "Paramètres d'authentification/autorisations" #: lib/pleroma/docs/translator.ex:5 #, elixir-autogen, elixir-format msgctxt "config description at :pleroma-:connections_pool" msgid "Advanced settings for `Gun` connections pool" -msgstr "" +msgstr "Paramètres avancés pour le bac (pool) de connexions `Gun`" #: lib/pleroma/docs/translator.ex:5 #, elixir-autogen, elixir-format msgctxt "config description at :pleroma-:email_notifications" msgid "Email notifications settings" -msgstr "" +msgstr "Paramètres de notification par email" #: lib/pleroma/docs/translator.ex:5 #, elixir-autogen, elixir-format msgctxt "config description at :pleroma-:features" msgid "Customizable features" -msgstr "" +msgstr "Fonctionnalités personnalisables" #: lib/pleroma/docs/translator.ex:5 #, elixir-autogen, elixir-format msgctxt "config description at :pleroma-:feed" msgid "Configure feed rendering" -msgstr "" +msgstr "Configurer le rendu des flux" #: lib/pleroma/docs/translator.ex:5 #, elixir-autogen, elixir-format msgctxt "config description at :pleroma-:frontend_configurations" msgid "This form can be used to configure a keyword list that keeps the configuration data for any kind of frontend. By default, settings for pleroma_fe are configured. If you want to add your own configuration your settings all fields must be complete." msgstr "" +"Ce formulaire peut être utilisé pour configurer une liste de clés (keyword) " +"qui contiennent les données de configuration pour tout types de frontaux. " +"Par défaut, les paramètres pour pleroma_fe sont configurés. Si vous voulez " +"ajouter vos propres paramètres de configurations, tout les champs doivent " +"être remplis." #: lib/pleroma/docs/translator.ex:5 #, elixir-autogen, elixir-format msgctxt "config description at :pleroma-:frontends" msgid "Installed frontends management" -msgstr "" +msgstr "Gestion des frontaux installés" #: lib/pleroma/docs/translator.ex:5 #, elixir-autogen, elixir-format msgctxt "config description at :pleroma-:gopher" msgid "Gopher settings" -msgstr "" +msgstr "Paramètres Gopher" #: lib/pleroma/docs/translator.ex:5 #, elixir-autogen, elixir-format msgctxt "config description at :pleroma-:hackney_pools" msgid "Advanced settings for `Hackney` connections pools" -msgstr "" +msgstr "Paramètres avancés pour les bacs (pool) de connexions `Hackney`" #: lib/pleroma/docs/translator.ex:5 #, elixir-autogen, elixir-format msgctxt "config description at :pleroma-:http" msgid "HTTP settings" -msgstr "" +msgstr "Paramètres HTTP" #: lib/pleroma/docs/translator.ex:5 #, elixir-autogen, elixir-format msgctxt "config description at :pleroma-:http_security" msgid "HTTP security settings" -msgstr "" +msgstr "Paramètres de sécurité HTTP" #: lib/pleroma/docs/translator.ex:5 #, elixir-autogen, elixir-format msgctxt "config description at :pleroma-:instance" msgid "Instance-related settings" -msgstr "" +msgstr "Paramètres liés à l'instance" #: lib/pleroma/docs/translator.ex:5 #, elixir-autogen, elixir-format msgctxt "config description at :pleroma-:instances_favicons" msgid "Control favicons for instances" -msgstr "" +msgstr "Gère les favicons des instances" #: lib/pleroma/docs/translator.ex:5 #, elixir-autogen, elixir-format @@ -212,151 +226,177 @@ msgstr "" #, elixir-autogen, elixir-format msgctxt "config description at :pleroma-:majic_pool" msgid "Majic/libmagic configuration" -msgstr "" +msgstr "Configuration de majic/libmagic" #: lib/pleroma/docs/translator.ex:5 #, elixir-autogen, elixir-format msgctxt "config description at :pleroma-:manifest" msgid "This section describe PWA manifest instance-specific values. Currently this option relate only for MastoFE." msgstr "" +"Cette section décrit les valeurs spécifique à l'instance du manifeste PWA. " +"Actuellement, cette option ne concerne que MastoFE." #: lib/pleroma/docs/translator.ex:5 #, elixir-autogen, elixir-format msgctxt "config description at :pleroma-:media_preview_proxy" msgid "Media preview proxy" -msgstr "" +msgstr "Proxy de prévisualisation média" #: lib/pleroma/docs/translator.ex:5 #, elixir-autogen, elixir-format msgctxt "config description at :pleroma-:media_proxy" msgid "Media proxy" -msgstr "" +msgstr "Proxy média" #: lib/pleroma/docs/translator.ex:5 #, elixir-autogen, elixir-format msgctxt "config description at :pleroma-:modules" msgid "Custom Runtime Modules" -msgstr "" +msgstr "Modules Runtime Personalisés" #: lib/pleroma/docs/translator.ex:5 #, elixir-autogen, elixir-format msgctxt "config description at :pleroma-:mrf" msgid "General MRF settings" -msgstr "" +msgstr "Paramètres généraux MRF" #: lib/pleroma/docs/translator.ex:5 #, elixir-autogen, elixir-format msgctxt "config description at :pleroma-:mrf_activity_expiration" msgid "Adds automatic expiration to all local activities" -msgstr "" +msgstr "Ajoute une expiration automatique à toutes les activités locales" #: lib/pleroma/docs/translator.ex:5 #, elixir-autogen, elixir-format msgctxt "config description at :pleroma-:mrf_follow_bot" msgid "Automatically follows newly discovered accounts." -msgstr "" +msgstr "Suivre automatiquement les comptes venant d'être découverts." #: lib/pleroma/docs/translator.ex:5 #, elixir-autogen, elixir-format msgctxt "config description at :pleroma-:mrf_hashtag" msgid "Reject, TWKN-remove or Set-Sensitive messsages with specific hashtags (without the leading #)\n\nNote: This MRF Policy is always enabled, if you want to disable it you have to set empty lists.\n" msgstr "" +"Rejeter, Enlever de TWKN ou marquer comme contenu sensible les messages avec " +"des mots-croisillons (sans mettre le # du début)\n" +"\n" +"Note: cette politique MRF est toujours activée. Si vous voulez la " +"désactiver, vous devez configurer des listes vides.\n" #: lib/pleroma/docs/translator.ex:5 #, elixir-autogen, elixir-format msgctxt "config description at :pleroma-:mrf_hellthread" msgid "Block messages with excessive user mentions" -msgstr "" +msgstr "Bloquer les messages avec un nombre excessif de mentions" #: lib/pleroma/docs/translator.ex:5 #, elixir-autogen, elixir-format msgctxt "config description at :pleroma-:mrf_keyword" msgid "Reject or Word-Replace messages matching a keyword or [Regex](https://hexdocs.pm/elixir/Regex.html)." msgstr "" +"Rejeter ou remplacer les mots des messages qui correspondent à un mot clef " +"ou à une [expression rationnelle (Regex)](https://hexdocs.pm/elixir/Regex." +"html)." #: lib/pleroma/docs/translator.ex:5 #, elixir-autogen, elixir-format msgctxt "config description at :pleroma-:mrf_mention" msgid "Block messages which mention a specific user" -msgstr "" +msgstr "Bloquer les messages mentionnant un utilisateur particulier" #: lib/pleroma/docs/translator.ex:5 #, elixir-autogen, elixir-format msgctxt "config description at :pleroma-:mrf_normalize_markup" msgid "MRF NormalizeMarkup settings. Scrub configured hypertext markup." msgstr "" +"Paramètres de normalisation MRF. Balaie les balises hypertextes configurées." #: lib/pleroma/docs/translator.ex:5 #, elixir-autogen, elixir-format msgctxt "config description at :pleroma-:mrf_object_age" msgid "Rejects or delists posts based on their timestamp deviance from your server's clock." msgstr "" +"Rejette ou retire des listes les messages selon l'écart entre leur heure et " +"l'horloge de votre serveur." #: lib/pleroma/docs/translator.ex:5 #, elixir-autogen, elixir-format msgctxt "config description at :pleroma-:mrf_rejectnonpublic" msgid "RejectNonPublic drops posts with non-public visibility settings." msgstr "" +"RejectNonPublic enlève les messages avec des paramètres de visibilité non-" +"publics." #: lib/pleroma/docs/translator.ex:5 #, elixir-autogen, elixir-format msgctxt "config description at :pleroma-:mrf_simple" msgid "Simple ingress policies" -msgstr "" +msgstr "Politiques simples pour entrants" #: lib/pleroma/docs/translator.ex:5 #, elixir-autogen, elixir-format msgctxt "config description at :pleroma-:mrf_steal_emoji" msgid "Steals emojis from selected instances when it sees them." -msgstr "" +msgstr "Vole les emojis des instances sélectionnées quand il les voit." #: lib/pleroma/docs/translator.ex:5 #, elixir-autogen, elixir-format msgctxt "config description at :pleroma-:mrf_subchain" msgid "This policy processes messages through an alternate pipeline when a given message matches certain criteria. All criteria are configured as a map of regular expressions to lists of policy modules." msgstr "" +"Cette politique traite les messages à travers un tuyau séparé lorsqu'un " +"message donné correspond à certain critères. Chaque critère est configuré " +"comme une correspondance entre une expression rationnelle et une liste de " +"modules de politiques." #: lib/pleroma/docs/translator.ex:5 #, elixir-autogen, elixir-format msgctxt "config description at :pleroma-:mrf_vocabulary" msgid "Filter messages which belong to certain activity vocabularies" msgstr "" +"Filtrer les messages qui correspondent à certain vocabulaires d'activités" #: lib/pleroma/docs/translator.ex:5 #, elixir-autogen, elixir-format msgctxt "config description at :pleroma-:oauth2" msgid "Configure OAuth 2 provider capabilities" -msgstr "" +msgstr "Configurer les capacités du fournisseur OAuth 2" #: lib/pleroma/docs/translator.ex:5 #, elixir-autogen, elixir-format msgctxt "config description at :pleroma-:pools" msgid "Advanced settings for `Gun` workers pools" -msgstr "" +msgstr "Paramètres avancés pour les bacs (pools) de travailleurs `Gun`" #: lib/pleroma/docs/translator.ex:5 #, elixir-autogen, elixir-format msgctxt "config description at :pleroma-:populate_hashtags_table" msgid "`populate_hashtags_table` background migration settings" -msgstr "" +msgstr "Paramètres de migration en arrière-plan `populate_hashtags_table`" #: lib/pleroma/docs/translator.ex:5 #, elixir-autogen, elixir-format msgctxt "config description at :pleroma-:rate_limit" msgid "Rate limit settings. This is an advanced feature enabled only for :authentication by default." msgstr "" +"Paramètres de limites par secondes. C'est une fonctionnalité avancée qui, " +"par défaut, n'est activée que pour :authentication." #: lib/pleroma/docs/translator.ex:5 #, elixir-autogen, elixir-format msgctxt "config description at :pleroma-:restrict_unauthenticated" msgid "Disallow viewing timelines, user profiles and statuses for unauthenticated users." msgstr "" +"Empêche de regarder les flux, les profils utilisateurs et les status pour " +"les utilisateurs non-authentifiés." #: lib/pleroma/docs/translator.ex:5 #, elixir-autogen, elixir-format msgctxt "config description at :pleroma-:rich_media" msgid "If enabled the instance will parse metadata from attached links to generate link previews" msgstr "" +"Si activé, l'instance interprétera les métadonnées des liens joins pour " +"générer les prévisualisations de liens" #: lib/pleroma/docs/translator.ex:5 #, elixir-autogen, elixir-format @@ -369,6 +409,8 @@ msgstr "" msgctxt "config description at :pleroma-:static_fe" msgid "Render profiles and posts using server-generated HTML that is viewable without using JavaScript" msgstr "" +"Rendre les profils et les status en utilisant du HTML généré par le serveur " +"qui ne nécessitera pas de JavaScript" #: lib/pleroma/docs/translator.ex:5 #, elixir-autogen, elixir-format @@ -380,7 +422,7 @@ msgstr "" #, elixir-autogen, elixir-format msgctxt "config description at :pleroma-:uri_schemes" msgid "URI schemes related settings" -msgstr "" +msgstr "Paramètres liés au schémas d'URI" #: lib/pleroma/docs/translator.ex:5 #, elixir-autogen, elixir-format diff --git a/priv/repo/migrations/20221203232118_add_user_follows_hashtag.exs b/priv/repo/migrations/20221203232118_add_user_follows_hashtag.exs new file mode 100644 index 000000000..2b5ae91be --- /dev/null +++ b/priv/repo/migrations/20221203232118_add_user_follows_hashtag.exs @@ -0,0 +1,14 @@ +defmodule Pleroma.Repo.Migrations.AddUserFollowsHashtag do + use Ecto.Migration + + def change do + create table(:user_follows_hashtag) do + add(:hashtag_id, references(:hashtags)) + add(:user_id, references(:users, type: :uuid, on_delete: :delete_all)) + end + + create(unique_index(:user_follows_hashtag, [:user_id, :hashtag_id])) + + create_if_not_exists(index(:user_follows_hashtag, [:hashtag_id])) + end +end diff --git a/rel/vm.args.eex b/rel/vm.args.eex index 71e803264..8e38fee4b 100644 --- a/rel/vm.args.eex +++ b/rel/vm.args.eex @@ -9,3 +9,8 @@ ## Tweak GC to run more often ##-env ERL_FULLSWEEP_AFTER 10 + +# Disable wasteful busywait. ++sbwt none ++sbwtdcpu none ++sbwtdio none diff --git a/test/fixtures/break_analyze.png b/test/fixtures/break_analyze.png Binary files differnew file mode 100644 index 000000000..b5e91b08a --- /dev/null +++ b/test/fixtures/break_analyze.png diff --git a/test/fixtures/mastodon-update-with-likes.json b/test/fixtures/mastodon-update-with-likes.json new file mode 100644 index 000000000..3bdb3ba3d --- /dev/null +++ b/test/fixtures/mastodon-update-with-likes.json @@ -0,0 +1,90 @@ +{ + "@context": [ + "https://www.w3.org/ns/activitystreams", + { + "atomUri": "ostatus:atomUri", + "conversation": "ostatus:conversation", + "inReplyToAtomUri": "ostatus:inReplyToAtomUri", + "ostatus": "http://ostatus.org#", + "sensitive": "as:sensitive", + "toot": "http://joinmastodon.org/ns#", + "votersCount": "toot:votersCount" + }, + "https://w3id.org/security/v1" + ], + "actor": "https://pol.social/users/mkljczk", + "cc": ["https://www.w3.org/ns/activitystreams#Public", + "https://pol.social/users/aemstuz", "https://gts.mkljczk.pl/users/mkljczk", + "https://pl.fediverse.pl/users/mkljczk", + "https://fedi.kutno.pl/users/mkljczk"], + "id": "https://pol.social/users/mkljczk/statuses/113907871635572263#updates/1738096776", + "object": { + "atomUri": "https://pol.social/users/mkljczk/statuses/113907871635572263", + "attachment": [], + "attributedTo": "https://pol.social/users/mkljczk", + "cc": ["https://www.w3.org/ns/activitystreams#Public", + "https://pol.social/users/aemstuz", "https://gts.mkljczk.pl/users/mkljczk", + "https://pl.fediverse.pl/users/mkljczk", + "https://fedi.kutno.pl/users/mkljczk"], + "content": "<p>test</p>", + "contentMap": { + "pl": "<p>test</p>" + }, + "conversation": "https://fedi.kutno.pl/contexts/43c14c70-d3fb-42b4-a36d-4eacfab9695a", + "id": "https://pol.social/users/mkljczk/statuses/113907871635572263", + "inReplyTo": "https://pol.social/users/aemstuz/statuses/113907854282654767", + "inReplyToAtomUri": "https://pol.social/users/aemstuz/statuses/113907854282654767", + "likes": { + "id": "https://pol.social/users/mkljczk/statuses/113907871635572263/likes", + "totalItems": 1, + "type": "Collection" + }, + "published": "2025-01-28T20:29:45Z", + "replies": { + "first": { + "items": [], + "next": "https://pol.social/users/mkljczk/statuses/113907871635572263/replies?only_other_accounts=true&page=true", + "partOf": "https://pol.social/users/mkljczk/statuses/113907871635572263/replies", + "type": "CollectionPage" + }, + "id": "https://pol.social/users/mkljczk/statuses/113907871635572263/replies", + "type": "Collection" + }, + "sensitive": false, + "shares": { + "id": "https://pol.social/users/mkljczk/statuses/113907871635572263/shares", + "totalItems": 0, + "type": "Collection" + }, + "summary": null, + "tag": [ + { + "href": "https://pol.social/users/aemstuz", + "name": "@aemstuz", + "type": "Mention" + }, + { + "href": "https://gts.mkljczk.pl/users/mkljczk", + "name": "@mkljczk@gts.mkljczk.pl", + "type": "Mention" + }, + { + "href": "https://pl.fediverse.pl/users/mkljczk", + "name": "@mkljczk@fediverse.pl", + "type": "Mention" + }, + { + "href": "https://fedi.kutno.pl/users/mkljczk", + "name": "@mkljczk@fedi.kutno.pl", + "type": "Mention" + } + ], + "to": ["https://pol.social/users/mkljczk/followers"], + "type": "Note", + "updated": "2025-01-28T20:39:36Z", + "url": "https://pol.social/@mkljczk/113907871635572263" + }, + "published": "2025-01-28T20:39:36Z", + "to": ["https://pol.social/users/mkljczk/followers"], + "type": "Update" +} diff --git a/test/mix/tasks/pleroma/database_test.exs b/test/mix/tasks/pleroma/database_test.exs index 96a925528..38ed096ae 100644 --- a/test/mix/tasks/pleroma/database_test.exs +++ b/test/mix/tasks/pleroma/database_test.exs @@ -411,7 +411,7 @@ defmodule Mix.Tasks.Pleroma.DatabaseTest do ["scheduled_activities"], ["schema_migrations"], ["thread_mutes"], - # ["user_follows_hashtag"], # not in pleroma + ["user_follows_hashtag"], # ["user_frontend_setting_profiles"], # not in pleroma ["user_invite_tokens"], ["user_notes"], diff --git a/test/pleroma/ecto_type/activity_pub/object_validators/content_language_map_test.exs b/test/pleroma/ecto_type/activity_pub/object_validators/content_language_map_test.exs new file mode 100644 index 000000000..a05871a6f --- /dev/null +++ b/test/pleroma/ecto_type/activity_pub/object_validators/content_language_map_test.exs @@ -0,0 +1,56 @@ +# Pleroma: A lightweight social networking server +# Copyright © 2017-2023 Pleroma Authors <https://pleroma.social/> +# SPDX-License-Identifier: AGPL-3.0-only + +defmodule Pleroma.EctoType.ActivityPub.ObjectValidators.ContentLanguageMapTest do + use Pleroma.DataCase, async: true + + alias Pleroma.EctoType.ActivityPub.ObjectValidators.ContentLanguageMap + + test "it validates" do + data = %{ + "en-US" => "mew mew", + "en-GB" => "meow meow" + } + + assert {:ok, ^data} = ContentLanguageMap.cast(data) + end + + test "it validates empty strings" do + data = %{ + "en-US" => "mew mew", + "en-GB" => "" + } + + assert {:ok, ^data} = ContentLanguageMap.cast(data) + end + + test "it ignores non-strings within the map" do + data = %{ + "en-US" => "mew mew", + "en-GB" => 123 + } + + assert {:ok, validated_data} = ContentLanguageMap.cast(data) + + assert validated_data == %{"en-US" => "mew mew"} + end + + test "it ignores bad locale codes" do + data = %{ + "en-US" => "mew mew", + "en_GB" => "meow meow", + "en<<#@!$#!@%!GB" => "meow meow" + } + + assert {:ok, validated_data} = ContentLanguageMap.cast(data) + + assert validated_data == %{"en-US" => "mew mew"} + end + + test "it complains with non-map data" do + assert :error = ContentLanguageMap.cast("mew") + assert :error = ContentLanguageMap.cast(["mew"]) + assert :error = ContentLanguageMap.cast([%{"en-US" => "mew"}]) + end +end diff --git a/test/pleroma/ecto_type/activity_pub/object_validators/language_code_test.exs b/test/pleroma/ecto_type/activity_pub/object_validators/language_code_test.exs new file mode 100644 index 000000000..086bb3e97 --- /dev/null +++ b/test/pleroma/ecto_type/activity_pub/object_validators/language_code_test.exs @@ -0,0 +1,29 @@ +# Pleroma: A lightweight social networking server +# Copyright © 2017-2023 Pleroma Authors <https://pleroma.social/> +# SPDX-License-Identifier: AGPL-3.0-only + +defmodule Pleroma.EctoType.ActivityPub.ObjectValidators.LanguageCodeTest do + use Pleroma.DataCase, async: true + + alias Pleroma.EctoType.ActivityPub.ObjectValidators.LanguageCode + + test "it accepts language code" do + text = "pl" + assert {:ok, ^text} = LanguageCode.cast(text) + end + + test "it accepts language code with region" do + text = "pl-PL" + assert {:ok, ^text} = LanguageCode.cast(text) + end + + test "errors for invalid language code" do + assert {:error, :invalid_language} = LanguageCode.cast("ru_RU") + assert {:error, :invalid_language} = LanguageCode.cast(" ") + assert {:error, :invalid_language} = LanguageCode.cast("en-US\n") + end + + test "errors for non-text" do + assert :error == LanguageCode.cast(42) + end +end diff --git a/test/pleroma/emoji/pack_test.exs b/test/pleroma/emoji/pack_test.exs index 00001abfc..1943ad1b5 100644 --- a/test/pleroma/emoji/pack_test.exs +++ b/test/pleroma/emoji/pack_test.exs @@ -5,6 +5,7 @@ defmodule Pleroma.Emoji.PackTest do use Pleroma.DataCase alias Pleroma.Emoji.Pack + alias Pleroma.Emoji @emoji_path Path.join( Pleroma.Config.get!([:instance, :static_dir]), @@ -53,6 +54,63 @@ defmodule Pleroma.Emoji.PackTest do assert updated_pack.files_count == 5 end + + test "skips existing emojis when adding from zip file", %{pack: pack} do + # First, let's create a test pack with a "bear" emoji + test_pack_path = Path.join(@emoji_path, "test_bear_pack") + File.mkdir_p(test_pack_path) + + # Create a pack.json file + File.write!(Path.join(test_pack_path, "pack.json"), """ + { + "files": { "bear": "bear.png" }, + "pack": { + "description": "Bear Pack", "homepage": "https://pleroma.social", + "license": "Test license", "share-files": true + }} + """) + + # Copy a test image to use as the bear emoji + File.cp!( + Path.absname("test/instance_static/emoji/test_pack/blank.png"), + Path.join(test_pack_path, "bear.png") + ) + + # Load the pack to register the "bear" emoji in the global registry + {:ok, _bear_pack} = Pleroma.Emoji.Pack.load_pack("test_bear_pack") + + # Reload emoji to make sure the bear emoji is in the global registry + Emoji.reload() + + # Verify that the bear emoji exists in the global registry + assert Emoji.exist?("bear") + + # Now try to add a zip file that contains an emoji with the same shortcode + file = %Plug.Upload{ + content_type: "application/zip", + filename: "emojis.zip", + path: Path.absname("test/fixtures/emojis.zip") + } + + {:ok, updated_pack} = Pack.add_file(pack, nil, nil, file) + + # Verify that the "bear" emoji was skipped + refute Map.has_key?(updated_pack.files, "bear") + + # Other emojis should be added + assert Map.has_key?(updated_pack.files, "a_trusted_friend-128") + assert Map.has_key?(updated_pack.files, "auroraborealis") + assert Map.has_key?(updated_pack.files, "baby_in_a_box") + assert Map.has_key?(updated_pack.files, "bear-128") + + # Total count should be 4 (all emojis except "bear") + assert updated_pack.files_count == 4 + + # Clean up the test pack + on_exit(fn -> + File.rm_rf!(test_pack_path) + end) + end end test "returns error when zip file is bad", %{pack: pack} do diff --git a/test/pleroma/integration/mastodon_websocket_test.exs b/test/pleroma/integration/mastodon_websocket_test.exs index f499f54ad..88f32762d 100644 --- a/test/pleroma/integration/mastodon_websocket_test.exs +++ b/test/pleroma/integration/mastodon_websocket_test.exs @@ -268,6 +268,17 @@ defmodule Pleroma.Integration.MastodonWebsocketTest do end) end + test "accepts valid token on Sec-WebSocket-Protocol header", %{token: token} do + assert {:ok, _} = start_socket("?stream=user", [{"Sec-WebSocket-Protocol", token.token}]) + + capture_log(fn -> + assert {:error, %WebSockex.RequestError{code: 401}} = + start_socket("?stream=user", [{"Sec-WebSocket-Protocol", "I am a friend"}]) + + Process.sleep(30) + end) + end + test "accepts valid token on client-sent event", %{token: token} do assert {:ok, pid} = start_socket() diff --git a/test/pleroma/object_test.exs b/test/pleroma/object_test.exs index 48d4d86eb..ed5c2b6c8 100644 --- a/test/pleroma/object_test.exs +++ b/test/pleroma/object_test.exs @@ -6,12 +6,10 @@ defmodule Pleroma.ObjectTest do use Pleroma.DataCase use Oban.Testing, repo: Pleroma.Repo - import ExUnit.CaptureLog import Mox import Pleroma.Factory import Tesla.Mock - alias Pleroma.Activity alias Pleroma.Hashtag alias Pleroma.Object alias Pleroma.Repo @@ -176,8 +174,9 @@ defmodule Pleroma.ObjectTest do filename = Path.basename(href) - assert {:ok, files} = File.ls(uploads_dir) - assert filename in files + expected_path = Path.join([uploads_dir, Pleroma.Upload.Filter.Dedupe.shard_path(filename)]) + + assert File.exists?(expected_path) Object.delete(note) @@ -185,8 +184,7 @@ defmodule Pleroma.ObjectTest do assert Object.get_by_id(note.id).data["deleted"] assert Object.get_by_id(attachment.id) == nil - assert {:ok, files} = File.ls(uploads_dir) - refute filename in files + refute File.exists?(expected_path) end test "with objects that have legacy data.url attribute" do @@ -282,148 +280,6 @@ defmodule Pleroma.ObjectTest do end end - describe "get_by_id_and_maybe_refetch" do - setup do - mock(fn - %{method: :get, url: "https://patch.cx/objects/9a172665-2bc5-452d-8428-2361d4c33b1d"} -> - %Tesla.Env{ - status: 200, - body: File.read!("test/fixtures/tesla_mock/poll_original.json"), - headers: HttpRequestMock.activitypub_object_headers() - } - - env -> - apply(HttpRequestMock, :request, [env]) - end) - - mock_modified = fn resp -> - mock(fn - %{method: :get, url: "https://patch.cx/objects/9a172665-2bc5-452d-8428-2361d4c33b1d"} -> - resp - - env -> - apply(HttpRequestMock, :request, [env]) - end) - end - - on_exit(fn -> mock(fn env -> apply(HttpRequestMock, :request, [env]) end) end) - - [mock_modified: mock_modified] - end - - test "refetches if the time since the last refetch is greater than the interval", %{ - mock_modified: mock_modified - } do - %Object{} = - object = - Object.normalize("https://patch.cx/objects/9a172665-2bc5-452d-8428-2361d4c33b1d", - fetch: true - ) - - Object.set_cache(object) - - assert Enum.at(object.data["oneOf"], 0)["replies"]["totalItems"] == 4 - assert Enum.at(object.data["oneOf"], 1)["replies"]["totalItems"] == 0 - - mock_modified.(%Tesla.Env{ - status: 200, - body: File.read!("test/fixtures/tesla_mock/poll_modified.json"), - headers: HttpRequestMock.activitypub_object_headers() - }) - - updated_object = Object.get_by_id_and_maybe_refetch(object.id, interval: -1) - object_in_cache = Object.get_cached_by_ap_id(object.data["id"]) - assert updated_object == object_in_cache - assert Enum.at(updated_object.data["oneOf"], 0)["replies"]["totalItems"] == 8 - assert Enum.at(updated_object.data["oneOf"], 1)["replies"]["totalItems"] == 3 - end - - test "returns the old object if refetch fails", %{mock_modified: mock_modified} do - %Object{} = - object = - Object.normalize("https://patch.cx/objects/9a172665-2bc5-452d-8428-2361d4c33b1d", - fetch: true - ) - - Object.set_cache(object) - - assert Enum.at(object.data["oneOf"], 0)["replies"]["totalItems"] == 4 - assert Enum.at(object.data["oneOf"], 1)["replies"]["totalItems"] == 0 - - assert capture_log(fn -> - mock_modified.(%Tesla.Env{status: 404, body: ""}) - - updated_object = Object.get_by_id_and_maybe_refetch(object.id, interval: -1) - object_in_cache = Object.get_cached_by_ap_id(object.data["id"]) - assert updated_object == object_in_cache - assert Enum.at(updated_object.data["oneOf"], 0)["replies"]["totalItems"] == 4 - assert Enum.at(updated_object.data["oneOf"], 1)["replies"]["totalItems"] == 0 - end) =~ - "[error] Couldn't refresh https://patch.cx/objects/9a172665-2bc5-452d-8428-2361d4c33b1d" - end - - test "does not refetch if the time since the last refetch is greater than the interval", %{ - mock_modified: mock_modified - } do - %Object{} = - object = - Object.normalize("https://patch.cx/objects/9a172665-2bc5-452d-8428-2361d4c33b1d", - fetch: true - ) - - Object.set_cache(object) - - assert Enum.at(object.data["oneOf"], 0)["replies"]["totalItems"] == 4 - assert Enum.at(object.data["oneOf"], 1)["replies"]["totalItems"] == 0 - - mock_modified.(%Tesla.Env{ - status: 200, - body: File.read!("test/fixtures/tesla_mock/poll_modified.json"), - headers: HttpRequestMock.activitypub_object_headers() - }) - - updated_object = Object.get_by_id_and_maybe_refetch(object.id, interval: 100) - object_in_cache = Object.get_cached_by_ap_id(object.data["id"]) - assert updated_object == object_in_cache - assert Enum.at(updated_object.data["oneOf"], 0)["replies"]["totalItems"] == 4 - assert Enum.at(updated_object.data["oneOf"], 1)["replies"]["totalItems"] == 0 - end - - test "preserves internal fields on refetch", %{mock_modified: mock_modified} do - %Object{} = - object = - Object.normalize("https://patch.cx/objects/9a172665-2bc5-452d-8428-2361d4c33b1d", - fetch: true - ) - - Object.set_cache(object) - - assert Enum.at(object.data["oneOf"], 0)["replies"]["totalItems"] == 4 - assert Enum.at(object.data["oneOf"], 1)["replies"]["totalItems"] == 0 - - user = insert(:user) - activity = Activity.get_create_by_object_ap_id(object.data["id"]) - {:ok, activity} = CommonAPI.favorite(activity.id, user) - object = Object.get_by_ap_id(activity.data["object"]) - - assert object.data["like_count"] == 1 - - mock_modified.(%Tesla.Env{ - status: 200, - body: File.read!("test/fixtures/tesla_mock/poll_modified.json"), - headers: HttpRequestMock.activitypub_object_headers() - }) - - updated_object = Object.get_by_id_and_maybe_refetch(object.id, interval: -1) - object_in_cache = Object.get_cached_by_ap_id(object.data["id"]) - assert updated_object == object_in_cache - assert Enum.at(updated_object.data["oneOf"], 0)["replies"]["totalItems"] == 8 - assert Enum.at(updated_object.data["oneOf"], 1)["replies"]["totalItems"] == 3 - - assert updated_object.data["like_count"] == 1 - end - end - describe ":hashtags association" do test "Hashtag records are created with Object record and updated on its change" do user = insert(:user) diff --git a/test/pleroma/release_task_test.exs b/test/pleroma/release_task_test.exs new file mode 100644 index 000000000..5a4293189 --- /dev/null +++ b/test/pleroma/release_task_test.exs @@ -0,0 +1,19 @@ +# Pleroma: A lightweight social networking server +# Copyright © 2017-2022 Pleroma Authors <https://pleroma.social/> +# SPDX-License-Identifier: AGPL-3.0-only + +defmodule Pleroma.ReleaseTaskTest do + use Pleroma.DataCase, async: true + + alias Pleroma.ReleaseTasks + + test "finding the module" do + task = "search.meilisearch" + assert Mix.Tasks.Pleroma.Search.Meilisearch == ReleaseTasks.find_module(task) + + task = "user" + assert Mix.Tasks.Pleroma.User == ReleaseTasks.find_module(task) + + refute ReleaseTasks.find_module("doesnt.exist") + end +end diff --git a/test/pleroma/safe_zip_test.exs b/test/pleroma/safe_zip_test.exs new file mode 100644 index 000000000..5063f05e4 --- /dev/null +++ b/test/pleroma/safe_zip_test.exs @@ -0,0 +1,496 @@ +defmodule Pleroma.SafeZipTest do + # Not making this async because it creates and deletes files + use ExUnit.Case + + alias Pleroma.SafeZip + + @fixtures_dir "test/fixtures" + @tmp_dir "test/zip_tmp" + + setup do + # Ensure tmp directory exists + File.mkdir_p!(@tmp_dir) + + on_exit(fn -> + # Clean up any files created during tests + File.rm_rf!(@tmp_dir) + File.mkdir_p!(@tmp_dir) + end) + + :ok + end + + describe "list_dir_file/1" do + test "lists files in a valid zip" do + {:ok, files} = SafeZip.list_dir_file(Path.join(@fixtures_dir, "emojis.zip")) + assert is_list(files) + assert length(files) > 0 + end + + test "returns an empty list for empty zip" do + {:ok, files} = SafeZip.list_dir_file(Path.join(@fixtures_dir, "empty.zip")) + assert files == [] + end + + test "returns error for non-existent file" do + assert {:error, _} = SafeZip.list_dir_file(Path.join(@fixtures_dir, "nonexistent.zip")) + end + + test "only lists regular files, not directories" do + # Create a zip with both files and directories + zip_path = create_zip_with_directory() + + # List files with SafeZip + {:ok, files} = SafeZip.list_dir_file(zip_path) + + # Verify only regular files are listed, not directories + assert "file_in_dir/test_file.txt" in files + assert "root_file.txt" in files + + # Directory entries should not be included in the list + refute "file_in_dir/" in files + end + end + + describe "contains_all_data?/2" do + test "returns true when all files are in the archive" do + # For this test, we'll create our own zip file with known content + # to ensure we can test the contains_all_data? function properly + zip_path = create_zip_with_directory() + archive_data = File.read!(zip_path) + + # Check if the archive contains the root file + # Note: The function expects charlists (Erlang strings) in the MapSet + assert SafeZip.contains_all_data?(archive_data, MapSet.new([~c"root_file.txt"])) + end + + test "returns false when files are missing" do + archive_path = Path.join(@fixtures_dir, "emojis.zip") + archive_data = File.read!(archive_path) + + # Create a MapSet with non-existent files + fset = MapSet.new([~c"nonexistent.txt"]) + + refute SafeZip.contains_all_data?(archive_data, fset) + end + + test "returns false for invalid archive data" do + refute SafeZip.contains_all_data?("invalid data", MapSet.new([~c"file.txt"])) + end + + test "only checks for regular files, not directories" do + # Create a zip with both files and directories + zip_path = create_zip_with_directory() + archive_data = File.read!(zip_path) + + # Check if the archive contains a directory (should return false) + refute SafeZip.contains_all_data?(archive_data, MapSet.new([~c"file_in_dir/"])) + + # For this test, we'll manually check if the file exists in the archive + # by extracting it and verifying it exists + extract_dir = Path.join(@tmp_dir, "extract_check") + File.mkdir_p!(extract_dir) + {:ok, files} = SafeZip.unzip_file(zip_path, extract_dir) + + # Verify the root file was extracted + assert Enum.any?(files, fn file -> + Path.basename(file) == "root_file.txt" + end) + + # Verify the file exists on disk + assert File.exists?(Path.join(extract_dir, "root_file.txt")) + end + end + + describe "zip/4" do + test "creates a zip file on disk" do + # Create a test file + test_file_path = Path.join(@tmp_dir, "test_file.txt") + File.write!(test_file_path, "test content") + + # Create a zip file + zip_path = Path.join(@tmp_dir, "test.zip") + assert {:ok, ^zip_path} = SafeZip.zip(zip_path, ["test_file.txt"], @tmp_dir, false) + + # Verify the zip file exists + assert File.exists?(zip_path) + end + + test "creates a zip file in memory" do + # Create a test file + test_file_path = Path.join(@tmp_dir, "test_file.txt") + File.write!(test_file_path, "test content") + + # Create a zip file in memory + zip_name = Path.join(@tmp_dir, "test.zip") + + assert {:ok, {^zip_name, zip_data}} = + SafeZip.zip(zip_name, ["test_file.txt"], @tmp_dir, true) + + # Verify the zip data is binary + assert is_binary(zip_data) + end + + test "returns error for unsafe paths" do + # Try to zip a file with path traversal + assert {:error, _} = + SafeZip.zip( + Path.join(@tmp_dir, "test.zip"), + ["../fixtures/test.txt"], + @tmp_dir, + false + ) + end + + test "can create zip with directories" do + # Create a directory structure + dir_path = Path.join(@tmp_dir, "test_dir") + File.mkdir_p!(dir_path) + + file_in_dir_path = Path.join(dir_path, "file_in_dir.txt") + File.write!(file_in_dir_path, "file in directory") + + # Create a zip file + zip_path = Path.join(@tmp_dir, "dir_test.zip") + + assert {:ok, ^zip_path} = + SafeZip.zip( + zip_path, + ["test_dir/file_in_dir.txt"], + @tmp_dir, + false + ) + + # Verify the zip file exists + assert File.exists?(zip_path) + + # Extract and verify the directory structure is preserved + extract_dir = Path.join(@tmp_dir, "extract") + {:ok, files} = SafeZip.unzip_file(zip_path, extract_dir) + + # Check if the file path is in the list, accounting for possible full paths + assert Enum.any?(files, fn file -> + String.ends_with?(file, "file_in_dir.txt") + end) + + # Verify the file exists in the expected location + assert File.exists?(Path.join([extract_dir, "test_dir", "file_in_dir.txt"])) + end + end + + describe "unzip_file/3" do + test "extracts files from a zip archive" do + archive_path = Path.join(@fixtures_dir, "emojis.zip") + + # Extract the archive + assert {:ok, files} = SafeZip.unzip_file(archive_path, @tmp_dir) + + # Verify files were extracted + assert is_list(files) + assert length(files) > 0 + + # Verify at least one file exists + first_file = List.first(files) + + # Simply check that the file exists in the tmp directory + assert File.exists?(Path.join(@tmp_dir, Path.basename(first_file))) + end + + test "extracts specific files from a zip archive" do + archive_path = Path.join(@fixtures_dir, "emojis.zip") + + # Get list of files in the archive + {:ok, all_files} = SafeZip.list_dir_file(archive_path) + file_to_extract = List.first(all_files) + + # Extract only one file + assert {:ok, [extracted_file]} = + SafeZip.unzip_file(archive_path, @tmp_dir, [file_to_extract]) + + # Verify only the specified file was extracted + assert Path.basename(extracted_file) == Path.basename(file_to_extract) + + # Check that the file exists in the tmp directory + assert File.exists?(Path.join(@tmp_dir, Path.basename(file_to_extract))) + end + + test "returns error for invalid zip file" do + invalid_path = Path.join(@tmp_dir, "invalid.zip") + File.write!(invalid_path, "not a zip file") + + assert {:error, _} = SafeZip.unzip_file(invalid_path, @tmp_dir) + end + + test "creates directories when extracting files in subdirectories" do + # Create a zip with files in subdirectories + zip_path = create_zip_with_directory() + + # Extract the archive + assert {:ok, files} = SafeZip.unzip_file(zip_path, @tmp_dir) + + # Verify files were extracted - handle both relative and absolute paths + assert Enum.any?(files, fn file -> + Path.basename(file) == "test_file.txt" && + String.contains?(file, "file_in_dir") + end) + + assert Enum.any?(files, fn file -> + Path.basename(file) == "root_file.txt" + end) + + # Verify directory was created + dir_path = Path.join(@tmp_dir, "file_in_dir") + assert File.exists?(dir_path) + assert File.dir?(dir_path) + + # Verify file in directory was extracted + file_path = Path.join(dir_path, "test_file.txt") + assert File.exists?(file_path) + end + end + + describe "unzip_data/3" do + test "extracts files from zip data" do + archive_path = Path.join(@fixtures_dir, "emojis.zip") + archive_data = File.read!(archive_path) + + # Extract the archive from data + assert {:ok, files} = SafeZip.unzip_data(archive_data, @tmp_dir) + + # Verify files were extracted + assert is_list(files) + assert length(files) > 0 + + # Verify at least one file exists + first_file = List.first(files) + + # Simply check that the file exists in the tmp directory + assert File.exists?(Path.join(@tmp_dir, Path.basename(first_file))) + end + + test "extracts specific files from zip data" do + archive_path = Path.join(@fixtures_dir, "emojis.zip") + archive_data = File.read!(archive_path) + + # Get list of files in the archive + {:ok, all_files} = SafeZip.list_dir_file(archive_path) + file_to_extract = List.first(all_files) + + # Extract only one file + assert {:ok, extracted_files} = + SafeZip.unzip_data(archive_data, @tmp_dir, [file_to_extract]) + + # Verify only the specified file was extracted + assert Enum.any?(extracted_files, fn path -> + Path.basename(path) == Path.basename(file_to_extract) + end) + + # Simply check that the file exists in the tmp directory + assert File.exists?(Path.join(@tmp_dir, Path.basename(file_to_extract))) + end + + test "returns error for invalid zip data" do + assert {:error, _} = SafeZip.unzip_data("not a zip file", @tmp_dir) + end + + test "creates directories when extracting files in subdirectories from data" do + # Create a zip with files in subdirectories + zip_path = create_zip_with_directory() + archive_data = File.read!(zip_path) + + # Extract the archive from data + assert {:ok, files} = SafeZip.unzip_data(archive_data, @tmp_dir) + + # Verify files were extracted - handle both relative and absolute paths + assert Enum.any?(files, fn file -> + Path.basename(file) == "test_file.txt" && + String.contains?(file, "file_in_dir") + end) + + assert Enum.any?(files, fn file -> + Path.basename(file) == "root_file.txt" + end) + + # Verify directory was created + dir_path = Path.join(@tmp_dir, "file_in_dir") + assert File.exists?(dir_path) + assert File.dir?(dir_path) + + # Verify file in directory was extracted + file_path = Path.join(dir_path, "test_file.txt") + assert File.exists?(file_path) + end + end + + # Security tests + describe "security checks" do + test "prevents path traversal in zip extraction" do + # Create a malicious zip file with path traversal + malicious_zip_path = create_malicious_zip_with_path_traversal() + + # Try to extract it with SafeZip + assert {:error, _} = SafeZip.unzip_file(malicious_zip_path, @tmp_dir) + + # Verify the file was not extracted outside the target directory + refute File.exists?(Path.join(Path.dirname(@tmp_dir), "traversal_attempt.txt")) + end + + test "prevents directory traversal in zip listing" do + # Create a malicious zip file with path traversal + malicious_zip_path = create_malicious_zip_with_path_traversal() + + # Try to list files with SafeZip + assert {:error, _} = SafeZip.list_dir_file(malicious_zip_path) + end + + test "prevents path traversal in zip data extraction" do + # Create a malicious zip file with path traversal + malicious_zip_path = create_malicious_zip_with_path_traversal() + malicious_data = File.read!(malicious_zip_path) + + # Try to extract it with SafeZip + assert {:error, _} = SafeZip.unzip_data(malicious_data, @tmp_dir) + + # Verify the file was not extracted outside the target directory + refute File.exists?(Path.join(Path.dirname(@tmp_dir), "traversal_attempt.txt")) + end + + test "handles zip bomb attempts" do + # Create a zip bomb (a zip with many files or large files) + zip_bomb_path = create_zip_bomb() + + # The SafeZip module should handle this gracefully + # Either by successfully extracting it (if it's not too large) + # or by returning an error (if it detects a potential zip bomb) + result = SafeZip.unzip_file(zip_bomb_path, @tmp_dir) + + case result do + {:ok, _} -> + # If it successfully extracts, make sure it didn't fill up the disk + # This is a simple check to ensure the extraction was controlled + assert File.exists?(@tmp_dir) + + {:error, _} -> + # If it returns an error, that's also acceptable + # The important thing is that it doesn't crash or hang + assert true + end + end + + test "handles deeply nested directory structures" do + # Create a zip with deeply nested directories + deep_nest_path = create_deeply_nested_zip() + + # The SafeZip module should handle this gracefully + result = SafeZip.unzip_file(deep_nest_path, @tmp_dir) + + case result do + {:ok, files} -> + # If it successfully extracts, verify the files were extracted + assert is_list(files) + assert length(files) > 0 + + {:error, _} -> + # If it returns an error, that's also acceptable + # The important thing is that it doesn't crash or hang + assert true + end + end + end + + # Helper functions to create test fixtures + + # Creates a zip file with a path traversal attempt + defp create_malicious_zip_with_path_traversal do + malicious_zip_path = Path.join(@tmp_dir, "path_traversal.zip") + + # Create a file to include in the zip + test_file_path = Path.join(@tmp_dir, "test_file.txt") + File.write!(test_file_path, "malicious content") + + # Use Erlang's zip module directly to create a zip with path traversal + {:ok, charlist_path} = + :zip.create( + String.to_charlist(malicious_zip_path), + [{String.to_charlist("../traversal_attempt.txt"), File.read!(test_file_path)}] + ) + + to_string(charlist_path) + end + + # Creates a zip file with directory entries + defp create_zip_with_directory do + zip_path = Path.join(@tmp_dir, "with_directory.zip") + + # Create files to include in the zip + root_file_path = Path.join(@tmp_dir, "root_file.txt") + File.write!(root_file_path, "root file content") + + # Create a directory and a file in it + dir_path = Path.join(@tmp_dir, "file_in_dir") + File.mkdir_p!(dir_path) + + file_in_dir_path = Path.join(dir_path, "test_file.txt") + File.write!(file_in_dir_path, "file in directory content") + + # Use Erlang's zip module to create a zip with directory structure + {:ok, charlist_path} = + :zip.create( + String.to_charlist(zip_path), + [ + {String.to_charlist("root_file.txt"), File.read!(root_file_path)}, + {String.to_charlist("file_in_dir/test_file.txt"), File.read!(file_in_dir_path)} + ] + ) + + to_string(charlist_path) + end + + # Creates a zip bomb (a zip with many small files) + defp create_zip_bomb do + zip_path = Path.join(@tmp_dir, "zip_bomb.zip") + + # Create a small file to duplicate many times + small_file_path = Path.join(@tmp_dir, "small_file.txt") + File.write!(small_file_path, String.duplicate("A", 100)) + + # Create a list of many files to include in the zip + file_entries = + for i <- 1..100 do + {String.to_charlist("file_#{i}.txt"), File.read!(small_file_path)} + end + + # Use Erlang's zip module to create a zip with many files + {:ok, charlist_path} = + :zip.create( + String.to_charlist(zip_path), + file_entries + ) + + to_string(charlist_path) + end + + # Creates a zip with deeply nested directories + defp create_deeply_nested_zip do + zip_path = Path.join(@tmp_dir, "deep_nest.zip") + + # Create a file to include in the zip + file_content = "test content" + + # Create a list of deeply nested files + file_entries = + for i <- 1..10 do + nested_path = Enum.reduce(1..i, "nested", fn j, acc -> "#{acc}/level_#{j}" end) + {String.to_charlist("#{nested_path}/file.txt"), file_content} + end + + # Use Erlang's zip module to create a zip with deeply nested directories + {:ok, charlist_path} = + :zip.create( + String.to_charlist(zip_path), + file_entries + ) + + to_string(charlist_path) + end +end diff --git a/test/pleroma/upload/filter/analyze_metadata_test.exs b/test/pleroma/upload/filter/analyze_metadata_test.exs index e4ac673b2..6e1f2afaf 100644 --- a/test/pleroma/upload/filter/analyze_metadata_test.exs +++ b/test/pleroma/upload/filter/analyze_metadata_test.exs @@ -34,6 +34,20 @@ defmodule Pleroma.Upload.Filter.AnalyzeMetadataTest do assert meta.blurhash == "eXJi-E:SwCEm5rCmn$+YWYn+15K#5A$xxCi{SiV]s*W:Efa#s.jE-T" end + test "it gets dimensions for grayscale images" do + upload = %Pleroma.Upload{ + name: "break_analyze.png", + content_type: "image/png", + path: Path.absname("test/fixtures/break_analyze.png"), + tempfile: Path.absname("test/fixtures/break_analyze.png") + } + + {:ok, :filtered, meta} = AnalyzeMetadata.filter(upload) + + assert %{width: 1410, height: 2048} = meta + assert is_nil(meta.blurhash) + end + test "adds the dimensions for videos" do upload = %Pleroma.Upload{ name: "coolvideo.mp4", diff --git a/test/pleroma/upload/filter/dedupe_test.exs b/test/pleroma/upload/filter/dedupe_test.exs index 29c181509..4dc28b998 100644 --- a/test/pleroma/upload/filter/dedupe_test.exs +++ b/test/pleroma/upload/filter/dedupe_test.exs @@ -10,6 +10,10 @@ defmodule Pleroma.Upload.Filter.DedupeTest do @shasum "e30397b58d226d6583ab5b8b3c5defb0c682bda5c31ef07a9f57c1c4986e3781" + test "generates a shard path for a shasum" do + assert "e3/03/97/" <> _path = Dedupe.shard_path(@shasum) + end + test "adds shasum" do File.cp!( "test/fixtures/image.jpg", @@ -23,10 +27,12 @@ defmodule Pleroma.Upload.Filter.DedupeTest do tempfile: Path.absname("test/fixtures/image_tmp.jpg") } + expected_path = Dedupe.shard_path(@shasum <> ".jpg") + assert { :ok, :filtered, - %Pleroma.Upload{id: @shasum, path: @shasum <> ".jpg"} + %Pleroma.Upload{id: @shasum, path: ^expected_path} } = Dedupe.filter(upload) end end diff --git a/test/pleroma/upload_test.exs b/test/pleroma/upload_test.exs index facb634c3..5fd62fa43 100644 --- a/test/pleroma/upload_test.exs +++ b/test/pleroma/upload_test.exs @@ -149,6 +149,9 @@ defmodule Pleroma.UploadTest do test "copies the file to the configured folder with deduping" do File.cp!("test/fixtures/image.jpg", "test/fixtures/image_tmp.jpg") + expected_filename = "e30397b58d226d6583ab5b8b3c5defb0c682bda5c31ef07a9f57c1c4986e3781.jpg" + + expected_path = Pleroma.Upload.Filter.Dedupe.shard_path(expected_filename) file = %Plug.Upload{ content_type: "image/jpeg", @@ -159,8 +162,7 @@ defmodule Pleroma.UploadTest do {:ok, data} = Upload.store(file, filters: [Pleroma.Upload.Filter.Dedupe]) assert List.first(data["url"])["href"] == - Pleroma.Upload.base_url() <> - "e30397b58d226d6583ab5b8b3c5defb0c682bda5c31ef07a9f57c1c4986e3781.jpg" + Path.join([Pleroma.Upload.base_url(), expected_path]) end test "copies the file to the configured folder without deduping" do diff --git a/test/pleroma/user/backup_test.exs b/test/pleroma/user/backup_test.exs index 24fe09f7e..f4b92adf8 100644 --- a/test/pleroma/user/backup_test.exs +++ b/test/pleroma/user/backup_test.exs @@ -185,13 +185,13 @@ defmodule Pleroma.User.BackupTest do %{"@language" => "und"} ], "bookmarks" => "bookmarks.json", - "followers" => "http://cofe.io/users/cofe/followers", - "following" => "http://cofe.io/users/cofe/following", + "followers" => "followers.json", + "following" => "following.json", "id" => "http://cofe.io/users/cofe", "inbox" => "http://cofe.io/users/cofe/inbox", "likes" => "likes.json", "name" => "Cofe", - "outbox" => "http://cofe.io/users/cofe/outbox", + "outbox" => "outbox.json", "preferredUsername" => "cofe", "publicKey" => %{ "id" => "http://cofe.io/users/cofe#main-key", diff --git a/test/pleroma/user_test.exs b/test/pleroma/user_test.exs index 06afc0709..4a3d6bacc 100644 --- a/test/pleroma/user_test.exs +++ b/test/pleroma/user_test.exs @@ -2919,4 +2919,74 @@ defmodule Pleroma.UserTest do assert [%{"verified_at" => ^verified_at}] = user.fields end + + describe "follow_hashtag/2" do + test "should follow a hashtag" do + user = insert(:user) + hashtag = insert(:hashtag) + + assert {:ok, _} = user |> User.follow_hashtag(hashtag) + + user = User.get_cached_by_ap_id(user.ap_id) + + assert user.followed_hashtags |> Enum.count() == 1 + assert hashtag.name in Enum.map(user.followed_hashtags, fn %{name: name} -> name end) + end + + test "should not follow a hashtag twice" do + user = insert(:user) + hashtag = insert(:hashtag) + + assert {:ok, _} = user |> User.follow_hashtag(hashtag) + + assert {:ok, _} = user |> User.follow_hashtag(hashtag) + + user = User.get_cached_by_ap_id(user.ap_id) + + assert user.followed_hashtags |> Enum.count() == 1 + assert hashtag.name in Enum.map(user.followed_hashtags, fn %{name: name} -> name end) + end + + test "can follow multiple hashtags" do + user = insert(:user) + hashtag = insert(:hashtag) + other_hashtag = insert(:hashtag) + + assert {:ok, _} = user |> User.follow_hashtag(hashtag) + assert {:ok, _} = user |> User.follow_hashtag(other_hashtag) + + user = User.get_cached_by_ap_id(user.ap_id) + + assert user.followed_hashtags |> Enum.count() == 2 + assert hashtag.name in Enum.map(user.followed_hashtags, fn %{name: name} -> name end) + assert other_hashtag.name in Enum.map(user.followed_hashtags, fn %{name: name} -> name end) + end + end + + describe "unfollow_hashtag/2" do + test "should unfollow a hashtag" do + user = insert(:user) + hashtag = insert(:hashtag) + + assert {:ok, _} = user |> User.follow_hashtag(hashtag) + assert {:ok, _} = user |> User.unfollow_hashtag(hashtag) + + user = User.get_cached_by_ap_id(user.ap_id) + + assert user.followed_hashtags |> Enum.count() == 0 + end + + test "should not error when trying to unfollow a hashtag twice" do + user = insert(:user) + hashtag = insert(:hashtag) + + assert {:ok, _} = user |> User.follow_hashtag(hashtag) + assert {:ok, _} = user |> User.unfollow_hashtag(hashtag) + assert {:ok, _} = user |> User.unfollow_hashtag(hashtag) + + user = User.get_cached_by_ap_id(user.ap_id) + + assert user.followed_hashtags |> Enum.count() == 0 + end + end end diff --git a/test/pleroma/web/activity_pub/activity_pub_controller_test.exs b/test/pleroma/web/activity_pub/activity_pub_controller_test.exs index fffd8f744..4edca14d8 100644 --- a/test/pleroma/web/activity_pub/activity_pub_controller_test.exs +++ b/test/pleroma/web/activity_pub/activity_pub_controller_test.exs @@ -1320,6 +1320,27 @@ defmodule Pleroma.Web.ActivityPub.ActivityPubControllerTest do html_body: ~r/#{note.data["object"]}/i ) end + + test "it accepts an incoming Block", %{conn: conn, data: data} do + user = insert(:user) + + data = + data + |> Map.put("type", "Block") + |> Map.put("to", [user.ap_id]) + |> Map.put("cc", []) + |> Map.put("object", user.ap_id) + + conn = + conn + |> assign(:valid_signature, true) + |> put_req_header("content-type", "application/activity+json") + |> post("/users/#{user.nickname}/inbox", data) + + assert "ok" == json_response(conn, 200) + ObanHelpers.perform(all_enqueued(worker: ReceiverWorker)) + assert Activity.get_by_ap_id(data["id"]) + end end describe "GET /users/:nickname/outbox" do @@ -1673,6 +1694,28 @@ defmodule Pleroma.Web.ActivityPub.ActivityPubControllerTest do assert json_response(conn, 403) end + test "it rejects update activity of object from other actor", %{conn: conn} do + note_activity = insert(:note_activity) + note_object = Object.normalize(note_activity, fetch: false) + user = insert(:user) + + data = %{ + type: "Update", + object: %{ + id: note_object.data["id"] + } + } + + conn = + conn + |> assign(:user, user) + |> put_req_header("content-type", "application/activity+json") + |> post("/users/#{user.nickname}/outbox", data) + + assert json_response(conn, 400) + assert note_object == Object.normalize(note_activity, fetch: false) + end + test "it increases like count when receiving a like action", %{conn: conn} do note_activity = insert(:note_activity) note_object = Object.normalize(note_activity, fetch: false) diff --git a/test/pleroma/web/activity_pub/activity_pub_test.exs b/test/pleroma/web/activity_pub/activity_pub_test.exs index b4f6fb68a..c7adf6bba 100644 --- a/test/pleroma/web/activity_pub/activity_pub_test.exs +++ b/test/pleroma/web/activity_pub/activity_pub_test.exs @@ -232,12 +232,14 @@ defmodule Pleroma.Web.ActivityPub.ActivityPubTest do assert user.avatar == %{ "type" => "Image", - "url" => [%{"href" => "https://jk.nipponalba.scot/images/profile.jpg"}] + "url" => [%{"href" => "https://jk.nipponalba.scot/images/profile.jpg"}], + "name" => "profile picture" } assert user.banner == %{ "type" => "Image", - "url" => [%{"href" => "https://jk.nipponalba.scot/images/profile.jpg"}] + "url" => [%{"href" => "https://jk.nipponalba.scot/images/profile.jpg"}], + "name" => "profile picture" } end @@ -432,6 +434,35 @@ defmodule Pleroma.Web.ActivityPub.ActivityPubTest do assert user.birthday == ~D[2001-02-12] end + + test "fetches avatar description" do + user_id = "https://example.com/users/marcin" + + user_data = + "test/fixtures/users_mock/user.json" + |> File.read!() + |> String.replace("{{nickname}}", "marcin") + |> Jason.decode!() + |> Map.delete("featured") + |> Map.update("icon", %{}, fn image -> Map.put(image, "name", "image description") end) + |> Jason.encode!() + + Tesla.Mock.mock(fn + %{ + method: :get, + url: ^user_id + } -> + %Tesla.Env{ + status: 200, + body: user_data, + headers: [{"content-type", "application/activity+json"}] + } + end) + + {:ok, user} = ActivityPub.make_user_from_ap_id(user_id) + + assert user.avatar["name"] == "image description" + end end test "it fetches the appropriate tag-restricted posts" do @@ -836,6 +867,33 @@ defmodule Pleroma.Web.ActivityPub.ActivityPubTest do end end + describe "fetch activities for followed hashtags" do + test "it should return public activities that reference a given hashtag" do + hashtag = insert(:hashtag, name: "tenshi") + user = insert(:user) + other_user = insert(:user) + + {:ok, normally_visible} = + CommonAPI.post(other_user, %{status: "hello :)", visibility: "public"}) + + {:ok, public} = CommonAPI.post(user, %{status: "maji #tenshi", visibility: "public"}) + {:ok, _unrelated} = CommonAPI.post(user, %{status: "dai #tensh", visibility: "public"}) + {:ok, unlisted} = CommonAPI.post(user, %{status: "maji #tenshi", visibility: "unlisted"}) + {:ok, _private} = CommonAPI.post(user, %{status: "maji #tenshi", visibility: "private"}) + + activities = + ActivityPub.fetch_activities([other_user.follower_address], %{ + followed_hashtags: [hashtag.id] + }) + + assert length(activities) == 3 + normal_id = normally_visible.id + public_id = public.id + unlisted_id = unlisted.id + assert [%{id: ^normal_id}, %{id: ^public_id}, %{id: ^unlisted_id}] = activities + end + end + describe "fetch activities in context" do test "retrieves activities that have a given context" do {:ok, activity} = ActivityBuilder.insert(%{"type" => "Create", "context" => "2hu"}) diff --git a/test/pleroma/web/activity_pub/mrf/remote_report_policy_test.exs b/test/pleroma/web/activity_pub/mrf/remote_report_policy_test.exs new file mode 100644 index 000000000..8d2a6b4fa --- /dev/null +++ b/test/pleroma/web/activity_pub/mrf/remote_report_policy_test.exs @@ -0,0 +1,155 @@ +defmodule Pleroma.Web.ActivityPub.MRF.RemoteReportPolicyTest do + use Pleroma.DataCase, async: true + + alias Pleroma.Web.ActivityPub.MRF.RemoteReportPolicy + + setup do + clear_config([:mrf_remote_report, :reject_all], false) + end + + test "doesn't impact local report" do + clear_config([:mrf_remote_report, :reject_anonymous], true) + clear_config([:mrf_remote_report, :reject_empty_message], true) + + activity = %{ + "type" => "Flag", + "actor" => "http://localhost:4001/actor", + "object" => ["https://mastodon.online/users/Gargron"] + } + + assert {:ok, _} = RemoteReportPolicy.filter(activity) + end + + test "rejects anonymous report if `reject_anonymous: true`" do + clear_config([:mrf_remote_report, :reject_anonymous], true) + clear_config([:mrf_remote_report, :reject_empty_message], true) + + activity = %{ + "type" => "Flag", + "actor" => "https://mastodon.social/actor", + "object" => ["https://mastodon.online/users/Gargron"] + } + + assert {:reject, _} = RemoteReportPolicy.filter(activity) + end + + test "preserves anonymous report if `reject_anonymous: false`" do + clear_config([:mrf_remote_report, :reject_anonymous], false) + clear_config([:mrf_remote_report, :reject_empty_message], false) + + activity = %{ + "type" => "Flag", + "actor" => "https://mastodon.social/actor", + "object" => ["https://mastodon.online/users/Gargron"] + } + + assert {:ok, _} = RemoteReportPolicy.filter(activity) + end + + test "rejects report on third party if `reject_third_party: true`" do + clear_config([:mrf_remote_report, :reject_third_party], true) + clear_config([:mrf_remote_report, :reject_empty_message], false) + + activity = %{ + "type" => "Flag", + "actor" => "https://mastodon.social/users/Gargron", + "object" => ["https://mastodon.online/users/Gargron"] + } + + assert {:reject, _} = RemoteReportPolicy.filter(activity) + end + + test "preserves report on first party if `reject_third_party: true`" do + clear_config([:mrf_remote_report, :reject_third_party], true) + clear_config([:mrf_remote_report, :reject_empty_message], false) + + activity = %{ + "type" => "Flag", + "actor" => "https://mastodon.social/users/Gargron", + "object" => ["http://localhost:4001/actor"] + } + + assert {:ok, _} = RemoteReportPolicy.filter(activity) + end + + test "preserves report on third party if `reject_third_party: false`" do + clear_config([:mrf_remote_report, :reject_third_party], false) + clear_config([:mrf_remote_report, :reject_empty_message], false) + + activity = %{ + "type" => "Flag", + "actor" => "https://mastodon.social/users/Gargron", + "object" => ["https://mastodon.online/users/Gargron"] + } + + assert {:ok, _} = RemoteReportPolicy.filter(activity) + end + + test "rejects empty message report if `reject_empty_message: true`" do + clear_config([:mrf_remote_report, :reject_anonymous], false) + clear_config([:mrf_remote_report, :reject_empty_message], true) + + activity = %{ + "type" => "Flag", + "actor" => "https://mastodon.social/users/Gargron", + "object" => ["https://mastodon.online/users/Gargron"] + } + + assert {:reject, _} = RemoteReportPolicy.filter(activity) + end + + test "rejects empty message report (\"\") if `reject_empty_message: true`" do + clear_config([:mrf_remote_report, :reject_anonymous], false) + clear_config([:mrf_remote_report, :reject_empty_message], true) + + activity = %{ + "type" => "Flag", + "actor" => "https://mastodon.social/users/Gargron", + "object" => ["https://mastodon.online/users/Gargron"], + "content" => "" + } + + assert {:reject, _} = RemoteReportPolicy.filter(activity) + end + + test "preserves empty message report if `reject_empty_message: false`" do + clear_config([:mrf_remote_report, :reject_anonymous], false) + clear_config([:mrf_remote_report, :reject_empty_message], false) + + activity = %{ + "type" => "Flag", + "actor" => "https://mastodon.social/users/Gargron", + "object" => ["https://mastodon.online/users/Gargron"] + } + + assert {:ok, _} = RemoteReportPolicy.filter(activity) + end + + test "preserves anonymous, empty message report with all settings disabled" do + clear_config([:mrf_remote_report, :reject_anonymous], false) + clear_config([:mrf_remote_report, :reject_empty_message], false) + + activity = %{ + "type" => "Flag", + "actor" => "https://mastodon.social/actor", + "object" => ["https://mastodon.online/users/Gargron"] + } + + assert {:ok, _} = RemoteReportPolicy.filter(activity) + end + + test "reject remote report if `reject_all: true`" do + clear_config([:mrf_remote_report, :reject_all], true) + clear_config([:mrf_remote_report, :reject_anonymous], false) + clear_config([:mrf_remote_report, :reject_empty_message], false) + + activity = %{ + "type" => "Flag", + "actor" => "https://mastodon.social/users/Gargron", + "content" => "Transphobia", + "object" => ["https://mastodon.online/users/Gargron"] + } + + assert {:reject, _} = RemoteReportPolicy.filter(activity) + end +end diff --git a/test/pleroma/web/activity_pub/mrf/simple_policy_test.exs b/test/pleroma/web/activity_pub/mrf/simple_policy_test.exs index 1a51b7d30..f49a7b8ff 100644 --- a/test/pleroma/web/activity_pub/mrf/simple_policy_test.exs +++ b/test/pleroma/web/activity_pub/mrf/simple_policy_test.exs @@ -252,6 +252,7 @@ defmodule Pleroma.Web.ActivityPub.MRF.SimplePolicyTest do remote_message = build_remote_message() assert SimplePolicy.filter(remote_message) == {:ok, remote_message} + assert SimplePolicy.id_filter(remote_message["actor"]) end test "activity has a matching host" do @@ -260,6 +261,7 @@ defmodule Pleroma.Web.ActivityPub.MRF.SimplePolicyTest do remote_message = build_remote_message() assert {:reject, _} = SimplePolicy.filter(remote_message) + refute SimplePolicy.id_filter(remote_message["actor"]) end test "activity matches with wildcard domain" do @@ -268,6 +270,7 @@ defmodule Pleroma.Web.ActivityPub.MRF.SimplePolicyTest do remote_message = build_remote_message() assert {:reject, _} = SimplePolicy.filter(remote_message) + refute SimplePolicy.id_filter(remote_message["actor"]) end test "actor has a matching host" do @@ -276,6 +279,7 @@ defmodule Pleroma.Web.ActivityPub.MRF.SimplePolicyTest do remote_user = build_remote_user() assert {:reject, _} = SimplePolicy.filter(remote_user) + refute SimplePolicy.id_filter(remote_user["id"]) end test "reject Announce when object would be rejected" do @@ -288,6 +292,7 @@ defmodule Pleroma.Web.ActivityPub.MRF.SimplePolicyTest do } assert {:reject, _} = SimplePolicy.filter(announce) + # Note: Non-Applicable for id_filter/1 end test "reject by URI object" do @@ -300,6 +305,7 @@ defmodule Pleroma.Web.ActivityPub.MRF.SimplePolicyTest do } assert {:reject, _} = SimplePolicy.filter(announce) + # Note: Non-Applicable for id_filter/1 end end @@ -370,6 +376,8 @@ defmodule Pleroma.Web.ActivityPub.MRF.SimplePolicyTest do assert SimplePolicy.filter(local_message) == {:ok, local_message} assert SimplePolicy.filter(remote_message) == {:ok, remote_message} + assert SimplePolicy.id_filter(local_message["actor"]) + assert SimplePolicy.id_filter(remote_message["actor"]) end test "is not empty but activity doesn't have a matching host" do @@ -380,6 +388,8 @@ defmodule Pleroma.Web.ActivityPub.MRF.SimplePolicyTest do assert SimplePolicy.filter(local_message) == {:ok, local_message} assert {:reject, _} = SimplePolicy.filter(remote_message) + assert SimplePolicy.id_filter(local_message["actor"]) + refute SimplePolicy.id_filter(remote_message["actor"]) end test "activity has a matching host" do @@ -390,6 +400,8 @@ defmodule Pleroma.Web.ActivityPub.MRF.SimplePolicyTest do assert SimplePolicy.filter(local_message) == {:ok, local_message} assert SimplePolicy.filter(remote_message) == {:ok, remote_message} + assert SimplePolicy.id_filter(local_message["actor"]) + assert SimplePolicy.id_filter(remote_message["actor"]) end test "activity matches with wildcard domain" do @@ -400,6 +412,8 @@ defmodule Pleroma.Web.ActivityPub.MRF.SimplePolicyTest do assert SimplePolicy.filter(local_message) == {:ok, local_message} assert SimplePolicy.filter(remote_message) == {:ok, remote_message} + assert SimplePolicy.id_filter(local_message["actor"]) + assert SimplePolicy.id_filter(remote_message["actor"]) end test "actor has a matching host" do @@ -408,6 +422,7 @@ defmodule Pleroma.Web.ActivityPub.MRF.SimplePolicyTest do remote_user = build_remote_user() assert SimplePolicy.filter(remote_user) == {:ok, remote_user} + assert SimplePolicy.id_filter(remote_user["id"]) end end diff --git a/test/pleroma/web/activity_pub/object_validators/article_note_page_validator_test.exs b/test/pleroma/web/activity_pub/object_validators/article_note_page_validator_test.exs index e1dbb20c3..829598246 100644 --- a/test/pleroma/web/activity_pub/object_validators/article_note_page_validator_test.exs +++ b/test/pleroma/web/activity_pub/object_validators/article_note_page_validator_test.exs @@ -128,6 +128,17 @@ defmodule Pleroma.Web.ActivityPub.ObjectValidators.ArticleNotePageValidatorTest %{valid?: true} = ArticleNotePageValidator.cast_and_validate(note) end + test "a Note with validated likes collection validates" do + insert(:user, ap_id: "https://pol.social/users/mkljczk") + + %{"object" => note} = + "test/fixtures/mastodon-update-with-likes.json" + |> File.read!() + |> Jason.decode!() + + %{valid?: true} = ArticleNotePageValidator.cast_and_validate(note) + end + test "Fedibird quote post" do insert(:user, ap_id: "https://fedibird.com/users/noellabo") @@ -176,4 +187,71 @@ defmodule Pleroma.Web.ActivityPub.ObjectValidators.ArticleNotePageValidatorTest name: "RE: https://server.example/objects/123" } end + + describe "Note language" do + test "it detects language from JSON-LD context" do + user = insert(:user) + + note_activity = %{ + "@context" => ["https://www.w3.org/ns/activitystreams", %{"@language" => "pl"}], + "to" => ["https://www.w3.org/ns/activitystreams#Public"], + "cc" => [], + "type" => "Create", + "object" => %{ + "to" => ["https://www.w3.org/ns/activitystreams#Public"], + "cc" => [], + "id" => Utils.generate_object_id(), + "type" => "Note", + "content" => "Szczęść Boże", + "attributedTo" => user.ap_id + }, + "actor" => user.ap_id + } + + {:ok, _create_activity, meta} = ObjectValidator.validate(note_activity, []) + + assert meta[:object_data]["language"] == "pl" + end + + test "it detects language from contentMap" do + user = insert(:user) + + note = %{ + "to" => ["https://www.w3.org/ns/activitystreams#Public"], + "cc" => [], + "id" => Utils.generate_object_id(), + "type" => "Note", + "content" => "Szczęść Boże", + "contentMap" => %{ + "de" => "Gott segne", + "pl" => "Szczęść Boże" + }, + "attributedTo" => user.ap_id + } + + {:ok, object} = ArticleNotePageValidator.cast_and_apply(note) + + assert object.language == "pl" + end + + test "it adds contentMap if language is specified" do + user = insert(:user) + + note = %{ + "to" => ["https://www.w3.org/ns/activitystreams#Public"], + "cc" => [], + "id" => Utils.generate_object_id(), + "type" => "Note", + "content" => "тест", + "language" => "uk", + "attributedTo" => user.ap_id + } + + {:ok, object} = ArticleNotePageValidator.cast_and_apply(note) + + assert object.contentMap == %{ + "uk" => "тест" + } + end + end end diff --git a/test/pleroma/web/activity_pub/transmogrifier/note_handling_test.exs b/test/pleroma/web/activity_pub/transmogrifier/note_handling_test.exs index ed71dcb90..fd7a3c772 100644 --- a/test/pleroma/web/activity_pub/transmogrifier/note_handling_test.exs +++ b/test/pleroma/web/activity_pub/transmogrifier/note_handling_test.exs @@ -219,6 +219,36 @@ defmodule Pleroma.Web.ActivityPub.Transmogrifier.NoteHandlingTest do "<p><span class=\"h-card\"><a href=\"http://localtesting.pleroma.lol/users/lain\" class=\"u-url mention\">@<span>lain</span></a></span></p>" end + test "it only uses contentMap if content is not present" do + user = insert(:user) + + message = %{ + "@context" => "https://www.w3.org/ns/activitystreams", + "to" => ["https://www.w3.org/ns/activitystreams#Public"], + "cc" => [], + "type" => "Create", + "object" => %{ + "to" => ["https://www.w3.org/ns/activitystreams#Public"], + "cc" => [], + "id" => Utils.generate_object_id(), + "type" => "Note", + "content" => "Hi", + "contentMap" => %{ + "de" => "Hallo", + "uk" => "Привіт" + }, + "inReplyTo" => nil, + "attributedTo" => user.ap_id + }, + "actor" => user.ap_id + } + + {:ok, %Activity{data: data, local: false}} = Transmogrifier.handle_incoming(message) + object = Object.normalize(data["object"], fetch: false) + + assert object.data["content"] == "Hi" + end + test "it works for incoming notices with a nil contentMap (firefish)" do data = File.read!("test/fixtures/mastodon-post-activity-contentmap.json") diff --git a/test/pleroma/web/activity_pub/transmogrifier_test.exs b/test/pleroma/web/activity_pub/transmogrifier_test.exs index 6da7e4a89..fcb8d65d1 100644 --- a/test/pleroma/web/activity_pub/transmogrifier_test.exs +++ b/test/pleroma/web/activity_pub/transmogrifier_test.exs @@ -384,6 +384,24 @@ defmodule Pleroma.Web.ActivityPub.TransmogrifierTest do assert modified["object"]["quoteUrl"] == quote_id assert modified["object"]["quoteUri"] == quote_id end + + test "it adds language of the object to its json-ld context" do + user = insert(:user) + + {:ok, activity} = CommonAPI.post(user, %{status: "Cześć", language: "pl"}) + {:ok, modified} = Transmogrifier.prepare_outgoing(activity.object.data) + + assert [_, _, %{"@language" => "pl"}] = modified["@context"] + end + + test "it adds language of the object to Create activity json-ld context" do + user = insert(:user) + + {:ok, activity} = CommonAPI.post(user, %{status: "Cześć", language: "pl"}) + {:ok, modified} = Transmogrifier.prepare_outgoing(activity.data) + + assert [_, _, %{"@language" => "pl"}] = modified["@context"] + end end describe "actor rewriting" do @@ -621,5 +639,14 @@ defmodule Pleroma.Web.ActivityPub.TransmogrifierTest do processed = Transmogrifier.prepare_object(original) assert processed["formerRepresentations"] == original["formerRepresentations"] end + + test "it uses contentMap to specify post language" do + user = insert(:user) + + {:ok, activity} = CommonAPI.post(user, %{status: "Cześć", language: "pl"}) + object = Transmogrifier.prepare_object(activity.object.data) + + assert %{"contentMap" => %{"pl" => "Cześć"}} = object + end end end diff --git a/test/pleroma/web/activity_pub/utils_test.exs b/test/pleroma/web/activity_pub/utils_test.exs index 872a440cb..45fef154e 100644 --- a/test/pleroma/web/activity_pub/utils_test.exs +++ b/test/pleroma/web/activity_pub/utils_test.exs @@ -173,16 +173,30 @@ defmodule Pleroma.Web.ActivityPub.UtilsTest do end end - test "make_json_ld_header/0" do - assert Utils.make_json_ld_header() == %{ - "@context" => [ - "https://www.w3.org/ns/activitystreams", - "http://localhost:4001/schemas/litepub-0.1.jsonld", - %{ - "@language" => "und" - } - ] - } + describe "make_json_ld_header/1" do + test "makes jsonld header" do + assert Utils.make_json_ld_header() == %{ + "@context" => [ + "https://www.w3.org/ns/activitystreams", + "http://localhost:4001/schemas/litepub-0.1.jsonld", + %{ + "@language" => "und" + } + ] + } + end + + test "includes language if specified" do + assert Utils.make_json_ld_header(%{"language" => "pl"}) == %{ + "@context" => [ + "https://www.w3.org/ns/activitystreams", + "http://localhost:4001/schemas/litepub-0.1.jsonld", + %{ + "@language" => "pl" + } + ] + } + end end describe "get_existing_votes" do diff --git a/test/pleroma/web/activity_pub/views/user_view_test.exs b/test/pleroma/web/activity_pub/views/user_view_test.exs index 651e535ac..a32e72829 100644 --- a/test/pleroma/web/activity_pub/views/user_view_test.exs +++ b/test/pleroma/web/activity_pub/views/user_view_test.exs @@ -68,6 +68,23 @@ defmodule Pleroma.Web.ActivityPub.UserViewTest do result = UserView.render("user.json", %{user: user}) assert result["icon"]["url"] == "https://someurl" assert result["image"]["url"] == "https://somebanner" + + refute result["icon"]["name"] + refute result["image"]["name"] + end + + test "Avatar has a description if the user set one" do + user = + insert(:user, + avatar: %{ + "url" => [%{"href" => "https://someurl"}], + "name" => "a drawing of pleroma-tan using pleroma groups" + } + ) + + result = UserView.render("user.json", %{user: user}) + + assert result["icon"]["name"] == "a drawing of pleroma-tan using pleroma groups" end test "renders an invisible user with the invisible property set to true" do diff --git a/test/pleroma/web/fallback_test.exs b/test/pleroma/web/fallback_test.exs index ed34d6490..9184cf8f1 100644 --- a/test/pleroma/web/fallback_test.exs +++ b/test/pleroma/web/fallback_test.exs @@ -32,7 +32,7 @@ defmodule Pleroma.Web.FallbackTest do resp = get(conn, "/foo") assert html_response(resp, 200) =~ "<title>a cool title</title>" - refute html_response(resp, 200) =~ "initial-results" + assert html_response(resp, 200) =~ "<meta content=\"noindex, noarchive\" name=\"robots\">" end test "GET /*path", %{conn: conn} do diff --git a/test/pleroma/web/feed/tag_controller_test.exs b/test/pleroma/web/feed/tag_controller_test.exs index 7d196b228..662235f31 100644 --- a/test/pleroma/web/feed/tag_controller_test.exs +++ b/test/pleroma/web/feed/tag_controller_test.exs @@ -191,4 +191,60 @@ defmodule Pleroma.Web.Feed.TagControllerTest do |> response(404) end end + + describe "restricted for unauthenticated" do + test "returns 404 when local timeline is disabled", %{conn: conn} do + clear_config([:restrict_unauthenticated, :timelines], %{local: true, federated: false}) + + conn + |> put_req_header("accept", "application/rss+xml") + |> get(tag_feed_path(conn, :feed, "pleromaart.rss")) + |> response(404) + end + + test "returns local posts only when federated timeline is disabled", %{conn: conn} do + clear_config([:restrict_unauthenticated, :timelines], %{local: false, federated: true}) + + local_user = insert(:user) + remote_user = insert(:user, local: false) + + local_note = + insert(:note, + user: local_user, + data: %{ + "content" => "local post #PleromaArt", + "summary" => "", + "tag" => ["pleromaart"] + } + ) + + remote_note = + insert(:note, + user: remote_user, + data: %{ + "content" => "remote post #PleromaArt", + "summary" => "", + "tag" => ["pleromaart"] + }, + local: false + ) + + insert(:note_activity, user: local_user, note: local_note) + insert(:note_activity, user: remote_user, note: remote_note, local: false) + + response = + conn + |> put_req_header("accept", "application/rss+xml") + |> get(tag_feed_path(conn, :feed, "pleromaart.rss")) + |> response(200) + + xml = parse(response) + + assert xpath(xml, ~x"//channel/title/text()") == ~c"#pleromaart" + + assert xpath(xml, ~x"//channel/item/title/text()"l) == [ + ~c"local post #PleromaArt" + ] + end + end end diff --git a/test/pleroma/web/feed/user_controller_test.exs b/test/pleroma/web/feed/user_controller_test.exs index 1c17d47b4..0a3aaff5c 100644 --- a/test/pleroma/web/feed/user_controller_test.exs +++ b/test/pleroma/web/feed/user_controller_test.exs @@ -147,6 +147,15 @@ defmodule Pleroma.Web.Feed.UserControllerTest do assert response(conn, 404) end + test "returns noindex meta for missing user", %{conn: conn} do + conn = + conn + |> put_req_header("accept", "text/html") + |> get("/users/nonexisting") + + assert html_response(conn, 200) =~ "<meta content=\"noindex, noarchive\" name=\"robots\">" + end + test "returns feed with public and unlisted activities", %{conn: conn} do user = insert(:user) diff --git a/test/pleroma/web/mastodon_api/controllers/media_controller_test.exs b/test/pleroma/web/mastodon_api/controllers/media_controller_test.exs index 4adbaa640..3f696d94d 100644 --- a/test/pleroma/web/mastodon_api/controllers/media_controller_test.exs +++ b/test/pleroma/web/mastodon_api/controllers/media_controller_test.exs @@ -56,7 +56,7 @@ defmodule Pleroma.Web.MastodonAPI.MediaControllerTest do conn |> put_req_header("content-type", "multipart/form-data") |> post("/api/v2/media", %{"file" => image, "description" => desc}) - |> json_response_and_validate_schema(202) + |> json_response_and_validate_schema(200) assert media_id = response["id"] @@ -111,7 +111,7 @@ defmodule Pleroma.Web.MastodonAPI.MediaControllerTest do "file" => large_binary, "description" => desc }) - |> json_response_and_validate_schema(202) + |> json_response_and_validate_schema(200) assert media_id = response["id"] diff --git a/test/pleroma/web/mastodon_api/controllers/poll_controller_test.exs b/test/pleroma/web/mastodon_api/controllers/poll_controller_test.exs index 7912b1d5f..51af87742 100644 --- a/test/pleroma/web/mastodon_api/controllers/poll_controller_test.exs +++ b/test/pleroma/web/mastodon_api/controllers/poll_controller_test.exs @@ -3,6 +3,7 @@ # SPDX-License-Identifier: AGPL-3.0-only defmodule Pleroma.Web.MastodonAPI.PollControllerTest do + use Oban.Testing, repo: Pleroma.Repo use Pleroma.Web.ConnCase, async: true alias Pleroma.Object @@ -27,6 +28,33 @@ defmodule Pleroma.Web.MastodonAPI.PollControllerTest do response = json_response_and_validate_schema(conn, 200) id = to_string(object.id) assert %{"id" => ^id, "expired" => false, "multiple" => false} = response + + # Local activities should not generate an Oban job to refresh + assert activity.local + + refute_enqueued( + worker: Pleroma.Workers.PollWorker, + args: %{"op" => "refresh", "activity_id" => activity.id} + ) + end + + test "creates an oban job to refresh poll if activity is remote", %{conn: conn} do + user = insert(:user, local: false) + question = insert(:question, user: user) + activity = insert(:question_activity, question: question, local: false) + + # Ensure this is not represented as a local activity + refute activity.local + + object = Object.normalize(activity, fetch: false) + + get(conn, "/api/v1/polls/#{object.id}") + |> json_response_and_validate_schema(200) + + assert_enqueued( + worker: Pleroma.Workers.PollWorker, + args: %{"op" => "refresh", "activity_id" => activity.id} + ) end test "does not expose polls for private statuses", %{conn: conn} do diff --git a/test/pleroma/web/mastodon_api/controllers/tag_controller_test.exs b/test/pleroma/web/mastodon_api/controllers/tag_controller_test.exs new file mode 100644 index 000000000..71c8e7fc0 --- /dev/null +++ b/test/pleroma/web/mastodon_api/controllers/tag_controller_test.exs @@ -0,0 +1,159 @@ +defmodule Pleroma.Web.MastodonAPI.TagControllerTest do + use Pleroma.Web.ConnCase + + import Pleroma.Factory + import Tesla.Mock + + alias Pleroma.User + + setup do + mock(fn env -> apply(HttpRequestMock, :request, [env]) end) + :ok + end + + describe "GET /api/v1/tags/:id" do + test "returns 200 with tag" do + %{user: user, conn: conn} = oauth_access(["read"]) + + tag = insert(:hashtag, name: "jubjub") + {:ok, _user} = User.follow_hashtag(user, tag) + + response = + conn + |> get("/api/v1/tags/jubjub") + |> json_response_and_validate_schema(200) + + assert %{ + "name" => "jubjub", + "url" => "http://localhost:4001/tags/jubjub", + "history" => [], + "following" => true + } = response + end + + test "returns 404 with unknown tag" do + %{conn: conn} = oauth_access(["read"]) + + conn + |> get("/api/v1/tags/jubjub") + |> json_response_and_validate_schema(404) + end + end + + describe "POST /api/v1/tags/:id/follow" do + test "should follow a hashtag" do + %{user: user, conn: conn} = oauth_access(["write:follows"]) + hashtag = insert(:hashtag, name: "jubjub") + + response = + conn + |> post("/api/v1/tags/jubjub/follow") + |> json_response_and_validate_schema(200) + + assert response["following"] == true + user = User.get_cached_by_ap_id(user.ap_id) + assert User.following_hashtag?(user, hashtag) + end + + test "should 404 if hashtag doesn't exist" do + %{conn: conn} = oauth_access(["write:follows"]) + + response = + conn + |> post("/api/v1/tags/rubrub/follow") + |> json_response_and_validate_schema(404) + + assert response["error"] == "Hashtag not found" + end + end + + describe "POST /api/v1/tags/:id/unfollow" do + test "should unfollow a hashtag" do + %{user: user, conn: conn} = oauth_access(["write:follows"]) + hashtag = insert(:hashtag, name: "jubjub") + {:ok, user} = User.follow_hashtag(user, hashtag) + + response = + conn + |> post("/api/v1/tags/jubjub/unfollow") + |> json_response_and_validate_schema(200) + + assert response["following"] == false + user = User.get_cached_by_ap_id(user.ap_id) + refute User.following_hashtag?(user, hashtag) + end + + test "should 404 if hashtag doesn't exist" do + %{conn: conn} = oauth_access(["write:follows"]) + + response = + conn + |> post("/api/v1/tags/rubrub/unfollow") + |> json_response_and_validate_schema(404) + + assert response["error"] == "Hashtag not found" + end + end + + describe "GET /api/v1/followed_tags" do + test "should list followed tags" do + %{user: user, conn: conn} = oauth_access(["read:follows"]) + + response = + conn + |> get("/api/v1/followed_tags") + |> json_response_and_validate_schema(200) + + assert Enum.empty?(response) + + hashtag = insert(:hashtag, name: "jubjub") + {:ok, _user} = User.follow_hashtag(user, hashtag) + + response = + conn + |> get("/api/v1/followed_tags") + |> json_response_and_validate_schema(200) + + assert [%{"name" => "jubjub"}] = response + end + + test "should include a link header to paginate" do + %{user: user, conn: conn} = oauth_access(["read:follows"]) + + for i <- 1..21 do + hashtag = insert(:hashtag, name: "jubjub#{i}}") + {:ok, _user} = User.follow_hashtag(user, hashtag) + end + + response = + conn + |> get("/api/v1/followed_tags") + + json = json_response_and_validate_schema(response, 200) + assert Enum.count(json) == 20 + assert [link_header] = get_resp_header(response, "link") + assert link_header =~ "rel=\"next\"" + next_link = extract_next_link_header(link_header) + + response = + conn + |> get(next_link) + |> json_response_and_validate_schema(200) + + assert Enum.count(response) == 1 + end + + test "should refuse access without read:follows scope" do + %{conn: conn} = oauth_access(["write"]) + + conn + |> get("/api/v1/followed_tags") + |> json_response_and_validate_schema(403) + end + end + + defp extract_next_link_header(header) do + [_, next_link] = Regex.run(~r{<(?<next_link>.*)>; rel="next"}, header) + next_link + end +end diff --git a/test/pleroma/web/mastodon_api/views/status_view_test.exs b/test/pleroma/web/mastodon_api/views/status_view_test.exs index bc6dec32a..e6a164d72 100644 --- a/test/pleroma/web/mastodon_api/views/status_view_test.exs +++ b/test/pleroma/web/mastodon_api/views/status_view_test.exs @@ -951,6 +951,26 @@ defmodule Pleroma.Web.MastodonAPI.StatusViewTest do assert status.edited_at end + test "it shows post language" do + user = insert(:user) + + {:ok, post} = CommonAPI.post(user, %{status: "Szczęść Boże", language: "pl"}) + + status = StatusView.render("show.json", activity: post) + + assert status.language == "pl" + end + + test "doesn't show post language if it's 'und'" do + user = insert(:user) + + {:ok, post} = CommonAPI.post(user, %{status: "sdifjogijodfg", language: "und"}) + + status = StatusView.render("show.json", activity: post) + + assert status.language == nil + end + test "with a source object" do note = insert(:note, diff --git a/test/pleroma/web/media_proxy/media_proxy_controller_test.exs b/test/pleroma/web/media_proxy/media_proxy_controller_test.exs index f0c1dd640..f7e52483c 100644 --- a/test/pleroma/web/media_proxy/media_proxy_controller_test.exs +++ b/test/pleroma/web/media_proxy/media_proxy_controller_test.exs @@ -248,8 +248,8 @@ defmodule Pleroma.Web.MediaProxy.MediaProxyControllerTest do response = get(conn, url) - assert response.status == 302 - assert redirected_to(response) == media_proxy_url + assert response.status == 301 + assert redirected_to(response, 301) == media_proxy_url end test "with `static` param and non-GIF image preview requested, " <> @@ -290,8 +290,8 @@ defmodule Pleroma.Web.MediaProxy.MediaProxyControllerTest do response = get(conn, url) - assert response.status == 302 - assert redirected_to(response) == media_proxy_url + assert response.status == 301 + assert redirected_to(response, 301) == media_proxy_url end test "thumbnails PNG images into PNG", %{ @@ -356,5 +356,32 @@ defmodule Pleroma.Web.MediaProxy.MediaProxyControllerTest do assert response.status == 302 assert redirected_to(response) == media_proxy_url end + + test "redirects to media proxy URI with 301 when image is too small for preview", %{ + conn: conn, + url: url, + media_proxy_url: media_proxy_url + } do + clear_config([:media_preview_proxy], + enabled: true, + min_content_length: 1000, + image_quality: 85, + thumbnail_max_width: 100, + thumbnail_max_height: 100 + ) + + Tesla.Mock.mock(fn + %{method: :head, url: ^media_proxy_url} -> + %Tesla.Env{ + status: 200, + body: "", + headers: [{"content-type", "image/png"}, {"content-length", "500"}] + } + end) + + response = get(conn, url) + assert response.status == 301 + assert redirected_to(response, 301) == media_proxy_url + end end end diff --git a/test/pleroma/web/metadata/providers/activity_pub_test.exs b/test/pleroma/web/metadata/providers/activity_pub_test.exs new file mode 100644 index 000000000..c5cf78a60 --- /dev/null +++ b/test/pleroma/web/metadata/providers/activity_pub_test.exs @@ -0,0 +1,40 @@ +# Pleroma: A lightweight social networking server +# Copyright © 2017-2024 Pleroma Authors <https://pleroma.social/> +# SPDX-License-Identifier: AGPL-3.0-only + +defmodule Pleroma.Web.Metadata.Providers.ActivityPubTest do + use Pleroma.DataCase + import Pleroma.Factory + + alias Pleroma.Web.CommonAPI + alias Pleroma.Web.Metadata.Providers.ActivityPub + + setup do: clear_config([Pleroma.Web.Metadata, :unfurl_nsfw]) + + test "it renders a link for user info" do + user = insert(:user) + res = ActivityPub.build_tags(%{user: user}) + + assert res == [ + {:link, [rel: "alternate", type: "application/activity+json", href: user.ap_id], []} + ] + end + + test "it renders a link for a post" do + user = insert(:user) + {:ok, %{id: activity_id, object: object}} = CommonAPI.post(user, %{status: "hi"}) + + result = ActivityPub.build_tags(%{object: object, user: user, activity_id: activity_id}) + + assert [ + {:link, + [rel: "alternate", type: "application/activity+json", href: object.data["id"]], []} + ] == result + end + + test "it returns an empty array for anything else" do + result = ActivityPub.build_tags(%{}) + + assert result == [] + end +end diff --git a/test/pleroma/web/metadata/providers/feed_test.exs b/test/pleroma/web/metadata/providers/feed_test.exs index e593453da..40d9d0909 100644 --- a/test/pleroma/web/metadata/providers/feed_test.exs +++ b/test/pleroma/web/metadata/providers/feed_test.exs @@ -15,4 +15,10 @@ defmodule Pleroma.Web.Metadata.Providers.FeedTest do [rel: "alternate", type: "application/atom+xml", href: "/users/lain/feed.atom"], []} ] end + + test "it doesn't render a link to remote user's feed" do + user = insert(:user, nickname: "lain@lain.com", local: false) + + assert Feed.build_tags(%{user: user}) == [] + end end diff --git a/test/pleroma/web/o_auth/ldap_authorization_test.exs b/test/pleroma/web/o_auth/ldap_authorization_test.exs index 07ce2eed8..35b947fd0 100644 --- a/test/pleroma/web/o_auth/ldap_authorization_test.exs +++ b/test/pleroma/web/o_auth/ldap_authorization_test.exs @@ -28,11 +28,7 @@ defmodule Pleroma.Web.OAuth.LDAPAuthorizationTest do {:eldap, [], [ open: fn [^host], [{:port, ^port}, {:ssl, false} | _] -> {:ok, self()} end, - simple_bind: fn _connection, _dn, ^password -> :ok end, - close: fn _connection -> - send(self(), :close_connection) - :ok - end + simple_bind: fn _connection, _dn, ^password -> :ok end ]} ] do conn = @@ -50,7 +46,6 @@ defmodule Pleroma.Web.OAuth.LDAPAuthorizationTest do token = Repo.get_by(Token, token: token) assert token.user_id == user.id - assert_received :close_connection end end @@ -72,10 +67,6 @@ defmodule Pleroma.Web.OAuth.LDAPAuthorizationTest do wholeSubtree: fn -> :ok end, search: fn _connection, _options -> {:ok, {:eldap_search_result, [{:eldap_entry, ~c"", []}], []}} - end, - close: fn _connection -> - send(self(), :close_connection) - :ok end ]} ] do @@ -94,7 +85,6 @@ defmodule Pleroma.Web.OAuth.LDAPAuthorizationTest do token = Repo.get_by(Token, token: token) |> Repo.preload(:user) assert token.user.nickname == user.nickname - assert_received :close_connection end end @@ -111,11 +101,7 @@ defmodule Pleroma.Web.OAuth.LDAPAuthorizationTest do {:eldap, [], [ open: fn [^host], [{:port, ^port}, {:ssl, false} | _] -> {:ok, self()} end, - simple_bind: fn _connection, _dn, ^password -> {:error, :invalidCredentials} end, - close: fn _connection -> - send(self(), :close_connection) - :ok - end + simple_bind: fn _connection, _dn, ^password -> {:error, :invalidCredentials} end ]} ] do conn = @@ -129,7 +115,6 @@ defmodule Pleroma.Web.OAuth.LDAPAuthorizationTest do }) assert %{"error" => "Invalid credentials"} = json_response(conn, 400) - assert_received :close_connection end end end diff --git a/test/pleroma/web/rich_media/card_test.exs b/test/pleroma/web/rich_media/card_test.exs index 387defc8c..c69f85323 100644 --- a/test/pleroma/web/rich_media/card_test.exs +++ b/test/pleroma/web/rich_media/card_test.exs @@ -83,4 +83,23 @@ defmodule Pleroma.Web.RichMedia.CardTest do Card.get_by_activity(activity) ) end + + test "refuses to crawl URL in activity from ignored host/domain" do + clear_config([:rich_media, :ignore_hosts], ["example.com"]) + + user = insert(:user) + + url = "https://example.com/ogp" + + {:ok, activity} = + CommonAPI.post(user, %{ + status: "[test](#{url})", + content_type: "text/markdown" + }) + + refute_enqueued( + worker: RichMediaWorker, + args: %{"url" => url, "activity_id" => activity.id} + ) + end end diff --git a/test/pleroma/web/streamer_test.exs b/test/pleroma/web/streamer_test.exs index 262ff11d2..85978e824 100644 --- a/test/pleroma/web/streamer_test.exs +++ b/test/pleroma/web/streamer_test.exs @@ -558,6 +558,36 @@ defmodule Pleroma.Web.StreamerTest do assert_receive {:render_with_user, _, "status_update.json", ^create, _} refute Streamer.filtered_by_user?(user, edited) end + + test "it streams posts containing followed hashtags on the 'user' stream", %{ + user: user, + token: oauth_token + } do + hashtag = insert(:hashtag, %{name: "tenshi"}) + other_user = insert(:user) + {:ok, user} = User.follow_hashtag(user, hashtag) + + Streamer.get_topic_and_add_socket("user", user, oauth_token) + {:ok, activity} = CommonAPI.post(other_user, %{status: "hey #tenshi"}) + + assert_receive {:render_with_user, _, "update.json", ^activity, _} + end + + test "should not stream private posts containing followed hashtags on the 'user' stream", %{ + user: user, + token: oauth_token + } do + hashtag = insert(:hashtag, %{name: "tenshi"}) + other_user = insert(:user) + {:ok, user} = User.follow_hashtag(user, hashtag) + + Streamer.get_topic_and_add_socket("user", user, oauth_token) + + {:ok, activity} = + CommonAPI.post(other_user, %{status: "hey #tenshi", visibility: "private"}) + + refute_receive {:render_with_user, _, "update.json", ^activity, _} + end end describe "public streams" do diff --git a/test/pleroma/web/twitter_api/controller_test.exs b/test/pleroma/web/twitter_api/controller_test.exs index 495d371d2..494be9ec7 100644 --- a/test/pleroma/web/twitter_api/controller_test.exs +++ b/test/pleroma/web/twitter_api/controller_test.exs @@ -69,7 +69,7 @@ defmodule Pleroma.Web.TwitterAPI.ControllerTest do |> hd() |> Map.keys() - assert keys -- ["id", "app_name", "valid_until"] == [] + assert Enum.sort(keys) == Enum.sort(["id", "app_name", "valid_until", "scopes"]) end test "revoke token", %{token: token} do diff --git a/test/pleroma/workers/poll_worker_test.exs b/test/pleroma/workers/poll_worker_test.exs index 749df8aff..a7cbbdb83 100644 --- a/test/pleroma/workers/poll_worker_test.exs +++ b/test/pleroma/workers/poll_worker_test.exs @@ -11,10 +11,10 @@ defmodule Pleroma.Workers.PollWorkerTest do alias Pleroma.Workers.PollWorker - test "poll notification job" do + test "local poll ending notification job" do user = insert(:user) question = insert(:question, user: user) - activity = insert(:question_activity, question: question) + activity = insert(:question_activity, question: question, user: user) PollWorker.schedule_poll_end(activity) @@ -44,6 +44,65 @@ defmodule Pleroma.Workers.PollWorkerTest do # Ensure notifications were streamed out when job executes assert called(Pleroma.Web.Streamer.stream(["user", "user:notification"], :_)) assert called(Pleroma.Web.Push.send(:_)) + + # Skip refreshing polls for local activities + assert activity.local + + refute_enqueued( + worker: PollWorker, + args: %{"op" => "refresh", "activity_id" => activity.id} + ) + end + end + + test "remote poll ending notification job schedules refresh" do + user = insert(:user, local: false) + question = insert(:question, user: user) + activity = insert(:question_activity, question: question, user: user) + + PollWorker.schedule_poll_end(activity) + + expected_job_args = %{"activity_id" => activity.id, "op" => "poll_end"} + + assert_enqueued(args: expected_job_args) + + [job] = all_enqueued(worker: PollWorker) + PollWorker.perform(job) + + refute activity.local + + assert_enqueued( + worker: PollWorker, + args: %{"op" => "refresh", "activity_id" => activity.id} + ) + end + + test "poll refresh" do + user = insert(:user, local: false) + question = insert(:question, user: user) + activity = insert(:question_activity, question: question) + + PollWorker.new(%{"op" => "refresh", "activity_id" => activity.id}) + |> Oban.insert() + + expected_job_args = %{"activity_id" => activity.id, "op" => "refresh"} + + assert_enqueued(args: expected_job_args) + + with_mocks([ + { + Pleroma.Web.Streamer, + [], + [ + stream: fn _, _ -> nil end + ] + } + ]) do + [job] = all_enqueued(worker: PollWorker) + PollWorker.perform(job) + + # Ensure updates are streamed out + assert called(Pleroma.Web.Streamer.stream(["user", "list", "public", "public:local"], :_)) end end end diff --git a/test/support/factory.ex b/test/support/factory.ex index 8f1c6faf9..88c4ed8e5 100644 --- a/test/support/factory.ex +++ b/test/support/factory.ex @@ -241,6 +241,7 @@ defmodule Pleroma.Factory do def question_factory(attrs \\ %{}) do user = attrs[:user] || insert(:user) + closed = attrs[:closed] || DateTime.utc_now() |> DateTime.add(86_400) |> DateTime.to_iso8601() data = %{ "id" => Pleroma.Web.ActivityPub.Utils.generate_object_id(), @@ -251,7 +252,7 @@ defmodule Pleroma.Factory do "to" => ["https://www.w3.org/ns/activitystreams#Public"], "cc" => [user.follower_address], "context" => Pleroma.Web.ActivityPub.Utils.generate_context_id(), - "closed" => DateTime.utc_now() |> DateTime.add(86_400) |> DateTime.to_iso8601(), + "closed" => closed, "content" => "Which flavor of ice cream do you prefer?", "oneOf" => [ %{ @@ -509,7 +510,8 @@ defmodule Pleroma.Factory do %Pleroma.Activity{ data: data, actor: data["actor"], - recipients: data["to"] + recipients: data["to"], + local: user.local } |> Map.merge(attrs) end @@ -666,4 +668,11 @@ defmodule Pleroma.Factory do |> Map.merge(params) |> Pleroma.Announcement.add_rendered_properties() end + + def hashtag_factory(params \\ %{}) do + %Pleroma.Hashtag{ + name: "test #{sequence(:hashtag_name, & &1)}" + } + |> Map.merge(params) + end end |