Compare commits

..

64 commits

Author SHA1 Message Date
Sebastian Jambor
a54c0993c1 sign json ld fetches 2023-12-17 23:06:57 +01:00
Sebastian Jambor
fa829953a9 allow jrd+json in json_ld_controller to support strict webfinger endpoints 2023-08-08 19:51:30 +02:00
Sebastian Jambor
a19c462245 only remove accounts containing underscores 2023-08-02 10:38:33 +02:00
Sebastian Jambor
97bde32a9c replace default avatar 2023-07-23 09:27:37 +02:00
Sebastian Jambor
0c320d04de update visualization library 2023-07-17 09:34:59 +02:00
Sebastian Jambor
024d357dd0 update visualization library 2023-07-15 09:22:07 +02:00
Sebastian Jambor
fc262c22cd update visualization library; do not delete alice account 2023-07-12 13:49:17 +02:00
Sebastian Jambor
1498fe1102 update readme 2023-07-11 13:15:40 +02:00
Sebastian Jambor
a3c81fc929 Merge tag 'v4.1.4' into merge-security-fix 2023-07-11 13:11:33 +02:00
Sebastian Jambor
b45e686a89 Merge tag 'v4.1.3' into merge-security-fix 2023-07-11 13:07:56 +02:00
Sebastian Jambor
cd523cc8f3 fix opening explorer from log 2023-07-11 13:07:31 +02:00
Sebastian Jambor
4f67ad32c7 update visualization library 2023-07-11 09:37:54 +02:00
Claire
3f5af768c8 Bump version to v4.1.4 2023-07-07 19:37:21 +02:00
Claire
cb8ab46302 Update dependencies 2023-07-07 19:37:21 +02:00
Claire
53b979d5c7 Fix processing of media files with unusual names (#25788) 2023-07-07 19:37:21 +02:00
Claire
f2bbac3f9f Fix crash in admin interface when viewing a remote user with verified links (#25796) 2023-07-07 19:37:21 +02:00
Claire
015ed99612 Fix branding:generate_app_icons failing because of disallowed ICO coder (#25794) 2023-07-07 19:37:21 +02:00
nemobis
cf58535193 Fix typo in CHANGELOG.md (#25764) 2023-07-07 19:37:21 +02:00
Claire
0d5781ca76 Bump version to v4.1.3 2023-07-06 15:07:20 +02:00
Claire
32ebeed59b
Merge pull request from GHSA-55j9-c3mp-6fcq 2023-07-06 15:06:50 +02:00
Claire
e75ad1de0f
Merge pull request from GHSA-9pxv-6qvf-pjwc
* Fix timeout handling of outbound HTTP requests

* Use CLOCK_MONOTONIC instead of Time.now
2023-07-06 15:06:24 +02:00
Claire
0aa0b71f2c
Merge pull request from GHSA-9928-3cp5-93fm
* Fix attachments getting processed despite failing content-type validation

* Add a restrictive ImageMagick security policy tailored for Mastodon

* Fix misdetection of MP3 files with large cover art

* Reject unprocessable audio/video files instead of keeping them unchanged
2023-07-06 15:05:05 +02:00
Claire
c4f2609f7a
Merge pull request from GHSA-ccm4-vgcc-73hp
* Tighten allowed HTML in oEmbed-based preview cards

* Sanitize preview cards at render time

* Add `sandbox` attribute to preview card iframes
2023-07-06 15:03:33 +02:00
Claire
9b6c0cac7d Add hardened headers to user-uploaded files (#25756) 2023-07-06 14:32:26 +02:00
Claire
fac2c9eb7d Update rack, rails, nokogiri and doorkeeper gems 2023-07-06 13:45:40 +02:00
Claire
a3d69a2c5d Fix OAuth apps page crashing when listing apps with certain admin API scopes (#25713) 2023-07-06 13:45:40 +02:00
Renaud Chaput
8eb1bb8ba6 Allow carets in URL search params (#25216) 2023-07-06 13:45:40 +02:00
Vyr Cossont
652ff76462 Fix Redis client and type errors introduced in #24285 (#24342) 2023-07-06 13:45:40 +02:00
Vyr Cossont
6f484fbbd2 IndexingScheduler: fetch and import in batches (#24285)
Co-authored-by: Claire <claire.github-309c@sitedethib.com>
2023-07-06 13:45:40 +02:00
Claire
79f5b8f156 Fix ResolveURLService not resolving local URLs for remote content (#25637) 2023-07-06 13:45:40 +02:00
Claire
f8930a67a0 Change /api/v1/statuses/:id/history to always return at least one item (#25510) 2023-07-06 13:45:40 +02:00
Claire
e65e3a6d14 Add finer permission requirements for managing webhooks (#25463) 2023-07-06 13:45:40 +02:00
Claire
8acbfc6ab1 Fix wrong view being displayed when a webhook fails validation (#25464) 2023-07-06 13:45:40 +02:00
Emelia Smith
3ef53958b2 Prevent UserCleanupScheduler from overwhelming streaming (#25519) 2023-07-06 13:45:40 +02:00
Daniel M Brasil
fd1ffd72eb Fix incorrect pagination headers in /api/v2/admin/accounts (#25477) 2023-07-06 13:45:40 +02:00
Claire
7bd34f8b23 Fix infinite loop in AccountsStatusesCleanupScheduler (#24840) 2023-07-06 13:45:40 +02:00
Claire
7012bf6ed3 Improve automatic post cleanup worker performances (#24785) 2023-07-06 13:45:40 +02:00
Claire
d9e45f2fa9 Fix AccountsStatusesCleanupScheduler not spreading deletes across accounts correctly (#24607) 2023-07-06 13:45:40 +02:00
Claire
0e139e3c4d Change automatic post deletion thresholds and load detection (#24614) 2023-07-06 13:45:40 +02:00
Emelia Smith
23e7b4d28d Fix logging of messages that are binary before closing their connection (#25361) 2023-07-06 13:45:40 +02:00
Emelia Smith
e78ee582f7 Fix performance of streaming by parsing message JSON once (#25278) 2023-07-06 13:45:40 +02:00
Claire
a197fc094f Fix CSP headers when S3_ALIAS_HOST includes a path component (#25273) 2023-07-06 13:45:40 +02:00
Daniel M Brasil
bd7cbeeadf Fix tootctl accounts approve --number N not aproving N earliest registrations (#24605) 2023-07-06 13:45:40 +02:00
Claire
2779bce9a2 Add fallback redirection when getting a webfinger query LOCAL_DOMAIN@LOCAL_DOMAIN (#23600)
Co-authored-by: Eugen Rochko <eugen@zeonfederated.com>
2023-07-06 13:45:40 +02:00
Claire
210ff36860 Change AccessTokensVacuum to also delete expired tokens (#24868) 2023-07-06 13:45:40 +02:00
Claire
99c2bbbec9 Change profile updates to be sent to recently-mentioned servers (#24852) 2023-07-06 13:45:40 +02:00
Claire
7e58779300 Fix reports not being closed when performing batch suspensions (#24988) 2023-07-06 13:45:40 +02:00
Claire
cca464bce3 Fix being able to vote on your own polls (#25015) 2023-07-06 13:45:40 +02:00
Claire
1301af60e0 Fix race condition when reblogging a status (#25016) 2023-07-06 13:45:40 +02:00
Claire
f962e83856 Change OpenGraph-based embeds to allow fullscreen (#25058) 2023-07-06 13:45:40 +02:00
Claire
b3cbcd7447 Fix “Authorized applications” inefficiently and incorrectly getting last use date (#25060) 2023-07-06 13:45:40 +02:00
Claire
72d96bf17a Remove invalid X-Frame-Options: ALLOWALL (#25070) 2023-07-06 13:45:40 +02:00
Claire
b1ac3562df Change Identity to not destroy associated User on destroy (#25098) 2023-07-06 13:45:40 +02:00
Claire
4c6c790f80 Fix /api/v1/conversations sometimes returning empty accounts (#25499) 2023-07-06 13:45:40 +02:00
Claire
036ac5b5c9 Fix ArgumentError when loading newer Private Mentions (#25399) 2023-07-06 13:45:40 +02:00
Claire
3e1724e972 Fix multiple N+1s in ConversationsController (#25134) 2023-07-06 13:45:40 +02:00
Claire
bc8592627b Fix user archive takeouts when using OpenStack Swift (#24431) 2023-07-06 13:45:40 +02:00
Sebastian Jambor
710d359bb2 extract component in activity log to avoid new render 2023-07-05 19:35:53 +02:00
Sebastian Jambor
13c83bd4ef allow application/json responses to circumvent a lemmy bug 2023-07-04 20:04:24 +02:00
Sebastian Jambor
010f9e30e3 update visualization library 2023-07-03 22:22:58 +02:00
Sebastian Jambor
3c51bc630e update visualization library 2023-07-03 22:00:27 +02:00
Sebastian Jambor
7ee124508e catch exceptions in activity subscriber 2023-07-03 19:56:10 +02:00
Sebastian Jambor
af095cb887 fix audience helper when passed array with null 2023-07-03 19:43:30 +02:00
Sebastian Jambor
9a1ca25b49 disable dependabot 2023-05-15 21:17:36 +02:00
82 changed files with 1155 additions and 339 deletions

View file

@ -3,28 +3,28 @@
# Please see the documentation for all configuration options: # Please see the documentation for all configuration options:
# https://help.github.com/github/administering-a-repository/configuration-options-for-dependency-updates # https://help.github.com/github/administering-a-repository/configuration-options-for-dependency-updates
version: 2 # version: 2
updates: # updates:
- package-ecosystem: npm # - package-ecosystem: npm
directory: '/' # directory: '/'
schedule: # schedule:
interval: weekly # interval: weekly
open-pull-requests-limit: 99 # open-pull-requests-limit: 99
allow: # allow:
- dependency-type: direct # - dependency-type: direct
#
- package-ecosystem: bundler # - package-ecosystem: bundler
directory: '/' # directory: '/'
schedule: # schedule:
interval: weekly # interval: weekly
open-pull-requests-limit: 99 # open-pull-requests-limit: 99
allow: # allow:
- dependency-type: direct # - dependency-type: direct
#
- package-ecosystem: github-actions # - package-ecosystem: github-actions
directory: '/' # directory: '/'
schedule: # schedule:
interval: weekly # interval: weekly
open-pull-requests-limit: 99 # open-pull-requests-limit: 99
allow: # allow:
- dependency-type: direct # - dependency-type: direct

View file

@ -3,6 +3,62 @@ Changelog
All notable changes to this project will be documented in this file. All notable changes to this project will be documented in this file.
## [4.1.4] - 2023-07-07
### Fixed
- Fix branding:generate_app_icons failing because of disallowed ICO coder ([ClearlyClaire](https://github.com/mastodon/mastodon/pull/25794))
- Fix crash in admin interface when viewing a remote user with verified links ([ClearlyClaire](https://github.com/mastodon/mastodon/pull/25796))
- Fix processing of media files with unusual names ([ClearlyClaire](https://github.com/mastodon/mastodon/pull/25788))
## [4.1.3] - 2023-07-06
### Added
- Add fallback redirection when getting a webfinger query `LOCAL_DOMAIN@LOCAL_DOMAIN` ([ClearlyClaire](https://github.com/mastodon/mastodon/pull/23600))
### Changed
- Change OpenGraph-based embeds to allow fullscreen ([ClearlyClaire](https://github.com/mastodon/mastodon/pull/25058))
- Change AccessTokensVacuum to also delete expired tokens ([ClearlyClaire](https://github.com/mastodon/mastodon/pull/24868))
- Change profile updates to be sent to recently-mentioned servers ([ClearlyClaire](https://github.com/mastodon/mastodon/pull/24852))
- Change automatic post deletion thresholds and load detection ([ClearlyClaire](https://github.com/mastodon/mastodon/pull/24614))
- Change `/api/v1/statuses/:id/history` to always return at least one item ([ClearlyClaire](https://github.com/mastodon/mastodon/pull/25510))
- Change auto-linking to allow carets in URL query params ([renchap](https://github.com/mastodon/mastodon/pull/25216))
### Removed
- Remove invalid `X-Frame-Options: ALLOWALL` ([ClearlyClaire](https://github.com/mastodon/mastodon/pull/25070))
### Fixed
- Fix wrong view being displayed when a webhook fails validation ([ClearlyClaire](https://github.com/mastodon/mastodon/pull/25464))
- Fix soft-deleted post cleanup scheduler overwhelming the streaming server ([ThisIsMissEm](https://github.com/mastodon/mastodon/pull/25519))
- Fix incorrect pagination headers in `/api/v2/admin/accounts` ([danielmbrasil](https://github.com/mastodon/mastodon/pull/25477))
- Fix multiple inefficiencies in automatic post cleanup worker ([ClearlyClaire](https://github.com/mastodon/mastodon/pull/24607), [ClearlyClaire](https://github.com/mastodon/mastodon/pull/24785), [ClearlyClaire](https://github.com/mastodon/mastodon/pull/24840))
- Fix performance of streaming by parsing message JSON once ([ThisIsMissEm](https://github.com/mastodon/mastodon/pull/25278), [ThisIsMissEm](https://github.com/mastodon/mastodon/pull/25361))
- Fix CSP headers when `S3_ALIAS_HOST` includes a path component ([ClearlyClaire](https://github.com/mastodon/mastodon/pull/25273))
- Fix `tootctl accounts approve --number N` not approving N earliest registrations ([danielmbrasil](https://github.com/mastodon/mastodon/pull/24605))
- Fix reports not being closed when performing batch suspensions ([ClearlyClaire](https://github.com/mastodon/mastodon/pull/24988))
- Fix being able to vote on your own polls ([ClearlyClaire](https://github.com/mastodon/mastodon/pull/25015))
- Fix race condition when reblogging a status ([ClearlyClaire](https://github.com/mastodon/mastodon/pull/25016))
- Fix “Authorized applications” inefficiently and incorrectly getting last use date ([ClearlyClaire](https://github.com/mastodon/mastodon/pull/25060))
- Fix “Authorized applications” crashing when listing apps with certain admin API scopes ([ClearlyClaire](https://github.com/mastodon/mastodon/pull/25713))
- Fix multiple N+1s in ConversationsController ([ClearlyClaire](https://github.com/mastodon/mastodon/pull/25134), [ClearlyClaire](https://github.com/mastodon/mastodon/pull/25399), [ClearlyClaire](https://github.com/mastodon/mastodon/pull/25499))
- Fix user archive takeouts when using OpenStack Swift ([ClearlyClaire](https://github.com/mastodon/mastodon/pull/24431))
- Fix searching for remote content by URL not working under certain conditions ([ClearlyClaire](https://github.com/mastodon/mastodon/pull/25637))
- Fix inefficiencies in indexing content for search ([VyrCossont](https://github.com/mastodon/mastodon/pull/24285), [VyrCossont](https://github.com/mastodon/mastodon/pull/24342))
### Security
- Add finer permission requirements for managing webhooks ([ClearlyClaire](https://github.com/mastodon/mastodon/pull/25463))
- Update dependencies
- Add hardening headers for user-uploaded files ([ClearlyClaire](https://github.com/mastodon/mastodon/pull/25756))
- Fix verified links possibly hiding important parts of the URL (CVE-2023-36462)
- Fix timeout handling of outbound HTTP requests (CVE-2023-36461)
- Fix arbitrary file creation through media processing (CVE-2023-36460)
- Fix possible XSS in preview cards (CVE-2023-36459)
## [4.1.2] - 2023-04-04 ## [4.1.2] - 2023-04-04
### Fixed ### Fixed

View file

@ -31,7 +31,7 @@ gem 'browser'
gem 'charlock_holmes', '~> 0.7.7' gem 'charlock_holmes', '~> 0.7.7'
gem 'chewy', '~> 7.2' gem 'chewy', '~> 7.2'
gem 'devise', '~> 4.8' gem 'devise', '~> 4.8'
gem 'devise-two-factor', '~> 4.1' gem 'devise-two-factor', '~> 4.0'
group :pam_authentication, optional: true do group :pam_authentication, optional: true do
gem 'devise_pam_authenticatable2', '~> 9.2' gem 'devise_pam_authenticatable2', '~> 9.2'

View file

@ -10,40 +10,40 @@ GIT
GEM GEM
remote: https://rubygems.org/ remote: https://rubygems.org/
specs: specs:
actioncable (6.1.7.2) actioncable (6.1.7.4)
actionpack (= 6.1.7.2) actionpack (= 6.1.7.4)
activesupport (= 6.1.7.2) activesupport (= 6.1.7.4)
nio4r (~> 2.0) nio4r (~> 2.0)
websocket-driver (>= 0.6.1) websocket-driver (>= 0.6.1)
actionmailbox (6.1.7.2) actionmailbox (6.1.7.4)
actionpack (= 6.1.7.2) actionpack (= 6.1.7.4)
activejob (= 6.1.7.2) activejob (= 6.1.7.4)
activerecord (= 6.1.7.2) activerecord (= 6.1.7.4)
activestorage (= 6.1.7.2) activestorage (= 6.1.7.4)
activesupport (= 6.1.7.2) activesupport (= 6.1.7.4)
mail (>= 2.7.1) mail (>= 2.7.1)
actionmailer (6.1.7.2) actionmailer (6.1.7.4)
actionpack (= 6.1.7.2) actionpack (= 6.1.7.4)
actionview (= 6.1.7.2) actionview (= 6.1.7.4)
activejob (= 6.1.7.2) activejob (= 6.1.7.4)
activesupport (= 6.1.7.2) activesupport (= 6.1.7.4)
mail (~> 2.5, >= 2.5.4) mail (~> 2.5, >= 2.5.4)
rails-dom-testing (~> 2.0) rails-dom-testing (~> 2.0)
actionpack (6.1.7.2) actionpack (6.1.7.4)
actionview (= 6.1.7.2) actionview (= 6.1.7.4)
activesupport (= 6.1.7.2) activesupport (= 6.1.7.4)
rack (~> 2.0, >= 2.0.9) rack (~> 2.0, >= 2.0.9)
rack-test (>= 0.6.3) rack-test (>= 0.6.3)
rails-dom-testing (~> 2.0) rails-dom-testing (~> 2.0)
rails-html-sanitizer (~> 1.0, >= 1.2.0) rails-html-sanitizer (~> 1.0, >= 1.2.0)
actiontext (6.1.7.2) actiontext (6.1.7.4)
actionpack (= 6.1.7.2) actionpack (= 6.1.7.4)
activerecord (= 6.1.7.2) activerecord (= 6.1.7.4)
activestorage (= 6.1.7.2) activestorage (= 6.1.7.4)
activesupport (= 6.1.7.2) activesupport (= 6.1.7.4)
nokogiri (>= 1.8.5) nokogiri (>= 1.8.5)
actionview (6.1.7.2) actionview (6.1.7.4)
activesupport (= 6.1.7.2) activesupport (= 6.1.7.4)
builder (~> 3.1) builder (~> 3.1)
erubi (~> 1.4) erubi (~> 1.4)
rails-dom-testing (~> 2.0) rails-dom-testing (~> 2.0)
@ -54,22 +54,22 @@ GEM
case_transform (>= 0.2) case_transform (>= 0.2)
jsonapi-renderer (>= 0.1.1.beta1, < 0.3) jsonapi-renderer (>= 0.1.1.beta1, < 0.3)
active_record_query_trace (1.8) active_record_query_trace (1.8)
activejob (6.1.7.2) activejob (6.1.7.4)
activesupport (= 6.1.7.2) activesupport (= 6.1.7.4)
globalid (>= 0.3.6) globalid (>= 0.3.6)
activemodel (6.1.7.2) activemodel (6.1.7.4)
activesupport (= 6.1.7.2) activesupport (= 6.1.7.4)
activerecord (6.1.7.2) activerecord (6.1.7.4)
activemodel (= 6.1.7.2) activemodel (= 6.1.7.4)
activesupport (= 6.1.7.2) activesupport (= 6.1.7.4)
activestorage (6.1.7.2) activestorage (6.1.7.4)
actionpack (= 6.1.7.2) actionpack (= 6.1.7.4)
activejob (= 6.1.7.2) activejob (= 6.1.7.4)
activerecord (= 6.1.7.2) activerecord (= 6.1.7.4)
activesupport (= 6.1.7.2) activesupport (= 6.1.7.4)
marcel (~> 1.0) marcel (~> 1.0)
mini_mime (>= 1.1.0) mini_mime (>= 1.1.0)
activesupport (6.1.7.2) activesupport (6.1.7.4)
concurrent-ruby (~> 1.0, >= 1.0.2) concurrent-ruby (~> 1.0, >= 1.0.2)
i18n (>= 1.6, < 2) i18n (>= 1.6, < 2)
minitest (>= 5.1) minitest (>= 5.1)
@ -85,7 +85,7 @@ GEM
activerecord (>= 3.2, < 8.0) activerecord (>= 3.2, < 8.0)
rake (>= 10.4, < 14.0) rake (>= 10.4, < 14.0)
ast (2.4.2) ast (2.4.2)
attr_encrypted (4.0.0) attr_encrypted (3.1.0)
encryptor (~> 3.0.0) encryptor (~> 3.0.0)
attr_required (1.0.1) attr_required (1.0.1)
awrence (1.2.1) awrence (1.2.1)
@ -105,7 +105,7 @@ GEM
aws-sigv4 (~> 1.4) aws-sigv4 (~> 1.4)
aws-sigv4 (1.5.2) aws-sigv4 (1.5.2)
aws-eventstream (~> 1, >= 1.0.2) aws-eventstream (~> 1, >= 1.0.2)
bcrypt (3.1.18) bcrypt (3.1.17)
better_errors (2.9.1) better_errors (2.9.1)
coderay (>= 1.0.0) coderay (>= 1.0.0)
erubi (>= 1.0.0) erubi (>= 1.0.0)
@ -173,7 +173,7 @@ GEM
cocoon (1.2.15) cocoon (1.2.15)
coderay (1.1.3) coderay (1.1.3)
color_diff (0.1) color_diff (0.1)
concurrent-ruby (1.2.0) concurrent-ruby (1.2.2)
connection_pool (2.3.0) connection_pool (2.3.0)
cose (1.2.1) cose (1.2.1)
cbor (~> 0.5.9) cbor (~> 0.5.9)
@ -191,9 +191,9 @@ GEM
railties (>= 4.1.0) railties (>= 4.1.0)
responders responders
warden (~> 1.2.3) warden (~> 1.2.3)
devise-two-factor (4.1.0) devise-two-factor (4.0.2)
activesupport (< 7.1) activesupport (< 7.1)
attr_encrypted (>= 1.3, < 5, != 2) attr_encrypted (>= 1.3, < 4, != 2)
devise (~> 4.0) devise (~> 4.0)
railties (< 7.1) railties (< 7.1)
rotp (~> 6.0) rotp (~> 6.0)
@ -206,7 +206,7 @@ GEM
docile (1.4.0) docile (1.4.0)
domain_name (0.5.20190701) domain_name (0.5.20190701)
unf (>= 0.0.5, < 1.0.0) unf (>= 0.0.5, < 1.0.0)
doorkeeper (5.6.4) doorkeeper (5.6.6)
railties (>= 5) railties (>= 5)
dotenv (2.8.1) dotenv (2.8.1)
dotenv-rails (2.8.1) dotenv-rails (2.8.1)
@ -312,7 +312,7 @@ GEM
httplog (1.6.2) httplog (1.6.2)
rack (>= 2.0) rack (>= 2.0)
rainbow (>= 2.0.0) rainbow (>= 2.0.0)
i18n (1.13.0) i18n (1.12.0)
concurrent-ruby (~> 1.0) concurrent-ruby (~> 1.0)
i18n-tasks (1.0.12) i18n-tasks (1.0.12)
activesupport (>= 4.0.2) activesupport (>= 4.0.2)
@ -385,10 +385,10 @@ GEM
activesupport (>= 4) activesupport (>= 4)
railties (>= 4) railties (>= 4)
request_store (~> 1.0) request_store (~> 1.0)
loofah (2.20.0) loofah (2.19.1)
crass (~> 1.0.2) crass (~> 1.0.2)
nokogiri (>= 1.5.9) nokogiri (>= 1.5.9)
mail (2.8.0.1) mail (2.8.1)
mini_mime (>= 0.1.1) mini_mime (>= 0.1.1)
net-imap net-imap
net-pop net-pop
@ -406,11 +406,11 @@ GEM
mime-types-data (3.2022.0105) mime-types-data (3.2022.0105)
mini_mime (1.1.2) mini_mime (1.1.2)
mini_portile2 (2.8.2) mini_portile2 (2.8.2)
minitest (5.18.0) minitest (5.17.0)
msgpack (1.6.0) msgpack (1.6.0)
multi_json (1.15.0) multi_json (1.15.0)
multipart-post (2.1.1) multipart-post (2.1.1)
net-imap (0.3.4) net-imap (0.3.6)
date date
net-protocol net-protocol
net-ldap (0.17.1) net-ldap (0.17.1)
@ -423,8 +423,8 @@ GEM
net-smtp (0.3.3) net-smtp (0.3.3)
net-protocol net-protocol
net-ssh (7.0.1) net-ssh (7.0.1)
nio4r (2.5.8) nio4r (2.5.9)
nokogiri (1.14.1) nokogiri (1.14.5)
mini_portile2 (~> 2.8.0) mini_portile2 (~> 2.8.0)
racc (~> 1.4) racc (~> 1.4)
nsa (0.2.8) nsa (0.2.8)
@ -497,7 +497,7 @@ GEM
activesupport (>= 3.0.0) activesupport (>= 3.0.0)
raabro (1.4.0) raabro (1.4.0)
racc (1.6.2) racc (1.6.2)
rack (2.2.6.2) rack (2.2.7)
rack-attack (6.6.1) rack-attack (6.6.1)
rack (>= 1.0, < 3) rack (>= 1.0, < 3)
rack-cors (1.1.1) rack-cors (1.1.1)
@ -512,20 +512,20 @@ GEM
rack rack
rack-test (2.0.2) rack-test (2.0.2)
rack (>= 1.3) rack (>= 1.3)
rails (6.1.7.2) rails (6.1.7.4)
actioncable (= 6.1.7.2) actioncable (= 6.1.7.4)
actionmailbox (= 6.1.7.2) actionmailbox (= 6.1.7.4)
actionmailer (= 6.1.7.2) actionmailer (= 6.1.7.4)
actionpack (= 6.1.7.2) actionpack (= 6.1.7.4)
actiontext (= 6.1.7.2) actiontext (= 6.1.7.4)
actionview (= 6.1.7.2) actionview (= 6.1.7.4)
activejob (= 6.1.7.2) activejob (= 6.1.7.4)
activemodel (= 6.1.7.2) activemodel (= 6.1.7.4)
activerecord (= 6.1.7.2) activerecord (= 6.1.7.4)
activestorage (= 6.1.7.2) activestorage (= 6.1.7.4)
activesupport (= 6.1.7.2) activesupport (= 6.1.7.4)
bundler (>= 1.15.0) bundler (>= 1.15.0)
railties (= 6.1.7.2) railties (= 6.1.7.4)
sprockets-rails (>= 2.0.0) sprockets-rails (>= 2.0.0)
rails-controller-testing (1.0.5) rails-controller-testing (1.0.5)
actionpack (>= 5.0.1.rc1) actionpack (>= 5.0.1.rc1)
@ -541,9 +541,9 @@ GEM
railties (>= 6.0.0, < 7) railties (>= 6.0.0, < 7)
rails-settings-cached (0.6.6) rails-settings-cached (0.6.6)
rails (>= 4.2.0) rails (>= 4.2.0)
railties (6.1.7.2) railties (6.1.7.4)
actionpack (= 6.1.7.2) actionpack (= 6.1.7.4)
activesupport (= 6.1.7.2) activesupport (= 6.1.7.4)
method_source method_source
rake (>= 12.2) rake (>= 12.2)
thor (~> 1.0) thor (~> 1.0)
@ -563,11 +563,11 @@ GEM
regexp_parser (2.6.2) regexp_parser (2.6.2)
request_store (1.5.1) request_store (1.5.1)
rack (>= 1.4) rack (>= 1.4)
responders (3.1.0) responders (3.0.1)
actionpack (>= 5.2) actionpack (>= 5.0)
railties (>= 5.2) railties (>= 5.0)
rexml (3.2.5) rexml (3.2.5)
rotp (6.2.2) rotp (6.2.0)
rpam2 (4.0.2) rpam2 (4.0.2)
rqrcode (2.1.2) rqrcode (2.1.2)
chunky_png (~> 1.0) chunky_png (~> 1.0)
@ -628,7 +628,7 @@ GEM
fugit (~> 1.1, >= 1.1.6) fugit (~> 1.1, >= 1.1.6)
safety_net_attestation (0.4.0) safety_net_attestation (0.4.0)
jwt (~> 2.0) jwt (~> 2.0)
sanitize (6.0.1) sanitize (6.0.2)
crass (~> 1.0.2) crass (~> 1.0.2)
nokogiri (>= 1.12.0) nokogiri (>= 1.12.0)
scenic (1.7.0) scenic (1.7.0)
@ -689,9 +689,9 @@ GEM
unicode-display_width (>= 1.1.1, < 3) unicode-display_width (>= 1.1.1, < 3)
terrapin (0.6.0) terrapin (0.6.0)
climate_control (>= 0.0.3, < 1.0) climate_control (>= 0.0.3, < 1.0)
thor (1.2.1) thor (1.2.2)
tilt (2.0.11) tilt (2.0.11)
timeout (0.3.1) timeout (0.3.2)
tpm-key_attestation (0.11.0) tpm-key_attestation (0.11.0)
bindata (~> 2.4) bindata (~> 2.4)
openssl (> 2.0, < 3.1) openssl (> 2.0, < 3.1)
@ -786,7 +786,7 @@ DEPENDENCIES
concurrent-ruby concurrent-ruby
connection_pool connection_pool
devise (~> 4.8) devise (~> 4.8)
devise-two-factor (~> 4.1) devise-two-factor (~> 4.0)
devise_pam_authenticatable2 (~> 9.2) devise_pam_authenticatable2 (~> 9.2)
discard (~> 1.2) discard (~> 1.2)
doorkeeper (~> 5.6) doorkeeper (~> 5.6)

View file

@ -1,4 +1,4 @@
This is a fork of [Mastodon](https://github.com/mastodon/mastodon) that has been adapted to visualize the ActivityPub protocol exchanges between different Mastodon instances. The changes are based on v4.1.2 of the main repository. This is a fork of [Mastodon](https://github.com/mastodon/mastodon) that has been adapted to visualize the ActivityPub protocol exchanges between different Mastodon instances. The changes are based on v4.1.4 of the main repository.
See it in action on [ActivityPub.Academy](https://activitypub.academy). See it in action on [ActivityPub.Academy](https://activitypub.academy).

View file

@ -20,6 +20,7 @@ module Admin
authorize :webhook, :create? authorize :webhook, :create?
@webhook = Webhook.new(resource_params) @webhook = Webhook.new(resource_params)
@webhook.current_account = current_account
if @webhook.save if @webhook.save
redirect_to admin_webhook_path(@webhook) redirect_to admin_webhook_path(@webhook)
@ -39,10 +40,12 @@ module Admin
def update def update
authorize @webhook, :update? authorize @webhook, :update?
@webhook.current_account = current_account
if @webhook.update(resource_params) if @webhook.update(resource_params)
redirect_to admin_webhook_path(@webhook) redirect_to admin_webhook_path(@webhook)
else else
render :show render :edit
end end
end end

View file

@ -11,7 +11,7 @@ class Api::V1::ConversationsController < Api::BaseController
def index def index
@conversations = paginated_conversations @conversations = paginated_conversations
render json: @conversations, each_serializer: REST::ConversationSerializer render json: @conversations, each_serializer: REST::ConversationSerializer, relationships: StatusRelationshipsPresenter.new(@conversations.map(&:last_status), current_user&.account_id)
end end
def read def read
@ -32,6 +32,19 @@ class Api::V1::ConversationsController < Api::BaseController
def paginated_conversations def paginated_conversations
AccountConversation.where(account: current_account) AccountConversation.where(account: current_account)
.includes(
account: :account_stat,
last_status: [
:media_attachments,
:preview_cards,
:status_stat,
:tags,
{
active_mentions: [account: :account_stat],
account: :account_stat,
},
]
)
.to_a_paginated_by_id(limit_param(LIMIT), params_slice(:max_id, :since_id, :min_id)) .to_a_paginated_by_id(limit_param(LIMIT), params_slice(:max_id, :since_id, :min_id))
end end

View file

@ -1,6 +1,7 @@
# frozen_string_literal: true # frozen_string_literal: true
require "faraday" require "faraday"
require "uri"
class Api::V1::JsonLdController < Api::BaseController class Api::V1::JsonLdController < Api::BaseController
include ActionController::Live include ActionController::Live
@ -9,6 +10,40 @@ class Api::V1::JsonLdController < Api::BaseController
render json: { error: e.to_s }, status: 422 render json: { error: e.to_s }, status: 422
end end
before_action :require_user!
REQUEST_TARGET = '(request-target)'
def signature(headers)
account = Account.representative
key_id = ActivityPub::TagManager.instance.key_uri_for(account)
algorithm = 'rsa-sha256'
signed_string = headers.map { |key, value| "#{key.downcase}: #{value}" }.join("\n")
signature = Base64.strict_encode64(account.keypair.sign(OpenSSL::Digest.new('SHA256'), signed_string))
"keyId=\"#{key_id}\",algorithm=\"#{algorithm}\",headers=\"#{headers.keys.join(' ').downcase}\",signature=\"#{signature}\""
end
def signed_headers(url_string)
if url_string.include?(".well-known")
return {'Accept': 'application/jrd+json'}
end
url = URI.parse(url_string)
tmp_headers = {
'Date': Time.now.utc.httpdate,
'Host': url.host,
'Accept': 'application/activity+json, application/ld+json; profile="https://www.w3.org/ns/activitystreams"',
}
tmp_headers[REQUEST_TARGET] = "get #{url_string.delete_prefix("#{url.scheme}://#{url.host}")}"
additional_headers = {
'Signature': signature(tmp_headers),
'User-Agent': Mastodon::Version.user_agent,
}
tmp_headers.merge(additional_headers).except(REQUEST_TARGET)
end
def show def show
url = params[:url] url = params[:url]
@ -17,13 +52,12 @@ class Api::V1::JsonLdController < Api::BaseController
Thread.new { Thread.new {
begin begin
conn = Faraday::Connection.new conn = Faraday::Connection.new
conn.options.timeout = 5
api_response = conn.get(url, nil, {'Accept' => 'application/ld+json; profile="https://www.w3.org/ns/activitystreams"'}) api_response = conn.get(url, nil, signed_headers(url))
max_redirects = 5 max_redirects = 5
while api_response.status == 301 || api_response.status == 302 and max_redirects > 0 do while api_response.status == 301 || api_response.status == 302 and max_redirects > 0 do
api_response = conn.get(api_response.headers['Location'], nil, {'Accept' => 'application/ld+json; profile="https://www.w3.org/ns/activitystreams"'}) api_response = conn.get(api_response.headers['Location'], nil, signed_headers(api_response.headers['Location']))
max_redirects -= 1 max_redirects -= 1
end end

View file

@ -7,11 +7,15 @@ class Api::V1::Statuses::HistoriesController < Api::BaseController
before_action :set_status before_action :set_status
def show def show
render json: @status.edits.includes(:account, status: [:account]), each_serializer: REST::StatusEditSerializer render json: status_edits, each_serializer: REST::StatusEditSerializer
end end
private private
def status_edits
@status.edits.includes(:account, status: [:account]).to_a.presence || [@status.build_snapshot(at_time: @status.edited_at || @status.created_at)]
end
def set_status def set_status
@status = Status.find(params[:status_id]) @status = Status.find(params[:status_id])
authorize @status, :show? authorize @status, :show?

View file

@ -2,6 +2,8 @@
class Api::V1::Statuses::ReblogsController < Api::BaseController class Api::V1::Statuses::ReblogsController < Api::BaseController
include Authorization include Authorization
include Redisable
include Lockable
before_action -> { doorkeeper_authorize! :write, :'write:statuses' } before_action -> { doorkeeper_authorize! :write, :'write:statuses' }
before_action :require_user! before_action :require_user!
@ -10,7 +12,9 @@ class Api::V1::Statuses::ReblogsController < Api::BaseController
override_rate_limit_headers :create, family: :statuses override_rate_limit_headers :create, family: :statuses
def create def create
@status = ReblogService.new.call(current_account, @reblog, reblog_params) with_lock("reblog:#{current_account.id}:#{@reblog.id}") do
@status = ReblogService.new.call(current_account, @reblog, reblog_params)
end
render json: @status, serializer: REST::StatusSerializer render json: @status, serializer: REST::StatusSerializer
end end

View file

@ -18,6 +18,14 @@ class Api::V2::Admin::AccountsController < Api::V1::Admin::AccountsController
private private
def next_path
api_v2_admin_accounts_url(pagination_params(max_id: pagination_max_id)) if records_continue?
end
def prev_path
api_v2_admin_accounts_url(pagination_params(min_id: pagination_since_id)) unless @accounts.empty?
end
def filtered_accounts def filtered_accounts
AccountFilter.new(translated_filter_params).results AccountFilter.new(translated_filter_params).results
end end

View file

@ -13,7 +13,7 @@ class BackupsController < ApplicationController
when :s3 when :s3
redirect_to @backup.dump.expiring_url(10) redirect_to @backup.dump.expiring_url(10)
when :fog when :fog
if Paperclip::Attachment.default_options.dig(:storage, :fog_credentials, :openstack_temp_url_key).present? if Paperclip::Attachment.default_options.dig(:fog_credentials, :openstack_temp_url_key).present?
redirect_to @backup.dump.expiring_url(Time.now.utc + 10) redirect_to @backup.dump.expiring_url(Time.now.utc + 10)
else else
redirect_to full_asset_url(@backup.dump.url) redirect_to full_asset_url(@backup.dump.url)

View file

@ -46,6 +46,6 @@ class MediaController < ApplicationController
end end
def allow_iframing def allow_iframing
response.headers['X-Frame-Options'] = 'ALLOWALL' response.headers.delete('X-Frame-Options')
end end
end end

View file

@ -8,6 +8,8 @@ class Oauth::AuthorizedApplicationsController < Doorkeeper::AuthorizedApplicatio
before_action :require_not_suspended!, only: :destroy before_action :require_not_suspended!, only: :destroy
before_action :set_body_classes before_action :set_body_classes
before_action :set_last_used_at_by_app, only: :index, unless: -> { request.format == :json }
skip_before_action :require_functional! skip_before_action :require_functional!
include Localized include Localized
@ -30,4 +32,14 @@ class Oauth::AuthorizedApplicationsController < Doorkeeper::AuthorizedApplicatio
def require_not_suspended! def require_not_suspended!
forbidden if current_account.suspended? forbidden if current_account.suspended?
end end
def set_last_used_at_by_app
@last_used_at_by_app = Doorkeeper::AccessToken
.select('DISTINCT ON (application_id) application_id, last_used_at')
.where(resource_owner_id: current_resource_owner.id)
.where.not(last_used_at: nil)
.order(application_id: :desc, last_used_at: :desc)
.pluck(:application_id, :last_used_at)
.to_h
end
end end

View file

@ -43,7 +43,7 @@ class StatusesController < ApplicationController
return not_found if @status.hidden? || @status.reblog? return not_found if @status.hidden? || @status.reblog?
expires_in 180, public: true expires_in 180, public: true
response.headers['X-Frame-Options'] = 'ALLOWALL' response.headers.delete('X-Frame-Options')
render layout: 'embedded' render layout: 'embedded'
end end

View file

@ -18,7 +18,14 @@ module WellKnown
private private
def set_account def set_account
@account = Account.find_local!(username_from_resource) username = username_from_resource
@account = begin
if username == Rails.configuration.x.local_domain
Account.representative
else
Account.find_local!(username)
end
end
end end
def username_from_resource def username_from_resource

View file

@ -58,6 +58,10 @@ module FormattingHelper
end end
def account_field_value_format(field, with_rel_me: true) def account_field_value_format(field, with_rel_me: true)
html_aware_format(field.value, field.account.local?, with_rel_me: with_rel_me, with_domains: true, multiline: false) if field.verified? && !field.account.local?
TextFormatter.shortened_link(field.value_for_verification)
else
html_aware_format(field.value, field.account.local?, with_rel_me: with_rel_me, with_domains: true, multiline: false)
end
end end
end end

View file

@ -17,6 +17,54 @@ const mapStateToProps = (state) => {
}; };
}; };
function Content({ logs, dispatch, router }) {
const darkMode = !(document.body && document.body.classList.contains('theme-mastodon-light'));
// hijack the toggleHidden shortcut to copy the logs to clipbaord
const handlers = {
toggleHidden: () => navigator.clipboard.writeText(JSON.stringify(logs, null, 2)),
};
if (logs.length > 0) {
return ( <HotKeys handlers={handlers}>
<div className={`${darkMode ? 'dark' : ''}`} style={{ height: '100%' }}>
<ActivityPubVisualization
logs={logs}
clickableLinks
onLinkClick={(url) => {
dispatch(setExplorerUrl(url));
router.history.push('/activitypub_explorer');
}}
showExplorerLink
onExplorerLinkClick={(data) => {
dispatch(setExplorerData(data));
router.history.push('/activitypub_explorer');
}}
/>
</div>
</HotKeys>);
} else {
return (<div className='empty-column-indicator'>
<FormattedMessage
id='empty_column.activity_log'
defaultMessage='The Activity Log is empty. Interact with accounts on other instances to trigger activities. You can find more information on my {blog}.'
values={{
blog: <a className='blog-link' href='https://seb.jambor.dev/posts/activitypub-academy/'>blog</a>,
}}
/>
</div>);
}
}
Content.propTypes = {
dispatch: PropTypes.func.isRequired,
logs: PropTypes.array,
router: PropTypes.object,
};
export default @connect(mapStateToProps) export default @connect(mapStateToProps)
class ActivityLog extends ImmutablePureComponent { class ActivityLog extends ImmutablePureComponent {
@ -31,52 +79,15 @@ class ActivityLog extends ImmutablePureComponent {
handleHeaderClick = () => { handleHeaderClick = () => {
this.column.scrollTop(); this.column.scrollTop();
} };
setRef = c => { setRef = c => {
this.column = c; this.column = c;
} };
render() { render() {
const { dispatch, logs, multiColumn } = this.props; const { dispatch, logs, multiColumn } = this.props;
const darkMode = !(document.body && document.body.classList.contains('theme-mastodon-light'));
// hijack the toggleHidden shortcut to copy the logs to clipbaord
const handlers = {
toggleHidden: () => navigator.clipboard.writeText(JSON.stringify(logs, null, 2)),
};
const Content = () => {
if (logs.length > 0) {
return ( <HotKeys handlers={handlers}>
<div className={`${darkMode ? 'dark' : ''}`} style={{height: '100%'}}>
<ActivityPubVisualization
logs={logs}
clickableLinks
onLinkClick={(url) => {
dispatch(setExplorerUrl(url));
this.context.router.history.push('/activitypub_explorer');
}}
showExplorerLink
onExplorerLinkClick={(data) => {
dispatch(setExplorerData(data));
this.context.router.history.push('/activitypub_explorer');
}}
/>
</div>
</HotKeys>) } else {
return (<div className='empty-column-indicator'>
<FormattedMessage id='empty_column.activity_log' defaultMessage='The Activity Log is empty. Interact with accounts on other instances to trigger activities. You can find more information on my {blog}.'
values={{
blog: <a className='blog-link' href='https://seb.jambor.dev/posts/activitypub-academy/'>blog</a>,
}}
/>
</div>)
}
}
return ( return (
<Column bindToDocument={!multiColumn} ref={this.setRef} label='Activity Log'> <Column bindToDocument={!multiColumn} ref={this.setRef} label='Activity Log'>
<ColumnHeader <ColumnHeader
@ -104,7 +115,11 @@ class ActivityLog extends ImmutablePureComponent {
</p> </p>
</DismissableBanner> </DismissableBanner>
<Content /> <Content
logs={logs}
dispatch={dispatch}
router={this.context.router}
/>
</Column> </Column>
); );
} }

View file

@ -6,7 +6,7 @@ class AccountReachFinder
end end
def inboxes def inboxes
(followers_inboxes + reporters_inboxes + relay_inboxes).uniq (followers_inboxes + reporters_inboxes + recently_mentioned_inboxes + relay_inboxes).uniq
end end
private private
@ -19,6 +19,13 @@ class AccountReachFinder
Account.where(id: @account.targeted_reports.select(:account_id)).inboxes Account.where(id: @account.targeted_reports.select(:account_id)).inboxes
end end
def recently_mentioned_inboxes
cutoff_id = Mastodon::Snowflake.id_at(2.days.ago, with_random: false)
recent_statuses = @account.statuses.recent.where(id: cutoff_id...).limit(200)
Account.joins(:mentions).where(mentions: { status: recent_statuses }).inboxes.take(2000)
end
def relay_inboxes def relay_inboxes
Relay.enabled.pluck(:inbox_url) Relay.enabled.pluck(:inbox_url)
end end

View file

@ -37,22 +37,20 @@ class ActivityLogAudienceHelper
if string_or_array.nil? if string_or_array.nil?
[] []
elsif string_or_array.is_a?(String) elsif string_or_array.is_a?(String)
self.actors([string_or_array]) if match = string_or_array.match(Regexp.new("https://#{domain}/users/([^/]*)"))
else [match.captures[0]]
string_or_array.map do |string| elsif string_or_array.ends_with?("/followers")
if match = string.match(Regexp.new("https://#{domain}/users/([^/]*)")) Account
match.captures[0] .joins(
elsif string.ends_with?("/followers") "JOIN follows ON follows.account_id = accounts.id
Account
.joins(
"JOIN follows ON follows.account_id = accounts.id
JOIN accounts AS followed ON follows.target_account_id = followed.id JOIN accounts AS followed ON follows.target_account_id = followed.id
WHERE followed.followers_url = '#{string}'") WHERE followed.followers_url = '#{string_or_array}'")
.map { |account| account.username } .map { |account| account.username }
else else
nil []
end end
end.flatten.compact else
string_or_array.flat_map { |inner| self.actors(inner) }
end end
end end
end end

View file

@ -9,10 +9,6 @@ module ApplicationExtension
validates :redirect_uri, length: { maximum: 2_000 } validates :redirect_uri, length: { maximum: 2_000 }
end end
def most_recently_used_access_token
@most_recently_used_access_token ||= access_tokens.where.not(last_used_at: nil).order(last_used_at: :desc).first
end
def confirmation_redirect_uri def confirmation_redirect_uri
redirect_uri.lines.first.strip redirect_uri.lines.first.strip
end end

View file

@ -140,7 +140,7 @@ class LinkDetailsExtractor
end end
def html def html
player_url.present? ? content_tag(:iframe, nil, src: player_url, width: width, height: height, allowtransparency: 'true', scrolling: 'no', frameborder: '0') : nil player_url.present? ? content_tag(:iframe, nil, src: player_url, width: width, height: height, allowfullscreen: 'true', allowtransparency: 'true', scrolling: 'no', frameborder: '0') : nil
end end
def width def width

View file

@ -7,11 +7,48 @@ require 'resolv'
# Monkey-patch the HTTP.rb timeout class to avoid using a timeout block # Monkey-patch the HTTP.rb timeout class to avoid using a timeout block
# around the Socket#open method, since we use our own timeout blocks inside # around the Socket#open method, since we use our own timeout blocks inside
# that method # that method
#
# Also changes how the read timeout behaves so that it is cumulative (closer
# to HTTP::Timeout::Global, but still having distinct timeouts for other
# operation types)
class HTTP::Timeout::PerOperation class HTTP::Timeout::PerOperation
def connect(socket_class, host, port, nodelay = false) def connect(socket_class, host, port, nodelay = false)
@socket = socket_class.open(host, port) @socket = socket_class.open(host, port)
@socket.setsockopt(Socket::IPPROTO_TCP, Socket::TCP_NODELAY, 1) if nodelay @socket.setsockopt(Socket::IPPROTO_TCP, Socket::TCP_NODELAY, 1) if nodelay
end end
# Reset deadline when the connection is re-used for different requests
def reset_counter
@deadline = nil
end
# Read data from the socket
def readpartial(size, buffer = nil)
@deadline ||= Process.clock_gettime(Process::CLOCK_MONOTONIC) + @read_timeout
timeout = false
loop do
result = @socket.read_nonblock(size, buffer, exception: false)
return :eof if result.nil?
remaining_time = @deadline - Process.clock_gettime(Process::CLOCK_MONOTONIC)
raise HTTP::TimeoutError, "Read timed out after #{@read_timeout} seconds" if timeout || remaining_time <= 0
return result if result != :wait_readable
# marking the socket for timeout. Why is this not being raised immediately?
# it seems there is some race-condition on the network level between calling
# #read_nonblock and #wait_readable, in which #read_nonblock signalizes waiting
# for reads, and when waiting for x seconds, it returns nil suddenly without completing
# the x seconds. In a normal case this would be a timeout on wait/read, but it can
# also mean that the socket has been closed by the server. Therefore we "mark" the
# socket for timeout and try to read more bytes. If it returns :eof, it's all good, no
# timeout. Else, the first timeout was a proper timeout.
# This hack has to be done because io/wait#wait_readable doesn't provide a value for when
# the socket is closed by the server, and HTTP::Parser doesn't provide the limit for the chunks.
timeout = true unless @socket.to_io.wait_readable(remaining_time)
end
end
end end
class Request class Request

View file

@ -1,7 +1,7 @@
# frozen_string_literal: true # frozen_string_literal: true
class ScopeParser < Parslet::Parser class ScopeParser < Parslet::Parser
rule(:term) { match('[a-z]').repeat(1).as(:term) } rule(:term) { match('[a-z_]').repeat(1).as(:term) }
rule(:colon) { str(':') } rule(:colon) { str(':') }
rule(:access) { (str('write') | str('read')).as(:access) } rule(:access) { (str('write') | str('read')).as(:access) }
rule(:namespace) { str('admin').as(:namespace) } rule(:namespace) { str('admin').as(:namespace) }

View file

@ -48,6 +48,26 @@ class TextFormatter
html.html_safe # rubocop:disable Rails/OutputSafety html.html_safe # rubocop:disable Rails/OutputSafety
end end
class << self
include ERB::Util
def shortened_link(url, rel_me: false)
url = Addressable::URI.parse(url).to_s
rel = rel_me ? (DEFAULT_REL + %w(me)) : DEFAULT_REL
prefix = url.match(URL_PREFIX_REGEX).to_s
display_url = url[prefix.length, 30]
suffix = url[prefix.length + 30..-1]
cutoff = url[prefix.length..-1].length > 30
<<~HTML.squish.html_safe # rubocop:disable Rails/OutputSafety
<a href="#{h(url)}" target="_blank" rel="#{rel.join(' ')}"><span class="invisible">#{h(prefix)}</span><span class="#{cutoff ? 'ellipsis' : ''}">#{h(display_url)}</span><span class="invisible">#{h(suffix)}</span></a>
HTML
rescue Addressable::URI::InvalidURIError, IDN::Idna::IdnaError
h(url)
end
end
private private
def rewrite def rewrite
@ -70,19 +90,7 @@ class TextFormatter
end end
def link_to_url(entity) def link_to_url(entity)
url = Addressable::URI.parse(entity[:url]).to_s TextFormatter.shortened_link(entity[:url], rel_me: with_rel_me?)
rel = with_rel_me? ? (DEFAULT_REL + %w(me)) : DEFAULT_REL
prefix = url.match(URL_PREFIX_REGEX).to_s
display_url = url[prefix.length, 30]
suffix = url[prefix.length + 30..-1]
cutoff = url[prefix.length..-1].length > 30
<<~HTML.squish
<a href="#{h(url)}" target="_blank" rel="#{rel.join(' ')}"><span class="invisible">#{h(prefix)}</span><span class="#{cutoff ? 'ellipsis' : ''}">#{h(display_url)}</span><span class="invisible">#{h(suffix)}</span></a>
HTML
rescue Addressable::URI::InvalidURIError, IDN::Idna::IdnaError
h(entity[:url])
end end
def link_to_hashtag(entity) def link_to_hashtag(entity)

View file

@ -9,10 +9,12 @@ class Vacuum::AccessTokensVacuum
private private
def vacuum_revoked_access_tokens! def vacuum_revoked_access_tokens!
Doorkeeper::AccessToken.where.not(revoked_at: nil).where('revoked_at < NOW()').delete_all Doorkeeper::AccessToken.where.not(expires_in: nil).where('created_at + make_interval(secs => expires_in) < NOW()').in_batches.delete_all
Doorkeeper::AccessToken.where.not(revoked_at: nil).where('revoked_at < NOW()').in_batches.delete_all
end end
def vacuum_revoked_access_grants! def vacuum_revoked_access_grants!
Doorkeeper::AccessGrant.where.not(revoked_at: nil).where('revoked_at < NOW()').delete_all Doorkeeper::AccessGrant.where.not(expires_in: nil).where('created_at + make_interval(secs => expires_in) < NOW()').in_batches.delete_all
Doorkeeper::AccessGrant.where.not(revoked_at: nil).where('revoked_at < NOW()').in_batches.delete_all
end end
end end

View file

@ -16,34 +16,44 @@
class AccountConversation < ApplicationRecord class AccountConversation < ApplicationRecord
include Redisable include Redisable
attr_writer :participant_accounts
before_validation :set_last_status
after_commit :push_to_streaming_api after_commit :push_to_streaming_api
belongs_to :account belongs_to :account
belongs_to :conversation belongs_to :conversation
belongs_to :last_status, class_name: 'Status' belongs_to :last_status, class_name: 'Status'
before_validation :set_last_status
def participant_account_ids=(arr) def participant_account_ids=(arr)
self[:participant_account_ids] = arr.sort self[:participant_account_ids] = arr.sort
@participant_accounts = nil
end end
def participant_accounts def participant_accounts
if participant_account_ids.empty? @participant_accounts ||= Account.where(id: participant_account_ids).to_a
[account] @participant_accounts.presence || [account]
else
participants = Account.where(id: participant_account_ids)
participants.empty? ? [account] : participants
end
end end
class << self class << self
def to_a_paginated_by_id(limit, options = {}) def to_a_paginated_by_id(limit, options = {})
if options[:min_id] array = begin
paginate_by_min_id(limit, options[:min_id], options[:max_id]).reverse if options[:min_id]
else paginate_by_min_id(limit, options[:min_id], options[:max_id]).reverse
paginate_by_max_id(limit, options[:max_id], options[:since_id]).to_a else
paginate_by_max_id(limit, options[:max_id], options[:since_id]).to_a
end
end end
# Preload participants
participant_ids = array.flat_map(&:participant_account_ids)
accounts_by_id = Account.where(id: participant_ids).index_by(&:id)
array.each do |conversation|
conversation.participant_accounts = conversation.participant_account_ids.filter_map { |id| accounts_by_id[id] }
end
array
end end
def paginate_by_min_id(limit, min_id = nil, max_id = nil) def paginate_by_min_id(limit, min_id = nil, max_id = nil)

View file

@ -22,15 +22,14 @@ module Attachmentable
included do included do
def self.has_attached_file(name, options = {}) # rubocop:disable Naming/PredicateName def self.has_attached_file(name, options = {}) # rubocop:disable Naming/PredicateName
options = { validate_media_type: false }.merge(options)
super(name, options) super(name, options)
send(:"before_#{name}_post_process") do
send(:"before_#{name}_validate", prepend: true) do
attachment = send(name) attachment = send(name)
check_image_dimension(attachment) check_image_dimension(attachment)
set_file_content_type(attachment) set_file_content_type(attachment)
obfuscate_file_name(attachment) obfuscate_file_name(attachment)
set_file_extension(attachment) set_file_extension(attachment)
Paperclip::Validators::MediaTypeSpoofDetectionValidator.new(attributes: [name]).validate(self)
end end
end end
end end

View file

@ -123,7 +123,18 @@ class Form::AccountBatch
account: current_account, account: current_account,
action: :suspend action: :suspend
) )
Admin::SuspensionWorker.perform_async(account.id) Admin::SuspensionWorker.perform_async(account.id)
# Suspending a single account closes their associated reports, so
# mass-suspending would be consistent.
Report.where(target_account: account).unresolved.find_each do |report|
authorize(report, :update?)
log_action(:resolve, report)
report.resolve!(current_account)
rescue Mastodon::NotPermittedError
# This should not happen, but just in case, do not fail early
end
end end
def approve_account(account) def approve_account(account)

View file

@ -12,7 +12,7 @@
# #
class Identity < ApplicationRecord class Identity < ApplicationRecord
belongs_to :user, dependent: :destroy belongs_to :user
validates :uid, presence: true, uniqueness: { scope: :provider } validates :uid, presence: true, uniqueness: { scope: :provider }
validates :provider, presence: true validates :provider, presence: true

View file

@ -20,6 +20,8 @@ class Webhook < ApplicationRecord
report.created report.created
).freeze ).freeze
attr_writer :current_account
scope :enabled, -> { where(enabled: true) } scope :enabled, -> { where(enabled: true) }
validates :url, presence: true, url: true validates :url, presence: true, url: true
@ -27,6 +29,7 @@ class Webhook < ApplicationRecord
validates :events, presence: true validates :events, presence: true
validate :validate_events validate :validate_events
validate :validate_permissions
before_validation :strip_events before_validation :strip_events
before_validation :generate_secret before_validation :generate_secret
@ -43,12 +46,29 @@ class Webhook < ApplicationRecord
update!(enabled: false) update!(enabled: false)
end end
def required_permissions
events.map { |event| Webhook.permission_for_event(event) }
end
def self.permission_for_event(event)
case event
when 'account.approved', 'account.created', 'account.updated'
:manage_users
when 'report.created'
:manage_reports
end
end
private private
def validate_events def validate_events
errors.add(:events, :invalid) if events.any? { |e| !EVENTS.include?(e) } errors.add(:events, :invalid) if events.any? { |e| !EVENTS.include?(e) }
end end
def validate_permissions
errors.add(:events, :invalid_permissions) if defined?(@current_account) && required_permissions.any? { |permission| !@current_account.user_role.can?(permission) }
end
def strip_events def strip_events
self.events = events.map { |str| str.strip.presence }.compact if events.present? self.events = events.map { |str| str.strip.presence }.compact if events.present?
end end

View file

@ -14,7 +14,7 @@ class WebhookPolicy < ApplicationPolicy
end end
def update? def update?
role.can?(:manage_webhooks) role.can?(:manage_webhooks) && record.required_permissions.all? { |permission| role.can?(permission) }
end end
def enable? def enable?
@ -30,6 +30,6 @@ class WebhookPolicy < ApplicationPolicy
end end
def destroy? def destroy?
role.can?(:manage_webhooks) role.can?(:manage_webhooks) && record.required_permissions.all? { |permission| role.can?(permission) }
end end
end end

View file

@ -11,4 +11,8 @@ class REST::PreviewCardSerializer < ActiveModel::Serializer
def image def image
object.image? ? full_asset_url(object.image.url(:original)) : nil object.image? ? full_asset_url(object.image.url(:original)) : nil
end end
def html
Sanitize.fragment(object.html, Sanitize::Config::MASTODON_OEMBED)
end
end end

View file

@ -43,7 +43,9 @@ class FetchResourceService < BaseService
@response_code = response.code @response_code = response.code
return nil if response.code != 200 return nil if response.code != 200
if ['application/activity+json', 'application/ld+json'].include?(response.mime_type) # Allow application/json to circumvent a bug in Lemmy < 1.8.1-rc.4
# https://github.com/LemmyNet/lemmy/commit/3d7d6b253086f1ac78e6dd459bc4c904df45dbfa
if ['application/activity+json', 'application/ld+json', 'application/json'].include?(response.mime_type)
body = response.body_with_limit body = response.body_with_limit
json = body_to_json(body) json = body_to_json(body)

View file

@ -12,6 +12,7 @@ class RemoveStatusService < BaseService
# @option [Boolean] :immediate # @option [Boolean] :immediate
# @option [Boolean] :preserve # @option [Boolean] :preserve
# @option [Boolean] :original_removed # @option [Boolean] :original_removed
# @option [Boolean] :skip_streaming
def call(status, **options) def call(status, **options)
@payload = Oj.dump(event: :delete, payload: status.id.to_s) @payload = Oj.dump(event: :delete, payload: status.id.to_s)
@status = status @status = status
@ -52,6 +53,9 @@ class RemoveStatusService < BaseService
private private
# The following FeedManager calls all do not result in redis publishes for
# streaming, as the `:update` option is false
def remove_from_self def remove_from_self
FeedManager.instance.unpush_from_home(@account, @status) FeedManager.instance.unpush_from_home(@account, @status)
end end
@ -75,6 +79,8 @@ class RemoveStatusService < BaseService
# followers. Here we send a delete to actively mentioned accounts # followers. Here we send a delete to actively mentioned accounts
# that may not follow the account # that may not follow the account
return if skip_streaming?
@status.active_mentions.find_each do |mention| @status.active_mentions.find_each do |mention|
redis.publish("timeline:#{mention.account_id}", @payload) redis.publish("timeline:#{mention.account_id}", @payload)
end end
@ -103,7 +109,7 @@ class RemoveStatusService < BaseService
# without us being able to do all the fancy stuff # without us being able to do all the fancy stuff
@status.reblogs.rewhere(deleted_at: [nil, @status.deleted_at]).includes(:account).reorder(nil).find_each do |reblog| @status.reblogs.rewhere(deleted_at: [nil, @status.deleted_at]).includes(:account).reorder(nil).find_each do |reblog|
RemoveStatusService.new.call(reblog, original_removed: true) RemoveStatusService.new.call(reblog, original_removed: true, skip_streaming: skip_streaming?)
end end
end end
@ -114,6 +120,8 @@ class RemoveStatusService < BaseService
return unless @status.public_visibility? return unless @status.public_visibility?
return if skip_streaming?
@status.tags.map(&:name).each do |hashtag| @status.tags.map(&:name).each do |hashtag|
redis.publish("timeline:hashtag:#{hashtag.mb_chars.downcase}", @payload) redis.publish("timeline:hashtag:#{hashtag.mb_chars.downcase}", @payload)
redis.publish("timeline:hashtag:#{hashtag.mb_chars.downcase}:local", @payload) if @status.local? redis.publish("timeline:hashtag:#{hashtag.mb_chars.downcase}:local", @payload) if @status.local?
@ -123,6 +131,8 @@ class RemoveStatusService < BaseService
def remove_from_public def remove_from_public
return unless @status.public_visibility? return unless @status.public_visibility?
return if skip_streaming?
redis.publish('timeline:public', @payload) redis.publish('timeline:public', @payload)
redis.publish(@status.local? ? 'timeline:public:local' : 'timeline:public:remote', @payload) redis.publish(@status.local? ? 'timeline:public:local' : 'timeline:public:remote', @payload)
end end
@ -130,6 +140,8 @@ class RemoveStatusService < BaseService
def remove_from_media def remove_from_media
return unless @status.public_visibility? return unless @status.public_visibility?
return if skip_streaming?
redis.publish('timeline:public:media', @payload) redis.publish('timeline:public:media', @payload)
redis.publish(@status.local? ? 'timeline:public:local:media' : 'timeline:public:remote:media', @payload) redis.publish(@status.local? ? 'timeline:public:local:media' : 'timeline:public:remote:media', @payload)
end end
@ -143,4 +155,8 @@ class RemoveStatusService < BaseService
def permanently? def permanently?
@options[:immediate] || !(@options[:preserve] || @status.reported?) @options[:immediate] || !(@options[:preserve] || @status.reported?)
end end
def skip_streaming?
!!@options[:skip_streaming]
end
end end

View file

@ -89,13 +89,28 @@ class ResolveURLService < BaseService
def process_local_url def process_local_url
recognized_params = Rails.application.routes.recognize_path(@url) recognized_params = Rails.application.routes.recognize_path(@url)
return unless recognized_params[:action] == 'show' case recognized_params[:controller]
when 'statuses'
return unless recognized_params[:action] == 'show'
if recognized_params[:controller] == 'statuses'
status = Status.find_by(id: recognized_params[:id]) status = Status.find_by(id: recognized_params[:id])
check_local_status(status) check_local_status(status)
elsif recognized_params[:controller] == 'accounts' when 'accounts'
return unless recognized_params[:action] == 'show'
Account.find_local(recognized_params[:username]) Account.find_local(recognized_params[:username])
when 'home'
return unless recognized_params[:action] == 'index' && recognized_params[:username_with_domain].present?
if recognized_params[:any]&.match?(/\A[0-9]+\Z/)
status = Status.find_by(id: recognized_params[:any])
check_local_status(status)
elsif recognized_params[:any].blank?
username, domain = recognized_params[:username_with_domain].gsub(/\A@/, '').split('@')
return unless username.present? && domain.present?
Account.find_remote(username, domain)
end
end end
end end

View file

@ -3,8 +3,8 @@
class VoteValidator < ActiveModel::Validator class VoteValidator < ActiveModel::Validator
def validate(vote) def validate(vote)
vote.errors.add(:base, I18n.t('polls.errors.expired')) if vote.poll.expired? vote.errors.add(:base, I18n.t('polls.errors.expired')) if vote.poll.expired?
vote.errors.add(:base, I18n.t('polls.errors.invalid_choice')) if invalid_choice?(vote) vote.errors.add(:base, I18n.t('polls.errors.invalid_choice')) if invalid_choice?(vote)
vote.errors.add(:base, I18n.t('polls.errors.self_vote')) if self_vote?(vote)
if vote.poll.multiple? && vote.poll.votes.where(account: vote.account, choice: vote.choice).exists? if vote.poll.multiple? && vote.poll.votes.where(account: vote.account, choice: vote.choice).exists?
vote.errors.add(:base, I18n.t('polls.errors.already_voted')) vote.errors.add(:base, I18n.t('polls.errors.already_voted'))
@ -18,4 +18,8 @@ class VoteValidator < ActiveModel::Validator
def invalid_choice?(vote) def invalid_choice?(vote)
vote.choice.negative? || vote.choice >= vote.poll.options.size vote.choice.negative? || vote.choice >= vote.poll.options.size
end end
def self_vote?(vote)
vote.account_id == vote.poll.account_id
end
end end

View file

@ -5,7 +5,7 @@
= f.input :url, wrapper: :with_block_label, input_html: { placeholder: 'https://' } = f.input :url, wrapper: :with_block_label, input_html: { placeholder: 'https://' }
.fields-group .fields-group
= f.input :events, collection: Webhook::EVENTS, wrapper: :with_block_label, include_blank: false, as: :check_boxes, collection_wrapper_tag: 'ul', item_wrapper_tag: 'li' = f.input :events, collection: Webhook::EVENTS, wrapper: :with_block_label, include_blank: false, as: :check_boxes, collection_wrapper_tag: 'ul', item_wrapper_tag: 'li', disabled: Webhook::EVENTS.filter { |event| !current_user.role.can?(Webhook.permission_for_event(event)) }
.actions .actions
= f.button :button, @webhook.new_record? ? t('admin.webhooks.add_new') : t('generic.save_changes'), type: :submit = f.button :button, @webhook.new_record? ? t('admin.webhooks.add_new') : t('generic.save_changes'), type: :submit

View file

@ -18,8 +18,8 @@
.announcements-list__item__action-bar .announcements-list__item__action-bar
.announcements-list__item__meta .announcements-list__item__meta
- if application.most_recently_used_access_token - if @last_used_at_by_app[application.id]
= t('doorkeeper.authorized_applications.index.last_used_at', date: l(application.most_recently_used_access_token.last_used_at.to_date)) = t('doorkeeper.authorized_applications.index.last_used_at', date: l(@last_used_at_by_app[application.id].to_date))
- else - else
= t('doorkeeper.authorized_applications.index.never_used') = t('doorkeeper.authorized_applications.index.never_used')

View file

@ -1,14 +1,14 @@
- thumbnail = @instance_presenter.thumbnail - thumbnail = @instance_presenter.thumbnail
- description ||= @instance_presenter.description.presence || strip_tags(t('about.about_mastodon_html')) - description = 'Learn ActivityPub interactively, by seeing protocol interactions visualized in real time'
%meta{ name: 'description', content: description }/ %meta{ name: 'description', content: description }/
= opengraph 'og:site_name', t('about.hosted_on', domain: site_hostname) = opengraph 'og:site_name', 'ActivityPub Academy - A learning resource for ActivityPub'
= opengraph 'og:url', url_for(only_path: false) = opengraph 'og:url', url_for(only_path: false)
= opengraph 'og:type', 'website' = opengraph 'og:type', 'website'
= opengraph 'og:title', @instance_presenter.title = opengraph 'og:title', 'ActivityPub Academy'
= opengraph 'og:description', description = opengraph 'og:description', description
= opengraph 'og:image', full_asset_url(thumbnail&.file&.url(:'@1x') || asset_pack_path('media/images/preview.png', protocol: :request)) = opengraph 'og:image', full_asset_url(thumbnail&.file&.url(:'@1x') || asset_pack_path('media/images/academy-mascot.webp', protocol: :request))
= opengraph 'og:image:width', thumbnail ? thumbnail.meta['width'] : '1200' = opengraph 'og:image:width', thumbnail ? thumbnail.meta['width'] : '500'
= opengraph 'og:image:height', thumbnail ? thumbnail.meta['height'] : '630' = opengraph 'og:image:height', thumbnail ? thumbnail.meta['height'] : '573'
= opengraph 'twitter:card', 'summary_large_image' = opengraph 'twitter:card', 'summary_large_image'

View file

@ -7,28 +7,30 @@ class Scheduler::AccountsStatusesCleanupScheduler
# This limit is mostly to be nice to the fediverse at large and not # This limit is mostly to be nice to the fediverse at large and not
# generate too much traffic. # generate too much traffic.
# This also helps limiting the running time of the scheduler itself. # This also helps limiting the running time of the scheduler itself.
MAX_BUDGET = 150 MAX_BUDGET = 300
# This is an attempt to spread the load across instances, as various # This is an attempt to spread the load across remote servers, as
# accounts are likely to have various followers. # spreading deletions across diverse accounts is likely to spread
# the deletion across diverse followers. It also helps each individual
# user see some effect sooner.
PER_ACCOUNT_BUDGET = 5 PER_ACCOUNT_BUDGET = 5
# This is an attempt to limit the workload generated by status removal # This is an attempt to limit the workload generated by status removal
# jobs to something the particular instance can handle. # jobs to something the particular server can handle.
PER_THREAD_BUDGET = 6 PER_THREAD_BUDGET = 5
# Those avoid loading an instance that is already under load # These are latency limits on various queues above which a server is
MAX_DEFAULT_SIZE = 200 # considered to be under load, causing the auto-deletion to be entirely
MAX_DEFAULT_LATENCY = 5 # skipped for that run.
MAX_PUSH_SIZE = 500 LOAD_LATENCY_THRESHOLDS = {
MAX_PUSH_LATENCY = 10 default: 5,
push: 10,
# 'pull' queue has lower priority jobs, and it's unlikely that pushing # The `pull` queue has lower priority jobs, and it's unlikely that
# deletes would cause much issues with this queue if it didn't cause issues # pushing deletes would cause much issues with this queue if it didn't
# with default and push. Yet, do not enqueue deletes if the instance is # cause issues with `default` and `push`. Yet, do not enqueue deletes
# lagging behind too much. # if the instance is lagging behind too much.
MAX_PULL_SIZE = 10_000 pull: 5.minutes.to_i,
MAX_PULL_LATENCY = 5.minutes.to_i }.freeze
sidekiq_options retry: 0, lock: :until_executed, lock_ttl: 1.day.to_i sidekiq_options retry: 0, lock: :until_executed, lock_ttl: 1.day.to_i
@ -36,17 +38,37 @@ class Scheduler::AccountsStatusesCleanupScheduler
return if under_load? return if under_load?
budget = compute_budget budget = compute_budget
first_policy_id = last_processed_id
# If the budget allows it, we want to consider all accounts with enabled
# auto cleanup at least once.
#
# We start from `first_policy_id` (the last processed id in the previous
# run) and process each policy until we loop to `first_policy_id`,
# recording into `affected_policies` any policy that caused posts to be
# deleted.
#
# After that, we set `full_iteration` to `false` and continue looping on
# policies from `affected_policies`.
first_policy_id = last_processed_id || 0
first_iteration = true
full_iteration = true
affected_policies = []
loop do loop do
num_processed_accounts = 0 num_processed_accounts = 0
scope = AccountStatusesCleanupPolicy.where(enabled: true) scope = cleanup_policies(first_policy_id, affected_policies, first_iteration, full_iteration)
scope.where(Account.arel_table[:id].gt(first_policy_id)) if first_policy_id.present?
scope.find_each(order: :asc) do |policy| scope.find_each(order: :asc) do |policy|
num_deleted = AccountStatusesCleanupService.new.call(policy, [budget, PER_ACCOUNT_BUDGET].min) num_deleted = AccountStatusesCleanupService.new.call(policy, [budget, PER_ACCOUNT_BUDGET].min)
num_processed_accounts += 1 unless num_deleted.zero?
budget -= num_deleted budget -= num_deleted
unless num_deleted.zero?
num_processed_accounts += 1
affected_policies << policy.id if full_iteration
end
full_iteration = false if !first_iteration && policy.id >= first_policy_id
if budget.zero? if budget.zero?
save_last_processed_id(policy.id) save_last_processed_id(policy.id)
break break
@ -55,36 +77,55 @@ class Scheduler::AccountsStatusesCleanupScheduler
# The idea here is to loop through all policies at least once until the budget is exhausted # The idea here is to loop through all policies at least once until the budget is exhausted
# and start back after the last processed account otherwise # and start back after the last processed account otherwise
break if budget.zero? || (num_processed_accounts.zero? && first_policy_id.nil?) break if budget.zero? || (num_processed_accounts.zero? && !full_iteration)
first_policy_id = nil
full_iteration = false unless first_iteration
first_iteration = false
end end
end end
def compute_budget def compute_budget
threads = Sidekiq::ProcessSet.new.select { |x| x['queues'].include?('push') }.map { |x| x['concurrency'] }.sum # Each post deletion is a `RemovalWorker` job (on `default` queue), each
# potentially spawning many `ActivityPub::DeliveryWorker` jobs (on the `push` queue).
threads = Sidekiq::ProcessSet.new.select { |x| x['queues'].include?('push') }.pluck('concurrency').sum
[PER_THREAD_BUDGET * threads, MAX_BUDGET].min [PER_THREAD_BUDGET * threads, MAX_BUDGET].min
end end
def under_load? def under_load?
queue_under_load?('default', MAX_DEFAULT_SIZE, MAX_DEFAULT_LATENCY) || queue_under_load?('push', MAX_PUSH_SIZE, MAX_PUSH_LATENCY) || queue_under_load?('pull', MAX_PULL_SIZE, MAX_PULL_LATENCY) LOAD_LATENCY_THRESHOLDS.any? { |queue, max_latency| queue_under_load?(queue, max_latency) }
end end
private private
def queue_under_load?(name, max_size, max_latency) def cleanup_policies(first_policy_id, affected_policies, first_iteration, full_iteration)
queue = Sidekiq::Queue.new(name) scope = AccountStatusesCleanupPolicy.where(enabled: true)
queue.size > max_size || queue.latency > max_latency
if full_iteration
# If we are doing a full iteration, examine all policies we have not examined yet
if first_iteration
scope.where(id: first_policy_id...)
else
scope.where(id: ..first_policy_id).or(scope.where(id: affected_policies))
end
else
# Otherwise, examine only policies that previously yielded posts to delete
scope.where(id: affected_policies)
end
end
def queue_under_load?(name, max_latency)
Sidekiq::Queue.new(name).latency > max_latency
end end
def last_processed_id def last_processed_id
redis.get('account_statuses_cleanup_scheduler:last_account_id') redis.get('account_statuses_cleanup_scheduler:last_policy_id')&.to_i
end end
def save_last_processed_id(id) def save_last_processed_id(id)
if id.nil? if id.nil?
redis.del('account_statuses_cleanup_scheduler:last_account_id') redis.del('account_statuses_cleanup_scheduler:last_policy_id')
else else
redis.set('account_statuses_cleanup_scheduler:last_account_id', id, ex: 1.hour.seconds) redis.set('account_statuses_cleanup_scheduler:last_policy_id', id, ex: 1.hour.seconds)
end end
end end
end end

View file

@ -6,17 +6,19 @@ class Scheduler::IndexingScheduler
sidekiq_options retry: 0 sidekiq_options retry: 0
IMPORT_BATCH_SIZE = 1000
SCAN_BATCH_SIZE = 10 * IMPORT_BATCH_SIZE
def perform def perform
return unless Chewy.enabled? return unless Chewy.enabled?
indexes.each do |type| indexes.each do |type|
with_redis do |redis| with_redis do |redis|
ids = redis.smembers("chewy:queue:#{type.name}") redis.sscan_each("chewy:queue:#{type.name}", count: SCAN_BATCH_SIZE).each_slice(IMPORT_BATCH_SIZE) do |ids|
type.import!(ids)
type.import!(ids) redis.pipelined do |pipeline|
pipeline.srem("chewy:queue:#{type.name}", ids)
redis.pipelined do |pipeline| end
ids.each { |id| pipeline.srem("chewy:queue:#{type.name}", id) }
end end
end end
end end

View file

@ -28,15 +28,13 @@ class Scheduler::OldAccountCleanupScheduler
.where("domain IS NULL") .where("domain IS NULL")
# id -99 is the instance actor # id -99 is the instance actor
.where("id <> -99") .where("id <> -99")
# don't delete admin # only delete accounts whose username contains underscores (those are auto-generated)
.where("username <> 'admin'") .where("username LIKE '%\\_%'")
# don't delete crepels
.where("username <> 'crepels'")
.where("created_at < ?", 1.day.ago) .where("created_at < ?", 1.day.ago)
.order(created_at: :asc) .order(created_at: :asc)
.limit(MAX_DELETIONS_PER_JOB) .limit(MAX_DELETIONS_PER_JOB)
.each do |account| .each do |account|
AccountDeletionWorker.perform_async(account.id, { :reserve_username => false }) AccountDeletionWorker.perform_async(account.id, { 'reserve_username' => false })
end end
end end
end end

View file

@ -24,7 +24,7 @@ class Scheduler::UserCleanupScheduler
def clean_discarded_statuses! def clean_discarded_statuses!
Status.unscoped.discarded.where('deleted_at <= ?', 30.days.ago).find_in_batches do |statuses| Status.unscoped.discarded.where('deleted_at <= ?', 30.days.ago).find_in_batches do |statuses|
RemovalWorker.push_bulk(statuses) do |status| RemovalWorker.push_bulk(statuses) do |status|
[status.id, { 'immediate' => true }] [status.id, { 'immediate' => true, 'skip_streaming' => true }]
end end
end end
end end

View file

@ -28,6 +28,7 @@ require_relative '../lib/paperclip/url_generator_extensions'
require_relative '../lib/paperclip/attachment_extensions' require_relative '../lib/paperclip/attachment_extensions'
require_relative '../lib/paperclip/lazy_thumbnail' require_relative '../lib/paperclip/lazy_thumbnail'
require_relative '../lib/paperclip/gif_transcoder' require_relative '../lib/paperclip/gif_transcoder'
require_relative '../lib/paperclip/media_type_spoof_detector_extensions'
require_relative '../lib/paperclip/transcoder' require_relative '../lib/paperclip/transcoder'
require_relative '../lib/paperclip/type_corrector' require_relative '../lib/paperclip/type_corrector'
require_relative '../lib/paperclip/response_with_limit_adapter' require_relative '../lib/paperclip/response_with_limit_adapter'

View file

@ -0,0 +1,27 @@
<policymap>
<!-- Set some basic system resource limits -->
<policy domain="resource" name="time" value="60" />
<policy domain="module" rights="none" pattern="URL" />
<policy domain="filter" rights="none" pattern="*" />
<!--
Ideally, we would restrict ImageMagick to only accessing its own
disk-backed pixel cache as well as Mastodon-created Tempfiles.
However, those paths depend on the operating system and environment
variables, so they can only be known at runtime.
Furthermore, those paths are not necessarily shared across Mastodon
processes, so even creating a policy.xml at runtime is impractical.
For the time being, only disable indirect reads.
-->
<policy domain="path" rights="none" pattern="@*" />
<!-- Disallow any coder by default, and only enable ones required by Mastodon -->
<policy domain="coder" rights="none" pattern="*" />
<policy domain="coder" rights="read | write" pattern="{PNG,JPEG,GIF,HEIC,WEBP}" />
<policy domain="coder" rights="write" pattern="{HISTOGRAM,RGB,INFO}" />
</policymap>

View file

@ -3,7 +3,7 @@
# https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Content-Security-Policy # https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Content-Security-Policy
def host_to_url(str) def host_to_url(str)
"http#{Rails.configuration.x.use_https ? 's' : ''}://#{str}" unless str.blank? "http#{Rails.configuration.x.use_https ? 's' : ''}://#{str}".split('/').first if str.present?
end end
base_host = Rails.configuration.x.web_domain base_host = Rails.configuration.x.web_domain

View file

@ -155,3 +155,10 @@ unless defined?(Seahorse)
end end
end end
end end
# Set our ImageMagick security policy, but allow admins to override it
ENV['MAGICK_CONFIGURE_PATH'] = begin
imagemagick_config_paths = ENV.fetch('MAGICK_CONFIGURE_PATH', '').split(File::PATH_SEPARATOR)
imagemagick_config_paths << Rails.root.join('config', 'imagemagick').expand_path.to_s
imagemagick_config_paths.join(File::PATH_SEPARATOR)
end

View file

@ -25,7 +25,7 @@ module Twitter::TwitterText
\) \)
/iox /iox
UCHARS = '\u{A0}-\u{D7FF}\u{F900}-\u{FDCF}\u{FDF0}-\u{FFEF}\u{10000}-\u{1FFFD}\u{20000}-\u{2FFFD}\u{30000}-\u{3FFFD}\u{40000}-\u{4FFFD}\u{50000}-\u{5FFFD}\u{60000}-\u{6FFFD}\u{70000}-\u{7FFFD}\u{80000}-\u{8FFFD}\u{90000}-\u{9FFFD}\u{A0000}-\u{AFFFD}\u{B0000}-\u{BFFFD}\u{C0000}-\u{CFFFD}\u{D0000}-\u{DFFFD}\u{E1000}-\u{EFFFD}\u{E000}-\u{F8FF}\u{F0000}-\u{FFFFD}\u{100000}-\u{10FFFD}' UCHARS = '\u{A0}-\u{D7FF}\u{F900}-\u{FDCF}\u{FDF0}-\u{FFEF}\u{10000}-\u{1FFFD}\u{20000}-\u{2FFFD}\u{30000}-\u{3FFFD}\u{40000}-\u{4FFFD}\u{50000}-\u{5FFFD}\u{60000}-\u{6FFFD}\u{70000}-\u{7FFFD}\u{80000}-\u{8FFFD}\u{90000}-\u{9FFFD}\u{A0000}-\u{AFFFD}\u{B0000}-\u{BFFFD}\u{C0000}-\u{CFFFD}\u{D0000}-\u{DFFFD}\u{E1000}-\u{EFFFD}\u{E000}-\u{F8FF}\u{F0000}-\u{FFFFD}\u{100000}-\u{10FFFD}'
REGEXEN[:valid_url_query_chars] = /[a-z0-9!?\*'\(\);:&=\+\$\/%#\[\]\-_\.,~|@#{UCHARS}]/iou REGEXEN[:valid_url_query_chars] = /[a-z0-9!?\*'\(\);:&=\+\$\/%#\[\]\-_\.,~|@\^#{UCHARS}]/iou
REGEXEN[:valid_url_query_ending_chars] = /[a-z0-9_&=#\/\-#{UCHARS}]/iou REGEXEN[:valid_url_query_ending_chars] = /[a-z0-9_&=#\/\-#{UCHARS}]/iou
REGEXEN[:valid_url_path] = /(?: REGEXEN[:valid_url_path] = /(?:
(?: (?:

View file

@ -53,3 +53,7 @@ en:
position: position:
elevated: cannot be higher than your current role elevated: cannot be higher than your current role
own_role: cannot be changed with your current role own_role: cannot be changed with your current role
webhook:
attributes:
events:
invalid_permissions: cannot include events you don't have the rights to

View file

@ -1387,6 +1387,7 @@ en:
expired: The poll has already ended expired: The poll has already ended
invalid_choice: The chosen vote option does not exist invalid_choice: The chosen vote option does not exist
over_character_limit: cannot be longer than %{max} characters each over_character_limit: cannot be longer than %{max} characters each
self_vote: You cannot vote in your own polls
too_few_options: must have more than one item too_few_options: must have more than one item
too_many_options: can't contain more than %{max} items too_many_options: can't contain more than %{max} items
preferences: preferences:

23
dist/nginx.conf vendored
View file

@ -90,6 +90,8 @@ server {
location ~ ^/system/ { location ~ ^/system/ {
add_header Cache-Control "public, max-age=2419200, immutable"; add_header Cache-Control "public, max-age=2419200, immutable";
add_header Strict-Transport-Security "max-age=63072000; includeSubDomains"; add_header Strict-Transport-Security "max-age=63072000; includeSubDomains";
add_header X-Content-Type-Options nosniff;
add_header Content-Security-Policy "default-src 'none'; form-action 'none'";
try_files $uri =404; try_files $uri =404;
} }
@ -112,6 +114,27 @@ server {
tcp_nodelay on; tcp_nodelay on;
} }
location ^~ /api/v1/json_ld {
proxy_set_header Host $host;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto https;
proxy_set_header Proxy "";
proxy_pass_header Server;
proxy_pass http://backend;
proxy_redirect off;
proxy_http_version 1.1;
proxy_set_header Upgrade $http_upgrade;
proxy_set_header Connection '';
proxy_cache off;
proxy_buffering off;
chunked_transfer_encoding off;
tcp_nodelay on;
}
location ^~ /api/v1/activity_log { location ^~ /api/v1/activity_log {
proxy_set_header Host $host; proxy_set_header Host $host;
proxy_set_header X-Real-IP $remote_addr; proxy_set_header X-Real-IP $remote_addr;

View file

@ -7,10 +7,14 @@ class ActivityLogSubscriber
redis.subscribe('activity_log') do |on| redis.subscribe('activity_log') do |on|
on.message do |channel, message| on.message do |channel, message|
event = ActivityLogEvent.from_json_string(message) begin
event = ActivityLogEvent.from_json_string(message)
ActivityLogAudienceHelper.audience(event) ActivityLogAudienceHelper.audience(event)
.each { |username| ActivityLogger.log(username, event) } .each { |username| ActivityLogger.log(username, event) }
rescue => e
Rails.logger.error (["Error parsing #{message}. #{e.class}: #{e.message}"]+e.backtrace).join("\n")
end
end end
end end
end end

View file

@ -542,7 +542,7 @@ module Mastodon
User.pending.find_each(&:approve!) User.pending.find_each(&:approve!)
say('OK', :green) say('OK', :green)
elsif options[:number] elsif options[:number]
User.pending.limit(options[:number]).each(&:approve!) User.pending.order(created_at: :asc).limit(options[:number]).each(&:approve!)
say('OK', :green) say('OK', :green)
elsif username.present? elsif username.present?
account = Account.find_local(username) account = Account.find_local(username)

View file

@ -13,7 +13,7 @@ module Mastodon
end end
def patch def patch
2 4
end end
def flags def flags

View file

@ -0,0 +1,22 @@
# frozen_string_literal: true
module Paperclip
module MediaTypeSpoofDetectorExtensions
def calculated_content_type
return @calculated_content_type if defined?(@calculated_content_type)
@calculated_content_type = type_from_file_command.chomp
# The `file` command fails to recognize some MP3 files as such
@calculated_content_type = type_from_marcel if @calculated_content_type == 'application/octet-stream' && type_from_marcel == 'audio/mpeg'
@calculated_content_type
end
def type_from_marcel
@type_from_marcel ||= Marcel::MimeType.for Pathname.new(@file.path),
name: @file.path
end
end
end
Paperclip::MediaTypeSpoofDetector.prepend(Paperclip::MediaTypeSpoofDetectorExtensions)

View file

@ -19,10 +19,7 @@ module Paperclip
def make def make
metadata = VideoMetadataExtractor.new(@file.path) metadata = VideoMetadataExtractor.new(@file.path)
unless metadata.valid? raise Paperclip::Error, "Error while transcoding #{@file.path}: unsupported file" unless metadata.valid?
Paperclip.log("Unsupported file #{@file.path}")
return File.open(@file.path)
end
update_attachment_type(metadata) update_attachment_type(metadata)
update_options_from_metadata(metadata) update_options_from_metadata(metadata)

View file

@ -32,6 +32,11 @@ class PublicFileServerMiddleware
end end
end end
# Override the default CSP header set by the CSP middleware
headers['Content-Security-Policy'] = "default-src 'none'; form-action 'none'" if request_path.start_with?(paperclip_root_url)
headers['X-Content-Type-Options'] = 'nosniff'
[status, headers, response] [status, headers, response]
end end

View file

@ -94,26 +94,26 @@ class Sanitize
] ]
) )
MASTODON_OEMBED ||= freeze_config merge( MASTODON_OEMBED ||= freeze_config(
RELAXED, elements: %w(audio embed iframe source video),
elements: RELAXED[:elements] + %w(audio embed iframe source video),
attributes: merge( attributes: {
RELAXED[:attributes],
'audio' => %w(controls), 'audio' => %w(controls),
'embed' => %w(height src type width), 'embed' => %w(height src type width),
'iframe' => %w(allowfullscreen frameborder height scrolling src width), 'iframe' => %w(allowfullscreen frameborder height scrolling src width),
'source' => %w(src type), 'source' => %w(src type),
'video' => %w(controls height loop width), 'video' => %w(controls height loop width),
'div' => [:data] },
),
protocols: merge( protocols: {
RELAXED[:protocols],
'embed' => { 'src' => HTTP_PROTOCOLS }, 'embed' => { 'src' => HTTP_PROTOCOLS },
'iframe' => { 'src' => HTTP_PROTOCOLS }, 'iframe' => { 'src' => HTTP_PROTOCOLS },
'source' => { 'src' => HTTP_PROTOCOLS } 'source' => { 'src' => HTTP_PROTOCOLS },
) },
add_attributes: {
'iframe' => { 'sandbox' => 'allow-scripts allow-same-origin allow-popups allow-popups-to-escape-sandbox allow-forms' },
}
) )
end end
end end

View file

@ -40,7 +40,7 @@ namespace :branding do
output_dest = Rails.root.join('app', 'javascript', 'icons') output_dest = Rails.root.join('app', 'javascript', 'icons')
rsvg_convert = Terrapin::CommandLine.new('rsvg-convert', '-w :size -h :size --keep-aspect-ratio :input -o :output') rsvg_convert = Terrapin::CommandLine.new('rsvg-convert', '-w :size -h :size --keep-aspect-ratio :input -o :output')
convert = Terrapin::CommandLine.new('convert', ':input :output') convert = Terrapin::CommandLine.new('convert', ':input :output', environment: { 'MAGICK_CONFIGURE_PATH' => nil })
favicon_sizes = [16, 32, 48] favicon_sizes = [16, 32, 48]
apple_icon_sizes = [57, 60, 72, 76, 114, 120, 144, 152, 167, 180, 1024] apple_icon_sizes = [57, 60, 72, 76, 114, 120, 144, 152, 167, 180, 1024]

View file

@ -35,7 +35,7 @@
"@github/webauthn-json": "^0.5.7", "@github/webauthn-json": "^0.5.7",
"@rails/ujs": "^6.1.7", "@rails/ujs": "^6.1.7",
"abortcontroller-polyfill": "^1.7.5", "abortcontroller-polyfill": "^1.7.5",
"activitypub-visualization": "^1.3.0", "activitypub-visualization": "^1.3.7",
"array-includes": "^3.1.6", "array-includes": "^3.1.6",
"arrow-key-navigation": "^1.2.0", "arrow-key-navigation": "^1.2.0",
"autoprefixer": "^9.8.8", "autoprefixer": "^9.8.8",

Binary file not shown.

Before

Width:  |  Height:  |  Size: 2.8 KiB

After

Width:  |  Height:  |  Size: 71 KiB

View file

@ -16,6 +16,7 @@ RSpec.describe Api::V1::ConversationsController, type: :controller do
before do before do
PostStatusService.new.call(other.account, text: 'Hey @alice', visibility: 'direct') PostStatusService.new.call(other.account, text: 'Hey @alice', visibility: 'direct')
PostStatusService.new.call(user.account, text: 'Hey, nobody here', visibility: 'direct')
end end
it 'returns http success' do it 'returns http success' do
@ -31,7 +32,26 @@ RSpec.describe Api::V1::ConversationsController, type: :controller do
it 'returns conversations' do it 'returns conversations' do
get :index get :index
json = body_as_json json = body_as_json
expect(json.size).to eq 1 expect(json.size).to eq 2
expect(json[0][:accounts].size).to eq 1
end
context 'with since_id' do
context 'when requesting old posts' do
it 'returns conversations' do
get :index, params: { since_id: Mastodon::Snowflake.id_at(1.hour.ago, with_random: false) }
json = body_as_json
expect(json.size).to eq 2
end
end
context 'when requesting posts in the future' do
it 'returns no conversation' do
get :index, params: { since_id: Mastodon::Snowflake.id_at(1.hour.from_now, with_random: false) }
json = body_as_json
expect(json.size).to eq 0
end
end
end end
end end
end end

View file

@ -23,6 +23,7 @@ describe Api::V1::Statuses::HistoriesController do
it 'returns http success' do it 'returns http success' do
expect(response).to have_http_status(200) expect(response).to have_http_status(200)
expect(body_as_json.size).to_not be 0
end end
end end
end end

View file

@ -69,5 +69,13 @@ RSpec.describe Api::V2::Admin::AccountsController, type: :controller do
end end
end end
end end
context 'with limit param' do
let(:params) { { limit: 1 } }
it 'sets the correct pagination headers' do
expect(response.headers['Link'].find_link(%w(rel next)).href).to eq api_v2_admin_accounts_url(limit: 1, max_id: admin_account.id)
end
end
end end
end end

View file

@ -4,6 +4,10 @@ describe WellKnown::WebfingerController, type: :controller do
render_views render_views
describe 'GET #show' do describe 'GET #show' do
subject(:perform_show!) do
get :show, params: { resource: resource }, format: :json
end
let(:alternate_domains) { [] } let(:alternate_domains) { [] }
let(:alice) { Fabricate(:account, username: 'alice') } let(:alice) { Fabricate(:account, username: 'alice') }
let(:resource) { nil } let(:resource) { nil }
@ -15,10 +19,6 @@ describe WellKnown::WebfingerController, type: :controller do
Rails.configuration.x.alternate_domains = tmp Rails.configuration.x.alternate_domains = tmp
end end
subject do
get :show, params: { resource: resource }, format: :json
end
shared_examples 'a successful response' do shared_examples 'a successful response' do
it 'returns http success' do it 'returns http success' do
expect(response).to have_http_status(200) expect(response).to have_http_status(200)
@ -43,7 +43,7 @@ describe WellKnown::WebfingerController, type: :controller do
let(:resource) { alice.to_webfinger_s } let(:resource) { alice.to_webfinger_s }
before do before do
subject perform_show!
end end
it_behaves_like 'a successful response' it_behaves_like 'a successful response'
@ -54,7 +54,7 @@ describe WellKnown::WebfingerController, type: :controller do
before do before do
alice.suspend! alice.suspend!
subject perform_show!
end end
it_behaves_like 'a successful response' it_behaves_like 'a successful response'
@ -66,7 +66,7 @@ describe WellKnown::WebfingerController, type: :controller do
before do before do
alice.suspend! alice.suspend!
alice.deletion_request.destroy alice.deletion_request.destroy
subject perform_show!
end end
it 'returns http gone' do it 'returns http gone' do
@ -78,7 +78,7 @@ describe WellKnown::WebfingerController, type: :controller do
let(:resource) { 'acct:not@existing.com' } let(:resource) { 'acct:not@existing.com' }
before do before do
subject perform_show!
end end
it 'returns http not found' do it 'returns http not found' do
@ -90,7 +90,7 @@ describe WellKnown::WebfingerController, type: :controller do
let(:alternate_domains) { ['foo.org'] } let(:alternate_domains) { ['foo.org'] }
before do before do
subject perform_show!
end end
context 'when an account exists' do context 'when an account exists' do
@ -114,11 +114,39 @@ describe WellKnown::WebfingerController, type: :controller do
end end
end end
context 'when the old name scheme is used to query the instance actor' do
let(:resource) do
"#{Rails.configuration.x.local_domain}@#{Rails.configuration.x.local_domain}"
end
before do
perform_show!
end
it 'returns http success' do
expect(response).to have_http_status(200)
end
it 'does not set a Vary header' do
expect(response.headers['Vary']).to be_nil
end
it 'returns application/jrd+json' do
expect(response.media_type).to eq 'application/jrd+json'
end
it 'returns links for the internal account' do
json = body_as_json
expect(json[:subject]).to eq 'acct:mastodon.internal@cb6e6126.ngrok.io'
expect(json[:aliases]).to eq ['https://cb6e6126.ngrok.io/actor']
end
end
context 'with no resource parameter' do context 'with no resource parameter' do
let(:resource) { nil } let(:resource) { nil }
before do before do
subject perform_show!
end end
it 'returns http bad request' do it 'returns http bad request' do
@ -130,7 +158,7 @@ describe WellKnown::WebfingerController, type: :controller do
let(:resource) { 'df/:dfkj' } let(:resource) { 'df/:dfkj' }
before do before do
subject perform_show!
end end
it 'returns http bad request' do it 'returns http bad request' do

View file

@ -0,0 +1,22 @@
{
"data" : {
"@context" : [
"https://app.wafrn.net/contexts/litepub-0.1.jsonld"
],
"actor" : "https://app.wafrn.net/fediverse/blog/spikesburstingthroughgrid",
"cc" : [
null
],
"id" : "https://app.wafrn.net/fediverse/likes/c425f754-e5fe-414c-b385-8a7d14542579/88795044-dbe9-40de-ae28-ba8dbb3b6800",
"object" : null,
"to" : [
"https://www.w3.org/ns/activitystreams#Public",
"https://app.wafrn.net/fediverse/blog/spikesburstingthroughgrid/followers"
],
"type" : "Like"
},
"path" : "https://example.com/inbox",
"sender" : "https://app.wafrn.net/fediverse/blog/spikesburstingthroughgrid",
"timestamp" : "2023-07-03T04:00:10Z",
"type" : "inbound"
}

View file

@ -7,7 +7,7 @@
"@context": "https://www.w3.org/ns/activitystreams", "@context": "https://www.w3.org/ns/activitystreams",
"id": "https://example.com/a5f25e0a-98d6-4e5c-baad-65318cd4d67d", "id": "https://example.com/a5f25e0a-98d6-4e5c-baad-65318cd4d67d",
"type": "Follow", "type": "Follow",
"actor": "https://example.com/users/alice", "actor": "https://example.com/users/eve",
"object": "https://other.org/users/bob" "object": "https://other.org/users/bob"
} }
} }

Binary file not shown.

After

Width:  |  Height:  |  Size: 60 KiB

BIN
spec/fixtures/files/boop.mp3 vendored Normal file

Binary file not shown.

View file

@ -0,0 +1,53 @@
# frozen_string_literal: true
require 'rails_helper'
RSpec.describe AccountReachFinder do
let(:account) { Fabricate(:account) }
let(:follower1) { Fabricate(:account, protocol: :activitypub, inbox_url: 'https://example.com/inbox-1') }
let(:follower2) { Fabricate(:account, protocol: :activitypub, inbox_url: 'https://example.com/inbox-2') }
let(:follower3) { Fabricate(:account, protocol: :activitypub, inbox_url: 'https://foo.bar/users/a/inbox', shared_inbox_url: 'https://foo.bar/inbox') }
let(:mentioned1) { Fabricate(:account, protocol: :activitypub, inbox_url: 'https://foo.bar/users/b/inbox', shared_inbox_url: 'https://foo.bar/inbox') }
let(:mentioned2) { Fabricate(:account, protocol: :activitypub, inbox_url: 'https://example.com/inbox-3') }
let(:mentioned3) { Fabricate(:account, protocol: :activitypub, inbox_url: 'https://example.com/inbox-4') }
let(:unrelated_account) { Fabricate(:account, protocol: :activitypub, inbox_url: 'https://example.com/unrelated-inbox') }
before do
follower1.follow!(account)
follower2.follow!(account)
follower3.follow!(account)
Fabricate(:status, account: account).tap do |status|
status.mentions << Mention.new(account: follower1)
status.mentions << Mention.new(account: mentioned1)
end
Fabricate(:status, account: account)
Fabricate(:status, account: account).tap do |status|
status.mentions << Mention.new(account: mentioned2)
status.mentions << Mention.new(account: mentioned3)
end
Fabricate(:status).tap do |status|
status.mentions << Mention.new(account: unrelated_account)
end
end
describe '#inboxes' do
it 'includes the preferred inbox URL of followers' do
expect(described_class.new(account).inboxes).to include(*[follower1, follower2, follower3].map(&:preferred_inbox_url))
end
it 'includes the preferred inbox URL of recently-mentioned accounts' do
expect(described_class.new(account).inboxes).to include(*[mentioned1, mentioned2, mentioned3].map(&:preferred_inbox_url))
end
it 'does not include the inbox of unrelated users' do
expect(described_class.new(account).inboxes).to_not include(unrelated_account.preferred_inbox_url)
end
end
end

View file

@ -15,8 +15,8 @@ RSpec.describe ActivityLogAudienceHelper do
Rails.configuration.x.web_domain = before Rails.configuration.x.web_domain = before
end end
describe 'for inbound events' do describe 'for outbound events' do
it 'returns the author if the domain matches' do it 'returns the sender if the domain matches' do
Rails.configuration.x.web_domain = 'example.com' Rails.configuration.x.web_domain = 'example.com'
outbound_event = activity_log_event_fixture('outbound.json') outbound_event = activity_log_event_fixture('outbound.json')
@ -30,16 +30,21 @@ RSpec.describe ActivityLogAudienceHelper do
expect(ActivityLogAudienceHelper.audience(outbound_event)).to eq [] expect(ActivityLogAudienceHelper.audience(outbound_event)).to eq []
end end
it 'returns nothing if the activity does not have an actor' do it 'returns nothing if the activity does not have a sender' do
Rails.configuration.x.web_domain = 'example.com' Rails.configuration.x.web_domain = 'example.com'
outbound_event = activity_log_event_fixture('outbound.json') outbound_event_tmp = activity_log_event_fixture('outbound.json')
outbound_event.data.delete('actor') outbound_event = ActivityLogEvent.new(
outbound_event_tmp.type,
nil,
outbound_event_tmp.path,
outbound_event_tmp.data
)
expect(ActivityLogAudienceHelper.audience(outbound_event)).to eq [] expect(ActivityLogAudienceHelper.audience(outbound_event)).to eq []
end end
end end
describe 'for outbound events' do describe 'for inbound events' do
it 'returns the inbox owner if it is sent to a personal inbox' do it 'returns the inbox owner if it is sent to a personal inbox' do
Rails.configuration.x.web_domain = 'example.com' Rails.configuration.x.web_domain = 'example.com'
inbound_event = activity_log_event_fixture('inbound-to-users-inbox.json') inbound_event = activity_log_event_fixture('inbound-to-users-inbox.json')
@ -84,6 +89,13 @@ RSpec.describe ActivityLogAudienceHelper do
]) ])
end end
it 'handles null in array correctly' do
Rails.configuration.x.web_domain = 'example.com'
inbound_event = activity_log_event_fixture('inbound-with-null-in-array.json')
expect(ActivityLogAudienceHelper.audience(inbound_event)).to match_array([])
end
end end
end end
end end

View file

@ -5,9 +5,11 @@ RSpec.describe Vacuum::AccessTokensVacuum do
describe '#perform' do describe '#perform' do
let!(:revoked_access_token) { Fabricate(:access_token, revoked_at: 1.minute.ago) } let!(:revoked_access_token) { Fabricate(:access_token, revoked_at: 1.minute.ago) }
let!(:expired_access_token) { Fabricate(:access_token, expires_in: 59.minutes.to_i, created_at: 1.hour.ago) }
let!(:active_access_token) { Fabricate(:access_token) } let!(:active_access_token) { Fabricate(:access_token) }
let!(:revoked_access_grant) { Fabricate(:access_grant, revoked_at: 1.minute.ago) } let!(:revoked_access_grant) { Fabricate(:access_grant, revoked_at: 1.minute.ago) }
let!(:expired_access_grant) { Fabricate(:access_grant, expires_in: 59.minutes.to_i, created_at: 1.hour.ago) }
let!(:active_access_grant) { Fabricate(:access_grant) } let!(:active_access_grant) { Fabricate(:access_grant) }
before do before do
@ -18,10 +20,18 @@ RSpec.describe Vacuum::AccessTokensVacuum do
expect { revoked_access_token.reload }.to raise_error ActiveRecord::RecordNotFound expect { revoked_access_token.reload }.to raise_error ActiveRecord::RecordNotFound
end end
it 'deletes expired access tokens' do
expect { expired_access_token.reload }.to raise_error ActiveRecord::RecordNotFound
end
it 'deletes revoked access grants' do it 'deletes revoked access grants' do
expect { revoked_access_grant.reload }.to raise_error ActiveRecord::RecordNotFound expect { revoked_access_grant.reload }.to raise_error ActiveRecord::RecordNotFound
end end
it 'deletes expired access grants' do
expect { expired_access_grant.reload }.to raise_error ActiveRecord::RecordNotFound
end
it 'does not delete active access tokens' do it 'does not delete active access tokens' do
expect { active_access_token.reload }.to_not raise_error expect { active_access_token.reload }.to_not raise_error
end end

View file

@ -0,0 +1,63 @@
# frozen_string_literal: true
require 'rails_helper'
RSpec.describe Form::AccountBatch do
let(:account_batch) { described_class.new }
describe '#save' do
subject { account_batch.save }
let(:account) { Fabricate(:user, role: UserRole.find_by(name: 'Admin')).account }
let(:account_ids) { [] }
let(:query) { Account.none }
before do
account_batch.assign_attributes(
action: action,
current_account: account,
account_ids: account_ids,
query: query,
select_all_matching: select_all_matching
)
end
context 'when action is "suspend"' do
let(:action) { 'suspend' }
let(:target_account) { Fabricate(:account) }
let(:target_account2) { Fabricate(:account) }
before do
Fabricate(:report, target_account: target_account)
Fabricate(:report, target_account: target_account2)
end
context 'when accounts are passed as account_ids' do
let(:select_all_matching) { '0' }
let(:account_ids) { [target_account.id, target_account2.id] }
it 'suspends the expected users' do
expect { subject }.to change { [target_account.reload.suspended?, target_account2.reload.suspended?] }.from([false, false]).to([true, true])
end
it 'closes open reports targeting the suspended users' do
expect { subject }.to change { Report.unresolved.where(target_account: [target_account, target_account2]).count }.from(2).to(0)
end
end
context 'when accounts are passed as a query' do
let(:select_all_matching) { '1' }
let(:query) { Account.where(id: [target_account.id, target_account2.id]) }
it 'suspends the expected users' do
expect { subject }.to change { [target_account.reload.suspended?, target_account2.reload.suspended?] }.from([false, false]).to([true, true])
end
it 'closes open reports targeting the suspended users' do
expect { subject }.to change { Report.unresolved.where(target_account: [target_account, target_account2]).count }.from(2).to(0)
end
end
end
end
end

View file

@ -150,6 +150,26 @@ RSpec.describe MediaAttachment, type: :model do
end end
end end
describe 'mp3 with large cover art' do
let(:media) { described_class.create(account: Fabricate(:account), file: attachment_fixture('boop.mp3')) }
it 'detects it as an audio file' do
expect(media.type).to eq 'audio'
end
it 'sets meta for the duration' do
expect(media.file.meta['original']['duration']).to be_within(0.05).of(0.235102)
end
it 'extracts thumbnail' do
expect(media.thumbnail.present?).to be true
end
it 'gives the file a random name' do
expect(media.file_file_name).to_not eq 'boop.mp3'
end
end
describe 'jpeg' do describe 'jpeg' do
let(:media) { MediaAttachment.create(account: Fabricate(:account), file: attachment_fixture('attachment.jpg')) } let(:media) { MediaAttachment.create(account: Fabricate(:account), file: attachment_fixture('attachment.jpg')) }

View file

@ -0,0 +1,18 @@
# frozen_string_literal: true
require 'rails_helper'
RSpec.describe 'Media API', paperclip_processing: true do
let(:user) { Fabricate(:user) }
let(:token) { Fabricate(:accessible_access_token, resource_owner_id: user.id, scopes: scopes) }
let(:scopes) { 'write' }
let(:headers) { { 'Authorization' => "Bearer #{token.token}" } }
describe 'POST /api/v2/media' do
it 'returns http success' do
post '/api/v2/media', headers: headers, params: { file: fixture_file_upload('attachment-jpg.123456_abcd', 'image/jpeg') }
expect(File.exist?(user.account.media_attachments.first.file.path(:small))).to be true
expect(response).to have_http_status(200)
end
end
end

View file

@ -10,6 +10,7 @@ RSpec.describe FetchLinkCardService, type: :service do
stub_request(:get, 'http://example.com/koi8-r').to_return(request_fixture('koi8-r.txt')) stub_request(:get, 'http://example.com/koi8-r').to_return(request_fixture('koi8-r.txt'))
stub_request(:get, 'http://example.com/日本語').to_return(request_fixture('sjis.txt')) stub_request(:get, 'http://example.com/日本語').to_return(request_fixture('sjis.txt'))
stub_request(:get, 'https://github.com/qbi/WannaCry').to_return(status: 404) stub_request(:get, 'https://github.com/qbi/WannaCry').to_return(status: 404)
stub_request(:get, 'http://example.com/test?data=file.gpx%5E1').to_return(status: 200)
stub_request(:get, 'http://example.com/test-').to_return(request_fixture('idn.txt')) stub_request(:get, 'http://example.com/test-').to_return(request_fixture('idn.txt'))
stub_request(:get, 'http://example.com/windows-1251').to_return(request_fixture('windows-1251.txt')) stub_request(:get, 'http://example.com/windows-1251').to_return(request_fixture('windows-1251.txt'))
@ -85,6 +86,15 @@ RSpec.describe FetchLinkCardService, type: :service do
expect(a_request(:get, 'http://example.com/sjis')).to_not have_been_made expect(a_request(:get, 'http://example.com/sjis')).to_not have_been_made
end end
end end
context do
let(:status) { Fabricate(:status, text: 'test http://example.com/test?data=file.gpx^1') }
it 'does fetch URLs with a caret in search params' do
expect(a_request(:get, 'http://example.com/test?data=file.gpx')).to_not have_been_made
expect(a_request(:get, 'http://example.com/test?data=file.gpx%5E1')).to have_been_made.once
end
end
end end
context 'in a remote status' do context 'in a remote status' do

View file

@ -145,5 +145,35 @@ describe ResolveURLService, type: :service do
expect(subject.call(url, on_behalf_of: account)).to eq(status) expect(subject.call(url, on_behalf_of: account)).to eq(status)
end end
end end
context 'when searching for a local link of a remote private status' do
let(:account) { Fabricate(:account) }
let(:poster) { Fabricate(:account, username: 'foo', domain: 'example.com') }
let(:url) { 'https://example.com/@foo/42' }
let(:uri) { 'https://example.com/users/foo/statuses/42' }
let!(:status) { Fabricate(:status, url: url, uri: uri, account: poster, visibility: :private) }
let(:search_url) { "https://#{Rails.configuration.x.local_domain}/@foo@example.com/#{status.id}" }
before do
stub_request(:get, url).to_return(status: 404) if url.present?
stub_request(:get, uri).to_return(status: 404)
end
context 'when the account follows the poster' do
before do
account.follow!(poster)
end
it 'returns the status' do
expect(subject.call(search_url, on_behalf_of: account)).to eq(status)
end
end
context 'when the account does not follow the poster' do
it 'does not return the status' do
expect(subject.call(search_url, on_behalf_of: account)).to be_nil
end
end
end
end end
end end

View file

@ -7,11 +7,13 @@ describe Scheduler::AccountsStatusesCleanupScheduler do
let!(:account2) { Fabricate(:account, domain: nil) } let!(:account2) { Fabricate(:account, domain: nil) }
let!(:account3) { Fabricate(:account, domain: nil) } let!(:account3) { Fabricate(:account, domain: nil) }
let!(:account4) { Fabricate(:account, domain: nil) } let!(:account4) { Fabricate(:account, domain: nil) }
let!(:account5) { Fabricate(:account, domain: nil) }
let!(:remote) { Fabricate(:account) } let!(:remote) { Fabricate(:account) }
let!(:policy1) { Fabricate(:account_statuses_cleanup_policy, account: account1) } let!(:policy1) { Fabricate(:account_statuses_cleanup_policy, account: account1) }
let!(:policy2) { Fabricate(:account_statuses_cleanup_policy, account: account3) } let!(:policy2) { Fabricate(:account_statuses_cleanup_policy, account: account3) }
let!(:policy3) { Fabricate(:account_statuses_cleanup_policy, account: account4, enabled: false) } let!(:policy3) { Fabricate(:account_statuses_cleanup_policy, account: account4, enabled: false) }
let!(:policy4) { Fabricate(:account_statuses_cleanup_policy, account: account5) }
let(:queue_size) { 0 } let(:queue_size) { 0 }
let(:queue_latency) { 0 } let(:queue_latency) { 0 }
@ -40,6 +42,7 @@ describe Scheduler::AccountsStatusesCleanupScheduler do
Fabricate(:status, account: account2, created_at: 3.years.ago) Fabricate(:status, account: account2, created_at: 3.years.ago)
Fabricate(:status, account: account3, created_at: 3.years.ago) Fabricate(:status, account: account3, created_at: 3.years.ago)
Fabricate(:status, account: account4, created_at: 3.years.ago) Fabricate(:status, account: account4, created_at: 3.years.ago)
Fabricate(:status, account: account5, created_at: 3.years.ago)
Fabricate(:status, account: remote, created_at: 3.years.ago) Fabricate(:status, account: remote, created_at: 3.years.ago)
end end
@ -70,7 +73,7 @@ describe Scheduler::AccountsStatusesCleanupScheduler do
end end
end end
describe '#get_budget' do describe '#compute_budget' do
context 'on a single thread' do context 'on a single thread' do
let(:process_set_stub) { [ { 'concurrency' => 1, 'queues' => ['push', 'default'] } ] } let(:process_set_stub) { [ { 'concurrency' => 1, 'queues' => ['push', 'default'] } ] }
@ -109,8 +112,48 @@ describe Scheduler::AccountsStatusesCleanupScheduler do
expect { subject.perform }.to_not change { account4.statuses.count } expect { subject.perform }.to_not change { account4.statuses.count }
end end
it 'eventually deletes every deletable toot' do it 'eventually deletes every deletable toot given enough runs' do
expect { subject.perform; subject.perform; subject.perform; subject.perform }.to change { Status.count }.by(-20) stub_const 'Scheduler::AccountsStatusesCleanupScheduler::MAX_BUDGET', 4
expect { 10.times { subject.perform } }.to change { Status.count }.by(-30)
end
it 'correctly round-trips between users across several runs' do
stub_const 'Scheduler::AccountsStatusesCleanupScheduler::MAX_BUDGET', 3
stub_const 'Scheduler::AccountsStatusesCleanupScheduler::PER_ACCOUNT_BUDGET', 2
expect { 3.times { subject.perform } }
.to change { Status.count }.by(-3 * 3)
.and change { account1.statuses.count }
.and change { account3.statuses.count }
.and change { account5.statuses.count }
end
context 'when given a big budget' do
let(:process_set_stub) { [{ 'concurrency' => 400, 'queues' => %w(push default) }] }
before do
stub_const 'Scheduler::AccountsStatusesCleanupScheduler::MAX_BUDGET', 400
end
it 'correctly handles looping in a single run' do
expect(subject.compute_budget).to eq(400)
expect { subject.perform }.to change { Status.count }.by(-30)
end
end
context 'when there is no work to be done' do
let(:process_set_stub) { [{ 'concurrency' => 400, 'queues' => %w(push default) }] }
before do
stub_const 'Scheduler::AccountsStatusesCleanupScheduler::MAX_BUDGET', 400
subject.perform
end
it 'does not get stuck' do
expect(subject.compute_budget).to eq(400)
expect { subject.perform }.to_not change { Status.count }
end
end end
end end
end end

View file

@ -0,0 +1,33 @@
require 'json'
require 'rails_helper'
RSpec.describe Scheduler::OldAccountCleanupScheduler do
subject { described_class.new }
let!(:generated_user) { Fabricate(:account, username: 'containing_underscore', created_at: 25.hours.ago) }
let!(:alice) { Fabricate(:account, username: 'alice', created_at: 25.hours.ago) }
let!(:generated_user_other_instance) { Fabricate(:account, username: 'containing_underscore', domain: 'example.com', created_at: 25.hours.ago) }
let!(:instance_actor) { Fabricate(:account, id: 99, created_at: 25.hours.ago) }
describe '#perform' do
it 'removes auto-generated user-accounts that are older than one day' do
expect { subject.perform }.to change { Account.exists?(generated_user.id) }.from(true).to(false)
end
it 'does not remove auto-generated user-accounts that are younger than one day' do
generated_user.update!(created_at: 23.hours.ago)
expect { subject.perform }.not_to change { Account.exists?(generated_user.id) }.from(true)
end
it 'does not remove accounts with underscores from other instances' do
expect { subject.perform }.not_to change { Account.exists?(generated_user_other_instance.id) }.from(true)
end
it 'does not remove accounts without underscores' do
expect { subject.perform }.not_to change { Account.exists?(alice.id) }.from(true)
end
it 'does not remove instance actor' do
expect { subject.perform }.not_to change { Account.exists?(instance_actor.id) }.from(true)
end
end
end

View file

@ -92,18 +92,31 @@ const redisUrlToClient = async (defaultConfig, redisUrl) => {
const numWorkers = +process.env.STREAMING_CLUSTER_NUM || (env === 'development' ? 1 : Math.max(os.cpus().length - 1, 1)); const numWorkers = +process.env.STREAMING_CLUSTER_NUM || (env === 'development' ? 1 : Math.max(os.cpus().length - 1, 1));
/** /**
* Attempts to safely parse a string as JSON, used when both receiving a message
* from redis and when receiving a message from a client over a websocket
* connection, this is why it accepts a `req` argument.
* @param {string} json * @param {string} json
* @param {any} req * @param {any?} req
* @return {Object.<string, any>|null} * @returns {Object.<string, any>|null}
*/ */
const parseJSON = (json, req) => { const parseJSON = (json, req) => {
try { try {
return JSON.parse(json); return JSON.parse(json);
} catch (err) { } catch (err) {
if (req.accountId) { /* FIXME: This logging isn't great, and should probably be done at the
log.warn(req.requestId, `Error parsing message from user ${req.accountId}: ${err}`); * call-site of parseJSON, not in the method, but this would require changing
* the signature of parseJSON to return something akin to a Result type:
* [Error|null, null|Object<string,any}], and then handling the error
* scenarios.
*/
if (req) {
if (req.accountId) {
log.warn(req.requestId, `Error parsing message from user ${req.accountId}: ${err}`);
} else {
log.silly(req.requestId, `Error parsing message from ${req.remoteAddress}: ${err}`);
}
} else { } else {
log.silly(req.requestId, `Error parsing message from ${req.remoteAddress}: ${err}`); log.warn(`Error parsing message from redis: ${err}`);
} }
return null; return null;
} }
@ -167,7 +180,7 @@ const startWorker = async (workerId) => {
const redisPrefix = redisNamespace ? `${redisNamespace}:` : ''; const redisPrefix = redisNamespace ? `${redisNamespace}:` : '';
/** /**
* @type {Object.<string, Array.<function(string): void>>} * @type {Object.<string, Array.<function(Object<string, any>): void>>}
*/ */
const subs = {}; const subs = {};
@ -207,7 +220,10 @@ const startWorker = async (workerId) => {
return; return;
} }
callbacks.forEach(callback => callback(message)); const json = parseJSON(message, null);
if (!json) return;
callbacks.forEach(callback => callback(json));
}; };
/** /**
@ -229,6 +245,7 @@ const startWorker = async (workerId) => {
/** /**
* @param {string} channel * @param {string} channel
* @param {function(Object<string, any>): void} callback
*/ */
const unsubscribe = (channel, callback) => { const unsubscribe = (channel, callback) => {
log.silly(`Removing listener for ${channel}`); log.silly(`Removing listener for ${channel}`);
@ -378,7 +395,7 @@ const startWorker = async (workerId) => {
/** /**
* @param {any} req * @param {any} req
* @return {string} * @returns {string|undefined}
*/ */
const channelNameFromPath = req => { const channelNameFromPath = req => {
const { path, query } = req; const { path, query } = req;
@ -487,15 +504,11 @@ const startWorker = async (workerId) => {
/** /**
* @param {any} req * @param {any} req
* @param {SystemMessageHandlers} eventHandlers * @param {SystemMessageHandlers} eventHandlers
* @return {function(string): void} * @returns {function(object): void}
*/ */
const createSystemMessageListener = (req, eventHandlers) => { const createSystemMessageListener = (req, eventHandlers) => {
return message => { return message => {
const json = parseJSON(message, req); const { event } = message;
if (!json) return;
const { event } = json;
log.silly(req.requestId, `System message for ${req.accountId}: ${event}`); log.silly(req.requestId, `System message for ${req.accountId}: ${event}`);
@ -612,19 +625,16 @@ const startWorker = async (workerId) => {
* @param {function(string, string): void} output * @param {function(string, string): void} output
* @param {function(string[], function(string): void): void} attachCloseHandler * @param {function(string[], function(string): void): void} attachCloseHandler
* @param {boolean=} needsFiltering * @param {boolean=} needsFiltering
* @return {function(string): void} * @returns {function(object): void}
*/ */
const streamFrom = (ids, req, output, attachCloseHandler, needsFiltering = false) => { const streamFrom = (ids, req, output, attachCloseHandler, needsFiltering = false) => {
const accountId = req.accountId || req.remoteAddress; const accountId = req.accountId || req.remoteAddress;
log.verbose(req.requestId, `Starting stream from ${ids.join(', ')} for ${accountId}`); log.verbose(req.requestId, `Starting stream from ${ids.join(', ')} for ${accountId}`);
// Currently message is of type string, soon it'll be Record<string, any>
const listener = message => { const listener = message => {
const json = parseJSON(message, req); const { event, payload, queued_at } = message;
if (!json) return;
const { event, payload, queued_at } = json;
const transmit = () => { const transmit = () => {
const now = new Date().getTime(); const now = new Date().getTime();
@ -1207,8 +1217,15 @@ const startWorker = async (workerId) => {
ws.on('close', onEnd); ws.on('close', onEnd);
ws.on('error', onEnd); ws.on('error', onEnd);
ws.on('message', data => { ws.on('message', (data, isBinary) => {
const json = parseJSON(data, session.request); if (isBinary) {
log.warn('socket', 'Received binary data, closing connection');
ws.close(1003, 'The mastodon streaming server does not support binary messages');
return;
}
const message = data.toString('utf8');
const json = parseJSON(message, session.request);
if (!json) return; if (!json) return;

View file

@ -2190,10 +2190,12 @@ acorn@^8.8.0:
resolved "https://registry.yarnpkg.com/acorn/-/acorn-8.8.2.tgz#1b2f25db02af965399b9776b0c2c391276d37c4a" resolved "https://registry.yarnpkg.com/acorn/-/acorn-8.8.2.tgz#1b2f25db02af965399b9776b0c2c391276d37c4a"
integrity sha512-xjIYgE8HBrkpd/sJqOGNspf8uHG+NOHGOw6a/Urj8taM2EXfdNAH2oFcPeIFfsv3+kz/mJrS5VuMqbNLjCa2vw== integrity sha512-xjIYgE8HBrkpd/sJqOGNspf8uHG+NOHGOw6a/Urj8taM2EXfdNAH2oFcPeIFfsv3+kz/mJrS5VuMqbNLjCa2vw==
activitypub-visualization@^1.2.0: activitypub-visualization@^1.3.7:
version "1.2.0" version "1.3.7"
resolved "https://registry.yarnpkg.com/activitypub-visualization/-/activitypub-visualization-1.2.0.tgz#93c7f5930c08f50e77aae09ea35d17f1f6ed3305" resolved "https://registry.yarnpkg.com/activitypub-visualization/-/activitypub-visualization-1.3.7.tgz#55e8e6dbc9b4cecff46c45a776219c47865d15d8"
integrity sha512-OP17Ynu7WDAc8KPSQXr0mn21vW6ozKsMHPcpGnICNW58sMuGj14//Lu10YxPxu/gn94z+mUSRCouilNmri6vKw== integrity sha512-Cta1l2rogf273NkHUsNjPMrfUcUzCV5Hk1xg94ThW0hYuLGI4GXWGij9PRIjHn6aDM407NHV+T3494+I9s1fMA==
dependencies:
dompurify "^3.0.5"
agent-base@6: agent-base@6:
version "6.0.2" version "6.0.2"
@ -4195,6 +4197,11 @@ domexception@^4.0.0:
dependencies: dependencies:
webidl-conversions "^7.0.0" webidl-conversions "^7.0.0"
dompurify@^3.0.5:
version "3.0.5"
resolved "https://registry.yarnpkg.com/dompurify/-/dompurify-3.0.5.tgz#eb3d9cfa10037b6e73f32c586682c4b2ab01fbed"
integrity sha512-F9e6wPGtY+8KNMRAVfxeCOHU0/NPWMSENNq4pQctuXRqqdEPW7q3CrLbR5Nse044WwacyjHGOMlvNsBe1y6z9A==
domutils@^1.7.0: domutils@^1.7.0:
version "1.7.0" version "1.7.0"
resolved "https://registry.yarnpkg.com/domutils/-/domutils-1.7.0.tgz#56ea341e834e06e6748af7a1cb25da67ea9f8c2a" resolved "https://registry.yarnpkg.com/domutils/-/domutils-1.7.0.tgz#56ea341e834e06e6748af7a1cb25da67ea9f8c2a"