diff --git a/.circleci/config.yml b/.circleci/config.yml
deleted file mode 100644
index a373d685e0e3f..0000000000000
--- a/.circleci/config.yml
+++ /dev/null
@@ -1,225 +0,0 @@
-version: 2.1
-
-orbs:
- ruby: circleci/ruby@2.0.0
- node: circleci/node@5.0.3
-
-executors:
- default:
- parameters:
- ruby-version:
- type: string
- docker:
- - image: cimg/ruby:<< parameters.ruby-version >>
- environment:
- BUNDLE_JOBS: 3
- BUNDLE_RETRY: 3
- CONTINUOUS_INTEGRATION: true
- DB_HOST: localhost
- DB_USER: root
- DISABLE_SIMPLECOV: true
- RAILS_ENV: test
- - image: cimg/postgres:14.5
- environment:
- POSTGRES_USER: root
- POSTGRES_HOST_AUTH_METHOD: trust
- - image: cimg/redis:7.0
-
-commands:
- install-system-dependencies:
- steps:
- - run:
- name: Install system dependencies
- command: |
- sudo apt-get update
- sudo apt-get install -y libicu-dev libidn11-dev
- install-ruby-dependencies:
- parameters:
- ruby-version:
- type: string
- steps:
- - run:
- command: |
- bundle config clean 'true'
- bundle config frozen 'true'
- bundle config without 'development production'
- name: Set bundler settings
- - ruby/install-deps:
- bundler-version: '2.3.26'
- key: ruby<< parameters.ruby-version >>-gems-v1
- wait-db:
- steps:
- - run:
- command: dockerize -wait tcp://localhost:5432 -wait tcp://localhost:6379 -timeout 1m
- name: Wait for PostgreSQL and Redis
-
-jobs:
- build:
- docker:
- - image: cimg/ruby:3.0-node
- environment:
- RAILS_ENV: test
- steps:
- - checkout
- - install-system-dependencies
- - install-ruby-dependencies:
- ruby-version: '3.0'
- - node/install-packages:
- cache-version: v1
- pkg-manager: yarn
- - run:
- command: |
- export NODE_OPTIONS=--openssl-legacy-provider
- ./bin/rails assets:precompile
- name: Precompile assets
- - persist_to_workspace:
- paths:
- - public/assets
- - public/packs-test
- root: .
-
- test:
- parameters:
- ruby-version:
- type: string
- executor:
- name: default
- ruby-version: << parameters.ruby-version >>
- environment:
- ALLOW_NOPAM: true
- PAM_ENABLED: true
- PAM_DEFAULT_SERVICE: pam_test
- PAM_CONTROLLED_SERVICE: pam_test_controlled
- parallelism: 4
- steps:
- - checkout
- - install-system-dependencies
- - run:
- command: sudo apt-get install -y ffmpeg imagemagick libpam-dev
- name: Install additional system dependencies
- - run:
- command: bundle config with 'pam_authentication'
- name: Enable PAM authentication
- - install-ruby-dependencies:
- ruby-version: << parameters.ruby-version >>
- - attach_workspace:
- at: .
- - wait-db
- - run:
- command: ./bin/rails db:create db:schema:load db:seed
- name: Load database schema
- - ruby/rspec-test
-
- test-migrations:
- executor:
- name: default
- ruby-version: '3.0'
- steps:
- - checkout
- - install-system-dependencies
- - install-ruby-dependencies:
- ruby-version: '3.0'
- - wait-db
- - run:
- command: ./bin/rails db:create
- name: Create database
- - run:
- command: ./bin/rails db:migrate VERSION=20171010025614
- name: Run migrations up to v2.0.0
- - run:
- command: ./bin/rails tests:migrations:populate_v2
- name: Populate database with test data
- - run:
- command: ./bin/rails db:migrate VERSION=20180514140000
- name: Run migrations up to v2.4.0
- - run:
- command: ./bin/rails tests:migrations:populate_v2_4
- name: Populate database with test data
- - run:
- command: ./bin/rails db:migrate VERSION=20180707154237
- name: Run migrations up to v2.4.3
- - run:
- command: ./bin/rails tests:migrations:populate_v2_4_3
- name: Populate database with test data
- - run:
- command: ./bin/rails db:migrate
- name: Run all remaining migrations
- - run:
- command: ./bin/rails tests:migrations:check_database
- name: Check migration result
-
- test-two-step-migrations:
- executor:
- name: default
- ruby-version: '3.0'
- steps:
- - checkout
- - install-system-dependencies
- - install-ruby-dependencies:
- ruby-version: '3.0'
- - wait-db
- - run:
- command: ./bin/rails db:create
- name: Create database
- - run:
- command: ./bin/rails db:migrate VERSION=20171010025614
- name: Run migrations up to v2.0.0
- - run:
- command: ./bin/rails tests:migrations:populate_v2
- name: Populate database with test data
- - run:
- command: ./bin/rails db:migrate VERSION=20180514140000
- name: Run pre-deployment migrations up to v2.4.0
- environment:
- SKIP_POST_DEPLOYMENT_MIGRATIONS: true
- - run:
- command: ./bin/rails tests:migrations:populate_v2_4
- name: Populate database with test data
- - run:
- command: ./bin/rails db:migrate VERSION=20180707154237
- name: Run migrations up to v2.4.3
- environment:
- SKIP_POST_DEPLOYMENT_MIGRATIONS: true
- - run:
- command: ./bin/rails tests:migrations:populate_v2_4_3
- name: Populate database with test data
- - run:
- command: ./bin/rails db:migrate
- name: Run all remaining pre-deployment migrations
- environment:
- SKIP_POST_DEPLOYMENT_MIGRATIONS: true
- - run:
- command: ./bin/rails db:migrate
- name: Run all post-deployment migrations
- - run:
- command: ./bin/rails tests:migrations:check_database
- name: Check migration result
-
-workflows:
- version: 2
- build-and-test:
- jobs:
- - build
- - test:
- matrix:
- parameters:
- ruby-version:
- - '2.7'
- - '3.0'
- name: test-ruby<< matrix.ruby-version >>
- requires:
- - build
- - test-migrations:
- requires:
- - build
- - test-two-step-migrations:
- requires:
- - build
- - node/run:
- cache-version: v1
- name: test-webui
- pkg-manager: yarn
- requires:
- - build
- version: '16.18'
- yarn-run: test:jest
diff --git a/.github/workflows/build-container-image.yml b/.github/workflows/build-container-image.yml
new file mode 100644
index 0000000000000..b9aebcc46c60d
--- /dev/null
+++ b/.github/workflows/build-container-image.yml
@@ -0,0 +1,92 @@
+on:
+ workflow_call:
+ inputs:
+ platforms:
+ required: true
+ type: string
+ cache:
+ type: boolean
+ default: true
+ use_native_arm64_builder:
+ type: boolean
+ push_to_images:
+ type: string
+ flavor:
+ type: string
+ tags:
+ type: string
+ labels:
+ type: string
+
+jobs:
+ build-image:
+ runs-on: ubuntu-latest
+
+ steps:
+ - uses: actions/checkout@v3
+
+ - uses: docker/setup-qemu-action@v2
+ if: contains(inputs.platforms, 'linux/arm64') && !inputs.use_native_arm64_builder
+
+ - uses: docker/setup-buildx-action@v2
+ id: buildx
+ if: ${{ !(inputs.use_native_arm64_builder && contains(inputs.platforms, 'linux/arm64')) }}
+
+ - name: Start a local Docker Builder
+ if: inputs.use_native_arm64_builder && contains(inputs.platforms, 'linux/arm64')
+ run: |
+ docker run --rm -d --name buildkitd -p 1234:1234 --privileged moby/buildkit:latest --addr tcp://0.0.0.0:1234
+
+ - uses: docker/setup-buildx-action@v2
+ id: buildx-native
+ if: inputs.use_native_arm64_builder && contains(inputs.platforms, 'linux/arm64')
+ with:
+ driver: remote
+ endpoint: tcp://localhost:1234
+ platforms: linux/amd64
+ append: |
+ - endpoint: tcp://${{ vars.DOCKER_BUILDER_HETZNER_ARM64_01_HOST }}:13865
+ platforms: linux/arm64
+ name: mastodon-docker-builder-arm64-01
+ driver-opts:
+ - servername=mastodon-docker-builder-arm64-01
+ env:
+ BUILDER_NODE_1_AUTH_TLS_CACERT: ${{ secrets.DOCKER_BUILDER_HETZNER_ARM64_01_CACERT }}
+ BUILDER_NODE_1_AUTH_TLS_CERT: ${{ secrets.DOCKER_BUILDER_HETZNER_ARM64_01_CERT }}
+ BUILDER_NODE_1_AUTH_TLS_KEY: ${{ secrets.DOCKER_BUILDER_HETZNER_ARM64_01_KEY }}
+
+ - name: Log in to Docker Hub
+ if: contains(inputs.push_to_images, 'tootsuite')
+ uses: docker/login-action@v2
+ with:
+ username: ${{ secrets.DOCKERHUB_USERNAME }}
+ password: ${{ secrets.DOCKERHUB_TOKEN }}
+
+ - name: Log in to the Github Container registry
+ if: contains(inputs.push_to_images, 'ghcr.io')
+ uses: docker/login-action@v2
+ with:
+ registry: ghcr.io
+ username: ${{ github.actor }}
+ password: ${{ secrets.GITHUB_TOKEN }}
+
+ - uses: docker/metadata-action@v4
+ id: meta
+ if: ${{ inputs.push_to_images != '' }}
+ with:
+ images: ${{ inputs.push_to_images }}
+ flavor: ${{ inputs.flavor }}
+ tags: ${{ inputs.tags }}
+ labels: ${{ inputs.labels }}
+
+ - uses: docker/build-push-action@v4
+ with:
+ context: .
+ platforms: ${{ inputs.platforms }}
+ provenance: false
+ builder: ${{ steps.buildx.outputs.name || steps.buildx-native.outputs.name }}
+ push: ${{ inputs.push_to_images != '' }}
+ tags: ${{ steps.meta.outputs.tags }}
+ labels: ${{ steps.meta.outputs.labels }}
+ cache-from: ${{ inputs.cache && 'type=gha' || '' }}
+ cache-to: ${{ inputs.cache && 'type=gha,mode=max' || '' }}
diff --git a/.github/workflows/build-image.yml b/.github/workflows/build-image.yml
deleted file mode 100644
index b3aa9f45c9725..0000000000000
--- a/.github/workflows/build-image.yml
+++ /dev/null
@@ -1,70 +0,0 @@
-name: Build container image
-on:
- workflow_dispatch:
- push:
- branches:
- - 'main'
- tags:
- - '*'
- pull_request:
- paths:
- - .github/workflows/build-image.yml
- - Dockerfile
-permissions:
- contents: read
- packages: write
-
-jobs:
- build-image:
- runs-on: ubuntu-latest
-
- concurrency:
- group: ${{ github.ref }}
- cancel-in-progress: true
-
- steps:
- - uses: actions/checkout@v3
- - uses: hadolint/hadolint-action@v3.1.0
- - uses: docker/setup-qemu-action@v2
- - uses: docker/setup-buildx-action@v2
-
- - name: Log in to Docker Hub
- uses: docker/login-action@v2
- with:
- username: ${{ secrets.DOCKERHUB_USERNAME }}
- password: ${{ secrets.DOCKERHUB_TOKEN }}
- if: github.repository == 'mastodon/mastodon' && github.event_name != 'pull_request'
-
- - name: Log in to the Github Container registry
- uses: docker/login-action@v2
- with:
- registry: ghcr.io
- username: ${{ github.actor }}
- password: ${{ secrets.GITHUB_TOKEN }}
- if: github.repository == 'mastodon/mastodon' && github.event_name != 'pull_request'
-
- - uses: docker/metadata-action@v4
- id: meta
- with:
- images: |
- tootsuite/mastodon
- ghcr.io/mastodon/mastodon
- flavor: |
- latest=auto
- tags: |
- type=edge,branch=main
- type=pep440,pattern={{raw}}
- type=pep440,pattern=v{{major}}.{{minor}}
- type=ref,event=pr
-
- - uses: docker/build-push-action@v4
- with:
- context: .
- platforms: linux/amd64,linux/arm64
- provenance: false
- builder: ${{ steps.buildx.outputs.name }}
- push: ${{ github.repository == 'mastodon/mastodon' && github.event_name != 'pull_request' }}
- tags: ${{ steps.meta.outputs.tags }}
- labels: ${{ steps.meta.outputs.labels }}
- cache-from: type=gha
- cache-to: type=gha,mode=max
diff --git a/.github/workflows/build-releases.yml b/.github/workflows/build-releases.yml
new file mode 100644
index 0000000000000..98b9dfc0dfb10
--- /dev/null
+++ b/.github/workflows/build-releases.yml
@@ -0,0 +1,29 @@
+name: Build container release images
+on:
+ push:
+ tags:
+ - '*'
+
+permissions:
+ contents: read
+ packages: write
+
+jobs:
+ build-image:
+ uses: ./.github/workflows/build-container-image.yml
+ with:
+ platforms: linux/amd64,linux/arm64
+ use_native_arm64_builder: true
+ push_to_images: |
+ tootsuite/mastodon
+ ghcr.io/mastodon/mastodon
+ # Do not use cache when building releases, so apt update is always ran and the release always contain the latest packages
+ cache: false
+ # Only tag with latest when ran against the latest stable branch
+ # This needs to be updated after each minor version release
+ flavor: |
+ latest=${{ startsWith(github.ref, 'refs/tags/v4.1.') }}
+ tags: |
+ type=pep440,pattern={{raw}}
+ type=pep440,pattern=v{{major}}.{{minor}}
+ secrets: inherit
diff --git a/.github/workflows/test-image-build.yml b/.github/workflows/test-image-build.yml
new file mode 100644
index 0000000000000..71344c0046aa0
--- /dev/null
+++ b/.github/workflows/test-image-build.yml
@@ -0,0 +1,15 @@
+name: Test container image build
+on:
+ pull_request:
+permissions:
+ contents: read
+
+jobs:
+ build-image:
+ concurrency:
+ group: ${{ github.workflow }}-${{ github.ref }}
+ cancel-in-progress: true
+
+ uses: ./.github/workflows/build-container-image.yml
+ with:
+ platforms: linux/amd64 # Testing only on native platform so it is performant
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 5ab9eb10dfeff..d29ec5ab471f0 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -3,6 +3,42 @@ Changelog
All notable changes to this project will be documented in this file.
+## [4.1.8] - 2023-09-19
+
+### Fixed
+
+- Fix post edits not being forwarded as expected ([ClearlyClaire](https://github.com/mastodon/mastodon/pull/26936))
+- Fix moderator rights inconsistencies ([ClearlyClaire](https://github.com/mastodon/mastodon/pull/26729))
+- Fix crash when encountering invalid URL ([ClearlyClaire](https://github.com/mastodon/mastodon/pull/26814))
+- Fix cached posts including stale stats ([ClearlyClaire](https://github.com/mastodon/mastodon/pull/26409))
+- Fix uploading of video files for which `ffprobe` reports `0/0` average framerate ([NicolaiSoeborg](https://github.com/mastodon/mastodon/pull/26500))
+- Fix unexpected audio stream transcoding when uploaded video is eligible to passthrough ([yufushiro](https://github.com/mastodon/mastodon/pull/26608))
+
+### Security
+
+- Fix missing HTML sanitization in translation API (CVE-2023-42452)
+- Fix incorrect domain name normalization (CVE-2023-42451)
+
+## [4.1.7] - 2023-09-05
+
+### Changed
+
+- Change remote report processing to accept reports with long comments, but truncate them ([ThisIsMissEm](https://github.com/mastodon/mastodon/pull/25028))
+
+### Fixed
+
+- **Fix blocking subdomains of an already-blocked domain** ([ClearlyClaire](https://github.com/mastodon/mastodon/pull/26392))
+- Fix `/api/v1/timelines/tag/:hashtag` allowing for unauthenticated access when public preview is disabled ([danielmbrasil](https://github.com/mastodon/mastodon/pull/26237))
+- Fix inefficiencies in `PlainTextFormatter` ([ClearlyClaire](https://github.com/mastodon/mastodon/pull/26727))
+
+## [4.1.6] - 2023-07-31
+
+### Fixed
+
+- Fix memory leak in streaming server ([ThisIsMissEm](https://github.com/mastodon/mastodon/pull/26228))
+- Fix wrong filters sometimes applying in streaming ([ClearlyClaire](https://github.com/mastodon/mastodon/pull/26159), [ThisIsMissEm](https://github.com/mastodon/mastodon/pull/26213), [renchap](https://github.com/mastodon/mastodon/pull/26233))
+- Fix incorrect connect timeout in outgoing requests ([ClearlyClaire](https://github.com/mastodon/mastodon/pull/26116))
+
## [4.1.5] - 2023-07-21
### Added
diff --git a/Dockerfile b/Dockerfile
index 160efeea4a1df..c0f584dc4f9eb 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -17,6 +17,7 @@ COPY Gemfile* package.json yarn.lock /opt/mastodon/
# hadolint ignore=DL3008
RUN apt-get update && \
+ apt-get -yq dist-upgrade && \
apt-get install -y --no-install-recommends build-essential \
ca-certificates \
git \
diff --git a/app/controllers/admin/domain_blocks_controller.rb b/app/controllers/admin/domain_blocks_controller.rb
index 74764640b8f8b..746623a06c372 100644
--- a/app/controllers/admin/domain_blocks_controller.rb
+++ b/app/controllers/admin/domain_blocks_controller.rb
@@ -37,7 +37,7 @@ def create
@domain_block.errors.delete(:domain)
render :new
else
- if existing_domain_block.present?
+ if existing_domain_block.present? && existing_domain_block.domain == TagManager.instance.normalize_domain(@domain_block.domain.strip)
@domain_block = existing_domain_block
@domain_block.update(resource_params)
end
diff --git a/app/controllers/api/v1/timelines/tag_controller.rb b/app/controllers/api/v1/timelines/tag_controller.rb
index 64a1db58df3ae..3f41eb6887c75 100644
--- a/app/controllers/api/v1/timelines/tag_controller.rb
+++ b/app/controllers/api/v1/timelines/tag_controller.rb
@@ -1,6 +1,7 @@
# frozen_string_literal: true
class Api::V1::Timelines::TagController < Api::BaseController
+ before_action -> { doorkeeper_authorize! :read, :'read:statuses' }, only: :show, if: :require_auth?
before_action :load_tag
after_action :insert_pagination_headers, unless: -> { @statuses.empty? }
@@ -11,6 +12,10 @@ def show
private
+ def require_auth?
+ !Setting.timeline_preview
+ end
+
def load_tag
@tag = Tag.find_normalized(params[:id])
end
diff --git a/app/models/account_statuses_filter.rb b/app/lib/account_statuses_filter.rb
similarity index 100%
rename from app/models/account_statuses_filter.rb
rename to app/lib/account_statuses_filter.rb
diff --git a/app/lib/activitypub/activity/flag.rb b/app/lib/activitypub/activity/flag.rb
index b0443849a6b8b..7539bda422ff3 100644
--- a/app/lib/activitypub/activity/flag.rb
+++ b/app/lib/activitypub/activity/flag.rb
@@ -16,7 +16,7 @@ def perform
@account,
target_account,
status_ids: target_statuses.nil? ? [] : target_statuses.map(&:id),
- comment: @json['content'] || '',
+ comment: report_comment,
uri: report_uri
)
end
@@ -35,4 +35,8 @@ def object_uris
def report_uri
@json['id'] unless @json['id'].nil? || invalid_origin?(@json['id'])
end
+
+ def report_comment
+ (@json['content'] || '')[0...5000]
+ end
end
diff --git a/app/lib/activitypub/activity/update.rb b/app/lib/activitypub/activity/update.rb
index e7c3bc9bf83de..91ebd3732a828 100644
--- a/app/lib/activitypub/activity/update.rb
+++ b/app/lib/activitypub/activity/update.rb
@@ -28,6 +28,6 @@ def update_status
return if @status.nil?
- ActivityPub::ProcessStatusUpdateService.new.call(@status, @object, request_id: @options[:request_id])
+ ActivityPub::ProcessStatusUpdateService.new.call(@status, @json, @object, request_id: @options[:request_id])
end
end
diff --git a/app/lib/activitypub/tag_manager.rb b/app/lib/activitypub/tag_manager.rb
index 3d6b28ef5814d..e05c0652268f7 100644
--- a/app/lib/activitypub/tag_manager.rb
+++ b/app/lib/activitypub/tag_manager.rb
@@ -27,6 +27,8 @@ def url_for(target)
when :note, :comment, :activity
return activity_account_status_url(target.account, target) if target.reblog?
short_account_status_url(target.account, target)
+ when :flag
+ target.uri
end
end
@@ -41,6 +43,8 @@ def uri_for(target)
account_status_url(target.account, target)
when :emoji
emoji_url(target)
+ when :flag
+ target.uri
end
end
diff --git a/app/lib/admin/account_statuses_filter.rb b/app/lib/admin/account_statuses_filter.rb
new file mode 100644
index 0000000000000..94927e4b6806c
--- /dev/null
+++ b/app/lib/admin/account_statuses_filter.rb
@@ -0,0 +1,9 @@
+# frozen_string_literal: true
+
+class Admin::AccountStatusesFilter < AccountStatusesFilter
+ private
+
+ def blocked?
+ false
+ end
+end
diff --git a/app/lib/plain_text_formatter.rb b/app/lib/plain_text_formatter.rb
index 6fa2bc5d2cc56..d1ff6808b2a99 100644
--- a/app/lib/plain_text_formatter.rb
+++ b/app/lib/plain_text_formatter.rb
@@ -1,9 +1,7 @@
# frozen_string_literal: true
class PlainTextFormatter
- include ActionView::Helpers::TextHelper
-
- NEWLINE_TAGS_RE = /(
|
|<\/p>)+/.freeze
+ NEWLINE_TAGS_RE = %r{(
|
|
)+}
attr_reader :text, :local
@@ -18,7 +16,10 @@ def to_s
if local?
text
else
- html_entities.decode(strip_tags(insert_newlines)).chomp
+ node = Nokogiri::HTML.fragment(insert_newlines)
+ # Elements that are entirely removed with our Sanitize config
+ node.xpath('.//iframe|.//math|.//noembed|.//noframes|.//noscript|.//plaintext|.//script|.//style|.//svg|.//xmp').remove
+ node.text.chomp
end
end
@@ -27,8 +28,4 @@ def to_s
def insert_newlines
text.gsub(NEWLINE_TAGS_RE) { |match| "#{match}\n" }
end
-
- def html_entities
- HTMLEntities.new
- end
end
diff --git a/app/lib/request.rb b/app/lib/request.rb
index 9c02410a52e52..660dc0fa769f5 100644
--- a/app/lib/request.rb
+++ b/app/lib/request.rb
@@ -285,11 +285,11 @@ def open(host, *args)
end
until socks.empty?
- _, available_socks, = IO.select(nil, socks, nil, Request::TIMEOUT[:connect])
+ _, available_socks, = IO.select(nil, socks, nil, Request::TIMEOUT[:connect_timeout])
if available_socks.nil?
socks.each(&:close)
- raise HTTP::TimeoutError, "Connect timed out after #{Request::TIMEOUT[:connect]} seconds"
+ raise HTTP::TimeoutError, "Connect timed out after #{Request::TIMEOUT[:connect_timeout]} seconds"
end
available_socks.each do |sock|
diff --git a/app/lib/tag_manager.rb b/app/lib/tag_manager.rb
index a1d12a654eb43..2e929d6e3f3f3 100644
--- a/app/lib/tag_manager.rb
+++ b/app/lib/tag_manager.rb
@@ -7,18 +7,18 @@ class TagManager
include RoutingHelper
def web_domain?(domain)
- domain.nil? || domain.gsub(/[\/]/, '').casecmp(Rails.configuration.x.web_domain).zero?
+ domain.nil? || domain.delete_suffix('/').casecmp(Rails.configuration.x.web_domain).zero?
end
def local_domain?(domain)
- domain.nil? || domain.gsub(/[\/]/, '').casecmp(Rails.configuration.x.local_domain).zero?
+ domain.nil? || domain.delete_suffix('/').casecmp(Rails.configuration.x.local_domain).zero?
end
def normalize_domain(domain)
return if domain.nil?
uri = Addressable::URI.new
- uri.host = domain.gsub(/[\/]/, '')
+ uri.host = domain.delete_suffix('/')
uri.normalized_host
end
@@ -28,7 +28,7 @@ def local_url?(url)
domain = uri.host + (uri.port ? ":#{uri.port}" : '')
TagManager.instance.web_domain?(domain)
- rescue Addressable::URI::InvalidURIError
+ rescue Addressable::URI::InvalidURIError, IDN::Idna::IdnaError
false
end
end
diff --git a/app/lib/video_metadata_extractor.rb b/app/lib/video_metadata_extractor.rb
index 2896620cb21b0..f27d34868a279 100644
--- a/app/lib/video_metadata_extractor.rb
+++ b/app/lib/video_metadata_extractor.rb
@@ -43,6 +43,9 @@ def parse_metadata
@height = video_stream[:height]
@frame_rate = video_stream[:avg_frame_rate] == '0/0' ? nil : Rational(video_stream[:avg_frame_rate])
@r_frame_rate = video_stream[:r_frame_rate] == '0/0' ? nil : Rational(video_stream[:r_frame_rate])
+ # For some video streams the frame_rate reported by `ffprobe` will be 0/0, but for these streams we
+ # should use `r_frame_rate` instead. Video screencast generated by Gnome Screencast have this issue.
+ @frame_rate ||= @r_frame_rate
end
if (audio_stream = audio_streams.first)
diff --git a/app/models/admin/status_batch_action.rb b/app/models/admin/status_batch_action.rb
index b8bdec7223fe3..6641688788847 100644
--- a/app/models/admin/status_batch_action.rb
+++ b/app/models/admin/status_batch_action.rb
@@ -140,6 +140,6 @@ def report_params
end
def allowed_status_ids
- AccountStatusesFilter.new(@report.target_account, current_account).results.with_discarded.where(id: status_ids).pluck(:id)
+ Admin::AccountStatusesFilter.new(@report.target_account, current_account).results.with_discarded.where(id: status_ids).pluck(:id)
end
end
diff --git a/app/models/report.rb b/app/models/report.rb
index 525d22ad5decd..3ae5c10dd0bd1 100644
--- a/app/models/report.rb
+++ b/app/models/report.rb
@@ -39,7 +39,10 @@ class Report < ApplicationRecord
scope :resolved, -> { where.not(action_taken_at: nil) }
scope :with_accounts, -> { includes([:account, :target_account, :action_taken_by_account, :assigned_account].index_with({ user: [:invite_request, :invite] })) }
- validates :comment, length: { maximum: 1_000 }
+ # A report is considered local if the reporter is local
+ delegate :local?, to: :account
+
+ validates :comment, length: { maximum: 1_000 }, if: :local?
validates :rule_ids, absence: true, unless: :violation?
validate :validate_rule_ids
@@ -50,10 +53,6 @@ class Report < ApplicationRecord
violation: 2_000,
}
- def local?
- false # Force uri_for to use uri attribute
- end
-
before_validation :set_uri, only: :create
after_create_commit :trigger_webhooks
diff --git a/app/models/status.rb b/app/models/status.rb
index b1c49e99a45da..afd3747ea3676 100644
--- a/app/models/status.rb
+++ b/app/models/status.rb
@@ -354,13 +354,25 @@ def reload_stale_associations!(cached_items)
account_ids.uniq!
+ status_ids = cached_items.map { |item| item.reblog? ? item.reblog_of_id : item.id }.uniq
+
return if account_ids.empty?
accounts = Account.where(id: account_ids).includes(:account_stat, :user).index_by(&:id)
+ status_stats = StatusStat.where(status_id: status_ids).index_by(&:status_id)
+
cached_items.each do |item|
item.account = accounts[item.account_id]
item.reblog.account = accounts[item.reblog.account_id] if item.reblog?
+
+ if item.reblog?
+ status_stat = status_stats[item.reblog.id]
+ item.reblog.status_stat = status_stat if status_stat.present?
+ else
+ status_stat = status_stats[item.id]
+ item.status_stat = status_stat if status_stat.present?
+ end
end
end
diff --git a/app/policies/admin/status_policy.rb b/app/policies/admin/status_policy.rb
index ffaa30f13de60..e9379c25eca90 100644
--- a/app/policies/admin/status_policy.rb
+++ b/app/policies/admin/status_policy.rb
@@ -12,7 +12,7 @@ def index?
end
def show?
- role.can?(:manage_reports, :manage_users) && (record.public_visibility? || record.unlisted_visibility? || record.reported?)
+ role.can?(:manage_reports, :manage_users) && (record.public_visibility? || record.unlisted_visibility? || record.reported? || viewable_through_normal_policy?)
end
def destroy?
@@ -26,4 +26,10 @@ def update?
def review?
role.can?(:manage_taxonomies)
end
+
+ private
+
+ def viewable_through_normal_policy?
+ StatusPolicy.new(current_account, record, @preloaded_relations).show?
+ end
end
diff --git a/app/services/activitypub/fetch_remote_poll_service.rb b/app/services/activitypub/fetch_remote_poll_service.rb
index 1829e791ce6eb..41b9b2f0c9be9 100644
--- a/app/services/activitypub/fetch_remote_poll_service.rb
+++ b/app/services/activitypub/fetch_remote_poll_service.rb
@@ -8,6 +8,6 @@ def call(poll, on_behalf_of = nil)
return unless supported_context?(json)
- ActivityPub::ProcessStatusUpdateService.new.call(poll.status, json)
+ ActivityPub::ProcessStatusUpdateService.new.call(poll.status, json, json)
end
end
diff --git a/app/services/activitypub/process_status_update_service.rb b/app/services/activitypub/process_status_update_service.rb
index 1dc393e28e9a6..ecb058bf78712 100644
--- a/app/services/activitypub/process_status_update_service.rb
+++ b/app/services/activitypub/process_status_update_service.rb
@@ -5,10 +5,11 @@ class ActivityPub::ProcessStatusUpdateService < BaseService
include Redisable
include Lockable
- def call(status, json, request_id: nil)
+ def call(status, activity_json, object_json, request_id: nil)
raise ArgumentError, 'Status has unsaved changes' if status.changed?
- @json = json
+ @activity_json = activity_json
+ @json = object_json
@status_parser = ActivityPub::Parser::StatusParser.new(@json)
@uri = @status_parser.uri
@status = status
@@ -308,6 +309,6 @@ def forward_activity!
end
def forwarder
- @forwarder ||= ActivityPub::Forwarder.new(@account, @json, @status)
+ @forwarder ||= ActivityPub::Forwarder.new(@account, @activity_json, @status)
end
end
diff --git a/app/services/translate_status_service.rb b/app/services/translate_status_service.rb
index 539a0d9db5fd9..b905f8158ad3e 100644
--- a/app/services/translate_status_service.rb
+++ b/app/services/translate_status_service.rb
@@ -12,7 +12,9 @@ def call(status, target_language)
@content = status_content_format(@status)
@target_language = target_language
- Rails.cache.fetch("translations/#{@status.language}/#{@target_language}/#{content_hash}", expires_in: CACHE_TTL) { translation_backend.translate(@content, @status.language, @target_language) }
+ Rails.cache.fetch("translations/#{@status.language}/#{@target_language}/#{content_hash}", expires_in: CACHE_TTL) do
+ Sanitize.fragment(translation_backend.translate(@content, @status.language, @target_language), Sanitize::Config::MASTODON_STRICT)
+ end
end
private
diff --git a/docker-compose.yml b/docker-compose.yml
index e3fa9ae1e2e50..769b215737d8f 100644
--- a/docker-compose.yml
+++ b/docker-compose.yml
@@ -56,7 +56,7 @@ services:
web:
build: .
- image: ghcr.io/mastodon/mastodon:v4.1.5
+ image: ghcr.io/mastodon/mastodon:v4.1.8
restart: always
env_file: .env.production
command: bash -c "rm -f /mastodon/tmp/pids/server.pid; bundle exec rails s -p 3000"
@@ -77,7 +77,7 @@ services:
streaming:
build: .
- image: ghcr.io/mastodon/mastodon:v4.1.5
+ image: ghcr.io/mastodon/mastodon:v4.1.8
restart: always
env_file: .env.production
command: node ./streaming
@@ -95,7 +95,7 @@ services:
sidekiq:
build: .
- image: ghcr.io/mastodon/mastodon:v4.1.5
+ image: ghcr.io/mastodon/mastodon:v4.1.8
restart: always
env_file: .env.production
command: bundle exec sidekiq
diff --git a/lib/mastodon/version.rb b/lib/mastodon/version.rb
index 843bc7327b841..04b69e0f1e28b 100644
--- a/lib/mastodon/version.rb
+++ b/lib/mastodon/version.rb
@@ -13,7 +13,7 @@ def minor
end
def patch
- 5
+ 8
end
def flags
diff --git a/lib/paperclip/transcoder.rb b/lib/paperclip/transcoder.rb
index be40b4924107a..0f2e30f7d5e45 100644
--- a/lib/paperclip/transcoder.rb
+++ b/lib/paperclip/transcoder.rb
@@ -37,12 +37,14 @@ def make
@output_options['f'] = 'image2'
@output_options['vframes'] = 1
when 'mp4'
- @output_options['acodec'] = 'aac'
- @output_options['strict'] = 'experimental'
-
- if high_vfr?(metadata) && !eligible_to_passthrough?(metadata)
- @output_options['vsync'] = 'vfr'
- @output_options['r'] = @vfr_threshold
+ unless eligible_to_passthrough?(metadata)
+ @output_options['acodec'] = 'aac'
+ @output_options['strict'] = 'experimental'
+
+ if high_vfr?(metadata)
+ @output_options['vsync'] = 'vfr'
+ @output_options['r'] = @vfr_threshold
+ end
end
end
diff --git a/spec/controllers/admin/statuses_controller_spec.rb b/spec/controllers/admin/statuses_controller_spec.rb
index 7f912c1c07bb2..877c7e63ebfb3 100644
--- a/spec/controllers/admin/statuses_controller_spec.rb
+++ b/spec/controllers/admin/statuses_controller_spec.rb
@@ -40,24 +40,36 @@
end
describe 'POST #batch' do
- before do
- post :batch, params: { account_id: account.id, action => '', admin_status_batch_action: { status_ids: status_ids } }
- end
+ subject { post :batch, params: { :account_id => account.id, action => '', :admin_status_batch_action => { status_ids: status_ids } } }
let(:status_ids) { [media_attached_status.id] }
- context 'when action is report' do
+ shared_examples 'when action is report' do
let(:action) { 'report' }
it 'creates a report' do
+ subject
+
report = Report.last
expect(report.target_account_id).to eq account.id
expect(report.status_ids).to eq status_ids
end
it 'redirects to report page' do
+ subject
+
expect(response).to redirect_to(admin_report_path(Report.last.id))
end
end
+
+ it_behaves_like 'when action is report'
+
+ context 'when the moderator is blocked by the author' do
+ before do
+ account.block!(user.account)
+ end
+
+ it_behaves_like 'when action is report'
+ end
end
end
diff --git a/spec/controllers/api/v1/timelines/tag_controller_spec.rb b/spec/controllers/api/v1/timelines/tag_controller_spec.rb
index 718911083362d..1c60798fcf6fe 100644
--- a/spec/controllers/api/v1/timelines/tag_controller_spec.rb
+++ b/spec/controllers/api/v1/timelines/tag_controller_spec.rb
@@ -5,36 +5,66 @@
describe Api::V1::Timelines::TagController do
render_views
- let(:user) { Fabricate(:user) }
+ let(:user) { Fabricate(:user) }
+ let(:token) { Fabricate(:accessible_access_token, resource_owner_id: user.id, scopes: 'read:statuses') }
before do
allow(controller).to receive(:doorkeeper_token) { token }
end
- context 'with a user context' do
- let(:token) { Fabricate(:accessible_access_token, resource_owner_id: user.id) }
+ describe 'GET #show' do
+ subject do
+ get :show, params: { id: 'test' }
+ end
- describe 'GET #show' do
- before do
- PostStatusService.new.call(user.account, text: 'It is a #test')
+ before do
+ PostStatusService.new.call(user.account, text: 'It is a #test')
+ end
+
+ context 'when the instance allows public preview' do
+ context 'when the user is not authenticated' do
+ let(:token) { nil }
+
+ it 'returns http success', :aggregate_failures do
+ subject
+
+ expect(response).to have_http_status(200)
+ expect(response.headers['Link'].links.size).to eq(2)
+ end
end
- it 'returns http success' do
- get :show, params: { id: 'test' }
- expect(response).to have_http_status(200)
- expect(response.headers['Link'].links.size).to eq(2)
+ context 'when the user is authenticated' do
+ it 'returns http success', :aggregate_failures do
+ subject
+
+ expect(response).to have_http_status(200)
+ expect(response.headers['Link'].links.size).to eq(2)
+ end
end
end
- end
- context 'without a user context' do
- let(:token) { Fabricate(:accessible_access_token, resource_owner_id: nil) }
+ context 'when the instance does not allow public preview' do
+ before do
+ Form::AdminSettings.new(timeline_preview: false).save
+ end
+
+ context 'when the user is not authenticated' do
+ let(:token) { nil }
+
+ it 'returns http unauthorized' do
+ subject
+
+ expect(response).to have_http_status(401)
+ end
+ end
+
+ context 'when the user is authenticated' do
+ it 'returns http success', :aggregate_failures do
+ subject
- describe 'GET #show' do
- it 'returns http success' do
- get :show, params: { id: 'test' }
- expect(response).to have_http_status(200)
- expect(response.headers['Link']).to be_nil
+ expect(response).to have_http_status(200)
+ expect(response.headers['Link'].links.size).to eq(2)
+ end
end
end
end
diff --git a/spec/controllers/concerns/cache_concern_spec.rb b/spec/controllers/concerns/cache_concern_spec.rb
index a34d7d7267696..21daa19921007 100644
--- a/spec/controllers/concerns/cache_concern_spec.rb
+++ b/spec/controllers/concerns/cache_concern_spec.rb
@@ -13,12 +13,17 @@ def empty_array
def empty_relation
render plain: cache_collection(Status.none, Status).size
end
+
+ def account_statuses_favourites
+ render plain: cache_collection(Status.where(account_id: params[:id]), Status).map(&:favourites_count)
+ end
end
before do
routes.draw do
- get 'empty_array' => 'anonymous#empty_array'
- post 'empty_relation' => 'anonymous#empty_relation'
+ get 'empty_array' => 'anonymous#empty_array'
+ get 'empty_relation' => 'anonymous#empty_relation'
+ get 'account_statuses_favourites' => 'anonymous#account_statuses_favourites'
end
end
@@ -36,5 +41,20 @@ def empty_relation
expect(response.body).to eq '0'
end
end
+
+ context 'when given a collection of statuses' do
+ let!(:account) { Fabricate(:account) }
+ let!(:status) { Fabricate(:status, account: account) }
+
+ it 'correctly updates with new interactions' do
+ get :account_statuses_favourites, params: { id: account.id }
+ expect(response.body).to eq '[0]'
+
+ FavouriteService.new.call(account, status)
+
+ get :account_statuses_favourites, params: { id: account.id }
+ expect(response.body).to eq '[1]'
+ end
+ end
end
end
diff --git a/spec/lib/activitypub/activity/flag_spec.rb b/spec/lib/activitypub/activity/flag_spec.rb
index 2f2d13876760d..6d7a8a7ec2e8d 100644
--- a/spec/lib/activitypub/activity/flag_spec.rb
+++ b/spec/lib/activitypub/activity/flag_spec.rb
@@ -37,6 +37,37 @@
end
end
+ context 'when the report comment is excessively long' do
+ subject do
+ described_class.new({
+ '@context': 'https://www.w3.org/ns/activitystreams',
+ id: flag_id,
+ type: 'Flag',
+ content: long_comment,
+ actor: ActivityPub::TagManager.instance.uri_for(sender),
+ object: [
+ ActivityPub::TagManager.instance.uri_for(flagged),
+ ActivityPub::TagManager.instance.uri_for(status),
+ ],
+ }.with_indifferent_access, sender)
+ end
+
+ let(:long_comment) { Faker::Lorem.characters(number: 6000) }
+
+ before do
+ subject.perform
+ end
+
+ it 'creates a report but with a truncated comment' do
+ report = Report.find_by(account: sender, target_account: flagged)
+
+ expect(report).to_not be_nil
+ expect(report.comment.length).to eq 5000
+ expect(report.comment).to eq long_comment[0...5000]
+ expect(report.status_ids).to eq [status.id]
+ end
+ end
+
context 'when the reported status is private and should not be visible to the remote server' do
let(:status) { Fabricate(:status, account: flagged, uri: 'foobar', visibility: :private) }
diff --git a/spec/models/report_spec.rb b/spec/models/report_spec.rb
index 874be41328cb5..c485a4a3c9ad1 100644
--- a/spec/models/report_spec.rb
+++ b/spec/models/report_spec.rb
@@ -125,10 +125,17 @@
expect(report).to be_valid
end
- it 'is invalid if comment is longer than 1000 characters' do
+ let(:remote_account) { Fabricate(:account, domain: 'example.com', protocol: :activitypub, inbox_url: 'http://example.com/inbox') }
+
+ it 'is invalid if comment is longer than 1000 characters only if reporter is local' do
report = Fabricate.build(:report, comment: Faker::Lorem.characters(number: 1001))
- report.valid?
+ expect(report.valid?).to be false
expect(report).to model_have_error_on_field(:comment)
end
+
+ it 'is valid if comment is longer than 1000 characters and reporter is not local' do
+ report = Fabricate.build(:report, account: remote_account, comment: Faker::Lorem.characters(number: 1001))
+ expect(report.valid?).to be true
+ end
end
end
diff --git a/spec/services/activitypub/process_status_update_service_spec.rb b/spec/services/activitypub/process_status_update_service_spec.rb
index 750369d57fbfc..09c7fe94a0c42 100644
--- a/spec/services/activitypub/process_status_update_service_spec.rb
+++ b/spec/services/activitypub/process_status_update_service_spec.rb
@@ -41,12 +41,12 @@ def poll_option_json(name, votes)
describe '#call' do
it 'updates text' do
- subject.call(status, json)
+ subject.call(status, json, json)
expect(status.reload.text).to eq 'Hello universe'
end
it 'updates content warning' do
- subject.call(status, json)
+ subject.call(status, json, json)
expect(status.reload.spoiler_text).to eq 'Show more'
end
@@ -64,7 +64,7 @@ def poll_option_json(name, votes)
end
before do
- subject.call(status, json)
+ subject.call(status, json, json)
end
it 'does not create any edits' do
@@ -87,7 +87,7 @@ def poll_option_json(name, votes)
end
before do
- subject.call(status, json)
+ subject.call(status, json, json)
end
it 'does not create any edits' do
@@ -135,7 +135,7 @@ def poll_option_json(name, votes)
end
before do
- subject.call(status, json)
+ subject.call(status, json, json)
end
it 'does not create any edits' do
@@ -188,7 +188,7 @@ def poll_option_json(name, votes)
end
before do
- subject.call(status, json)
+ subject.call(status, json, json)
end
it 'does not create any edits' do
@@ -216,11 +216,11 @@ def poll_option_json(name, votes)
end
it 'does not create any edits' do
- expect { subject.call(status, json) }.not_to change { status.reload.edits.pluck(&:id) }
+ expect { subject.call(status, json, json) }.to_not(change { status.reload.edits.pluck(&:id) })
end
it 'does not update the text, spoiler_text or edited_at' do
- expect { subject.call(status, json) }.not_to change { s = status.reload; [s.text, s.spoiler_text, s.edited_at] }
+ expect { subject.call(status, json, json) }.to_not(change { s = status.reload; [s.text, s.spoiler_text, s.edited_at] })
end
end
@@ -235,7 +235,7 @@ def poll_option_json(name, votes)
end
before do
- subject.call(status, json)
+ subject.call(status, json, json)
end
it 'does not create any edits' do
@@ -259,7 +259,7 @@ def poll_option_json(name, votes)
before do
status.update(ordered_media_attachment_ids: nil)
- subject.call(status, json)
+ subject.call(status, json, json)
end
it 'does not create any edits' do
@@ -273,7 +273,7 @@ def poll_option_json(name, votes)
context 'originally without tags' do
before do
- subject.call(status, json)
+ subject.call(status, json, json)
end
it 'updates tags' do
@@ -299,7 +299,7 @@ def poll_option_json(name, votes)
end
before do
- subject.call(status, json)
+ subject.call(status, json, json)
end
it 'updates tags' do
@@ -309,7 +309,7 @@ def poll_option_json(name, votes)
context 'originally without mentions' do
before do
- subject.call(status, json)
+ subject.call(status, json, json)
end
it 'updates mentions' do
@@ -321,7 +321,7 @@ def poll_option_json(name, votes)
let(:mentions) { [alice, bob] }
before do
- subject.call(status, json)
+ subject.call(status, json, json)
end
it 'updates mentions' do
@@ -332,7 +332,7 @@ def poll_option_json(name, votes)
context 'originally without media attachments' do
before do
stub_request(:get, 'https://example.com/foo.png').to_return(body: attachment_fixture('emojo.png'))
- subject.call(status, json)
+ subject.call(status, json, json)
end
let(:payload) do
@@ -382,7 +382,7 @@ def poll_option_json(name, votes)
before do
allow(RedownloadMediaWorker).to receive(:perform_async)
- subject.call(status, json)
+ subject.call(status, json, json)
end
it 'updates the existing media attachment in-place' do
@@ -410,7 +410,7 @@ def poll_option_json(name, votes)
before do
poll = Fabricate(:poll, status: status)
status.update(preloadable_poll: poll)
- subject.call(status, json)
+ subject.call(status, json, json)
end
it 'removes poll' do
@@ -440,7 +440,7 @@ def poll_option_json(name, votes)
end
before do
- subject.call(status, json)
+ subject.call(status, json, json)
end
it 'creates a poll' do
@@ -456,12 +456,12 @@ def poll_option_json(name, votes)
end
it 'creates edit history' do
- subject.call(status, json)
+ subject.call(status, json, json)
expect(status.edits.reload.map(&:text)).to eq ['Hello world', 'Hello universe']
end
it 'sets edited timestamp' do
- subject.call(status, json)
+ subject.call(status, json, json)
expect(status.reload.edited_at.to_s).to eq '2021-09-08 22:39:25 UTC'
end
end
diff --git a/spec/services/report_service_spec.rb b/spec/services/report_service_spec.rb
index 02bc42ac170d6..1737a05ae3810 100644
--- a/spec/services/report_service_spec.rb
+++ b/spec/services/report_service_spec.rb
@@ -4,6 +4,14 @@
subject { described_class.new }
let(:source_account) { Fabricate(:account) }
+ let(:target_account) { Fabricate(:account) }
+
+ context 'with a local account' do
+ it 'has a uri' do
+ report = subject.call(source_account, target_account)
+ expect(report.uri).to_not be_nil
+ end
+ end
context 'for a remote account' do
let(:remote_account) { Fabricate(:account, domain: 'example.com', protocol: :activitypub, inbox_url: 'http://example.com/inbox') }
diff --git a/streaming/index.js b/streaming/index.js
index 041cb9b1dce40..84c8009513276 100644
--- a/streaming/index.js
+++ b/streaming/index.js
@@ -226,9 +226,15 @@ const startWorker = async (workerId) => {
callbacks.forEach(callback => callback(json));
};
+ /**
+ * @callback SubscriptionListener
+ * @param {ReturnType} json of the message
+ * @returns void
+ */
+
/**
* @param {string} channel
- * @param {function(string): void} callback
+ * @param {SubscriptionListener} callback
*/
const subscribe = (channel, callback) => {
log.silly(`Adding listener for ${channel}`);
@@ -245,7 +251,7 @@ const startWorker = async (workerId) => {
/**
* @param {string} channel
- * @param {function(Object): void} callback
+ * @param {SubscriptionListener} callback
*/
const unsubscribe = (channel, callback) => {
log.silly(`Removing listener for ${channel}`);
@@ -629,51 +635,66 @@ const startWorker = async (workerId) => {
* @param {string[]} ids
* @param {any} req
* @param {function(string, string): void} output
- * @param {function(string[], function(string): void): void} attachCloseHandler
+ * @param {undefined | function(string[], SubscriptionListener): void} attachCloseHandler
* @param {boolean=} needsFiltering
- * @returns {function(object): void}
+ * @returns {SubscriptionListener}
*/
const streamFrom = (ids, req, output, attachCloseHandler, needsFiltering = false) => {
const accountId = req.accountId || req.remoteAddress;
log.verbose(req.requestId, `Starting stream from ${ids.join(', ')} for ${accountId}`);
- // Currently message is of type string, soon it'll be Record
- const listener = message => {
- const { event, payload, queued_at } = message;
-
- const transmit = () => {
- const now = new Date().getTime();
- const delta = now - queued_at;
- const encodedPayload = typeof payload === 'object' ? JSON.stringify(payload) : payload;
+ const transmit = (event, payload) => {
+ // TODO: Replace "string"-based delete payloads with object payloads:
+ const encodedPayload = typeof payload === 'object' ? JSON.stringify(payload) : payload;
- log.silly(req.requestId, `Transmitting for ${accountId}: ${event} ${encodedPayload} Delay: ${delta}ms`);
- output(event, encodedPayload);
- };
+ log.silly(req.requestId, `Transmitting for ${accountId}: ${event} ${encodedPayload}`);
+ output(event, encodedPayload);
+ };
- // Only messages that may require filtering are statuses, since notifications
- // are already personalized and deletes do not matter
- if (!needsFiltering || event !== 'update') {
- transmit();
+ // The listener used to process each message off the redis subscription,
+ // message here is an object with an `event` and `payload` property. Some
+ // events also include a queued_at value, but this is being removed shortly.
+ /** @type {SubscriptionListener} */
+ const listener = message => {
+ const { event, payload } = message;
+
+ // Streaming only needs to apply filtering to some channels and only to
+ // some events. This is because majority of the filtering happens on the
+ // Ruby on Rails side when producing the event for streaming.
+ //
+ // The only events that require filtering from the streaming server are
+ // `update` and `status.update`, all other events are transmitted to the
+ // client as soon as they're received (pass-through).
+ //
+ // The channels that need filtering are determined in the function
+ // `channelNameToIds` defined below:
+ if (!needsFiltering || (event !== 'update' && event !== 'status.update')) {
+ transmit(event, payload);
return;
}
- const unpackedPayload = payload;
- const targetAccountIds = [unpackedPayload.account.id].concat(unpackedPayload.mentions.map(item => item.id));
- const accountDomain = unpackedPayload.account.acct.split('@')[1];
+ // The rest of the logic from here on in this function is to handle
+ // filtering of statuses:
- if (Array.isArray(req.chosenLanguages) && unpackedPayload.language !== null && req.chosenLanguages.indexOf(unpackedPayload.language) === -1) {
- log.silly(req.requestId, `Message ${unpackedPayload.id} filtered by language (${unpackedPayload.language})`);
+ // Filter based on language:
+ if (Array.isArray(req.chosenLanguages) && payload.language !== null && req.chosenLanguages.indexOf(payload.language) === -1) {
+ log.silly(req.requestId, `Message ${payload.id} filtered by language (${payload.language})`);
return;
}
// When the account is not logged in, it is not necessary to confirm the block or mute
if (!req.accountId) {
- transmit();
+ transmit(event, payload);
return;
}
- pgPool.connect((err, client, done) => {
+ // Filter based on domain blocks, blocks, mutes, or custom filters:
+ const targetAccountIds = [payload.account.id].concat(payload.mentions.map(item => item.id));
+ const accountDomain = payload.account.acct.split('@')[1];
+
+ // TODO: Move this logic out of the message handling loop
+ pgPool.connect((err, client, releasePgConnection) => {
if (err) {
log.error(err);
return;
@@ -688,40 +709,57 @@ const startWorker = async (workerId) => {
SELECT 1
FROM mutes
WHERE account_id = $1
- AND target_account_id IN (${placeholders(targetAccountIds, 2)})`, [req.accountId, unpackedPayload.account.id].concat(targetAccountIds)),
+ AND target_account_id IN (${placeholders(targetAccountIds, 2)})`, [req.accountId, payload.account.id].concat(targetAccountIds)),
];
if (accountDomain) {
queries.push(client.query('SELECT 1 FROM account_domain_blocks WHERE account_id = $1 AND domain = $2', [req.accountId, accountDomain]));
}
- if (!unpackedPayload.filtered && !req.cachedFilters) {
+ if (!payload.filtered && !req.cachedFilters) {
queries.push(client.query('SELECT filter.id AS id, filter.phrase AS title, filter.context AS context, filter.expires_at AS expires_at, filter.action AS filter_action, keyword.keyword AS keyword, keyword.whole_word AS whole_word FROM custom_filter_keywords keyword JOIN custom_filters filter ON keyword.custom_filter_id = filter.id WHERE filter.account_id = $1 AND (filter.expires_at IS NULL OR filter.expires_at > NOW())', [req.accountId]));
}
Promise.all(queries).then(values => {
- done();
+ releasePgConnection();
+ // Handling blocks & mutes and domain blocks: If one of those applies,
+ // then we don't transmit the payload of the event to the client
if (values[0].rows.length > 0 || (accountDomain && values[1].rows.length > 0)) {
return;
}
- if (!unpackedPayload.filtered && !req.cachedFilters) {
+ // If the payload already contains the `filtered` property, it means
+ // that filtering has been applied on the ruby on rails side, as
+ // such, we don't need to construct or apply the filters in streaming:
+ if (Object.prototype.hasOwnProperty.call(payload, "filtered")) {
+ transmit(event, payload);
+ return;
+ }
+
+ // Handling for constructing the custom filters and caching them on the request
+ // TODO: Move this logic out of the message handling lifecycle
+ if (!req.cachedFilters) {
const filterRows = values[accountDomain ? 2 : 1].rows;
- req.cachedFilters = filterRows.reduce((cache, row) => {
- if (cache[row.id]) {
- cache[row.id].keywords.push([row.keyword, row.whole_word]);
+ req.cachedFilters = filterRows.reduce((cache, filter) => {
+ if (cache[filter.id]) {
+ cache[filter.id].keywords.push([filter.keyword, filter.whole_word]);
} else {
- cache[row.id] = {
- keywords: [[row.keyword, row.whole_word]],
- expires_at: row.expires_at,
- repr: {
- id: row.id,
- title: row.title,
- context: row.context,
- expires_at: row.expires_at,
- filter_action: ['warn', 'hide'][row.filter_action],
+ cache[filter.id] = {
+ keywords: [[filter.keyword, filter.whole_word]],
+ expires_at: filter.expires_at,
+ filter: {
+ id: filter.id,
+ title: filter.title,
+ context: filter.context,
+ expires_at: filter.expires_at,
+ // filter.filter_action is the value from the
+ // custom_filters.action database column, it is an integer
+ // representing a value in an enum defined by Ruby on Rails:
+ //
+ // enum { warn: 0, hide: 1 }
+ filter_action: ['warn', 'hide'][filter.filter_action],
},
};
}
@@ -729,6 +767,10 @@ const startWorker = async (workerId) => {
return cache;
}, {});
+ // Construct the regular expressions for the custom filters: This
+ // needs to be done in a separate loop as the database returns one
+ // filterRow per keyword, so we need all the keywords before
+ // constructing the regular expression
Object.keys(req.cachedFilters).forEach((key) => {
req.cachedFilters[key].regexp = new RegExp(req.cachedFilters[key].keywords.map(([keyword, whole_word]) => {
let expr = keyword.replace(/[.*+?^${}()|[\]\\]/g, '\\$&');
@@ -748,31 +790,58 @@ const startWorker = async (workerId) => {
});
}
- // Check filters
- if (req.cachedFilters && !unpackedPayload.filtered) {
- const status = unpackedPayload;
- const searchContent = ([status.spoiler_text || '', status.content].concat((status.poll && status.poll.options) ? status.poll.options.map(option => option.title) : [])).concat(status.media_attachments.map(att => att.description)).join('\n\n').replace(/
/g, '\n').replace(/<\/p>/g, '\n\n');
- const searchIndex = JSDOM.fragment(searchContent).textContent;
+ // Apply cachedFilters against the payload, constructing a
+ // `filter_results` array of FilterResult entities
+ if (req.cachedFilters) {
+ const status = payload;
+ // TODO: Calculate searchableContent in Ruby on Rails:
+ const searchableContent = ([status.spoiler_text || '', status.content].concat((status.poll && status.poll.options) ? status.poll.options.map(option => option.title) : [])).concat(status.media_attachments.map(att => att.description)).join('\n\n').replace(/
/g, '\n').replace(/<\/p>
/g, '\n\n');
+ const searchableTextContent = JSDOM.fragment(searchableContent).textContent;
const now = new Date();
- payload.filtered = [];
- Object.values(req.cachedFilters).forEach((cachedFilter) => {
- if ((cachedFilter.expires_at === null || cachedFilter.expires_at > now)) {
- const keyword_matches = searchIndex.match(cachedFilter.regexp);
- if (keyword_matches) {
- payload.filtered.push({
- filter: cachedFilter.repr,
- keyword_matches,
- });
- }
+ const filter_results = Object.values(req.cachedFilters).reduce((results, cachedFilter) => {
+ // Check the filter hasn't expired before applying:
+ if (cachedFilter.expires_at !== null && cachedFilter.expires_at < now) {
+ return results;
+ }
+
+ // Just in-case JSDOM fails to find textContent in searchableContent
+ if (!searchableTextContent) {
+ return results;
}
+
+ const keyword_matches = searchableTextContent.match(cachedFilter.regexp);
+ if (keyword_matches) {
+ // results is an Array of FilterResult; status_matches is always
+ // null as we only are only applying the keyword-based custom
+ // filters, not the status-based custom filters.
+ // https://docs.joinmastodon.org/entities/FilterResult/
+ results.push({
+ filter: cachedFilter.filter,
+ keyword_matches,
+ status_matches: null
+ });
+ }
+
+ return results;
+ }, []);
+
+ // Send the payload + the FilterResults as the `filtered` property
+ // to the streaming connection. To reach this code, the `event` must
+ // have been either `update` or `status.update`, meaning the
+ // `payload` is a Status entity, which has a `filtered` property:
+ //
+ // filtered: https://docs.joinmastodon.org/entities/Status/#filtered
+ transmit(event, {
+ ...payload,
+ filtered: filter_results
});
+ } else {
+ transmit(event, payload);
}
-
- transmit();
}).catch(err => {
+ releasePgConnection();
log.error(err);
- done();
});
});
};
@@ -781,7 +850,7 @@ const startWorker = async (workerId) => {
subscribe(`${redisPrefix}${id}`, listener);
});
- if (attachCloseHandler) {
+ if (typeof attachCloseHandler === 'function') {
attachCloseHandler(ids.map(id => `${redisPrefix}${id}`), listener);
}
@@ -826,12 +895,13 @@ const startWorker = async (workerId) => {
/**
* @param {any} req
* @param {function(): void} [closeHandler]
- * @return {function(string[]): void}
+ * @returns {function(string[], SubscriptionListener): void}
*/
- const streamHttpEnd = (req, closeHandler = undefined) => (ids) => {
+
+ const streamHttpEnd = (req, closeHandler = undefined) => (ids, listener) => {
req.on('close', () => {
ids.forEach(id => {
- unsubscribe(id);
+ unsubscribe(id, listener);
});
if (closeHandler) {
@@ -1092,7 +1162,7 @@ const startWorker = async (workerId) => {
* @typedef WebSocketSession
* @property {any} socket
* @property {any} request
- * @property {Object.} subscriptions
+ * @property {Object.} subscriptions
*/
/**