Merge tag 'v3.3.1'

This commit is contained in:
Mike Barnes 2022-02-03 20:13:58 +11:00
commit 42437cacf0
120 changed files with 2046 additions and 460 deletions

View file

@ -160,8 +160,45 @@ jobs:
name: Create database name: Create database
command: ./bin/rails db:create command: ./bin/rails db:create
- run: - run:
name: Run migrations command: ./bin/rails db:migrate VERSION=20171010025614
name: Run migrations up to v2.0.0
- run:
command: ./bin/rails tests:migrations:populate_v2
name: Populate database with test data
- run:
command: ./bin/rails db:migrate command: ./bin/rails db:migrate
name: Run all remaining migrations
test-two-step-migrations:
<<: *defaults
docker:
- image: circleci/ruby:2.7-buster-node
environment: *ruby_environment
- image: circleci/postgres:12.2
environment:
POSTGRES_USER: root
POSTGRES_HOST_AUTH_METHOD: trust
- image: circleci/redis:5-alpine
steps:
- *attach_workspace
- *install_system_dependencies
- run:
command: ./bin/rails db:create
name: Create database
- run:
command: ./bin/rails db:migrate VERSION=20171010025614
name: Run migrations up to v2.0.0
- run:
command: ./bin/rails tests:migrations:populate_v2
name: Populate database with test data
- run:
command: ./bin/rails db:migrate
name: Run all pre-deployment migrations
evironment:
SKIP_POST_DEPLOYMENT_MIGRATIONS: true
- run:
command: ./bin/rails db:migrate
name: Run all post-deployment remaining migrations
test-ruby2.7: test-ruby2.7:
<<: *defaults <<: *defaults
@ -233,6 +270,9 @@ workflows:
- test-migrations: - test-migrations:
requires: requires:
- install-ruby2.7 - install-ruby2.7
- test-two-step-migrations:
requires:
- install-ruby2.7
- test-ruby2.7: - test-ruby2.7:
requires: requires:
- install-ruby2.7 - install-ruby2.7

View file

@ -4,6 +4,12 @@
# not demonstrate all available configuration options. Please look at # not demonstrate all available configuration options. Please look at
# https://docs.joinmastodon.org/admin/config/ for the full documentation. # https://docs.joinmastodon.org/admin/config/ for the full documentation.
# Note that this file accepts slightly different syntax depending on whether
# you are using `docker-compose` or not. In particular, if you use
# `docker-compose`, the value of each declared variable will be taken verbatim,
# including surrounding quotes.
# See: https://github.com/mastodon/mastodon/issues/16895
# Federation # Federation
# ---------- # ----------
# This identifies your server and cannot be changed safely later # This identifies your server and cannot be changed safely later

34
.github/workflows/build-image.yml vendored Normal file
View file

@ -0,0 +1,34 @@
name: Build container image
on:
workflow_dispatch:
push:
branches:
- "main"
tags:
- "*"
jobs:
build-image:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- uses: docker/setup-buildx-action@v1
- uses: docker/login-action@v1
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- uses: docker/metadata-action@v3
id: meta
with:
images: tootsuite/mastodon
flavor: |
latest=auto
tags: |
type=edge,branch=main
type=semver,pattern={{ raw }}
- uses: docker/build-push-action@v2
with:
context: .
push: true
tags: ${{ steps.meta.outputs.tags }}
cache-from: type=registry,ref=tootsuite/mastodon:latest
cache-to: type=inline

View file

@ -3,6 +3,72 @@ Changelog
All notable changes to this project will be documented in this file. All notable changes to this project will be documented in this file.
## [3.3.1] - 2022-01-31
### Added
- Add more advanced migration tests ([ClearlyClaire](https://github.com/mastodon/mastodon/pull/17393))
- Add github workflow to build Docker images ([unasuke](https://github.com/mastodon/mastodon/pull/16973), [Gargron](https://github.com/mastodon/mastodon/pull/16980), [Gargron](https://github.com/mastodon/mastodon/pull/17000))
### Fixed
- Update some dependencies that were broken or unavailable
- Fix some old migrations failing when skipping releases ([ClearlyClaire](https://github.com/mastodon/mastodon/pull/17394))
- Fix migrations script failing in certain edge cases ([ClearlyClaire](https://github.com/mastodon/mastodon/pull/17398))
- Fix media redownload worker retrying on unexpected response codes ([Gargron](https://github.com/tootsuite/mastodon/pull/16111))
- Fix thread resolve worker retrying when status no longer exists ([Gargron](https://github.com/tootsuite/mastodon/pull/16109))
- Fix n+1 queries when rendering statuses in REST API ([abcang](https://github.com/tootsuite/mastodon/pull/15641))
- Fix remote reporters not receiving suspend/unsuspend activities ([Gargron](https://github.com/tootsuite/mastodon/pull/16050))
- Fix understanding (not fully qualified) `as:Public` and `Public` ([ClearlyClaire](https://github.com/tootsuite/mastodon/pull/15948))
- Fix actor update not being distributed on profile picture deletion ([ClearlyClaire](https://github.com/tootsuite/mastodon/pull/15461))
- Fix processing of incoming Delete activities ([ClearlyClaire](https://github.com/tootsuite/mastodon/pull/16084))
- Fix processing of incoming Block activities ([ClearlyClaire](https://github.com/tootsuite/mastodon/pull/15546))
- Fix processing of incoming Update activities of unknown accounts ([ClearlyClaire](https://github.com/tootsuite/mastodon/pull/15514))
- Fix URIs of repeat follow requests not being recorded ([ClearlyClaire](https://github.com/tootsuite/mastodon/pull/15662))
- Fix error on requests with no `Digest` header ([ClearlyClaire](https://github.com/tootsuite/mastodon/pull/15782))
- Fix activity object not requiring signature in secure mode ([ClearlyClaire](https://github.com/tootsuite/mastodon/pull/15592))
- Fix database serialization failure returning HTTP 500 ([Gargron](https://github.com/tootsuite/mastodon/pull/16101))
- Fix media processing getting stuck on too much stdin/stderr ([Gargron](https://github.com/tootsuite/mastodon/pull/16136))
- Fix `tootctl maintenance fix-duplicates` failures ([ClearlyClaire](https://github.com/tootsuite/mastodon/pull/15923), [ClearlyClaire](https://github.com/tootsuite/mastodon/pull/15515))
- Fix blocking someone not clearing up list feeds ([ClearlyClaire](https://github.com/tootsuite/mastodon/pull/16205))
- Fix edge case where follow limit interferes with accepting a follow ([ClearlyClaire](https://github.com/tootsuite/mastodon/pull/16098))
- Fix reports of already suspended accounts being recorded ([Gargron](https://github.com/tootsuite/mastodon/pull/16047))
- Fix sign-up restrictions based on IP addresses not being enforced ([ClearlyClaire](https://github.com/tootsuite/mastodon/pull/15607))
- Fix race conditions on account migration creation ([ClearlyClaire](https://github.com/tootsuite/mastodon/pull/15597))
- Fix not being able to change world filter expiration back to “Never” ([ClearlyClaire](https://github.com/tootsuite/mastodon/pull/15858))
- Fix error when muting users with `duration` in REST API ([Tak](https://github.com/tootsuite/mastodon/pull/15516))
- Fix wrong URL to custom CSS when `CDN_HOST` is used ([ClearlyClaire](https://github.com/tootsuite/mastodon/pull/15927))
- Fix `tootctl accounts unfollow` ([ClearlyClaire](https://github.com/tootsuite/mastodon/pull/15639))
- Fix `tootctl emoji import` wasting time on MacOS shadow files ([cortices](https://github.com/tootsuite/mastodon/pull/15430))
- Fix `tootctl emoji import` not treating shortcodes as case-insensitive ([angristan](https://github.com/tootsuite/mastodon/pull/15738))
- Fix some issues with SAML account creation ([Gargron](https://github.com/tootsuite/mastodon/pull/15222), [kaiyou](https://github.com/tootsuite/mastodon/pull/15511
- Fix instance actor not being automatically created if it wasn't seeded properly ([ClearlyClaire](https://github.com/tootsuite/mastodon/pull/15693))))
- Fix app name, website and redirect URIs not having a maximum length ([Gargron](https://github.com/tootsuite/mastodon/pull/16042))
- Fix some ActivityPub identifiers in server actor outbox ([ClearlyClaire](https://github.com/mastodon/mastodon/pull/16343))
- Fix custom CSS path setting cookies and being uncacheable due to it ([tribela](https://github.com/mastodon/mastodon/pull/16314))
- Fix some redis locks auto-releasing too fast ([ClearlyClaire](https://github.com/mastodon/mastodon/pull/16276), [ClearlyClaire](https://github.com/mastodon/mastodon/pull/16291))
- Fix migration script not being able to run if it fails midway ([ClearlyClaire](https://github.com/mastodon/mastodon/pull/16312))
- Fix PWA not being usable from alternate domains ([HolgerHuo](https://github.com/mastodon/mastodon/pull/16714))
- Fix scheduling a status decreasing status count ([ClearlyClaire](https://github.com/mastodon/mastodon/pull/16791))
- Fix followers synchronization mechanism not working when URI has empty path ([ClearlyClaire](https://github.com/mastodon/mastodon/pull/16744))
- Fix serialization of counts in REST API when user hides their network ([ClearlyClaire](https://github.com/mastodon/mastodon/pull/16418))
- Fix `tootctl self-destruct` not sending delete activities for recently-suspended accounts ([ClearlyClaire](https://github.com/mastodon/mastodon/pull/16688))
- Fix `mastodon:setup` generated env-file syntax ([ClearlyClaire](https://github.com/mastodon/mastodon/pull/16896), [ClearlyClaire](https://github.com/mastodon/mastodon/pull/16976))
- Fix link previews being incorrectly generated from earlier links ([ClearlyClaire](https://github.com/mastodon/mastodon/pull/16885))
- Fix wrong `to`/`cc` values for remote groups in ActivityPub ([ClearlyClaire](https://github.com/mastodon/mastodon/pull/16700))
- Fix mentions with non-ascii TLDs not being processed ([ClearlyClaire](https://github.com/mastodon/mastodon/pull/16689))
- Fix authentication failures halfway through a sign-in attempt ([ClearlyClaire](https://github.com/mastodon/mastodon/pull/16607), [ClearlyClaire](https://github.com/mastodon/mastodon/pull/16792))
- Fix suspended accounts statuses being merged back into timelines ([ClearlyClaire](https://github.com/mastodon/mastodon/pull/16628))
- Fix crash when encountering invalid account fields ([ClearlyClaire](https://github.com/mastodon/mastodon/pull/16598))
- Fix invalid blurhash handling for remote activities ([noellabo](https://github.com/mastodon/mastodon/pull/16583))
- Fix newlines being added to accout notes when an account moves ([ClearlyClaire](https://github.com/mastodon/mastodon/pull/16415), [noellabo](https://github.com/mastodon/mastodon/pull/16576))
- Fix logging out from one browser logging out all other sessions ([ClearlyClaire](https://github.com/mastodon/mastodon/pull/16943))
- Fix confusing error when WebFinger request returns empty document ([ClearlyClaire](https://github.com/mastodon/mastodon/pull/16986))
- Fix upload of remote media with OpenStack Swift sometimes failing ([ClearlyClaire](https://github.com/mastodon/mastodon/pull/16998))
- Fix Docker build ([tribela](https://github.com/mastodon/mastodon/pull/17188))
### Security
- Fix user notes not having a length limit ([ClearlyClaire](https://github.com/mastodon/mastodon/pull/16942))
- Fix revoking a specific session not working ([ClearlyClaire](https://github.com/mastodon/mastodon/pull/16943))
- Fix filtering DMs from non-followed users ([ClearlyClaire](https://github.com/mastodon/mastodon/pull/17042))
## [3.3.0] - 2020-12-27 ## [3.3.0] - 2020-12-27
### Added ### Added

View file

@ -71,8 +71,8 @@ RUN npm install -g yarn && \
COPY Gemfile* package.json yarn.lock /opt/mastodon/ COPY Gemfile* package.json yarn.lock /opt/mastodon/
RUN cd /opt/mastodon && \ RUN cd /opt/mastodon && \
bundle config set deployment 'true' && \ bundle config set --local deployment 'true' && \
bundle config set without 'development test' && \ bundle config set --local without 'development test' && \
bundle install -j$(nproc) && \ bundle install -j$(nproc) && \
yarn install --pure-lockfile yarn install --pure-lockfile

View file

@ -21,8 +21,6 @@ gem 'aws-sdk-s3', '~> 1.85', require: false
gem 'fog-core', '<= 2.1.0' gem 'fog-core', '<= 2.1.0'
gem 'fog-openstack', '~> 0.3', require: false gem 'fog-openstack', '~> 0.3', require: false
gem 'paperclip', '~> 6.0' gem 'paperclip', '~> 6.0'
gem 'paperclip-av-transcoder', '~> 0.6'
gem 'streamio-ffmpeg', '~> 3.0'
gem 'blurhash', '~> 0.1' gem 'blurhash', '~> 0.1'
gem 'active_model_serializers', '~> 0.10' gem 'active_model_serializers', '~> 0.10'

View file

@ -75,8 +75,6 @@ GEM
ast (2.4.1) ast (2.4.1)
attr_encrypted (3.1.0) attr_encrypted (3.1.0)
encryptor (~> 3.0.0) encryptor (~> 3.0.0)
av (0.9.0)
cocaine (~> 0.5.3)
awrence (1.1.1) awrence (1.1.1)
aws-eventstream (1.1.0) aws-eventstream (1.1.0)
aws-partitions (1.397.0) aws-partitions (1.397.0)
@ -151,8 +149,6 @@ GEM
cld3 (3.3.0) cld3 (3.3.0)
ffi (>= 1.1.0, < 1.12.0) ffi (>= 1.1.0, < 1.12.0)
climate_control (0.2.0) climate_control (0.2.0)
cocaine (0.5.8)
climate_control (>= 0.0.3, < 1.0)
coderay (1.1.3) coderay (1.1.3)
color_diff (0.1) color_diff (0.1)
concurrent-ruby (1.1.7) concurrent-ruby (1.1.7)
@ -346,9 +342,11 @@ GEM
mime-types (3.3.1) mime-types (3.3.1)
mime-types-data (~> 3.2015) mime-types-data (~> 3.2015)
mime-types-data (3.2020.0512) mime-types-data (3.2020.0512)
mimemagic (0.3.5) mimemagic (0.3.10)
nokogiri (~> 1)
rake
mini_mime (1.0.2) mini_mime (1.0.2)
mini_portile2 (2.4.0) mini_portile2 (2.7.1)
minitest (5.14.2) minitest (5.14.2)
msgpack (1.3.3) msgpack (1.3.3)
multi_json (1.15.0) multi_json (1.15.0)
@ -358,9 +356,10 @@ GEM
net-ssh (>= 2.6.5, < 7.0.0) net-ssh (>= 2.6.5, < 7.0.0)
net-ssh (6.1.0) net-ssh (6.1.0)
nio4r (2.5.4) nio4r (2.5.4)
nokogiri (1.10.10) nokogiri (1.13.1)
mini_portile2 (~> 2.4.0) mini_portile2 (~> 2.7.0)
nokogumbo (2.0.2) racc (~> 1.4)
nokogumbo (2.0.5)
nokogiri (~> 1.8, >= 1.8.4) nokogiri (~> 1.8, >= 1.8.4)
nsa (0.2.7) nsa (0.2.7)
activesupport (>= 4.2, < 6) activesupport (>= 4.2, < 6)
@ -391,9 +390,6 @@ GEM
mime-types mime-types
mimemagic (~> 0.3.0) mimemagic (~> 0.3.0)
terrapin (~> 0.6.0) terrapin (~> 0.6.0)
paperclip-av-transcoder (0.6.4)
av (~> 0.9.0)
paperclip (>= 2.5.2)
parallel (1.20.1) parallel (1.20.1)
parallel_tests (3.4.0) parallel_tests (3.4.0)
parallel parallel
@ -432,6 +428,7 @@ GEM
pundit (2.1.0) pundit (2.1.0)
activesupport (>= 3.0.0) activesupport (>= 3.0.0)
raabro (1.3.3) raabro (1.3.3)
racc (1.6.0)
rack (2.2.3) rack (2.2.3)
rack-attack (6.3.1) rack-attack (6.3.1)
rack (>= 1.0, < 3) rack (>= 1.0, < 3)
@ -605,8 +602,6 @@ GEM
stackprof (0.2.16) stackprof (0.2.16)
statsd-ruby (1.4.0) statsd-ruby (1.4.0)
stoplight (2.2.1) stoplight (2.2.1)
streamio-ffmpeg (3.0.2)
multi_json (~> 1.8)
strong_migrations (0.7.2) strong_migrations (0.7.2)
activerecord (>= 5) activerecord (>= 5)
temple (0.8.2) temple (0.8.2)
@ -751,7 +746,6 @@ DEPENDENCIES
omniauth-saml (~> 1.10) omniauth-saml (~> 1.10)
ox (~> 2.13) ox (~> 2.13)
paperclip (~> 6.0) paperclip (~> 6.0)
paperclip-av-transcoder (~> 0.6)
parallel (~> 1.20) parallel (~> 1.20)
parallel_tests (~> 3.4) parallel_tests (~> 3.4)
parslet parslet
@ -797,7 +791,6 @@ DEPENDENCIES
sprockets-rails (~> 3.2) sprockets-rails (~> 3.2)
stackprof stackprof
stoplight (~> 2.2.1) stoplight (~> 2.2.1)
streamio-ffmpeg (~> 3.0)
strong_migrations (~> 0.7) strong_migrations (~> 0.7)
thor (~> 1.0) thor (~> 1.0)
tty-prompt (~> 0.22) tty-prompt (~> 0.22)

View file

@ -19,11 +19,11 @@ class ActivityPub::FollowersSynchronizationsController < ActivityPub::BaseContro
private private
def uri_prefix def uri_prefix
signed_request_account.uri[/http(s?):\/\/[^\/]+\//] signed_request_account.uri[Account::URL_PREFIX_RE]
end end
def set_items def set_items
@items = @account.followers.where(Account.arel_table[:uri].matches(uri_prefix + '%', false, true)).pluck(:uri) @items = @account.followers.where(Account.arel_table[:uri].matches("#{Account.sanitize_sql_like(uri_prefix)}/%", false, true)).or(@account.followers.where(uri: uri_prefix)).pluck(:uri)
end end
def collection_presenter def collection_presenter

View file

@ -5,7 +5,7 @@ class ActivityPub::InboxesController < ActivityPub::BaseController
include JsonLdHelper include JsonLdHelper
include AccountOwnedConcern include AccountOwnedConcern
before_action :skip_unknown_actor_delete before_action :skip_unknown_actor_activity
before_action :require_signature! before_action :require_signature!
skip_before_action :authenticate_user! skip_before_action :authenticate_user!
@ -18,13 +18,13 @@ class ActivityPub::InboxesController < ActivityPub::BaseController
private private
def skip_unknown_actor_delete def skip_unknown_actor_activity
head 202 if unknown_deleted_account? head 202 if unknown_affected_account?
end end
def unknown_deleted_account? def unknown_affected_account?
json = Oj.load(body, mode: :strict) json = Oj.load(body, mode: :strict)
json.is_a?(Hash) && json['type'] == 'Delete' && json['actor'].present? && json['actor'] == value_or_id(json['object']) && !Account.where(uri: json['actor']).exists? json.is_a?(Hash) && %w(Delete Update).include?(json['type']) && json['actor'].present? && json['actor'] == value_or_id(json['object']) && !Account.where(uri: json['actor']).exists?
rescue Oj::ParseError rescue Oj::ParseError
false false
end end

View file

@ -29,7 +29,7 @@ class ActivityPub::OutboxesController < ActivityPub::BaseController
) )
else else
ActivityPub::CollectionPresenter.new( ActivityPub::CollectionPresenter.new(
id: account_outbox_url(@account), id: outbox_url,
type: :ordered, type: :ordered,
size: @account.statuses_count, size: @account.statuses_count,
first: outbox_url(page: true), first: outbox_url(page: true),
@ -47,11 +47,11 @@ class ActivityPub::OutboxesController < ActivityPub::BaseController
end end
def next_page def next_page
account_outbox_url(@account, page: true, max_id: @statuses.last.id) if @statuses.size == LIMIT outbox_url(page: true, max_id: @statuses.last.id) if @statuses.size == LIMIT
end end
def prev_page def prev_page
account_outbox_url(@account, page: true, min_id: @statuses.first.id) unless @statuses.empty? outbox_url(page: true, min_id: @statuses.first.id) unless @statuses.empty?
end end
def set_statuses def set_statuses

View file

@ -42,7 +42,7 @@ class Api::V1::AccountsController < Api::BaseController
end end
def mute def mute
MuteService.new.call(current_user.account, @account, notifications: truthy_param?(:notifications), duration: (params[:duration] || 0)) MuteService.new.call(current_user.account, @account, notifications: truthy_param?(:notifications), duration: (params[:duration]&.to_i || 0))
render json: @account, serializer: REST::RelationshipSerializer, relationships: relationships render json: @account, serializer: REST::RelationshipSerializer, relationships: relationships
end end

View file

@ -20,17 +20,16 @@ class ApplicationController < ActionController::Base
helper_method :use_seamless_external_login? helper_method :use_seamless_external_login?
helper_method :whitelist_mode? helper_method :whitelist_mode?
rescue_from ActionController::RoutingError, with: :not_found rescue_from ActionController::ParameterMissing, Paperclip::AdapterRegistry::NoHandlerError, with: :bad_request
rescue_from ActionController::InvalidAuthenticityToken, with: :unprocessable_entity
rescue_from ActionController::UnknownFormat, with: :not_acceptable
rescue_from ActionController::ParameterMissing, with: :bad_request
rescue_from Paperclip::AdapterRegistry::NoHandlerError, with: :bad_request
rescue_from ActiveRecord::RecordNotFound, with: :not_found
rescue_from Mastodon::NotPermittedError, with: :forbidden rescue_from Mastodon::NotPermittedError, with: :forbidden
rescue_from HTTP::Error, OpenSSL::SSL::SSLError, with: :internal_server_error rescue_from ActionController::RoutingError, ActiveRecord::RecordNotFound, with: :not_found
rescue_from Mastodon::RaceConditionError, Seahorse::Client::NetworkingError, Stoplight::Error::RedLight, with: :service_unavailable rescue_from ActionController::UnknownFormat, with: :not_acceptable
rescue_from ActionController::InvalidAuthenticityToken, with: :unprocessable_entity
rescue_from Mastodon::RateLimitExceededError, with: :too_many_requests rescue_from Mastodon::RateLimitExceededError, with: :too_many_requests
rescue_from HTTP::Error, OpenSSL::SSL::SSLError, with: :internal_server_error
rescue_from Mastodon::RaceConditionError, Seahorse::Client::NetworkingError, Stoplight::Error::RedLight, ActiveRecord::SerializationFailure, with: :service_unavailable
before_action :store_current_location, except: :raise_not_found, unless: :devise_controller? before_action :store_current_location, except: :raise_not_found, unless: :devise_controller?
before_action :require_functional!, if: :user_signed_in? before_action :require_functional!, if: :user_signed_in?

View file

@ -10,7 +10,6 @@ class Auth::PasswordsController < Devise::PasswordsController
super do |resource| super do |resource|
if resource.errors.empty? if resource.errors.empty?
resource.session_activations.destroy_all resource.session_activations.destroy_all
resource.forget_me!
end end
end end
end end

View file

@ -1,7 +1,6 @@
# frozen_string_literal: true # frozen_string_literal: true
class Auth::RegistrationsController < Devise::RegistrationsController class Auth::RegistrationsController < Devise::RegistrationsController
include Devise::Controllers::Rememberable
include RegistrationSpamConcern include RegistrationSpamConcern
layout :determine_layout layout :determine_layout
@ -30,8 +29,6 @@ class Auth::RegistrationsController < Devise::RegistrationsController
super do |resource| super do |resource|
if resource.saved_change_to_encrypted_password? if resource.saved_change_to_encrypted_password?
resource.clear_other_sessions(current_session.session_id) resource.clear_other_sessions(current_session.session_id)
resource.forget_me!
remember_me(resource)
end end
end end
end end

View file

@ -1,8 +1,6 @@
# frozen_string_literal: true # frozen_string_literal: true
class Auth::SessionsController < Devise::SessionsController class Auth::SessionsController < Devise::SessionsController
include Devise::Controllers::Rememberable
layout 'auth' layout 'auth'
skip_before_action :require_no_authentication, only: [:create] skip_before_action :require_no_authentication, only: [:create]
@ -26,7 +24,6 @@ class Auth::SessionsController < Devise::SessionsController
def create def create
super do |resource| super do |resource|
resource.update_sign_in!(request, new_sign_in: true) resource.update_sign_in!(request, new_sign_in: true)
remember_me(resource)
flash.delete(:notice) flash.delete(:notice)
end end
end end
@ -40,7 +37,7 @@ class Auth::SessionsController < Devise::SessionsController
end end
def webauthn_options def webauthn_options
user = find_user user = User.find_by(id: session[:attempt_user_id])
if user.webauthn_enabled? if user.webauthn_enabled?
options_for_get = WebAuthn::Credential.options_for_get( options_for_get = WebAuthn::Credential.options_for_get(
@ -58,15 +55,19 @@ class Auth::SessionsController < Devise::SessionsController
protected protected
def find_user def find_user
if session[:attempt_user_id] if user_params[:email].present?
find_user_from_params
elsif session[:attempt_user_id]
User.find_by(id: session[:attempt_user_id]) User.find_by(id: session[:attempt_user_id])
else end
end
def find_user_from_params
user = User.authenticate_with_ldap(user_params) if Devise.ldap_authentication user = User.authenticate_with_ldap(user_params) if Devise.ldap_authentication
user ||= User.authenticate_with_pam(user_params) if Devise.pam_authentication user ||= User.authenticate_with_pam(user_params) if Devise.pam_authentication
user ||= User.find_for_authentication(email: user_params[:email]) user ||= User.find_for_authentication(email: user_params[:email])
user user
end end
end
def user_params def user_params
params.require(:user).permit(:email, :password, :otp_attempt, :sign_in_token_attempt, credential: {}) params.require(:user).permit(:email, :password, :otp_attempt, :sign_in_token_attempt, credential: {})

View file

@ -16,21 +16,24 @@ module SignInTokenAuthenticationConcern
end end
def authenticate_with_sign_in_token def authenticate_with_sign_in_token
user = self.resource = find_user if user_params[:email].present?
user = self.resource = find_user_from_params
prompt_for_sign_in_token(user) if user&.external_or_valid_password?(user_params[:password])
elsif session[:attempt_user_id]
user = self.resource = User.find_by(id: session[:attempt_user_id])
return if user.nil?
if user.present? && session[:attempt_user_id].present? && session[:attempt_user_updated_at] != user.updated_at.to_s if session[:attempt_user_updated_at] != user.updated_at.to_s
restart_session restart_session
elsif user_params.key?(:sign_in_token_attempt) && session[:attempt_user_id] elsif user_params.key?(:sign_in_token_attempt)
authenticate_with_sign_in_token_attempt(user) authenticate_with_sign_in_token_attempt(user)
elsif user.present? && user.external_or_valid_password?(user_params[:password]) end
prompt_for_sign_in_token(user)
end end
end end
def authenticate_with_sign_in_token_attempt(user) def authenticate_with_sign_in_token_attempt(user)
if valid_sign_in_token_attempt?(user) if valid_sign_in_token_attempt?(user)
clear_attempt_from_session clear_attempt_from_session
remember_me(user)
sign_in(user) sign_in(user)
else else
flash.now[:alert] = I18n.t('users.invalid_sign_in_token') flash.now[:alert] = I18n.t('users.invalid_sign_in_token')

View file

@ -133,6 +133,7 @@ module SignatureVerification
def verify_body_digest! def verify_body_digest!
return unless signed_headers.include?('digest') return unless signed_headers.include?('digest')
raise SignatureVerificationError, 'Digest header missing' unless request.headers.key?('Digest')
digests = request.headers['Digest'].split(',').map { |digest| digest.split('=', 2) }.map { |key, value| [key.downcase, value] } digests = request.headers['Digest'].split(',').map { |digest| digest.split('=', 2) }.map { |key, value| [key.downcase, value] }
sha256 = digests.assoc('sha-256') sha256 = digests.assoc('sha-256')

View file

@ -35,16 +35,20 @@ module TwoFactorAuthenticationConcern
end end
def authenticate_with_two_factor def authenticate_with_two_factor
user = self.resource = find_user if user_params[:email].present?
user = self.resource = find_user_from_params
prompt_for_two_factor(user) if user&.external_or_valid_password?(user_params[:password])
elsif session[:attempt_user_id]
user = self.resource = User.find_by(id: session[:attempt_user_id])
return if user.nil?
if user.present? && session[:attempt_user_id].present? && session[:attempt_user_updated_at] != user.updated_at.to_s if session[:attempt_user_updated_at] != user.updated_at.to_s
restart_session restart_session
elsif user.webauthn_enabled? && user_params.key?(:credential) && session[:attempt_user_id] elsif user.webauthn_enabled? && user_params.key?(:credential)
authenticate_with_two_factor_via_webauthn(user) authenticate_with_two_factor_via_webauthn(user)
elsif user_params.key?(:otp_attempt) && session[:attempt_user_id] elsif user_params.key?(:otp_attempt)
authenticate_with_two_factor_via_otp(user) authenticate_with_two_factor_via_otp(user)
elsif user.present? && user.external_or_valid_password?(user_params[:password]) end
prompt_for_two_factor(user)
end end
end end
@ -53,7 +57,6 @@ module TwoFactorAuthenticationConcern
if valid_webauthn_credential?(user, webauthn_credential) if valid_webauthn_credential?(user, webauthn_credential)
clear_attempt_from_session clear_attempt_from_session
remember_me(user)
sign_in(user) sign_in(user)
render json: { redirect_path: root_path }, status: :ok render json: { redirect_path: root_path }, status: :ok
else else
@ -64,7 +67,6 @@ module TwoFactorAuthenticationConcern
def authenticate_with_two_factor_via_otp(user) def authenticate_with_two_factor_via_otp(user)
if valid_otp_attempt?(user) if valid_otp_attempt?(user)
clear_attempt_from_session clear_attempt_from_session
remember_me(user)
sign_in(user) sign_in(user)
else else
flash.now[:alert] = I18n.t('users.invalid_otp_token') flash.now[:alert] = I18n.t('users.invalid_otp_token')

View file

@ -3,11 +3,16 @@
class CustomCssController < ApplicationController class CustomCssController < ApplicationController
skip_before_action :store_current_location skip_before_action :store_current_location
skip_before_action :require_functional! skip_before_action :require_functional!
skip_before_action :update_user_sign_in
skip_before_action :set_session_activity
skip_around_action :set_locale
before_action :set_cache_headers before_action :set_cache_headers
def show def show
expires_in 3.minutes, public: true expires_in 3.minutes, public: true
request.session_options[:skip] = true
render plain: Setting.custom_css || '', content_type: 'text/css' render plain: Setting.custom_css || '', content_type: 'text/css'
end end
end end

View file

@ -85,7 +85,7 @@ class FollowerAccountsController < ApplicationController
if page_requested? || !@account.user_hides_network? if page_requested? || !@account.user_hides_network?
# Return all fields # Return all fields
else else
%i(id type totalItems) %i(id type total_items)
end end
end end
end end

View file

@ -85,7 +85,7 @@ class FollowingAccountsController < ApplicationController
if page_requested? || !@account.user_hides_network? if page_requested? || !@account.user_hides_network?
# Return all fields # Return all fields
else else
%i(id type totalItems) %i(id type total_items)
end end
end end
end end

View file

@ -13,7 +13,7 @@ class InstanceActorsController < ApplicationController
private private
def set_account def set_account
@account = Account.find(-99) @account = Account.representative
end end
def restrict_fields_to def restrict_fields_to

View file

@ -45,7 +45,7 @@ class MediaProxyController < ApplicationController
end end
def lock_options def lock_options
{ redis: Redis.current, key: "media_download:#{params[:id]}" } { redis: Redis.current, key: "media_download:#{params[:id]}", autorelease: 15.minutes.seconds }
end end
def reject_media? def reject_media?

View file

@ -7,8 +7,12 @@ module Settings
def destroy def destroy
if valid_picture? if valid_picture?
msg = I18n.t('generic.changes_saved_msg') if UpdateAccountService.new.call(@account, { @picture => nil, "#{@picture}_remote_url" => '' }) if UpdateAccountService.new.call(@account, { @picture => nil, "#{@picture}_remote_url" => '' })
redirect_to settings_profile_path, notice: msg, status: 303 ActivityPub::UpdateDistributionWorker.perform_async(@account.id)
redirect_to settings_profile_path, notice: I18n.t('generic.changes_saved_msg'), status: 303
else
redirect_to settings_profile_path
end
else else
bad_request bad_request
end end

View file

@ -8,7 +8,7 @@ class StatusesController < ApplicationController
layout 'public' layout 'public'
before_action :require_signature!, only: :show, if: -> { request.format == :json && authorized_fetch_mode? } before_action :require_signature!, only: [:show, :activity], if: -> { request.format == :json && authorized_fetch_mode? }
before_action :set_status before_action :set_status
before_action :set_instance_presenter before_action :set_instance_presenter
before_action :set_link_headers before_action :set_link_headers

View file

@ -0,0 +1,25 @@
# frozen_string_literal: true
class AccountReachFinder
def initialize(account)
@account = account
end
def inboxes
(followers_inboxes + reporters_inboxes + relay_inboxes).uniq
end
private
def followers_inboxes
@account.followers.inboxes
end
def reporters_inboxes
Account.where(id: @account.targeted_reports.select(:account_id)).inboxes
end
def relay_inboxes
Relay.enabled.pluck(:inbox_url)
end
end

View file

@ -144,7 +144,7 @@ class ActivityPub::Activity
end end
def delete_later!(uri) def delete_later!(uri)
redis.setex("delete_upon_arrival:#{@account.id}:#{uri}", 6.hours.seconds, uri) redis.setex("delete_upon_arrival:#{@account.id}:#{uri}", 6.hours.seconds, true)
end end
def status_from_object def status_from_object
@ -210,12 +210,22 @@ class ActivityPub::Activity
end end
end end
def lock_or_return(key, expire_after = 7.days.seconds) def lock_or_return(key, expire_after = 2.hours.seconds)
yield if redis.set(key, true, nx: true, ex: expire_after) yield if redis.set(key, true, nx: true, ex: expire_after)
ensure ensure
redis.del(key) redis.del(key)
end end
def lock_or_fail(key, expire_after = 15.minutes.seconds)
RedisLock.acquire({ redis: Redis.current, key: key, autorelease: expire_after }) do |lock|
if lock.acquired?
yield
else
raise Mastodon::RaceConditionError
end
end
end
def fetch? def fetch?
!@options[:delivery] !@options[:delivery]
end end

View file

@ -4,8 +4,7 @@ class ActivityPub::Activity::Announce < ActivityPub::Activity
def perform def perform
return reject_payload! if delete_arrived_first?(@json['id']) || !related_to_local_activity? return reject_payload! if delete_arrived_first?(@json['id']) || !related_to_local_activity?
RedisLock.acquire(lock_options) do |lock| lock_or_fail("announce:#{@object['id']}") do
if lock.acquired?
original_status = status_from_object original_status = status_from_object
return reject_payload! if original_status.nil? || !announceable?(original_status) return reject_payload! if original_status.nil? || !announceable?(original_status)
@ -24,9 +23,6 @@ class ActivityPub::Activity::Announce < ActivityPub::Activity
) )
distribute(@status) distribute(@status)
else
raise Mastodon::RaceConditionError
end
end end
@status @status
@ -43,9 +39,9 @@ class ActivityPub::Activity::Announce < ActivityPub::Activity
end end
def visibility_from_audience def visibility_from_audience
if audience_to.include?(ActivityPub::TagManager::COLLECTIONS[:public]) if audience_to.any? { |to| ActivityPub::TagManager.instance.public_collection?(to) }
:public :public
elsif audience_cc.include?(ActivityPub::TagManager::COLLECTIONS[:public]) elsif audience_cc.any? { |cc| ActivityPub::TagManager.instance.public_collection?(cc) }
:unlisted :unlisted
elsif audience_to.include?(@account.followers_url) elsif audience_to.include?(@account.followers_url)
:private :private
@ -69,8 +65,4 @@ class ActivityPub::Activity::Announce < ActivityPub::Activity
def reblog_of_local_status? def reblog_of_local_status?
status_from_uri(object_uri)&.account&.local? status_from_uri(object_uri)&.account&.local?
end end
def lock_options
{ redis: Redis.current, key: "announce:#{@object['id']}" }
end
end end

View file

@ -11,8 +11,13 @@ class ActivityPub::Activity::Block < ActivityPub::Activity
return return
end end
UnfollowService.new.call(@account, target_account) if @account.following?(target_account)
UnfollowService.new.call(target_account, @account) if target_account.following?(@account) UnfollowService.new.call(target_account, @account) if target_account.following?(@account)
RejectFollowService.new.call(target_account, @account) if target_account.requested?(@account)
@account.block!(target_account, uri: @json['id']) unless delete_arrived_first?(@json['id']) unless delete_arrived_first?(@json['id'])
BlockWorker.perform_async(@account.id, target_account.id)
@account.block!(target_account, uri: @json['id'])
end
end end
end end

View file

@ -45,8 +45,7 @@ class ActivityPub::Activity::Create < ActivityPub::Activity
def create_status def create_status
return reject_payload! if unsupported_object_type? || invalid_origin?(object_uri) || tombstone_exists? || !related_to_local_activity? return reject_payload! if unsupported_object_type? || invalid_origin?(object_uri) || tombstone_exists? || !related_to_local_activity?
RedisLock.acquire(lock_options) do |lock| lock_or_fail("create:#{object_uri}") do
if lock.acquired?
return if delete_arrived_first?(object_uri) || poll_vote? # rubocop:disable Lint/NonLocalExitFromIterator return if delete_arrived_first?(object_uri) || poll_vote? # rubocop:disable Lint/NonLocalExitFromIterator
@status = find_existing_status @status = find_existing_status
@ -56,9 +55,6 @@ class ActivityPub::Activity::Create < ActivityPub::Activity
elsif @options[:delivered_to_account_id].present? elsif @options[:delivered_to_account_id].present?
postprocess_audience_and_deliver postprocess_audience_and_deliver
end end
else
raise Mastodon::RaceConditionError
end
end end
@status @status
@ -123,7 +119,7 @@ class ActivityPub::Activity::Create < ActivityPub::Activity
def process_audience def process_audience
(audience_to + audience_cc).uniq.each do |audience| (audience_to + audience_cc).uniq.each do |audience|
next if audience == ActivityPub::TagManager::COLLECTIONS[:public] next if ActivityPub::TagManager.instance.public_collection?(audience)
# Unlike with tags, there is no point in resolving accounts we don't already # Unlike with tags, there is no point in resolving accounts we don't already
# know here, because silent mentions would only be used for local access # know here, because silent mentions would only be used for local access
@ -314,13 +310,9 @@ class ActivityPub::Activity::Create < ActivityPub::Activity
poll = replied_to_status.preloadable_poll poll = replied_to_status.preloadable_poll
already_voted = true already_voted = true
RedisLock.acquire(poll_lock_options) do |lock| lock_or_fail("vote:#{replied_to_status.poll_id}:#{@account.id}") do
if lock.acquired?
already_voted = poll.votes.where(account: @account).exists? already_voted = poll.votes.where(account: @account).exists?
poll.votes.create!(account: @account, choice: poll.options.index(@object['name']), uri: object_uri) poll.votes.create!(account: @account, choice: poll.options.index(@object['name']), uri: object_uri)
else
raise Mastodon::RaceConditionError
end
end end
increment_voters_count! unless already_voted increment_voters_count! unless already_voted
@ -356,9 +348,9 @@ class ActivityPub::Activity::Create < ActivityPub::Activity
end end
def visibility_from_audience def visibility_from_audience
if audience_to.include?(ActivityPub::TagManager::COLLECTIONS[:public]) if audience_to.any? { |to| ActivityPub::TagManager.instance.public_collection?(to) }
:public :public
elsif audience_cc.include?(ActivityPub::TagManager::COLLECTIONS[:public]) elsif audience_cc.any? { |cc| ActivityPub::TagManager.instance.public_collection?(cc) }
:unlisted :unlisted
elsif audience_to.include?(@account.followers_url) elsif audience_to.include?(@account.followers_url)
:private :private
@ -455,10 +447,14 @@ class ActivityPub::Activity::Create < ActivityPub::Activity
end end
def supported_blurhash?(blurhash) def supported_blurhash?(blurhash)
components = blurhash.blank? ? nil : Blurhash.components(blurhash) components = blurhash.blank? || !blurhash_valid_chars?(blurhash) ? nil : Blurhash.components(blurhash)
components.present? && components.none? { |comp| comp > 5 } components.present? && components.none? { |comp| comp > 5 }
end end
def blurhash_valid_chars?(blurhash)
/^[\w#$%*+-.:;=?@\[\]^{|}~]+$/.match?(blurhash)
end
def skip_download? def skip_download?
return @skip_download if defined?(@skip_download) return @skip_download if defined?(@skip_download)
@ -513,12 +509,4 @@ class ActivityPub::Activity::Create < ActivityPub::Activity
poll.reload poll.reload
retry retry
end end
def lock_options
{ redis: Redis.current, key: "create:#{object_uri}" }
end
def poll_lock_options
{ redis: Redis.current, key: "vote:#{replied_to_status.poll_id}:#{@account.id}" }
end
end end

View file

@ -20,8 +20,15 @@ class ActivityPub::Activity::Delete < ActivityPub::Activity
def delete_note def delete_note
return if object_uri.nil? return if object_uri.nil?
lock_or_return("delete_status_in_progress:#{object_uri}", 5.minutes.seconds) do
unless invalid_origin?(object_uri) unless invalid_origin?(object_uri)
RedisLock.acquire(lock_options) { |_lock| delete_later!(object_uri) } # This lock ensures a concurrent `ActivityPub::Activity::Create` either
# does not create a status at all, or has finished saving it to the
# database before we try to load it.
# Without the lock, `delete_later!` could be called after `delete_arrived_first?`
# and `Status.find` before `Status.create!`
lock_or_fail("create:#{object_uri}") { delete_later!(object_uri) }
Tombstone.find_or_create_by(uri: object_uri, account: @account) Tombstone.find_or_create_by(uri: object_uri, account: @account)
end end
@ -30,23 +37,18 @@ class ActivityPub::Activity::Delete < ActivityPub::Activity
return if @status.nil? return if @status.nil?
if @status.distributable? forward! if @json['signature'].present? && @status.distributable?
forward_for_reply
forward_for_reblogs
end
delete_now! delete_now!
end end
def forward_for_reblogs
return if @json['signature'].blank?
rebloggers_ids = @status.reblogs.includes(:account).references(:account).merge(Account.local).pluck(:account_id)
inboxes = Account.where(id: ::Follow.where(target_account_id: rebloggers_ids).select(:account_id)).inboxes - [@account.preferred_inbox_url]
ActivityPub::LowPriorityDeliveryWorker.push_bulk(inboxes) do |inbox_url|
[payload, rebloggers_ids.first, inbox_url]
end end
def rebloggers_ids
return @rebloggers_ids if defined?(@rebloggers_ids)
@rebloggers_ids = @status.reblogs.includes(:account).references(:account).merge(Account.local).pluck(:account_id)
end
def inboxes_for_reblogs
Account.where(id: ::Follow.where(target_account_id: rebloggers_ids).select(:account_id)).inboxes
end end
def replied_to_status def replied_to_status
@ -58,13 +60,19 @@ class ActivityPub::Activity::Delete < ActivityPub::Activity
!replied_to_status.nil? && replied_to_status.account.local? !replied_to_status.nil? && replied_to_status.account.local?
end end
def forward_for_reply def inboxes_for_reply
return unless @json['signature'].present? && reply_to_local? replied_to_status.account.followers.inboxes
end
inboxes = replied_to_status.account.followers.inboxes - [@account.preferred_inbox_url] def forward!
inboxes = inboxes_for_reblogs
inboxes += inboxes_for_reply if reply_to_local?
inboxes -= [@account.preferred_inbox_url]
ActivityPub::LowPriorityDeliveryWorker.push_bulk(inboxes) do |inbox_url| sender_id = reply_to_local? ? replied_to_status.account_id : rebloggers_ids.first
[payload, replied_to_status.account_id, inbox_url]
ActivityPub::LowPriorityDeliveryWorker.push_bulk(inboxes.uniq) do |inbox_url|
[payload, sender_id, inbox_url]
end end
end end
@ -75,8 +83,4 @@ class ActivityPub::Activity::Delete < ActivityPub::Activity
def payload def payload
@payload ||= Oj.dump(@json) @payload ||= Oj.dump(@json)
end end
def lock_options
{ redis: Redis.current, key: "create:#{object_uri}" }
end
end end

View file

@ -10,6 +10,8 @@ class ActivityPub::Activity::Flag < ActivityPub::Activity
target_accounts.each do |target_account| target_accounts.each do |target_account|
target_statuses = target_statuses_by_account[target_account.id] target_statuses = target_statuses_by_account[target_account.id]
next if target_account.suspended?
ReportService.new.call( ReportService.new.call(
@account, @account,
target_account, target_account,

View file

@ -6,7 +6,14 @@ class ActivityPub::Activity::Follow < ActivityPub::Activity
def perform def perform
target_account = account_from_uri(object_uri) target_account = account_from_uri(object_uri)
return if target_account.nil? || !target_account.local? || delete_arrived_first?(@json['id']) || @account.requested?(target_account) return if target_account.nil? || !target_account.local? || delete_arrived_first?(@json['id'])
# Update id of already-existing follow requests
existing_follow_request = ::FollowRequest.find_by(account: @account, target_account: target_account)
unless existing_follow_request.nil?
existing_follow_request.update!(uri: @json['id'])
return
end
if target_account.blocking?(@account) || target_account.domain_blocking?(@account.domain) || target_account.moved? || target_account.instance_actor? if target_account.blocking?(@account) || target_account.domain_blocking?(@account.domain) || target_account.moved? || target_account.instance_actor?
reject_follow_request!(target_account) reject_follow_request!(target_account)
@ -14,7 +21,9 @@ class ActivityPub::Activity::Follow < ActivityPub::Activity
end end
# Fast-forward repeat follow requests # Fast-forward repeat follow requests
if @account.following?(target_account) existing_follow = ::Follow.find_by(account: @account, target_account: target_account)
unless existing_follow.nil?
existing_follow.update!(uri: @json['id'])
AuthorizeFollowService.new.call(@account, target_account, skip_follow_request: true, follow_request_uri: @json['id']) AuthorizeFollowService.new.call(@account, target_account, skip_follow_request: true, follow_request_uri: @json['id'])
return return
end end

View file

@ -4,9 +4,8 @@ class ActivityPub::Activity::Move < ActivityPub::Activity
PROCESSING_COOLDOWN = 7.days.seconds PROCESSING_COOLDOWN = 7.days.seconds
def perform def perform
return if origin_account.uri != object_uri || processed? return if origin_account.uri != object_uri
return unless mark_as_processing!
mark_as_processing!
target_account = ActivityPub::FetchRemoteAccountService.new.call(target_uri) target_account = ActivityPub::FetchRemoteAccountService.new.call(target_uri)
@ -35,12 +34,8 @@ class ActivityPub::Activity::Move < ActivityPub::Activity
value_or_id(@json['target']) value_or_id(@json['target'])
end end
def processed?
redis.exists?("move_in_progress:#{@account.id}")
end
def mark_as_processing! def mark_as_processing!
redis.setex("move_in_progress:#{@account.id}", PROCESSING_COOLDOWN, true) redis.set("move_in_progress:#{@account.id}", true, nx: true, ex: PROCESSING_COOLDOWN)
end end
def unmark_as_processing! def unmark_as_processing!

View file

@ -12,6 +12,10 @@ class ActivityPub::TagManager
public: 'https://www.w3.org/ns/activitystreams#Public', public: 'https://www.w3.org/ns/activitystreams#Public',
}.freeze }.freeze
def public_collection?(uri)
uri == COLLECTIONS[:public] || uri == 'as:Public' || uri == 'Public'
end
def url_for(target) def url_for(target)
return target.url if target.respond_to?(:local?) && !target.local? return target.url if target.respond_to?(:local?) && !target.local?
@ -60,6 +64,10 @@ class ActivityPub::TagManager
account_status_replies_url(target.account, target, page_params) account_status_replies_url(target.account, target, page_params)
end end
def followers_uri_for(target)
target.local? ? account_followers_url(target) : target.followers_url.presence
end
# Primary audience of a status # Primary audience of a status
# Public statuses go out to primarily the public collection # Public statuses go out to primarily the public collection
# Unlisted and private statuses go out primarily to the followers collection # Unlisted and private statuses go out primarily to the followers collection
@ -76,17 +84,17 @@ class ActivityPub::TagManager
account_ids = status.active_mentions.pluck(:account_id) account_ids = status.active_mentions.pluck(:account_id)
to = status.account.followers.where(id: account_ids).each_with_object([]) do |account, result| to = status.account.followers.where(id: account_ids).each_with_object([]) do |account, result|
result << uri_for(account) result << uri_for(account)
result << account_followers_url(account) if account.group? result << followers_uri_for(account) if account.group?
end end
to.concat(FollowRequest.where(target_account_id: status.account_id, account_id: account_ids).each_with_object([]) do |request, result| to.concat(FollowRequest.where(target_account_id: status.account_id, account_id: account_ids).each_with_object([]) do |request, result|
result << uri_for(request.account) result << uri_for(request.account)
result << account_followers_url(request.account) if request.account.group? result << followers_uri_for(request.account) if request.account.group?
end) end).compact
else else
status.active_mentions.each_with_object([]) do |mention, result| status.active_mentions.each_with_object([]) do |mention, result|
result << uri_for(mention.account) result << uri_for(mention.account)
result << account_followers_url(mention.account) if mention.account.group? result << followers_uri_for(mention.account) if mention.account.group?
end end.compact
end end
end end
end end
@ -114,17 +122,17 @@ class ActivityPub::TagManager
account_ids = status.active_mentions.pluck(:account_id) account_ids = status.active_mentions.pluck(:account_id)
cc.concat(status.account.followers.where(id: account_ids).each_with_object([]) do |account, result| cc.concat(status.account.followers.where(id: account_ids).each_with_object([]) do |account, result|
result << uri_for(account) result << uri_for(account)
result << account_followers_url(account) if account.group? result << followers_uri_for(account) if account.group?
end) end.compact)
cc.concat(FollowRequest.where(target_account_id: status.account_id, account_id: account_ids).each_with_object([]) do |request, result| cc.concat(FollowRequest.where(target_account_id: status.account_id, account_id: account_ids).each_with_object([]) do |request, result|
result << uri_for(request.account) result << uri_for(request.account)
result << account_followers_url(request.account) if request.account.group? result << followers_uri_for(request.account) if request.account.group?
end) end.compact)
else else
cc.concat(status.active_mentions.each_with_object([]) do |mention, result| cc.concat(status.active_mentions.each_with_object([]) do |mention, result|
result << uri_for(mention.account) result << uri_for(mention.account)
result << account_followers_url(mention.account) if mention.account.group? result << followers_uri_for(mention.account) if mention.account.group?
end) end.compact)
end end
end end

View file

@ -4,6 +4,8 @@ module ApplicationExtension
extend ActiveSupport::Concern extend ActiveSupport::Concern
included do included do
validates :website, url: true, if: :website? validates :name, length: { maximum: 60 }
validates :website, url: true, length: { maximum: 2_000 }, if: :website?
validates :redirect_uri, length: { maximum: 2_000 }
end end
end end

View file

@ -12,7 +12,11 @@ module Mastodon
class RateLimitExceededError < Error; end class RateLimitExceededError < Error; end
class UnexpectedResponseError < Error class UnexpectedResponseError < Error
attr_reader :response
def initialize(response = nil) def initialize(response = nil)
@response = response
if response.respond_to? :uri if response.respond_to? :uri
super("#{response.uri} returned code #{response.code}") super("#{response.uri} returned code #{response.code}")
else else

View file

@ -192,6 +192,36 @@ class FeedManager
end end
end end
# Clear all statuses from or mentioning target_account from a list feed
# @param [List] list
# @param [Account] target_account
# @return [void]
def clear_from_list(list, target_account)
timeline_key = key(:list, list.id)
timeline_status_ids = redis.zrange(timeline_key, 0, -1)
statuses = Status.where(id: timeline_status_ids).select(:id, :reblog_of_id, :account_id).to_a
reblogged_ids = Status.where(id: statuses.map(&:reblog_of_id).compact, account: target_account).pluck(:id)
with_mentions_ids = Mention.active.where(status_id: statuses.flat_map { |s| [s.id, s.reblog_of_id] }.compact, account: target_account).pluck(:status_id)
target_statuses = statuses.select do |status|
status.account_id == target_account.id || reblogged_ids.include?(status.reblog_of_id) || with_mentions_ids.include?(status.id) || with_mentions_ids.include?(status.reblog_of_id)
end
target_statuses.each do |status|
unpush_from_list(list, status)
end
end
# Clear all statuses from or mentioning target_account from an account's lists
# @param [Account] account
# @param [Account] target_account
# @return [void]
def clear_from_lists(account, target_account)
List.where(account: account).each do |list|
clear_from_list(list, target_account)
end
end
# Populate home feed of account from scratch # Populate home feed of account from scratch
# @param [Account] account # @param [Account] account
# @return [void] # @return [void]

View file

@ -0,0 +1,54 @@
# frozen_string_literal: true
class VideoMetadataExtractor
attr_reader :duration, :bitrate, :video_codec, :audio_codec,
:colorspace, :width, :height, :frame_rate
def initialize(path)
@path = path
@metadata = Oj.load(ffmpeg_command_output, mode: :strict, symbol_keys: true)
parse_metadata
rescue Terrapin::ExitStatusError, Oj::ParseError
@invalid = true
rescue Terrapin::CommandNotFoundError
raise Paperclip::Errors::CommandNotFoundError, 'Could not run the `ffprobe` command. Please install ffmpeg.'
end
def valid?
!@invalid
end
private
def ffmpeg_command_output
command = Terrapin::CommandLine.new('ffprobe', '-i :path -print_format :format -show_format -show_streams -show_error -loglevel :loglevel')
command.run(path: @path, format: 'json', loglevel: 'fatal')
end
def parse_metadata
if @metadata.key?(:format)
@duration = @metadata[:format][:duration].to_f
@bitrate = @metadata[:format][:bit_rate].to_i
end
if @metadata.key?(:streams)
video_streams = @metadata[:streams].select { |stream| stream[:codec_type] == 'video' }
audio_streams = @metadata[:streams].select { |stream| stream[:codec_type] == 'audio' }
if (video_stream = video_streams.first)
@video_codec = video_stream[:codec_name]
@colorspace = video_stream[:pix_fmt]
@width = video_stream[:width]
@height = video_stream[:height]
@frame_rate = video_stream[:avg_frame_rate] == '0/0' ? nil : Rational(video_stream[:avg_frame_rate])
end
if (audio_stream = audio_streams.first)
@audio_codec = audio_stream[:codec_name]
end
end
@invalid = true if @metadata.key?(:error)
end
end

View file

@ -46,7 +46,9 @@ class Webfinger
def body_from_webfinger(url = standard_url, use_fallback = true) def body_from_webfinger(url = standard_url, use_fallback = true)
webfinger_request(url).perform do |res| webfinger_request(url).perform do |res|
if res.code == 200 if res.code == 200
res.body_with_limit body = res.body_with_limit
raise Webfinger::Error, "Request for #{@uri} returned empty response" if body.empty?
body
elsif res.code == 404 && use_fallback elsif res.code == 404 && use_fallback
body_from_host_meta body_from_host_meta
elsif res.code == 410 elsif res.code == 410

View file

@ -56,7 +56,8 @@
class Account < ApplicationRecord class Account < ApplicationRecord
USERNAME_RE = /[a-z0-9_]+([a-z0-9_\.-]+[a-z0-9_]+)?/i USERNAME_RE = /[a-z0-9_]+([a-z0-9_\.-]+[a-z0-9_]+)?/i
MENTION_RE = /(?<=^|[^\/[:word:]])@((#{USERNAME_RE})(?:@[[:word:]\.\-]+[a-z0-9]+)?)/i MENTION_RE = /(?<=^|[^\/[:word:]])@((#{USERNAME_RE})(?:@[[:word:]\.\-]+[[:word:]]+)?)/i
URL_PREFIX_RE = /\Ahttp(s?):\/\/[^\/]+/
include AccountAssociations include AccountAssociations
include AccountAvatar include AccountAvatar
@ -301,7 +302,11 @@ class Account < ApplicationRecord
end end
def fields def fields
(self[:fields] || []).map { |f| Field.new(self, f) } (self[:fields] || []).map do |f|
Field.new(self, f)
rescue
nil
end.compact
end end
def fields_attributes=(attributes) def fields_attributes=(attributes)
@ -381,7 +386,7 @@ class Account < ApplicationRecord
def synchronization_uri_prefix def synchronization_uri_prefix
return 'local' if local? return 'local' if local?
@synchronization_uri_prefix ||= uri[/http(s?):\/\/[^\/]+\//] @synchronization_uri_prefix ||= "#{uri[URL_PREFIX_RE]}/"
end end
class Field < ActiveModelSerializers::Model class Field < ActiveModelSerializers::Model

View file

@ -14,6 +14,8 @@
# #
class AccountMigration < ApplicationRecord class AccountMigration < ApplicationRecord
include Redisable
COOLDOWN_PERIOD = 30.days.freeze COOLDOWN_PERIOD = 30.days.freeze
belongs_to :account belongs_to :account
@ -39,7 +41,13 @@ class AccountMigration < ApplicationRecord
return false unless errors.empty? return false unless errors.empty?
RedisLock.acquire(lock_options) do |lock|
if lock.acquired?
save save
else
raise Mastodon::RaceConditionError
end
end
end end
def cooldown_at def cooldown_at
@ -75,4 +83,8 @@ class AccountMigration < ApplicationRecord
def validate_migration_cooldown def validate_migration_cooldown
errors.add(:base, I18n.t('migrations.errors.on_cooldown')) if account.migrations.within_cooldown.exists? errors.add(:base, I18n.t('migrations.errors.on_cooldown')) if account.migrations.within_cooldown.exists?
end end
def lock_options
{ redis: redis, key: "account_migration:#{account.id}" }
end
end end

View file

@ -17,4 +17,5 @@ class AccountNote < ApplicationRecord
belongs_to :target_account, class_name: 'Account' belongs_to :target_account, class_name: 'Account'
validates :account_id, uniqueness: { scope: :target_account_id } validates :account_id, uniqueness: { scope: :target_account_id }
validates :comment, length: { maximum: 2_000 }
end end

View file

@ -14,6 +14,8 @@ module AccountFinderConcern
def representative def representative
Account.find(-99) Account.find(-99)
rescue ActiveRecord::RecordNotFound
Account.create!(id: -99, actor_type: 'Application', locked: true, username: Rails.configuration.x.local_domain)
end end
def find_local(username) def find_local(username)

View file

@ -243,10 +243,13 @@ module AccountInteractions
.where('users.current_sign_in_at > ?', User::ACTIVE_DURATION.ago) .where('users.current_sign_in_at > ?', User::ACTIVE_DURATION.ago)
end end
def remote_followers_hash(url_prefix) def remote_followers_hash(url)
Rails.cache.fetch("followers_hash:#{id}:#{url_prefix}") do url_prefix = url[Account::URL_PREFIX_RE]
return if url_prefix.blank?
Rails.cache.fetch("followers_hash:#{id}:#{url_prefix}/") do
digest = "\x00" * 32 digest = "\x00" * 32
followers.where(Account.arel_table[:uri].matches(url_prefix + '%', false, true)).pluck_each(:uri) do |uri| followers.where(Account.arel_table[:uri].matches("#{Account.sanitize_sql_like(url_prefix)}/%", false, true)).or(followers.where(uri: url_prefix)).pluck_each(:uri) do |uri|
Xorcist.xor!(digest, Digest::SHA256.digest(uri)) Xorcist.xor!(digest, Digest::SHA256.digest(uri))
end end
digest.unpack('H*')[0] digest.unpack('H*')[0]

View file

@ -17,7 +17,7 @@ module Expireable
end end
def expires_in=(interval) def expires_in=(interval)
self.expires_at = interval.to_i.seconds.from_now if interval.present? self.expires_at = interval.present? ? interval.to_i.seconds.from_now : nil
@expires_in = interval @expires_in = interval
end end

View file

@ -68,7 +68,6 @@ module Omniauthable
def user_params_from_auth(email, auth) def user_params_from_auth(email, auth)
{ {
email: email || "#{TEMP_EMAIL_PREFIX}-#{auth.uid}-#{auth.provider}.com", email: email || "#{TEMP_EMAIL_PREFIX}-#{auth.uid}-#{auth.provider}.com",
password: Devise.friendly_token[0, 20],
agreement: true, agreement: true,
external: true, external: true,
account_attributes: { account_attributes: {

View file

@ -29,7 +29,7 @@ class FollowRequest < ApplicationRecord
validates :account_id, uniqueness: { scope: :target_account_id } validates :account_id, uniqueness: { scope: :target_account_id }
def authorize! def authorize!
account.follow!(target_account, reblogs: show_reblogs, notify: notify, uri: uri) account.follow!(target_account, reblogs: show_reblogs, notify: notify, uri: uri, bypass_limit: true)
MergeWorker.perform_async(target_account.id, account.id) if account.local? MergeWorker.perform_async(target_account.id, account.id) if account.local?
destroy! destroy!
end end

View file

@ -287,7 +287,7 @@ class MediaAttachment < ApplicationRecord
if instance.file_content_type == 'image/gif' if instance.file_content_type == 'image/gif'
[:gif_transcoder, :blurhash_transcoder] [:gif_transcoder, :blurhash_transcoder]
elsif VIDEO_MIME_TYPES.include?(instance.file_content_type) elsif VIDEO_MIME_TYPES.include?(instance.file_content_type)
[:video_transcoder, :blurhash_transcoder, :type_corrector] [:transcoder, :blurhash_transcoder, :type_corrector]
elsif AUDIO_MIME_TYPES.include?(instance.file_content_type) elsif AUDIO_MIME_TYPES.include?(instance.file_content_type)
[:image_extractor, :transcoder, :type_corrector] [:image_extractor, :transcoder, :type_corrector]
else else
@ -388,7 +388,7 @@ class MediaAttachment < ApplicationRecord
# paths but ultimately the same file, so it makes sense to memoize the # paths but ultimately the same file, so it makes sense to memoize the
# result while disregarding the path # result while disregarding the path
def ffmpeg_data(path = nil) def ffmpeg_data(path = nil)
@ffmpeg_data ||= FFMPEG::Movie.new(path) @ffmpeg_data ||= VideoMetadataExtractor.new(path)
end end
def enqueue_processing def enqueue_processing

View file

@ -114,7 +114,7 @@ class Status < ApplicationRecord
:tags, :tags,
:preview_cards, :preview_cards,
:preloadable_poll, :preloadable_poll,
account: :account_stat, account: [:account_stat, :user],
active_mentions: { account: :account_stat }, active_mentions: { account: :account_stat },
reblog: [ reblog: [
:application, :application,
@ -124,7 +124,7 @@ class Status < ApplicationRecord
:conversation, :conversation,
:status_stat, :status_stat,
:preloadable_poll, :preloadable_poll,
account: :account_stat, account: [:account_stat, :user],
active_mentions: { account: :account_stat }, active_mentions: { account: :account_stat },
], ],
thread: { account: :account_stat } thread: { account: :account_stat }
@ -301,7 +301,7 @@ class Status < ApplicationRecord
return if account_ids.empty? return if account_ids.empty?
accounts = Account.where(id: account_ids).includes(:account_stat).each_with_object({}) { |a, h| h[a.id] = a } accounts = Account.where(id: account_ids).includes(:account_stat, :user).index_by(&:id)
cached_items.each do |item| cached_items.each do |item|
item.account = accounts[item.account_id] item.account = accounts[item.account_id]
@ -422,7 +422,7 @@ class Status < ApplicationRecord
end end
def decrement_counter_caches def decrement_counter_caches
return if direct_visibility? return if direct_visibility? || new_record?
account&.decrement_count!(:statuses_count) account&.decrement_count!(:statuses_count)
reblog&.decrement_count!(:reblogs_count) if reblog? reblog&.decrement_count!(:reblogs_count) if reblog?

View file

@ -63,7 +63,7 @@ class User < ApplicationRecord
devise :two_factor_backupable, devise :two_factor_backupable,
otp_number_of_backup_codes: 10 otp_number_of_backup_codes: 10
devise :registerable, :recoverable, :rememberable, :validatable, devise :registerable, :recoverable, :validatable,
:confirmable :confirmable
include Omniauthable include Omniauthable
@ -152,7 +152,7 @@ class User < ApplicationRecord
def confirm def confirm
new_user = !confirmed? new_user = !confirmed?
self.approved = true if open_registrations? self.approved = true if open_registrations? && !sign_up_from_ip_requires_approval?
super super
@ -468,7 +468,7 @@ class User < ApplicationRecord
end end
def validate_email_dns? def validate_email_dns?
email_changed? && !(Rails.env.test? || Rails.env.development?) email_changed? && !external? && !(Rails.env.test? || Rails.env.development?)
end end
def invite_text_required? def invite_text_required?

View file

@ -48,7 +48,7 @@ class ManifestSerializer < ActiveModel::Serializer
end end
def scope def scope
root_url '/'
end end
def share_target def share_target

View file

@ -283,7 +283,7 @@ class ActivityPub::ProcessAccountService < BaseService
end end
def lock_options def lock_options
{ redis: Redis.current, key: "process_account:#{@uri}" } { redis: Redis.current, key: "process_account:#{@uri}", autorelease: 15.minutes.seconds }
end end
def process_tags def process_tags

View file

@ -6,6 +6,7 @@ class AfterBlockService < BaseService
@target_account = target_account @target_account = target_account
clear_home_feed! clear_home_feed!
clear_list_feeds!
clear_notifications! clear_notifications!
clear_conversations! clear_conversations!
end end
@ -16,6 +17,10 @@ class AfterBlockService < BaseService
FeedManager.instance.clear_from_home(@account, @target_account) FeedManager.instance.clear_from_home(@account, @target_account)
end end
def clear_list_feeds!
FeedManager.instance.clear_from_lists(@account, @target_account)
end
def clear_conversations! def clear_conversations!
AccountConversation.where(account: @account).where('? = ANY(participant_account_ids)', @target_account.id).in_batches.destroy_all AccountConversation.where(account: @account).where('? = ANY(participant_account_ids)', @target_account.id).in_batches.destroy_all
end end

View file

@ -174,6 +174,6 @@ class FetchLinkCardService < BaseService
end end
def lock_options def lock_options
{ redis: Redis.current, key: "fetch:#{@url}" } { redis: Redis.current, key: "fetch:#{@url}", autorelease: 15.minutes.seconds }
end end
end end

View file

@ -2,6 +2,7 @@
class FetchOEmbedService class FetchOEmbedService
ENDPOINT_CACHE_EXPIRES_IN = 24.hours.freeze ENDPOINT_CACHE_EXPIRES_IN = 24.hours.freeze
URL_REGEX = /(=(http[s]?(%3A|:)(\/\/|%2F%2F)))([^&]*)/i.freeze
attr_reader :url, :options, :format, :endpoint_url attr_reader :url, :options, :format, :endpoint_url
@ -55,10 +56,12 @@ class FetchOEmbedService
end end
def cache_endpoint! def cache_endpoint!
return unless URL_REGEX.match?(@endpoint_url)
url_domain = Addressable::URI.parse(@url).normalized_host url_domain = Addressable::URI.parse(@url).normalized_host
endpoint_hash = { endpoint_hash = {
endpoint: @endpoint_url.gsub(/(=(http[s]?(%3A|:)(\/\/|%2F%2F)))([^&]*)/i, '={url}'), endpoint: @endpoint_url.gsub(URL_REGEX, '={url}'),
format: @format, format: @format,
} }

View file

@ -67,8 +67,49 @@ class NotifyService < BaseService
message? && @notification.target_status.direct_visibility? message? && @notification.target_status.direct_visibility?
end end
# Returns true if the sender has been mentionned by the recipient up the thread
def response_to_recipient? def response_to_recipient?
@notification.target_status.in_reply_to_account_id == @recipient.id && @notification.target_status.thread&.direct_visibility? return false if @notification.target_status.in_reply_to_id.nil?
# Using an SQL CTE to avoid unneeded back-and-forth with SQL server in case of long threads
!Status.count_by_sql([<<-SQL.squish, id: @notification.target_status.in_reply_to_id, recipient_id: @recipient.id, sender_id: @notification.from_account.id]).zero?
WITH RECURSIVE ancestors(id, in_reply_to_id, replying_to_sender) AS (
SELECT
s.id, s.in_reply_to_id, (CASE
WHEN s.account_id = :recipient_id THEN
EXISTS (
SELECT *
FROM mentions m
WHERE m.silent = FALSE AND m.account_id = :sender_id AND m.status_id = s.id
)
ELSE
FALSE
END)
FROM statuses s
WHERE s.id = :id
UNION ALL
SELECT
s.id,
s.in_reply_to_id,
(CASE
WHEN s.account_id = :recipient_id THEN
EXISTS (
SELECT *
FROM mentions m
WHERE m.silent = FALSE AND m.account_id = :sender_id AND m.status_id = s.id
)
ELSE
FALSE
END)
FROM ancestors st
JOIN statuses s ON s.id = st.in_reply_to_id
WHERE st.replying_to_sender IS FALSE
)
SELECT COUNT(*)
FROM ancestors st
JOIN statuses s ON s.id = st.id
WHERE st.replying_to_sender IS TRUE AND s.visibility = 3
SQL
end end
def from_staff? def from_staff?

View file

@ -74,6 +74,9 @@ class PostStatusService < BaseService
status_for_validation = @account.statuses.build(status_attributes) status_for_validation = @account.statuses.build(status_attributes)
if status_for_validation.valid? if status_for_validation.valid?
# Marking the status as destroyed is necessary to prevent the status from being
# persisted when the associated media attachments get updated when creating the
# scheduled status.
status_for_validation.destroy status_for_validation.destroy
# The following transaction block is needed to wrap the UPDATEs to # The following transaction block is needed to wrap the UPDATEs to

View file

@ -16,6 +16,8 @@ class RemoveStatusService < BaseService
@account = status.account @account = status.account
@options = options @options = options
@status.discard
RedisLock.acquire(lock_options) do |lock| RedisLock.acquire(lock_options) do |lock|
if lock.acquired? if lock.acquired?
remove_from_self if @account.local? remove_from_self if @account.local?
@ -168,6 +170,6 @@ class RemoveStatusService < BaseService
end end
def lock_options def lock_options
{ redis: Redis.current, key: "distribute:#{@status.id}" } { redis: Redis.current, key: "distribute:#{@status.id}", autorelease: 5.minutes.seconds }
end end
end end

View file

@ -10,6 +10,8 @@ class ReportService < BaseService
@comment = options.delete(:comment) || '' @comment = options.delete(:comment) || ''
@options = options @options = options
raise ActiveRecord::RecordNotFound if @target_account.suspended?
create_report! create_report!
notify_staff! notify_staff!
forward_to_origin! if !@target_account.local? && ActiveModel::Type::Boolean.new.cast(@options[:forward]) forward_to_origin! if !@target_account.local? && ActiveModel::Type::Boolean.new.cast(@options[:forward])

View file

@ -141,10 +141,11 @@ class ResolveAccountService < BaseService
end end
def queue_deletion! def queue_deletion!
@account.suspend!(origin: :remote)
AccountDeletionWorker.perform_async(@account.id, reserve_username: false, skip_activitypub: true) AccountDeletionWorker.perform_async(@account.id, reserve_username: false, skip_activitypub: true)
end end
def lock_options def lock_options
{ redis: Redis.current, key: "resolve:#{@username}@#{@domain}" } { redis: Redis.current, key: "resolve:#{@username}@#{@domain}", autorelease: 15.minutes.seconds }
end end
end end

View file

@ -42,7 +42,13 @@ class SuspendAccountService < BaseService
end end
def distribute_update_actor! def distribute_update_actor!
ActivityPub::UpdateDistributionWorker.perform_async(@account.id) if @account.local? return unless @account.local?
account_reach_finder = AccountReachFinder.new(@account)
ActivityPub::DeliveryWorker.push_bulk(account_reach_finder.inboxes) do |inbox_url|
[signed_activity_json, @account.id, inbox_url]
end
end end
def unmerge_from_home_timelines! def unmerge_from_home_timelines!
@ -90,4 +96,8 @@ class SuspendAccountService < BaseService
end end
end end
end end
def signed_activity_json
@signed_activity_json ||= Oj.dump(serialize_payload(@account, ActivityPub::UpdateSerializer, signer: @account))
end
end end

View file

@ -7,11 +7,12 @@ class UnsuspendAccountService < BaseService
unsuspend! unsuspend!
refresh_remote_account! refresh_remote_account!
return if @account.nil? return if @account.nil? || @account.suspended?
merge_into_home_timelines! merge_into_home_timelines!
merge_into_list_timelines! merge_into_list_timelines!
publish_media_attachments! publish_media_attachments!
distribute_update_actor!
end end
private private
@ -36,6 +37,16 @@ class UnsuspendAccountService < BaseService
# @account would now be nil. # @account would now be nil.
end end
def distribute_update_actor!
return unless @account.local?
account_reach_finder = AccountReachFinder.new(@account)
ActivityPub::DeliveryWorker.push_bulk(account_reach_finder.inboxes) do |inbox_url|
[signed_activity_json, @account.id, inbox_url]
end
end
def merge_into_home_timelines! def merge_into_home_timelines!
@account.followers_for_local_distribution.find_each do |follower| @account.followers_for_local_distribution.find_each do |follower|
FeedManager.instance.merge_into_home(@account, follower) FeedManager.instance.merge_into_home(@account, follower)
@ -81,4 +92,8 @@ class UnsuspendAccountService < BaseService
end end
end end
end end
def signed_activity_json
@signed_activity_json ||= Oj.dump(serialize_payload(@account, ActivityPub::UpdateSerializer, signer: @account))
end
end end

View file

@ -2,7 +2,7 @@
.fields-row__column.fields-row__column-6.fields-group .fields-row__column.fields-row__column-6.fields-group
= f.input :phrase, as: :string, wrapper: :with_label, hint: false = f.input :phrase, as: :string, wrapper: :with_label, hint: false
.fields-row__column.fields-row__column-6.fields-group .fields-row__column.fields-row__column-6.fields-group
= f.input :expires_in, wrapper: :with_label, collection: [30.minutes, 1.hour, 6.hours, 12.hours, 1.day, 1.week].map(&:to_i), label_method: lambda { |i| I18n.t("invites.expires_in.#{i}") }, prompt: I18n.t('invites.expires_in_prompt') = f.input :expires_in, wrapper: :with_label, collection: [30.minutes, 1.hour, 6.hours, 12.hours, 1.day, 1.week].map(&:to_i), label_method: lambda { |i| I18n.t("invites.expires_in.#{i}") }, include_blank: I18n.t('invites.expires_in_prompt')
.fields-group .fields-group
= f.input :context, wrapper: :with_block_label, collection: CustomFilter::VALID_CONTEXTS, as: :check_boxes, collection_wrapper_tag: 'ul', item_wrapper_tag: 'li', label_method: lambda { |context| I18n.t("filters.contexts.#{context}") }, include_blank: false = f.input :context, wrapper: :with_block_label, collection: CustomFilter::VALID_CONTEXTS, as: :check_boxes, collection_wrapper_tag: 'ul', item_wrapper_tag: 'li', label_method: lambda { |context| I18n.t("filters.contexts.#{context}") }, include_blank: false

View file

@ -31,7 +31,7 @@
= stylesheet_link_tag '/inert.css', skip_pipeline: true, media: 'all', id: 'inert-style' = stylesheet_link_tag '/inert.css', skip_pipeline: true, media: 'all', id: 'inert-style'
- if Setting.custom_css.present? - if Setting.custom_css.present?
= stylesheet_link_tag custom_css_path, media: 'all' = stylesheet_link_tag custom_css_path, host: request.host, media: 'all'
= yield :header_tags = yield :header_tags

View file

@ -44,11 +44,7 @@ class ActivityPub::DeliveryWorker
end end
def synchronization_header def synchronization_header
"collectionId=\"#{account_followers_url(@source_account)}\", digest=\"#{@source_account.remote_followers_hash(inbox_url_prefix)}\", url=\"#{account_followers_synchronization_url(@source_account)}\"" "collectionId=\"#{account_followers_url(@source_account)}\", digest=\"#{@source_account.remote_followers_hash(@inbox_url)}\", url=\"#{account_followers_synchronization_url(@source_account)}\""
end
def inbox_url_prefix
@inbox_url[/http(s?):\/\/[^\/]+\//]
end end
def perform_request def perform_request

View file

@ -4,7 +4,7 @@ class DistributionWorker
include Sidekiq::Worker include Sidekiq::Worker
def perform(status_id) def perform(status_id)
RedisLock.acquire(redis: Redis.current, key: "distribute:#{status_id}") do |lock| RedisLock.acquire(redis: Redis.current, key: "distribute:#{status_id}", autorelease: 5.minutes.seconds) do |lock|
if lock.acquired? if lock.acquired?
FanOutOnWriteService.new.call(Status.find(status_id)) FanOutOnWriteService.new.call(Status.find(status_id))
else else

View file

@ -13,9 +13,13 @@ class MoveWorker
queue_follow_unfollows! queue_follow_unfollows!
end end
@deferred_error = nil
copy_account_notes! copy_account_notes!
carry_blocks_over! carry_blocks_over!
carry_mutes_over! carry_mutes_over!
raise @deferred_error unless @deferred_error.nil?
rescue ActiveRecord::RecordNotFound rescue ActiveRecord::RecordNotFound
true true
end end
@ -36,21 +40,31 @@ class MoveWorker
@source_account.followers.local.select(:id).find_in_batches do |accounts| @source_account.followers.local.select(:id).find_in_batches do |accounts|
UnfollowFollowWorker.push_bulk(accounts.map(&:id)) { |follower_id| [follower_id, @source_account.id, @target_account.id, bypass_locked] } UnfollowFollowWorker.push_bulk(accounts.map(&:id)) { |follower_id| [follower_id, @source_account.id, @target_account.id, bypass_locked] }
rescue => e
@deferred_error = e
end end
end end
def copy_account_notes! def copy_account_notes!
AccountNote.where(target_account: @source_account).find_each do |note| AccountNote.where(target_account: @source_account).find_each do |note|
text = I18n.with_locale(note.account.user.locale || I18n.default_locale) do text = I18n.with_locale(note.account.user&.locale || I18n.default_locale) do
I18n.t('move_handler.copy_account_note_text', acct: @source_account.acct) I18n.t('move_handler.copy_account_note_text', acct: @source_account.acct)
end end
new_note = AccountNote.find_by(account: note.account, target_account: @target_account) new_note = AccountNote.find_by(account: note.account, target_account: @target_account)
if new_note.nil? if new_note.nil?
AccountNote.create!(account: note.account, target_account: @target_account, comment: [text, note.comment].join('\n')) begin
else AccountNote.create!(account: note.account, target_account: @target_account, comment: [text, note.comment].join("\n"))
new_note.update!(comment: [text, note.comment, '\n', new_note.comment].join('\n')) rescue ActiveRecord::RecordInvalid
AccountNote.create!(account: note.account, target_account: @target_account, comment: note.comment)
end end
else
new_note.update!(comment: [text, note.comment, "\n", new_note.comment].join("\n"))
end
rescue ActiveRecord::RecordInvalid
nil
rescue => e
@deferred_error = e
end end
end end
@ -60,6 +74,8 @@ class MoveWorker
BlockService.new.call(block.account, @target_account) BlockService.new.call(block.account, @target_account)
add_account_note_if_needed!(block.account, 'move_handler.carry_blocks_over_text') add_account_note_if_needed!(block.account, 'move_handler.carry_blocks_over_text')
end end
rescue => e
@deferred_error = e
end end
end end
@ -67,12 +83,14 @@ class MoveWorker
@source_account.muted_by_relationships.where(account: Account.local).find_each do |mute| @source_account.muted_by_relationships.where(account: Account.local).find_each do |mute|
MuteService.new.call(mute.account, @target_account, notifications: mute.hide_notifications) unless mute.account.muting?(@target_account) || mute.account.following?(@target_account) MuteService.new.call(mute.account, @target_account, notifications: mute.hide_notifications) unless mute.account.muting?(@target_account) || mute.account.following?(@target_account)
add_account_note_if_needed!(mute.account, 'move_handler.carry_mutes_over_text') add_account_note_if_needed!(mute.account, 'move_handler.carry_mutes_over_text')
rescue => e
@deferred_error = e
end end
end end
def add_account_note_if_needed!(account, id) def add_account_note_if_needed!(account, id)
unless AccountNote.where(account: account, target_account: @target_account).exists? unless AccountNote.where(account: account, target_account: @target_account).exists?
text = I18n.with_locale(account.user.locale || I18n.default_locale) do text = I18n.with_locale(account.user&.locale || I18n.default_locale) do
I18n.t(id, acct: @source_account.acct) I18n.t(id, acct: @source_account.acct)
end end
AccountNote.create!(account: account, target_account: @target_account, comment: text) AccountNote.create!(account: account, target_account: @target_account, comment: text)

View file

@ -3,6 +3,7 @@
class RedownloadMediaWorker class RedownloadMediaWorker
include Sidekiq::Worker include Sidekiq::Worker
include ExponentialBackoff include ExponentialBackoff
include JsonLdHelper
sidekiq_options queue: 'pull', retry: 3 sidekiq_options queue: 'pull', retry: 3
@ -15,6 +16,14 @@ class RedownloadMediaWorker
media_attachment.download_thumbnail! media_attachment.download_thumbnail!
media_attachment.save media_attachment.save
rescue ActiveRecord::RecordNotFound rescue ActiveRecord::RecordNotFound
true # Do nothing
rescue Mastodon::UnexpectedResponseError => e
response = e.response
if response_error_unsalvageable?(response)
# Give up
else
raise e
end
end end
end end

View file

@ -14,5 +14,7 @@ class ThreadResolveWorker
child_status.thread = parent_status child_status.thread = parent_status
child_status.save! child_status.save!
rescue ActiveRecord::RecordNotFound
true
end end
end end

View file

@ -11,12 +11,12 @@ require_relative '../lib/redis/namespace_extensions'
require_relative '../lib/paperclip/url_generator_extensions' require_relative '../lib/paperclip/url_generator_extensions'
require_relative '../lib/paperclip/attachment_extensions' require_relative '../lib/paperclip/attachment_extensions'
require_relative '../lib/paperclip/media_type_spoof_detector_extensions' require_relative '../lib/paperclip/media_type_spoof_detector_extensions'
require_relative '../lib/paperclip/transcoder_extensions'
require_relative '../lib/paperclip/lazy_thumbnail' require_relative '../lib/paperclip/lazy_thumbnail'
require_relative '../lib/paperclip/gif_transcoder' require_relative '../lib/paperclip/gif_transcoder'
require_relative '../lib/paperclip/video_transcoder' require_relative '../lib/paperclip/transcoder'
require_relative '../lib/paperclip/type_corrector' require_relative '../lib/paperclip/type_corrector'
require_relative '../lib/paperclip/response_with_limit_adapter' require_relative '../lib/paperclip/response_with_limit_adapter'
require_relative '../lib/terrapin/multi_pipe_extensions'
require_relative '../lib/mastodon/snowflake' require_relative '../lib/mastodon/snowflake'
require_relative '../lib/mastodon/version' require_relative '../lib/mastodon/version'
require_relative '../lib/devise/two_factor_ldap_authenticatable' require_relative '../lib/devise/two_factor_ldap_authenticatable'

View file

@ -1,3 +1,5 @@
require 'devise/strategies/authenticatable'
Warden::Manager.after_set_user except: :fetch do |user, warden| Warden::Manager.after_set_user except: :fetch do |user, warden|
if user.session_active?(warden.cookies.signed['_session_id'] || warden.raw_session['auth_id']) if user.session_active?(warden.cookies.signed['_session_id'] || warden.raw_session['auth_id'])
session_id = warden.cookies.signed['_session_id'] || warden.raw_session['auth_id'] session_id = warden.cookies.signed['_session_id'] || warden.raw_session['auth_id']
@ -72,17 +74,48 @@ module Devise
mattr_accessor :ldap_uid_conversion_replace mattr_accessor :ldap_uid_conversion_replace
@@ldap_uid_conversion_replace = nil @@ldap_uid_conversion_replace = nil
class Strategies::PamAuthenticatable module Strategies
class PamAuthenticatable
def valid? def valid?
super && ::Devise.pam_authentication super && ::Devise.pam_authentication
end end
end end
class SessionActivationRememberable < Authenticatable
def valid?
@session_cookie = nil
session_cookie.present?
end
def authenticate!
resource = SessionActivation.find_by(session_id: session_cookie)&.user
unless resource
cookies.delete('_session_id')
return pass
end
if validate(resource)
success!(resource)
end
end
private
def session_cookie
@session_cookie ||= cookies.signed['_session_id']
end
end
end
end end
Warden::Strategies.add(:session_activation_rememberable, Devise::Strategies::SessionActivationRememberable)
Devise.setup do |config| Devise.setup do |config|
config.warden do |manager| config.warden do |manager|
manager.default_strategies(scope: :user).unshift :two_factor_ldap_authenticatable if Devise.ldap_authentication manager.default_strategies(scope: :user).unshift :two_factor_ldap_authenticatable if Devise.ldap_authentication
manager.default_strategies(scope: :user).unshift :two_factor_pam_authenticatable if Devise.pam_authentication manager.default_strategies(scope: :user).unshift :two_factor_pam_authenticatable if Devise.pam_authentication
manager.default_strategies(scope: :user).unshift :session_activation_rememberable
manager.default_strategies(scope: :user).unshift :two_factor_authenticatable manager.default_strategies(scope: :user).unshift :two_factor_authenticatable
manager.default_strategies(scope: :user).unshift :two_factor_backupable manager.default_strategies(scope: :user).unshift :two_factor_backupable
end end

View file

@ -52,6 +52,11 @@ Doorkeeper.configure do
# Issue access tokens with refresh token (disabled by default) # Issue access tokens with refresh token (disabled by default)
# use_refresh_token # use_refresh_token
# Forbids creating/updating applications with arbitrary scopes that are
# not in configuration, i.e. `default_scopes` or `optional_scopes`.
# (Disabled by default)
enforce_configured_scopes
# Provide support for an owner to be assigned to each registered application (disabled by default) # Provide support for an owner to be assigned to each registered application (disabled by default)
# Optional parameter :confirmation => true (default false) if you want to enforce ownership of # Optional parameter :confirmation => true (default false) if you want to enforce ownership of
# a registered application # a registered application

View file

@ -60,6 +60,7 @@ Devise.setup do |config|
saml_options[:attribute_statements][:verified] = [ENV['SAML_ATTRIBUTES_STATEMENTS_VERIFIED']] if ENV['SAML_ATTRIBUTES_STATEMENTS_VERIFIED'] saml_options[:attribute_statements][:verified] = [ENV['SAML_ATTRIBUTES_STATEMENTS_VERIFIED']] if ENV['SAML_ATTRIBUTES_STATEMENTS_VERIFIED']
saml_options[:attribute_statements][:verified_email] = [ENV['SAML_ATTRIBUTES_STATEMENTS_VERIFIED_EMAIL']] if ENV['SAML_ATTRIBUTES_STATEMENTS_VERIFIED_EMAIL'] saml_options[:attribute_statements][:verified_email] = [ENV['SAML_ATTRIBUTES_STATEMENTS_VERIFIED_EMAIL']] if ENV['SAML_ATTRIBUTES_STATEMENTS_VERIFIED_EMAIL']
saml_options[:uid_attribute] = ENV['SAML_UID_ATTRIBUTE'] if ENV['SAML_UID_ATTRIBUTE'] saml_options[:uid_attribute] = ENV['SAML_UID_ATTRIBUTE'] if ENV['SAML_UID_ATTRIBUTE']
saml_options[:allowed_clock_drift] = ENV['SAML_ALLOWED_CLOCK_DRIFT'] if ENV['SAML_ALLOWED_CLOCK_DRIFT']
config.omniauth :saml, saml_options config.omniauth :saml, saml_options
end end
end end

View file

@ -1,6 +1,46 @@
class RemoveFauxRemoteAccountDuplicates < ActiveRecord::Migration[5.2] class RemoveFauxRemoteAccountDuplicates < ActiveRecord::Migration[5.2]
disable_ddl_transaction! disable_ddl_transaction!
class StreamEntry < ApplicationRecord
# Dummy class, to make migration possible across version changes
belongs_to :account, inverse_of: :stream_entries
end
class Status < ApplicationRecord
# Dummy class, to make migration possible across version changes
belongs_to :account, inverse_of: :statuses
has_many :favourites, inverse_of: :status, dependent: :destroy
has_many :mentions, dependent: :destroy, inverse_of: :status
end
class Favourite < ApplicationRecord
# Dummy class, to make migration possible across version changes
belongs_to :account, inverse_of: :favourites
belongs_to :status, inverse_of: :favourites
end
class Mention < ApplicationRecord
# Dummy class, to make migration possible across version changes
belongs_to :account, inverse_of: :mentions
belongs_to :status
end
class Notification < ApplicationRecord
# Dummy class, to make migration possible across version changes
belongs_to :account, optional: true
belongs_to :from_account, class_name: 'Account', optional: true
belongs_to :activity, polymorphic: true, optional: true
end
class Account < ApplicationRecord
# Dummy class, to make migration possible across version changes
has_many :stream_entries, inverse_of: :account, dependent: :destroy
has_many :statuses, inverse_of: :account, dependent: :destroy
has_many :favourites, inverse_of: :account, dependent: :destroy
has_many :mentions, inverse_of: :account, dependent: :destroy
has_many :notifications, inverse_of: :account, dependent: :destroy
end
def up def up
local_domain = Rails.configuration.x.local_domain local_domain = Rails.configuration.x.local_domain

View file

@ -1,4 +1,9 @@
class AddInstanceActor < ActiveRecord::Migration[5.2] class AddInstanceActor < ActiveRecord::Migration[5.2]
class Account < ApplicationRecord
# Dummy class, to make migration possible across version changes
validates :username, uniqueness: { scope: :domain, case_sensitive: false }
end
def up def up
Account.create!(id: -99, actor_type: 'Application', locked: true, username: Rails.configuration.x.local_domain) Account.create!(id: -99, actor_type: 'Application', locked: true, username: Rails.configuration.x.local_domain)
end end

View file

@ -15,7 +15,13 @@ class AddCaseInsensitiveIndexToTags < ActiveRecord::Migration[5.2]
Tag.where(id: redundant_tag_ids).in_batches.delete_all Tag.where(id: redundant_tag_ids).in_batches.delete_all
end end
begin
safety_assured { execute 'CREATE UNIQUE INDEX CONCURRENTLY index_tags_on_name_lower ON tags (lower(name))' } safety_assured { execute 'CREATE UNIQUE INDEX CONCURRENTLY index_tags_on_name_lower ON tags (lower(name))' }
rescue ActiveRecord::StatementInvalid
remove_index :tags, name: 'index_tags_on_name_lower'
raise
end
remove_index :tags, name: 'index_tags_on_name' remove_index :tags, name: 'index_tags_on_name'
remove_index :tags, name: 'hashtag_search_index' remove_index :tags, name: 'hashtag_search_index'
end end

View file

@ -1,4 +1,8 @@
class UpdatePtLocales < ActiveRecord::Migration[5.2] class UpdatePtLocales < ActiveRecord::Migration[5.2]
class User < ApplicationRecord
# Dummy class, to make migration possible across version changes
end
disable_ddl_transaction! disable_ddl_transaction!
def up def up

View file

@ -1,16 +1,10 @@
require Rails.root.join('lib', 'mastodon', 'migration_helpers')
class AddFixedLowercaseIndexToAccounts < ActiveRecord::Migration[5.2] class AddFixedLowercaseIndexToAccounts < ActiveRecord::Migration[5.2]
include Mastodon::MigrationHelpers
disable_ddl_transaction! disable_ddl_transaction!
class CorruptionError < StandardError
def cause
nil
end
def backtrace
[]
end
end
def up def up
if index_name_exists?(:accounts, 'old_index_accounts_on_username_and_domain_lower') && index_name_exists?(:accounts, 'index_accounts_on_username_and_domain_lower') if index_name_exists?(:accounts, 'old_index_accounts_on_username_and_domain_lower') && index_name_exists?(:accounts, 'index_accounts_on_username_and_domain_lower')
remove_index :accounts, name: 'index_accounts_on_username_and_domain_lower' remove_index :accounts, name: 'index_accounts_on_username_and_domain_lower'
@ -21,7 +15,8 @@ class AddFixedLowercaseIndexToAccounts < ActiveRecord::Migration[5.2]
begin begin
add_index :accounts, "lower (username), COALESCE(lower(domain), '')", name: 'index_accounts_on_username_and_domain_lower', unique: true, algorithm: :concurrently add_index :accounts, "lower (username), COALESCE(lower(domain), '')", name: 'index_accounts_on_username_and_domain_lower', unique: true, algorithm: :concurrently
rescue ActiveRecord::RecordNotUnique rescue ActiveRecord::RecordNotUnique
raise CorruptionError, 'Migration failed because of index corruption, see https://docs.joinmastodon.org/admin/troubleshooting/index-corruption/#fixing' remove_index :accounts, name: 'index_accounts_on_username_and_domain_lower'
raise CorruptionError
end end
remove_index :accounts, name: 'old_index_accounts_on_username_and_domain_lower' if index_name_exists?(:accounts, 'old_index_accounts_on_username_and_domain_lower') remove_index :accounts, name: 'old_index_accounts_on_username_and_domain_lower' if index_name_exists?(:accounts, 'old_index_accounts_on_username_and_domain_lower')

View file

@ -43,7 +43,7 @@ services:
web: web:
build: . build: .
image: tootsuite/mastodon image: tootsuite/mastodon:v3.3.1
restart: always restart: always
env_file: .env.production env_file: .env.production
command: bash -c "rm -f /mastodon/tmp/pids/server.pid; bundle exec rails s -p 3000" command: bash -c "rm -f /mastodon/tmp/pids/server.pid; bundle exec rails s -p 3000"
@ -63,7 +63,7 @@ services:
streaming: streaming:
build: . build: .
image: tootsuite/mastodon image: tootsuite/mastodon:v3.3.1
restart: always restart: always
env_file: .env.production env_file: .env.production
command: node ./streaming command: node ./streaming
@ -80,7 +80,7 @@ services:
sidekiq: sidekiq:
build: . build: .
image: tootsuite/mastodon image: tootsuite/mastodon:v3.3.1
restart: always restart: always
env_file: .env.production env_file: .env.production
command: bundle exec sidekiq command: bundle exec sidekiq

View file

@ -94,17 +94,22 @@ module Mastodon
exit(1) unless prompt.ask('Type in the domain of the server to confirm:', required: true) == Rails.configuration.x.local_domain exit(1) unless prompt.ask('Type in the domain of the server to confirm:', required: true) == Rails.configuration.x.local_domain
unless options[:dry_run]
prompt.warn('This operation WILL NOT be reversible. It can also take a long time.') prompt.warn('This operation WILL NOT be reversible. It can also take a long time.')
prompt.warn('While the data won\'t be erased locally, the server will be in a BROKEN STATE afterwards.') prompt.warn('While the data won\'t be erased locally, the server will be in a BROKEN STATE afterwards.')
prompt.warn('A running Sidekiq process is required. Do not shut it down until queues clear.') prompt.warn('A running Sidekiq process is required. Do not shut it down until queues clear.')
exit(1) if prompt.no?('Are you sure you want to proceed?') exit(1) if prompt.no?('Are you sure you want to proceed?')
end
inboxes = Account.inboxes inboxes = Account.inboxes
processed = 0 processed = 0
dry_run = options[:dry_run] ? ' (DRY RUN)' : '' dry_run = options[:dry_run] ? ' (DRY RUN)' : ''
Setting.registrations_mode = 'none' unless options[:dry_run]
if inboxes.empty? if inboxes.empty?
Account.local.without_suspended.in_batches.update_all(suspended_at: Time.now.utc, suspension_origin: :local) unless options[:dry_run]
prompt.ok('It seems like your server has not federated with anything') prompt.ok('It seems like your server has not federated with anything')
prompt.ok('You can shut it down and delete it any time') prompt.ok('You can shut it down and delete it any time')
return return
@ -112,9 +117,7 @@ module Mastodon
prompt.warn('Do NOT interrupt this process...') prompt.warn('Do NOT interrupt this process...')
Setting.registrations_mode = 'none' delete_account = ->(account) do
Account.local.without_suspended.find_each do |account|
payload = ActiveModelSerializers::SerializableResource.new( payload = ActiveModelSerializers::SerializableResource.new(
account, account,
serializer: ActivityPub::DeleteActorSerializer, serializer: ActivityPub::DeleteActorSerializer,
@ -128,12 +131,15 @@ module Mastodon
[json, account.id, inbox_url] [json, account.id, inbox_url]
end end
account.suspend! account.suspend!(block_email: false)
end end
processed += 1 processed += 1
end end
Account.local.without_suspended.find_each { |account| delete_account.call(account) }
Account.local.suspended.joins(:deletion_request).find_each { |account| delete_account.call(account) }
prompt.ok("Queued #{inboxes.size * processed} items into Sidekiq for #{processed} accounts#{dry_run}") prompt.ok("Queued #{inboxes.size * processed} items into Sidekiq for #{processed} accounts#{dry_run}")
prompt.ok('Wait until Sidekiq processes all items, then you can shut everything down and delete the data') prompt.ok('Wait until Sidekiq processes all items, then you can shut everything down and delete the data')
rescue TTY::Reader::InputInterrupt rescue TTY::Reader::InputInterrupt

View file

@ -402,7 +402,7 @@ module Mastodon
exit(1) exit(1)
end end
parallelize_with_progress(target_account.followers.local) do |account| processed, = parallelize_with_progress(target_account.followers.local) do |account|
UnfollowService.new.call(account, target_account) UnfollowService.new.call(account, target_account)
end end

View file

@ -43,8 +43,13 @@ module Mastodon
tar.each do |entry| tar.each do |entry|
next unless entry.file? && entry.full_name.end_with?('.png') next unless entry.file? && entry.full_name.end_with?('.png')
shortcode = [options[:prefix], File.basename(entry.full_name, '.*'), options[:suffix]].compact.join filename = File.basename(entry.full_name, '.*')
custom_emoji = CustomEmoji.local.find_by(shortcode: shortcode)
# Skip macOS shadow files
next if filename.start_with?('._')
shortcode = [options[:prefix], filename, options[:suffix]].compact.join
custom_emoji = CustomEmoji.local.find_by("LOWER(shortcode) = ?", shortcode.downcase)
if custom_emoji && !options[:overwrite] if custom_emoji && !options[:overwrite]
skipped += 1 skipped += 1

View file

@ -14,7 +14,7 @@ module Mastodon
end end
MIN_SUPPORTED_VERSION = 2019_10_01_213028 MIN_SUPPORTED_VERSION = 2019_10_01_213028
MAX_SUPPORTED_VERSION = 2020_12_18_054746 MAX_SUPPORTED_VERSION = 2021_03_08_133107
# Stubs to enjoy ActiveRecord queries while not depending on a particular # Stubs to enjoy ActiveRecord queries while not depending on a particular
# version of the code/database # version of the code/database
@ -142,7 +142,6 @@ module Mastodon
@prompt.warn 'Please make sure to stop Mastodon and have a backup.' @prompt.warn 'Please make sure to stop Mastodon and have a backup.'
exit(1) unless @prompt.yes?('Continue?') exit(1) unless @prompt.yes?('Continue?')
deduplicate_accounts!
deduplicate_users! deduplicate_users!
deduplicate_account_domain_blocks! deduplicate_account_domain_blocks!
deduplicate_account_identity_proofs! deduplicate_account_identity_proofs!
@ -157,9 +156,11 @@ module Mastodon
deduplicate_media_attachments! deduplicate_media_attachments!
deduplicate_preview_cards! deduplicate_preview_cards!
deduplicate_statuses! deduplicate_statuses!
deduplicate_accounts!
deduplicate_tags! deduplicate_tags!
deduplicate_webauthn_credentials! deduplicate_webauthn_credentials!
Scenic.database.refresh_materialized_view('instances', concurrently: true, cascade: false) if ActiveRecord::Migrator.current_version >= 2020_12_06_004238
Rails.cache.clear Rails.cache.clear
@prompt.say 'Finished!' @prompt.say 'Finished!'
@ -188,6 +189,11 @@ module Mastodon
else else
ActiveRecord::Base.connection.add_index :accounts, "lower (username), COALESCE(lower(domain), '')", name: 'index_accounts_on_username_and_domain_lower', unique: true ActiveRecord::Base.connection.add_index :accounts, "lower (username), COALESCE(lower(domain), '')", name: 'index_accounts_on_username_and_domain_lower', unique: true
end end
@prompt.say 'Reindexing textual indexes on accounts…'
ActiveRecord::Base.connection.execute('REINDEX INDEX search_index;')
ActiveRecord::Base.connection.execute('REINDEX INDEX index_accounts_on_uri;')
ActiveRecord::Base.connection.execute('REINDEX INDEX index_accounts_on_url;')
end end
def deduplicate_users! def deduplicate_users!

View file

@ -41,6 +41,20 @@
module Mastodon module Mastodon
module MigrationHelpers module MigrationHelpers
class CorruptionError < StandardError
def initialize(message = nil)
super(message.presence || 'Migration failed because of index corruption, see https://docs.joinmastodon.org/admin/troubleshooting/index-corruption/#fixing')
end
def cause
nil
end
def backtrace
[]
end
end
# Stub for Database.postgresql? from GitLab # Stub for Database.postgresql? from GitLab
def self.postgresql? def self.postgresql?
ActiveRecord::Base.configurations[Rails.env]['adapter'].casecmp('postgresql').zero? ActiveRecord::Base.configurations[Rails.env]['adapter'].casecmp('postgresql').zero?
@ -315,7 +329,7 @@ module Mastodon
table = Arel::Table.new(table_name) table = Arel::Table.new(table_name)
total = estimate_rows_in_table(table_name).to_i total = estimate_rows_in_table(table_name).to_i
if total == 0 if total < 1
count_arel = table.project(Arel.star.count.as('count')) count_arel = table.project(Arel.star.count.as('count'))
count_arel = yield table, count_arel if block_given? count_arel = yield table, count_arel if block_given?

View file

@ -13,7 +13,7 @@ module Mastodon
end end
def patch def patch
0 1
end end
def flags def flags

View file

@ -2,6 +2,10 @@
module Paperclip module Paperclip
module AttachmentExtensions module AttachmentExtensions
def meta
instance_read(:meta)
end
# We overwrite this method to support delayed processing in # We overwrite this method to support delayed processing in
# Sidekiq. Since we process the original file to reduce disk # Sidekiq. Since we process the original file to reduce disk
# usage, and we still want to generate thumbnails straight # usage, and we still want to generate thumbnails straight

View file

@ -100,7 +100,8 @@ end
module Paperclip module Paperclip
# This transcoder is only to be used for the MediaAttachment model # This transcoder is only to be used for the MediaAttachment model
# to convert animated gifs to webm # to convert animated GIFs to videos
class GifTranscoder < Paperclip::Processor class GifTranscoder < Paperclip::Processor
def make def make
return File.open(@file.path) unless needs_convert? return File.open(@file.path) unless needs_convert?

View file

@ -31,21 +31,17 @@ module Paperclip
private private
def extract_image_from_file! def extract_image_from_file!
::Av.logger = Paperclip.logger
cli = ::Av.cli
dst = Tempfile.new([File.basename(@file.path, '.*'), '.png']) dst = Tempfile.new([File.basename(@file.path, '.*'), '.png'])
dst.binmode dst.binmode
cli.add_source(@file.path)
cli.add_destination(dst.path)
cli.add_output_param loglevel: 'fatal'
begin begin
cli.run command = Terrapin::CommandLine.new('ffmpeg', '-i :source -loglevel :loglevel -y :destination', logger: Paperclip.logger)
rescue Cocaine::ExitStatusError, ::Av::CommandError command.run(source: @file.path, destination: dst.path, loglevel: 'fatal')
rescue Terrapin::ExitStatusError
dst.close(true) dst.close(true)
return nil return nil
rescue Terrapin::CommandNotFoundError
raise Paperclip::Errors::CommandNotFoundError, 'Could not run the `ffmpeg` command. Please install ffmpeg.'
end end
dst dst

View file

@ -17,9 +17,9 @@ module Paperclip
def cache_current_values def cache_current_values
@original_filename = filename_from_content_disposition.presence || filename_from_path.presence || 'data' @original_filename = filename_from_content_disposition.presence || filename_from_path.presence || 'data'
@size = @target.response.content_length
@tempfile = copy_to_tempfile(@target) @tempfile = copy_to_tempfile(@target)
@content_type = ContentTypeDetector.new(@tempfile.path).detect @content_type = ContentTypeDetector.new(@tempfile.path).detect
@size = File.size(@tempfile)
end end
def copy_to_tempfile(source) def copy_to_tempfile(source)

102
lib/paperclip/transcoder.rb Normal file
View file

@ -0,0 +1,102 @@
# frozen_string_literal: true
module Paperclip
# This transcoder is only to be used for the MediaAttachment model
# to check when uploaded videos are actually gifv's
class Transcoder < Paperclip::Processor
def initialize(file, options = {}, attachment = nil)
super
@current_format = File.extname(@file.path)
@basename = File.basename(@file.path, @current_format)
@format = options[:format]
@time = options[:time] || 3
@passthrough_options = options[:passthrough_options]
@convert_options = options[:convert_options].dup
end
def make
metadata = VideoMetadataExtractor.new(@file.path)
unless metadata.valid?
log("Unsupported file #{@file.path}")
return File.open(@file.path)
end
update_attachment_type(metadata)
update_options_from_metadata(metadata)
destination = Tempfile.new([@basename, @format ? ".#{@format}" : ''])
destination.binmode
@output_options = @convert_options[:output]&.dup || {}
@input_options = @convert_options[:input]&.dup || {}
case @format.to_s
when /jpg$/, /jpeg$/, /png$/, /gif$/
@input_options['ss'] = @time
@output_options['f'] = 'image2'
@output_options['vframes'] = 1
when 'mp4'
@output_options['acodec'] = 'aac'
@output_options['strict'] = 'experimental'
end
command_arguments, interpolations = prepare_command(destination)
begin
command = Terrapin::CommandLine.new('ffmpeg', command_arguments.join(' '), logger: Paperclip.logger)
command.run(interpolations)
rescue Terrapin::ExitStatusError => e
raise Paperclip::Error, "Error while transcoding #{@basename}: #{e}"
rescue Terrapin::CommandNotFoundError
raise Paperclip::Errors::CommandNotFoundError, 'Could not run the `ffmpeg` command. Please install ffmpeg.'
end
destination
end
private
def prepare_command(destination)
command_arguments = ['-nostdin']
interpolations = {}
interpolation_keys = 0
@input_options.each_pair do |key, value|
interpolation_key = interpolation_keys
command_arguments << "-#{key} :#{interpolation_key}"
interpolations[interpolation_key] = value
interpolation_keys += 1
end
command_arguments << '-i :source'
interpolations[:source] = @file.path
@output_options.each_pair do |key, value|
interpolation_key = interpolation_keys
command_arguments << "-#{key} :#{interpolation_key}"
interpolations[interpolation_key] = value
interpolation_keys += 1
end
command_arguments << '-y :destination'
interpolations[:destination] = destination.path
[command_arguments, interpolations]
end
def update_options_from_metadata(metadata)
return unless @passthrough_options && @passthrough_options[:video_codecs].include?(metadata.video_codec) && @passthrough_options[:audio_codecs].include?(metadata.audio_codec) && @passthrough_options[:colorspaces].include?(metadata.colorspace)
@format = @passthrough_options[:options][:format] || @format
@time = @passthrough_options[:options][:time] || @time
@convert_options = @passthrough_options[:options][:convert_options].dup
end
def update_attachment_type(metadata)
@attachment.instance.type = MediaAttachment.types[:gifv] unless metadata.audio_codec
end
end
end

View file

@ -1,14 +0,0 @@
# frozen_string_literal: true
module Paperclip
module TranscoderExtensions
# Prevent the transcoder from modifying our meta hash
def initialize(file, options = {}, attachment = nil)
meta_value = attachment&.instance_read(:meta)
super
attachment&.instance_write(:meta, meta_value)
end
end
end
Paperclip::Transcoder.prepend(Paperclip::TranscoderExtensions)

View file

@ -1,26 +0,0 @@
# frozen_string_literal: true
module Paperclip
# This transcoder is only to be used for the MediaAttachment model
# to check when uploaded videos are actually gifv's
class VideoTranscoder < Paperclip::Processor
def make
movie = FFMPEG::Movie.new(@file.path)
attachment.instance.type = MediaAttachment.types[:gifv] unless movie.audio_codec
Paperclip::Transcoder.make(file, actual_options(movie), attachment)
end
private
def actual_options(movie)
opts = options[:passthrough_options]
if opts && opts[:video_codecs].include?(movie.video_codec) && opts[:audio_codecs].include?(movie.audio_codec) && opts[:colorspaces].include?(movie.colorspace)
opts[:options]
else
options
end
end
end
end

View file

@ -333,8 +333,12 @@ namespace :mastodon do
prompt.say 'This configuration will be written to .env.production' prompt.say 'This configuration will be written to .env.production'
if prompt.yes?('Save configuration?') if prompt.yes?('Save configuration?')
incompatible_syntax = false
env_contents = env.each_pair.map do |key, value| env_contents = env.each_pair.map do |key, value|
if value.is_a?(String) && value =~ /[\s\#\\"]/ if value.is_a?(String) && value =~ /[\s\#\\"]/
incompatible_syntax = true
if value =~ /[']/ if value =~ /[']/
value = value.to_s.gsub(/[\\"\$]/) { |x| "\\#{x}" } value = value.to_s.gsub(/[\\"\$]/) { |x| "\\#{x}" }
"#{key}=\"#{value}\"" "#{key}=\"#{value}\""
@ -346,12 +350,19 @@ namespace :mastodon do
end end
end.join("\n") end.join("\n")
File.write(Rails.root.join('.env.production'), "# Generated with mastodon:setup on #{Time.now.utc}\n\n" + env_contents + "\n") generated_header = "# Generated with mastodon:setup on #{Time.now.utc}\n\n".dup
if incompatible_syntax
generated_header << "# Some variables in this file will be interpreted differently whether you are\n"
generated_header << "# using docker-compose or not.\n\n"
end
File.write(Rails.root.join('.env.production'), "#{generated_header}#{env_contents}\n")
if using_docker if using_docker
prompt.ok 'Below is your configuration, save it to an .env.production file outside Docker:' prompt.ok 'Below is your configuration, save it to an .env.production file outside Docker:'
prompt.say "\n" prompt.say "\n"
prompt.say File.read(Rails.root.join('.env.production')) prompt.say "#{generated_header}#{env.each_pair.map { |key, value| "#{key}=#{value}" }.join("\n")}"
prompt.say "\n" prompt.say "\n"
prompt.ok 'It is also saved within this container so you can proceed with this wizard.' prompt.ok 'It is also saved within this container so you can proceed with this wizard.'
end end

181
lib/tasks/tests.rake Normal file
View file

@ -0,0 +1,181 @@
# frozen_string_literal: true
namespace :tests do
namespace :migrations do
desc 'Populate the database with test data for 2.0.0'
task populate_v2: :environment do
admin_key = OpenSSL::PKey::RSA.new(2048)
user_key = OpenSSL::PKey::RSA.new(2048)
remote_key = OpenSSL::PKey::RSA.new(2048)
remote_key2 = OpenSSL::PKey::RSA.new(2048)
remote_key3 = OpenSSL::PKey::RSA.new(2048)
admin_private_key = ActiveRecord::Base.connection.quote(admin_key.to_pem)
admin_public_key = ActiveRecord::Base.connection.quote(admin_key.public_key.to_pem)
user_private_key = ActiveRecord::Base.connection.quote(user_key.to_pem)
user_public_key = ActiveRecord::Base.connection.quote(user_key.public_key.to_pem)
remote_public_key = ActiveRecord::Base.connection.quote(remote_key.public_key.to_pem)
remote_public_key2 = ActiveRecord::Base.connection.quote(remote_key2.public_key.to_pem)
remote_public_key_ap = ActiveRecord::Base.connection.quote(remote_key3.public_key.to_pem)
local_domain = ActiveRecord::Base.connection.quote(Rails.configuration.x.local_domain)
ActiveRecord::Base.connection.execute(<<~SQL)
-- accounts
INSERT INTO "accounts"
(id, username, domain, private_key, public_key, created_at, updated_at)
VALUES
(1, 'admin', NULL, #{admin_private_key}, #{admin_public_key}, now(), now()),
(2, 'user', NULL, #{user_private_key}, #{user_public_key}, now(), now());
INSERT INTO "accounts"
(id, username, domain, private_key, public_key, created_at, updated_at, remote_url, salmon_url)
VALUES
(3, 'remote', 'remote.com', NULL, #{remote_public_key}, now(), now(),
'https://remote.com/@remote', 'https://remote.com/salmon/1'),
(4, 'Remote', 'remote.com', NULL, #{remote_public_key}, now(), now(),
'https://remote.com/@Remote', 'https://remote.com/salmon/1'),
(5, 'REMOTE', 'Remote.com', NULL, #{remote_public_key2}, now(), now(),
'https://remote.com/stale/@REMOTE', 'https://remote.com/stale/salmon/1');
INSERT INTO "accounts"
(id, username, domain, private_key, public_key, created_at, updated_at, protocol, inbox_url, outbox_url, followers_url)
VALUES
(6, 'bob', 'activitypub.com', NULL, #{remote_public_key_ap}, now(), now(),
1, 'https://activitypub.com/users/bob/inbox', 'https://activitypub.com/users/bob/outbox', 'https://activitypub.com/users/bob/followers');
INSERT INTO "accounts"
(id, username, domain, private_key, public_key, created_at, updated_at)
VALUES
(7, 'user', #{local_domain}, #{user_private_key}, #{user_public_key}, now(), now()),
(8, 'pt_user', NULL, #{user_private_key}, #{user_public_key}, now(), now());
-- users
INSERT INTO "users"
(id, account_id, email, created_at, updated_at, admin)
VALUES
(1, 1, 'admin@localhost', now(), now(), true),
(2, 2, 'user@localhost', now(), now(), false);
INSERT INTO "users"
(id, account_id, email, created_at, updated_at, admin, locale)
VALUES
(3, 7, 'ptuser@localhost', now(), now(), false, 'pt');
-- statuses
INSERT INTO "statuses"
(id, account_id, text, created_at, updated_at)
VALUES
(1, 1, 'test', now(), now()),
(2, 1, '@remote@remote.com hello', now(), now()),
(3, 1, '@Remote@remote.com hello', now(), now()),
(4, 1, '@REMOTE@remote.com hello', now(), now());
INSERT INTO "statuses"
(id, account_id, text, created_at, updated_at, uri, local)
VALUES
(5, 1, 'activitypub status', now(), now(), 'https://localhost/users/admin/statuses/4', true);
INSERT INTO "statuses"
(id, account_id, text, created_at, updated_at)
VALUES
(6, 3, 'test', now(), now());
INSERT INTO "statuses"
(id, account_id, text, created_at, updated_at, in_reply_to_id, in_reply_to_account_id)
VALUES
(7, 4, '@admin hello', now(), now(), 3, 1);
INSERT INTO "statuses"
(id, account_id, text, created_at, updated_at)
VALUES
(8, 5, 'test', now(), now());
INSERT INTO "statuses"
(id, account_id, reblog_of_id, created_at, updated_at)
VALUES
(9, 1, 2, now(), now());
-- mentions (from previous statuses)
INSERT INTO "mentions"
(status_id, account_id, created_at, updated_at)
VALUES
(2, 3, now(), now()),
(3, 4, now(), now()),
(4, 5, now(), now());
-- stream entries
INSERT INTO "stream_entries"
(activity_id, account_id, activity_type, created_at, updated_at)
VALUES
(1, 1, 'status', now(), now()),
(2, 1, 'status', now(), now()),
(3, 1, 'status', now(), now()),
(4, 1, 'status', now(), now()),
(5, 1, 'status', now(), now()),
(6, 3, 'status', now(), now()),
(7, 4, 'status', now(), now()),
(8, 5, 'status', now(), now()),
(9, 1, 'status', now(), now());
-- custom emoji
INSERT INTO "custom_emojis"
(shortcode, created_at, updated_at)
VALUES
('test', now(), now()),
('Test', now(), now()),
('blobcat', now(), now());
INSERT INTO "custom_emojis"
(shortcode, domain, uri, created_at, updated_at)
VALUES
('blobcat', 'remote.org', 'https://remote.org/emoji/blobcat', now(), now()),
('blobcat', 'Remote.org', 'https://remote.org/emoji/blobcat', now(), now()),
('Blobcat', 'remote.org', 'https://remote.org/emoji/Blobcat', now(), now());
-- favourites
INSERT INTO "favourites"
(account_id, status_id, created_at, updated_at)
VALUES
(1, 1, now(), now()),
(1, 7, now(), now()),
(4, 1, now(), now()),
(3, 1, now(), now()),
(5, 1, now(), now());
-- pinned statuses
INSERT INTO "status_pins"
(account_id, status_id, created_at, updated_at)
VALUES
(1, 1, now(), now()),
(3, 6, now(), now()),
(4, 7, now(), now());
-- follows
INSERT INTO "follows"
(account_id, target_account_id, created_at, updated_at)
VALUES
(1, 5, now(), now()),
(6, 2, now(), now()),
(5, 2, now(), now()),
(6, 1, now(), now());
-- follow requests
INSERT INTO "follow_requests"
(account_id, target_account_id, created_at, updated_at)
VALUES
(2, 5, now(), now()),
(5, 1, now(), now());
SQL
end
end
end

View file

@ -0,0 +1,63 @@
# frozen_string_literal: false
# Fix adapted from https://github.com/thoughtbot/terrapin/pull/5
module Terrapin
module MultiPipeExtensions
def read
read_streams(@stdout_in, @stderr_in)
end
def close_read
begin
@stdout_in.close
rescue IOError
# Do nothing
end
begin
@stderr_in.close
rescue IOError
# Do nothing
end
end
def read_streams(output, error)
@stdout_output = ''
@stderr_output = ''
read_fds = [output, error]
until read_fds.empty?
to_read, = IO.select(read_fds)
if to_read.include?(output)
@stdout_output << read_stream(output)
read_fds.delete(output) if output.closed?
end
if to_read.include?(error)
@stderr_output << read_stream(error)
read_fds.delete(error) if error.closed?
end
end
end
def read_stream(io)
result = ''
begin
while (partial_result = io.read_nonblock(8192))
result << partial_result
end
rescue EOFError, Errno::EPIPE
io.close
rescue Errno::EINTR, Errno::EWOULDBLOCK, Errno::EAGAIN
# Do nothing
end
result
end
end
end
Terrapin::CommandLine::MultiPipe.prepend(Terrapin::MultiPipeExtensions)

View file

@ -5,11 +5,13 @@ RSpec.describe ActivityPub::FollowersSynchronizationsController, type: :controll
let!(:follower_1) { Fabricate(:account, domain: 'example.com', uri: 'https://example.com/users/a') } let!(:follower_1) { Fabricate(:account, domain: 'example.com', uri: 'https://example.com/users/a') }
let!(:follower_2) { Fabricate(:account, domain: 'example.com', uri: 'https://example.com/users/b') } let!(:follower_2) { Fabricate(:account, domain: 'example.com', uri: 'https://example.com/users/b') }
let!(:follower_3) { Fabricate(:account, domain: 'foo.com', uri: 'https://foo.com/users/a') } let!(:follower_3) { Fabricate(:account, domain: 'foo.com', uri: 'https://foo.com/users/a') }
let!(:follower_4) { Fabricate(:account, username: 'instance-actor', domain: 'example.com', uri: 'https://example.com') }
before do before do
follower_1.follow!(account) follower_1.follow!(account)
follower_2.follow!(account) follower_2.follow!(account)
follower_3.follow!(account) follower_3.follow!(account)
follower_4.follow!(account)
end end
before do before do
@ -45,7 +47,7 @@ RSpec.describe ActivityPub::FollowersSynchronizationsController, type: :controll
it 'returns orderedItems with followers from example.com' do it 'returns orderedItems with followers from example.com' do
expect(body[:orderedItems]).to be_an Array expect(body[:orderedItems]).to be_an Array
expect(body[:orderedItems].sort).to eq [follower_1.uri, follower_2.uri] expect(body[:orderedItems].sort).to eq [follower_4.uri, follower_1.uri, follower_2.uri]
end end
it 'returns private Cache-Control header' do it 'returns private Cache-Control header' do

View file

@ -0,0 +1,48 @@
require 'rails_helper'
describe Api::V1::Accounts::NotesController do
render_views
let(:user) { Fabricate(:user, account: Fabricate(:account, username: 'alice')) }
let(:token) { Fabricate(:accessible_access_token, resource_owner_id: user.id, scopes: 'write:accounts') }
let(:account) { Fabricate(:account) }
let(:comment) { 'foo' }
before do
allow(controller).to receive(:doorkeeper_token) { token }
end
describe 'POST #create' do
subject do
post :create, params: { account_id: account.id, comment: comment }
end
context 'when account note has reasonable length' do
let(:comment) { 'foo' }
it 'returns http success' do
subject
expect(response).to have_http_status(200)
end
it 'updates account note' do
subject
expect(AccountNote.find_by(account_id: user.account.id, target_account_id: account.id).comment).to eq comment
end
end
context 'when account note exceends allowed length' do
let(:comment) { 'a' * 2_001 }
it 'returns 422' do
subject
expect(response).to have_http_status(422)
end
it 'does not create account note' do
subject
expect(AccountNote.where(account_id: user.account.id, target_account_id: account.id).exists?).to be_falsey
end
end
end
end

View file

@ -268,6 +268,34 @@ RSpec.describe Api::V1::AccountsController, type: :controller do
it_behaves_like 'forbidden for wrong scope', 'read:accounts' it_behaves_like 'forbidden for wrong scope', 'read:accounts'
end end
describe 'POST #mute with nonzero duration set' do
let(:scopes) { 'write:mutes' }
let(:other_account) { Fabricate(:user, email: 'bob@example.com', account: Fabricate(:account, username: 'bob')).account }
before do
user.account.follow!(other_account)
post :mute, params: { id: other_account.id, duration: 300 }
end
it 'returns http success' do
expect(response).to have_http_status(200)
end
it 'does not remove the following relation between user and target user' do
expect(user.account.following?(other_account)).to be true
end
it 'creates a muting relation' do
expect(user.account.muting?(other_account)).to be true
end
it 'mutes notifications' do
expect(user.account.muting_notifications?(other_account)).to be true
end
it_behaves_like 'forbidden for wrong scope', 'read:accounts'
end
describe 'POST #unmute' do describe 'POST #unmute' do
let(:scopes) { 'write:mutes' } let(:scopes) { 'write:mutes' }
let(:other_account) { Fabricate(:user, email: 'bob@example.com', account: Fabricate(:account, username: 'bob')).account } let(:other_account) { Fabricate(:user, email: 'bob@example.com', account: Fabricate(:account, username: 'bob')).account }

View file

@ -4,16 +4,31 @@ RSpec.describe Api::V1::AppsController, type: :controller do
render_views render_views
describe 'POST #create' do describe 'POST #create' do
before do let(:client_name) { 'Test app' }
post :create, params: { client_name: 'Test app', redirect_uris: 'urn:ietf:wg:oauth:2.0:oob' } let(:scopes) { nil }
let(:redirect_uris) { 'urn:ietf:wg:oauth:2.0:oob' }
let(:website) { nil }
let(:app_params) do
{
client_name: client_name,
redirect_uris: redirect_uris,
scopes: scopes,
website: website,
}
end end
before do
post :create, params: app_params
end
context 'with valid params' do
it 'returns http success' do it 'returns http success' do
expect(response).to have_http_status(200) expect(response).to have_http_status(200)
end end
it 'creates an OAuth app' do it 'creates an OAuth app' do
expect(Doorkeeper::Application.find_by(name: 'Test app')).to_not be nil expect(Doorkeeper::Application.find_by(name: client_name)).to_not be nil
end end
it 'returns client ID and client secret' do it 'returns client ID and client secret' do
@ -23,4 +38,49 @@ RSpec.describe Api::V1::AppsController, type: :controller do
expect(json[:client_secret]).to_not be_blank expect(json[:client_secret]).to_not be_blank
end end
end end
context 'with an unsupported scope' do
let(:scopes) { 'hoge' }
it 'returns http unprocessable entity' do
expect(response).to have_http_status(422)
end
end
context 'with many duplicate scopes' do
let(:scopes) { (%w(read) * 40).join(' ') }
it 'returns http success' do
expect(response).to have_http_status(200)
end
it 'only saves the scope once' do
expect(Doorkeeper::Application.find_by(name: client_name).scopes.to_s).to eq 'read'
end
end
context 'with a too-long name' do
let(:client_name) { 'hoge' * 20 }
it 'returns http unprocessable entity' do
expect(response).to have_http_status(422)
end
end
context 'with a too-long website' do
let(:website) { 'https://foo.bar/' + ('hoge' * 2_000) }
it 'returns http unprocessable entity' do
expect(response).to have_http_status(422)
end
end
context 'with a too-long redirect_uris' do
let(:redirect_uris) { 'https://foo.bar/' + ('hoge' * 2_000) }
it 'returns http unprocessable entity' do
expect(response).to have_http_status(422)
end
end
end
end end

Some files were not shown because too many files have changed in this diff Show more