Merge tag 'v3.2.1'

This commit is contained in:
Mike Barnes 2020-11-15 22:32:36 +11:00
commit 0c115ff63f
690 changed files with 20992 additions and 7108 deletions

View file

@ -12,6 +12,7 @@ require_relative 'mastodon/domains_cli'
require_relative 'mastodon/preview_cards_cli'
require_relative 'mastodon/cache_cli'
require_relative 'mastodon/upgrade_cli'
require_relative 'mastodon/email_domain_blocks_cli'
require_relative 'mastodon/version'
module Mastodon
@ -53,6 +54,9 @@ module Mastodon
desc 'upgrade SUBCOMMAND ...ARGS', 'Various version upgrade utilities'
subcommand 'upgrade', Mastodon::UpgradeCLI
desc 'email_domain_blocks SUBCOMMAND ...ARGS', 'Manage e-mail domain blocks'
subcommand 'email_domain_blocks', Mastodon::EmailDomainBlocksCLI
option :dry_run, type: :boolean
desc 'self-destruct', 'Erase the server from the federation'
long_desc <<~LONG_DESC

View file

@ -7,6 +7,7 @@ ActiveRecord::Base.logger = dev_null
ActiveJob::Base.logger = dev_null
HttpLog.configuration.logger = dev_null
Paperclip.options[:log] = false
Chewy.logger = dev_null
module Mastodon
module CLIHelper

View file

@ -16,22 +16,22 @@ module Mastodon
option :concurrency, type: :numeric, default: 5, aliases: [:c]
option :verbose, type: :boolean, aliases: [:v]
option :dry_run, type: :boolean
option :whitelist_mode, type: :boolean
option :limited_federation_mode, type: :boolean
desc 'purge [DOMAIN...]', 'Remove accounts from a DOMAIN without a trace'
long_desc <<-LONG_DESC
Remove all accounts from a given DOMAIN without leaving behind any
records. Unlike a suspension, if the DOMAIN still exists in the wild,
it means the accounts could return if they are resolved again.
When the --whitelist-mode option is given, instead of purging accounts
from a single domain, all accounts from domains that are not whitelisted
When the --limited-federation-mode option is given, instead of purging accounts
from a single domain, all accounts from domains that have not been explicitly allowed
are removed from the database.
LONG_DESC
def purge(*domains)
dry_run = options[:dry_run] ? ' (DRY RUN)' : ''
scope = begin
if options[:whitelist_mode]
if options[:limited_federation_mode]
Account.remote.where.not(domain: DomainAllow.pluck(:domain))
elsif !domains.empty?
Account.remote.where(domain: domains)

View file

@ -0,0 +1,138 @@
# frozen_string_literal: true
require 'concurrent'
require_relative '../../config/boot'
require_relative '../../config/environment'
require_relative 'cli_helper'
module Mastodon
class EmailDomainBlocksCLI < Thor
include CLIHelper
def self.exit_on_failure?
true
end
desc 'list', 'List blocked e-mail domains'
def list
EmailDomainBlock.where(parent_id: nil).order(id: 'DESC').find_each do |entry|
say(entry.domain.to_s, :white)
EmailDomainBlock.where(parent_id: entry.id).order(id: 'DESC').find_each do |child|
say(" #{child.domain}", :cyan)
end
end
end
option :with_dns_records, type: :boolean
desc 'add DOMAIN...', 'Block e-mail domain(s)'
long_desc <<-LONG_DESC
Blocking an e-mail domain prevents users from signing up
with e-mail addresses from that domain. You can provide one or
multiple domains to the command.
When the --with-dns-records option is given, an attempt to resolve the
given domains' DNS records will be made and the results (A, AAAA and MX) will
also be blocked. This can be helpful if you are blocking an e-mail server that
has many different domains pointing to it as it allows you to essentially block
it at the root.
LONG_DESC
def add(*domains)
if domains.empty?
say('No domain(s) given', :red)
exit(1)
end
skipped = 0
processed = 0
domains.each do |domain|
if EmailDomainBlock.where(domain: domain).exists?
say("#{domain} is already blocked.", :yellow)
skipped += 1
next
end
email_domain_block = EmailDomainBlock.new(domain: domain, with_dns_records: options[:with_dns_records] || false)
email_domain_block.save!
processed += 1
next unless email_domain_block.with_dns_records?
hostnames = []
ips = []
Resolv::DNS.open do |dns|
dns.timeouts = 1
hostnames = dns.getresources(email_domain_block.domain, Resolv::DNS::Resource::IN::MX).to_a.map { |e| e.exchange.to_s }
([email_domain_block.domain] + hostnames).uniq.each do |hostname|
ips.concat(dns.getresources(hostname, Resolv::DNS::Resource::IN::A).to_a.map { |e| e.address.to_s })
ips.concat(dns.getresources(hostname, Resolv::DNS::Resource::IN::AAAA).to_a.map { |e| e.address.to_s })
end
end
(hostnames + ips).uniq.each do |hostname|
another_email_domain_block = EmailDomainBlock.new(domain: hostname, parent: email_domain_block)
if EmailDomainBlock.where(domain: hostname).exists?
say("#{hostname} is already blocked.", :yellow)
skipped += 1
next
end
another_email_domain_block.save!
processed += 1
end
end
say("Added #{processed}, skipped #{skipped}", color(processed, 0))
end
desc 'remove DOMAIN...', 'Remove e-mail domain blocks'
def remove(*domains)
if domains.empty?
say('No domain(s) given', :red)
exit(1)
end
skipped = 0
processed = 0
failed = 0
domains.each do |domain|
entry = EmailDomainBlock.find_by(domain: domain)
if entry.nil?
say("#{domain} is not yet blocked.", :yellow)
skipped += 1
next
end
children_count = EmailDomainBlock.where(parent_id: entry.id).count
result = entry.destroy
if result
processed += 1 + children_count
else
say("#{domain} could not be unblocked.", :red)
failed += 1
end
end
say("Removed #{processed}, skipped #{skipped}, failed #{failed}", color(processed, failed))
end
private
def color(processed, failed)
if !processed.zero? && failed.zero?
:green
elsif failed.zero?
:yellow
else
:red
end
end
end
end

View file

@ -31,10 +31,11 @@ module Mastodon
processed, aggregate = parallelize_with_progress(MediaAttachment.cached.where.not(remote_url: '').where('created_at < ?', time_ago)) do |media_attachment|
next if media_attachment.file.blank?
size = media_attachment.file_file_size
size = (media_attachment.file_file_size || 0) + (media_attachment.thumbnail_file_size || 0)
unless options[:dry_run]
media_attachment.file.destroy
media_attachment.thumbnail.destroy
media_attachment.save
end
@ -88,6 +89,11 @@ module Mastodon
path_segments = object.key.split('/')
path_segments.delete('cache')
unless [7, 10].include?(path_segments.size)
progress.log(pastel.yellow("Unrecognized file found: #{object.key}"))
next
end
model_name = path_segments.first.classify
attachment_name = path_segments[1].singularize
record_id = path_segments[2..-2].join.to_i
@ -127,6 +133,11 @@ module Mastodon
path_segments = key.split(File::SEPARATOR)
path_segments.delete('cache')
unless [7, 10].include?(path_segments.size)
progress.log(pastel.yellow("Unrecognized file found: #{key}"))
next
end
model_name = path_segments.first.classify
record_id = path_segments[2..-2].join.to_i
attachment_name = path_segments[1].singularize
@ -217,11 +228,12 @@ module Mastodon
next if media_attachment.remote_url.blank? || (!options[:force] && media_attachment.file_file_name.present?)
unless options[:dry_run]
media_attachment.file_remote_url = media_attachment.remote_url
media_attachment.reset_file!
media_attachment.reset_thumbnail!
media_attachment.save
end
media_attachment.file_file_size
media_attachment.file_file_size + (media_attachment.thumbnail_file_size || 0)
end
say("Downloaded #{processed} media attachments (approx. #{number_to_human_size(aggregate)})#{dry_run}", :green, true)
@ -229,7 +241,7 @@ module Mastodon
desc 'usage', 'Calculate disk space consumed by Mastodon'
def usage
say("Attachments:\t#{number_to_human_size(MediaAttachment.sum(:file_file_size))} (#{number_to_human_size(MediaAttachment.where(account: Account.local).sum(:file_file_size))} local)")
say("Attachments:\t#{number_to_human_size(MediaAttachment.sum(Arel.sql('COALESCE(file_file_size, 0) + COALESCE(thumbnail_file_size, 0)')))} (#{number_to_human_size(MediaAttachment.where(account: Account.local).sum(Arel.sql('COALESCE(file_file_size, 0) + COALESCE(thumbnail_file_size, 0)')))} local)")
say("Custom emoji:\t#{number_to_human_size(CustomEmoji.sum(:image_file_size))} (#{number_to_human_size(CustomEmoji.local.sum(:image_file_size))} local)")
say("Preview cards:\t#{number_to_human_size(PreviewCard.sum(:image_file_size))}")
say("Avatars:\t#{number_to_human_size(Account.sum(:avatar_file_size))} (#{number_to_human_size(Account.local.sum(:avatar_file_size))} local)")
@ -246,6 +258,11 @@ module Mastodon
path_segments = path.split('/')[2..-1]
path_segments.delete('cache')
unless [7, 10].include?(path_segments.size)
say('Not a media URL', :red)
exit(1)
end
model_name = path_segments.first.classify
record_id = path_segments[2..-2].join.to_i
@ -294,6 +311,8 @@ module Mastodon
segments = object.key.split('/')
segments.delete('cache')
next unless [7, 10].include?(segments.size)
model_name = segments.first.classify
record_id = segments[2..-2].join.to_i

View file

@ -6,8 +6,19 @@ require_relative 'cli_helper'
module Mastodon
class SearchCLI < Thor
option :processes, default: 2, aliases: [:p]
desc 'deploy', 'Create or update an ElasticSearch index and populate it'
include CLIHelper
# Indices are sorted by amount of data to be expected in each, so that
# smaller indices can go online sooner
INDICES = [
AccountsIndex,
TagsIndex,
StatusesIndex,
].freeze
option :concurrency, type: :numeric, default: 2, aliases: [:c], desc: 'Workload will be split between this number of threads'
option :only, type: :array, enum: %w(accounts tags statuses), desc: 'Only process these indices'
desc 'deploy', 'Create or upgrade ElasticSearch indices and populate them'
long_desc <<~LONG_DESC
If ElasticSearch is empty, this command will create the necessary indices
and then import data from the database into those indices.
@ -15,27 +26,126 @@ module Mastodon
This command will also upgrade indices if the underlying schema has been
changed since the last run.
With the --processes option, parallelize execution of the command. The
default is 2. If "auto" is specified, the number is automatically
derived from available CPUs.
Even if creating or upgrading indices is not necessary, data from the
database will be imported into the indices.
LONG_DESC
def deploy
processed = Chewy::RakeHelper.upgrade(parallel: processes)
Chewy::RakeHelper.sync(except: processed, parallel: processes)
end
private
def processes
return true if options[:processes] == 'auto'
num = options[:processes].to_i
if num < 2
nil
else
num
if options[:concurrency] < 1
say('Cannot run with this concurrency setting, must be at least 1', :red)
exit(1)
end
indices = begin
if options[:only]
options[:only].map { |str| "#{str.camelize}Index".constantize }
else
INDICES
end
end
progress = ProgressBar.create(total: nil, format: '%t%c/%u |%b%i| %e (%r docs/s)', autofinish: false)
# First, ensure all indices are created and have the correct
# structure, so that live data can already be written
indices.select { |index| index.specification.changed? }.each do |index|
progress.title = "Upgrading #{index} "
index.purge
index.specification.lock!
end
ActiveRecord::Base.configurations[Rails.env]['pool'] = options[:concurrency] + 1
pool = Concurrent::FixedThreadPool.new(options[:concurrency])
added = Concurrent::AtomicFixnum.new(0)
removed = Concurrent::AtomicFixnum.new(0)
progress.title = 'Estimating workload '
# Estimate the amount of data that has to be imported first
indices.each do |index|
index.types.each do |type|
progress.total = (progress.total || 0) + type.adapter.default_scope.count
end
end
# Now import all the actual data. Mind that unlike chewy:sync, we don't
# fetch and compare all record IDs from the database and the index to
# find out which to add and which to remove from the index. Because with
# potentially millions of rows, the memory footprint of such a calculation
# is uneconomical. So we only ever add.
indices.each do |index|
progress.title = "Importing #{index} "
batch_size = 1_000
slice_size = (batch_size / options[:concurrency]).ceil
index.types.each do |type|
type.adapter.default_scope.reorder(nil).find_in_batches(batch_size: batch_size) do |batch|
futures = []
batch.each_slice(slice_size) do |records|
futures << Concurrent::Future.execute(executor: pool) do
begin
if !progress.total.nil? && progress.progress + records.size > progress.total
# The number of items has changed between start and now,
# since there is no good way to predict the final count from
# here, just change the progress bar to an indeterminate one
progress.total = nil
end
grouped_records = nil
bulk_body = nil
index_count = 0
delete_count = 0
ActiveRecord::Base.connection_pool.with_connection do
grouped_records = type.adapter.send(:grouped_objects, records)
bulk_body = Chewy::Type::Import::BulkBuilder.new(type, grouped_records).bulk_body
end
index_count = grouped_records[:index].size if grouped_records.key?(:index)
delete_count = grouped_records[:delete].size if grouped_records.key?(:delete)
# The following is an optimization for statuses specifically, since
# we want to de-index statuses that cannot be searched by anybody,
# but can't use Chewy's delete_if logic because it doesn't use
# crutches and our searchable_by logic depends on them
if type == StatusesIndex::Status
bulk_body.map! do |entry|
if entry[:index] && entry.dig(:index, :data, 'searchable_by').blank?
index_count -= 1
delete_count += 1
{ delete: entry[:index].except(:data) }
else
entry
end
end
end
Chewy::Type::Import::BulkRequest.new(type).perform(bulk_body)
progress.progress += records.size
added.increment(index_count)
removed.increment(delete_count)
sleep 1
rescue => e
progress.log pastel.red("Error importing #{index}: #{e}")
end
end
end
futures.map(&:value)
end
end
end
progress.title = ''
progress.stop
say("Indexed #{added.value} records, de-indexed #{removed.value}", :green, true)
end
end
end

View file

@ -41,23 +41,32 @@ module Mastodon
klass.find_each do |record|
attachment_names.each do |attachment_name|
attachment = record.public_send(attachment_name)
upgraded = false
next if attachment.blank? || attachment.storage_schema_version >= CURRENT_STORAGE_SCHEMA_VERSION
attachment.styles.each_key do |style|
case Paperclip::Attachment.default_options[:storage]
when :s3
upgrade_storage_s3(progress, attachment, style)
when :fog
upgrade_storage_fog(progress, attachment, style)
when :filesystem
upgrade_storage_filesystem(progress, attachment, style)
styles = attachment.styles.keys
styles << :original unless styles.include?(:original)
styles.each do |style|
success = begin
case Paperclip::Attachment.default_options[:storage]
when :s3
upgrade_storage_s3(progress, attachment, style)
when :fog
upgrade_storage_fog(progress, attachment, style)
when :filesystem
upgrade_storage_filesystem(progress, attachment, style)
end
end
upgraded = true if style == :original && success
progress.increment
end
attachment.instance_write(:storage_schema_version, CURRENT_STORAGE_SCHEMA_VERSION)
attachment.instance_write(:storage_schema_version, CURRENT_STORAGE_SCHEMA_VERSION) if upgraded
end
if record.changed?
@ -78,18 +87,20 @@ module Mastodon
def upgrade_storage_s3(progress, attachment, style)
previous_storage_schema_version = attachment.storage_schema_version
object = attachment.s3_object(style)
success = true
attachment.instance_write(:storage_schema_version, CURRENT_STORAGE_SCHEMA_VERSION)
upgraded_path = attachment.path(style)
new_object = attachment.s3_object(style)
if upgraded_path != object.key && object.exists?
progress.log("Moving #{object.key} to #{upgraded_path}") if options[:verbose]
if new_object.key != object.key && object.exists?
progress.log("Moving #{object.key} to #{new_object.key}") if options[:verbose]
begin
object.move_to(upgraded_path) unless dry_run?
object.move_to(new_object, acl: attachment.s3_permissions(style)) unless dry_run?
rescue => e
progress.log(pastel.red("Error processing #{object.key}: #{e}"))
success = false
end
end
@ -97,6 +108,7 @@ module Mastodon
# previous version at the end. The upgrade will be recorded after
# all styles are updated
attachment.instance_write(:storage_schema_version, previous_storage_schema_version)
success
end
def upgrade_storage_fog(_progress, _attachment, _style)
@ -107,6 +119,7 @@ module Mastodon
def upgrade_storage_filesystem(progress, attachment, style)
previous_storage_schema_version = attachment.storage_schema_version
previous_path = attachment.path(style)
success = true
attachment.instance_write(:storage_schema_version, CURRENT_STORAGE_SCHEMA_VERSION)
@ -128,6 +141,7 @@ module Mastodon
end
rescue => e
progress.log(pastel.red("Error processing #{previous_path}: #{e}"))
success = false
unless dry_run?
begin
@ -143,6 +157,7 @@ module Mastodon
# previous version at the end. The upgrade will be recorded after
# all styles are updated
attachment.instance_write(:storage_schema_version, previous_storage_schema_version)
success
end
end
end

View file

@ -9,11 +9,11 @@ module Mastodon
end
def minor
1
2
end
def patch
5
1
end
def flags

View file

@ -7,7 +7,7 @@ module Paperclip
# usage, and we still want to generate thumbnails straight
# away, it's the only style we need to exclude
def process_style?(style_name, style_args)
if style_name == :original && instance.respond_to?(:delay_processing?) && instance.delay_processing?
if style_name == :original && instance.respond_to?(:delay_processing_for_attachment?) && instance.delay_processing_for_attachment?(name)
false
else
style_args.empty? || style_args.include?(style_name)

View file

@ -3,7 +3,7 @@
module Paperclip
class BlurhashTranscoder < Paperclip::Processor
def make
return @file unless options[:style] == :small
return @file unless options[:style] == :small || options[:blurhash]
pixels = convert(':source RGB:-', source: File.expand_path(@file.path)).unpack('C*')
geometry = options.fetch(:file_geometry_parser).from_file(@file)

View file

@ -0,0 +1,191 @@
# frozen_string_literal: true
require 'mime/types/columnar'
module Paperclip
class ColorExtractor < Paperclip::Processor
MIN_CONTRAST = 3.0
ACCENT_MIN_CONTRAST = 2.0
FREQUENCY_THRESHOLD = 0.01
def make
depth = 8
# Determine background palette by getting colors close to the image's edge only
background_palette = palette_from_histogram(convert(':source -alpha set -gravity Center -region 75%x75% -fill None -colorize 100% -alpha transparent +region -format %c -colors :quantity -depth :depth histogram:info:', source: File.expand_path(@file.path), quantity: 10, depth: depth), 10)
# Determine foreground palette from the whole image
foreground_palette = palette_from_histogram(convert(':source -format %c -colors :quantity -depth :depth histogram:info:', source: File.expand_path(@file.path), quantity: 10, depth: depth), 10)
background_color = background_palette.first || foreground_palette.first
foreground_colors = []
return @file if background_color.nil?
max_distance = 0
max_distance_color = nil
foreground_palette.each do |color|
distance = ColorDiff.between(background_color, color)
contrast = w3c_contrast(background_color, color)
if distance > max_distance && contrast >= ACCENT_MIN_CONTRAST
max_distance = distance
max_distance_color = color
end
end
foreground_colors << max_distance_color unless max_distance_color.nil?
max_distance = 0
max_distance_color = nil
foreground_palette.each do |color|
distance = ColorDiff.between(background_color, color)
contrast = w3c_contrast(background_color, color)
if distance > max_distance && contrast >= MIN_CONTRAST && !foreground_colors.include?(color)
max_distance = distance
max_distance_color = color
end
end
foreground_colors << max_distance_color unless max_distance_color.nil?
# If we don't have enough colors for accent and foreground, generate
# new ones by manipulating the background color
(2 - foreground_colors.size).times do |i|
foreground_colors << lighten_or_darken(background_color, 35 + (15 * i))
end
# We want the color with the highest contrast to background to be the foreground one,
# and the one with the highest saturation to be the accent one
foreground_color = foreground_colors.max_by { |rgb| w3c_contrast(background_color, rgb) }
accent_color = foreground_colors.max_by { |rgb| rgb_to_hsl(rgb.r, rgb.g, rgb.b)[1] }
meta = {
colors: {
background: rgb_to_hex(background_color),
foreground: rgb_to_hex(foreground_color),
accent: rgb_to_hex(accent_color),
},
}
attachment.instance.file.instance_write(:meta, (attachment.instance.file.instance_read(:meta) || {}).merge(meta))
@file
end
private
def w3c_contrast(color1, color2)
luminance1 = color1.to_xyz.y * 0.01 + 0.05
luminance2 = color2.to_xyz.y * 0.01 + 0.05
if luminance1 > luminance2
luminance1 / luminance2
else
luminance2 / luminance1
end
end
# rubocop:disable Style/MethodParameterName
def rgb_to_hsl(r, g, b)
r /= 255.0
g /= 255.0
b /= 255.0
max = [r, g, b].max
min = [r, g, b].min
h = (max + min) / 2.0
s = (max + min) / 2.0
l = (max + min) / 2.0
if max == min
h = 0
s = 0 # achromatic
else
d = max - min
s = l >= 0.5 ? d / (2.0 - max - min) : d / (max + min)
case max
when r
h = (g - b) / d + (g < b ? 6.0 : 0)
when g
h = (b - r) / d + 2.0
when b
h = (r - g) / d + 4.0
end
h /= 6.0
end
[(h * 360).round, (s * 100).round, (l * 100).round]
end
def hue_to_rgb(p, q, t)
t += 1 if t.negative?
t -= 1 if t > 1
return (p + (q - p) * 6 * t) if t < 1 / 6.0
return q if t < 1 / 2.0
return (p + (q - p) * (2 / 3.0 - t) * 6) if t < 2 / 3.0
p
end
def hsl_to_rgb(h, s, l)
h /= 360.0
s /= 100.0
l /= 100.0
r = 0.0
g = 0.0
b = 0.0
if s == 0.0
r = l.to_f
g = l.to_f
b = l.to_f # achromatic
else
q = l < 0.5 ? l * (1 + s) : l + s - l * s
p = 2 * l - q
r = hue_to_rgb(p, q, h + 1 / 3.0)
g = hue_to_rgb(p, q, h)
b = hue_to_rgb(p, q, h - 1 / 3.0)
end
[(r * 255).round, (g * 255).round, (b * 255).round]
end
# rubocop:enable Style/MethodParameterName
def lighten_or_darken(color, by)
hue, saturation, light = rgb_to_hsl(color.r, color.g, color.b)
light = begin
if light < 50
[100, light + by].min
else
[0, light - by].max
end
end
ColorDiff::Color::RGB.new(*hsl_to_rgb(hue, saturation, light))
end
def palette_from_histogram(result, quantity)
frequencies = result.scan(/([0-9]+)\:/).flatten.map(&:to_f)
hex_values = result.scan(/\#([0-9A-Fa-f]{6,8})/).flatten
total_frequencies = frequencies.reduce(&:+).to_f
frequencies.map.with_index { |f, i| [f / total_frequencies, hex_values[i]] }
.sort_by { |r| -r[0] }
.reject { |r| r[1].size == 8 && r[1].end_with?('00') }
.map { |r| ColorDiff::Color::RGB.new(*r[1][0..5].scan(/../).map { |c| c.to_i(16) }) }
.slice(0, quantity)
end
def rgb_to_hex(rgb)
'#%02x%02x%02x' % [rgb.r, rgb.g, rgb.b]
end
end
end

View file

@ -6,7 +6,7 @@ class GifReader
EXTENSION_LABELS = [0xf9, 0x01, 0xff].freeze
GIF_HEADERS = %w(GIF87a GIF89a).freeze
class GifReaderException; end
class GifReaderException < StandardError; end
class UnknownImageType < GifReaderException; end

View file

@ -0,0 +1,54 @@
# frozen_string_literal: true
require 'mime/types/columnar'
module Paperclip
class ImageExtractor < Paperclip::Processor
def make
return @file unless options[:style] == :original
image = extract_image_from_file!
unless image.nil?
begin
attachment.instance.thumbnail = image if image.size.positive?
ensure
# Paperclip does not automatically delete the source file of
# a new attachment while working on copies of it, so we need
# to make sure it's cleaned up
begin
image.close(true)
rescue Errno::ENOENT
nil
end
end
end
@file
end
private
def extract_image_from_file!
::Av.logger = Paperclip.logger
cli = ::Av.cli
dst = Tempfile.new([File.basename(@file.path, '.*'), '.png'])
dst.binmode
cli.add_source(@file.path)
cli.add_destination(dst.path)
cli.add_output_param loglevel: 'fatal'
begin
cli.run
rescue Cocaine::ExitStatusError, ::Av::CommandError
dst.close(true)
return nil
end
dst
end
end
end

View file

@ -0,0 +1,35 @@
# frozen_string_literal: true
module Paperclip
module MediaTypeSpoofDetectorExtensions
def mapping_override_mismatch?
!Array(mapped_content_type).include?(calculated_content_type) && !Array(mapped_content_type).include?(type_from_mime_magic)
end
def calculated_media_type_from_mime_magic
@calculated_media_type_from_mime_magic ||= type_from_mime_magic.split('/').first
end
def calculated_type_mismatch?
!media_types_from_name.include?(calculated_media_type) && !media_types_from_name.include?(calculated_media_type_from_mime_magic)
end
def type_from_mime_magic
@type_from_mime_magic ||= begin
begin
File.open(@file.path) do |file|
MimeMagic.by_magic(file)&.type || ''
end
rescue Errno::ENOENT
''
end
end
end
def type_from_file_command
@type_from_file_command ||= FileCommandContentTypeDetector.new(@file.path).detect
end
end
end
Paperclip::MediaTypeSpoofDetector.prepend(Paperclip::MediaTypeSpoofDetectorExtensions)

View file

@ -0,0 +1,55 @@
# frozen_string_literal: true
module Paperclip
class ResponseWithLimitAdapter < AbstractAdapter
def self.register
Paperclip.io_adapters.register self do |target|
target.is_a?(ResponseWithLimit)
end
end
def initialize(target, options = {})
super
cache_current_values
end
private
def cache_current_values
@original_filename = filename_from_content_disposition || filename_from_path || 'data'
@size = @target.response.content_length
@tempfile = copy_to_tempfile(@target)
@content_type = ContentTypeDetector.new(@tempfile.path).detect
end
def copy_to_tempfile(source)
bytes_read = 0
source.response.body.each do |chunk|
bytes_read += chunk.bytesize
destination.write(chunk)
chunk.clear
raise Mastodon::LengthValidationError if bytes_read > source.limit
end
destination.rewind
destination
rescue Mastodon::LengthValidationError
destination.close(true)
raise
ensure
source.response.connection.close
end
def filename_from_content_disposition
disposition = @target.response.headers['content-disposition']
disposition&.match(/filename="([^"]*)"/)&.captures&.first
end
def filename_from_path
@target.response.uri.path.split('/').last
end
end
end

View file

@ -0,0 +1,14 @@
# frozen_string_literal: true
module Paperclip
module TranscoderExtensions
# Prevent the transcoder from modifying our meta hash
def initialize(file, options = {}, attachment = nil)
meta_value = attachment&.instance_read(:meta)
super
attachment&.instance_write(:meta, meta_value)
end
end
end
Paperclip::Transcoder.prepend(Paperclip::TranscoderExtensions)

View file

@ -5,13 +5,15 @@ require 'mime/types/columnar'
module Paperclip
class TypeCorrector < Paperclip::Processor
def make
target_extension = options[:format]
extension = File.extname(attachment.instance.file_file_name)
return @file unless options[:format]
target_extension = '.' + options[:format]
extension = File.extname(attachment.instance_read(:file_name))
return @file unless options[:style] == :original && target_extension && extension != target_extension
attachment.instance.file_content_type = options[:content_type] || attachment.instance.file_content_type
attachment.instance.file_file_name = File.basename(attachment.instance.file_file_name, '.*') + '.' + target_extension
attachment.instance_write(:content_type, options[:content_type] || attachment.instance_read(:content_type))
attachment.instance_write(:file_name, File.basename(attachment.instance_read(:file_name), '.*') + target_extension)
@file
end

View file

@ -0,0 +1,12 @@
# frozen_string_literal: true
class Redis
module NamespaceExtensions
def exists?(*args, &block)
call_with_namespace('exists?', *args, &block)
end
end
end
Redis::Namespace::COMMANDS['exists?'] = [:first]
Redis::Namespace.prepend(Redis::NamespaceExtensions)

View file

@ -1,5 +1,35 @@
# frozen_string_literal: true
def gen_border(codepoint)
input = Rails.root.join('public', 'emoji', "#{codepoint}.svg")
dest = Rails.root.join('public', 'emoji', "#{codepoint}_border.svg")
doc = File.open(input) { |f| Nokogiri::XML(f) }
svg = doc.at_css('svg')
if svg.key?('viewBox')
view_box = svg['viewBox'].split(' ').map(&:to_i)
view_box[0] -= 2
view_box[1] -= 2
view_box[2] += 4
view_box[3] += 4
svg['viewBox'] = view_box.join(' ')
end
g = Nokogiri::XML::Node.new 'g', doc
doc.css('svg > *').each do |elem|
border_elem = elem.dup
border_elem.delete('fill')
border_elem['stroke'] = 'white'
border_elem['stroke-linejoin'] = 'round'
border_elem['stroke-width'] = '4px'
g.add_child(border_elem)
end
svg.prepend_child(g)
File.write(dest, doc.to_xml)
puts "Wrote bordered #{codepoint}.svg to #{dest}!"
end
def codepoints_to_filename(codepoints)
codepoints.downcase.gsub(/\A[0]+/, '').tr(' ', '-')
end
@ -23,8 +53,10 @@ namespace :emojis do
HTTP.get(source).to_s.split("\n").each do |line|
next if line.start_with? '#'
parts = line.split(';').map(&:strip)
next if parts.size < 2
codes << [parts[0], parts[1].start_with?('fully-qualified')]
end
@ -55,4 +87,16 @@ namespace :emojis do
File.write(dest, Oj.dump(map))
puts "Wrote emojo to destination! (#{dest})"
end
desc 'Generate emoji variants with white borders'
task :generate_borders do
src = Rails.root.join('app', 'javascript', 'mastodon', 'features', 'emoji', 'emoji_map.json')
emojis = '🎱🐜⚫🖤⬛◼️◾◼️✒️▪️💣🎳📷📸♣️🕶️✴️🔌💂‍♀️📽️🍳🦍💂🔪🕳️🕹️🕋🖊️🖋️💂‍♂️🎤🎓🎥🎼♠️🎩🦃📼📹🎮🐃🏴👽⚾🐔☁️💨🕊️👀🍥👻🐐❕❔⛸️🌩️🔊🔇📃🌧️🐏🍚🍙🐓🐑💀☠️🌨️🔉🔈💬💭🏐🏳️⚪⬜◽◻️▫️'
map = Oj.load(File.read(src))
emojis.each_grapheme_cluster do |emoji|
gen_border map[emoji]
end
end
end

View file

@ -1,6 +1,5 @@
# frozen_string_literal: true
require 'tty-command'
require 'tty-prompt'
namespace :mastodon do
@ -334,8 +333,6 @@ namespace :mastodon do
prompt.say 'This configuration will be written to .env.production'
if prompt.yes?('Save configuration?')
cmd = TTY::Command.new(printer: :quiet)
env_contents = env.each_pair.map do |key, value|
if value.is_a?(String) && value =~ /[\s\#\\"]/
if value =~ /[']/
@ -367,7 +364,7 @@ namespace :mastodon do
prompt.say 'Running `RAILS_ENV=production rails db:setup` ...'
prompt.say "\n\n"
if cmd.run!({ RAILS_ENV: 'production', SAFETY_ASSURED: 1 }, :rails, 'db:setup').failure?
if !system(env.transform_values(&:to_s).merge({ 'RAILS_ENV' => 'production', 'SAFETY_ASSURED' => '1' }), 'rails db:setup')
prompt.error 'That failed! Perhaps your configuration is not right'
else
prompt.ok 'Done!'
@ -382,7 +379,7 @@ namespace :mastodon do
prompt.say 'Running `RAILS_ENV=production rails assets:precompile` ...'
prompt.say "\n\n"
if cmd.run!({ RAILS_ENV: 'production' }, :rails, 'assets:precompile').failure?
if !system(env.transform_values(&:to_s).merge({ 'RAILS_ENV' => 'production' }), 'rails assets:precompile')
prompt.error 'That failed! Maybe you need swap space?'
else
prompt.say 'Done!'