2019-02-03 13:59:51 +11:00
# frozen_string_literal: true
require 'csv'
class ImportService < BaseService
ROWS_PROCESSING_LIMIT = 20_000
def call ( import )
@import = import
@account = @import . account
case @import . type
when 'following'
import_follows!
when 'blocking'
import_blocks!
when 'muting'
import_mutes!
when 'domain_blocking'
import_domain_blocks!
2020-11-20 03:48:13 +11:00
when 'bookmarks'
import_bookmarks!
2019-02-03 13:59:51 +11:00
end
end
private
def import_follows!
2019-04-04 03:17:43 +11:00
parse_import_data! ( [ 'Account address' ] )
2022-09-21 07:51:21 +10:00
import_relationships! ( 'follow' , 'unfollow' , @account . following , ROWS_PROCESSING_LIMIT , reblogs : { header : 'Show boosts' , default : true } , notify : { header : 'Notify on new posts' , default : false } , languages : { header : 'Languages' , default : nil } )
2019-02-03 13:59:51 +11:00
end
def import_blocks!
2019-04-04 03:17:43 +11:00
parse_import_data! ( [ 'Account address' ] )
2019-02-03 13:59:51 +11:00
import_relationships! ( 'block' , 'unblock' , @account . blocking , ROWS_PROCESSING_LIMIT )
end
def import_mutes!
2019-04-04 03:17:43 +11:00
parse_import_data! ( [ 'Account address' ] )
2020-09-19 01:26:45 +10:00
import_relationships! ( 'mute' , 'unmute' , @account . muting , ROWS_PROCESSING_LIMIT , notifications : { header : 'Hide notifications' , default : true } )
2019-02-03 13:59:51 +11:00
end
def import_domain_blocks!
2019-04-04 03:17:43 +11:00
parse_import_data! ( [ '#domain' ] )
items = @data . take ( ROWS_PROCESSING_LIMIT ) . map { | row | row [ '#domain' ] . strip }
2019-02-03 13:59:51 +11:00
if @import . overwrite?
2021-03-24 20:44:31 +11:00
presence_hash = items . index_with ( true )
2019-02-03 13:59:51 +11:00
@account . domain_blocks . find_each do | domain_block |
if presence_hash [ domain_block . domain ]
items . delete ( domain_block . domain )
else
@account . unblock_domain! ( domain_block . domain )
end
end
end
items . each do | domain |
@account . block_domain! ( domain )
end
AfterAccountDomainBlockWorker . push_bulk ( items ) do | domain |
[ @account . id , domain ]
end
end
2019-04-08 15:28:27 +10:00
def import_relationships! ( action , undo_action , overwrite_scope , limit , extra_fields = { } )
2020-03-31 05:32:34 +11:00
local_domain_suffix = " @ #{ Rails . configuration . x . local_domain } "
2023-03-01 00:59:19 +11:00
items = @data . take ( limit ) . map { | row | [ row [ 'Account address' ] & . strip & . delete_suffix ( local_domain_suffix ) , extra_fields . to_h { | key , field_settings | [ key , row [ field_settings [ :header ] ] & . strip || field_settings [ :default ] ] } ] } . reject { | ( id , _ ) | id . blank? }
2019-02-03 13:59:51 +11:00
if @import . overwrite?
2019-04-04 03:17:43 +11:00
presence_hash = items . each_with_object ( { } ) { | ( id , extra ) , mapping | mapping [ id ] = [ true , extra ] }
2019-02-03 13:59:51 +11:00
overwrite_scope . find_each do | target_account |
if presence_hash [ target_account . acct ]
items . delete ( target_account . acct )
2019-04-04 03:17:43 +11:00
extra = presence_hash [ target_account . acct ] [ 1 ]
2022-01-28 10:43:56 +11:00
Import :: RelationshipWorker . perform_async ( @account . id , target_account . acct , action , extra . stringify_keys )
2019-02-03 13:59:51 +11:00
else
Import :: RelationshipWorker . perform_async ( @account . id , target_account . acct , undo_action )
end
end
end
2020-06-09 18:26:58 +10:00
head_items = items . uniq { | acct , _ | acct . split ( '@' ) [ 1 ] }
tail_items = items - head_items
2020-12-18 19:18:31 +11:00
2020-06-09 18:26:58 +10:00
Import :: RelationshipWorker . push_bulk ( head_items + tail_items ) do | acct , extra |
2022-01-28 10:43:56 +11:00
[ @account . id , acct , action , extra . stringify_keys ]
2019-02-03 13:59:51 +11:00
end
end
2020-11-20 03:48:13 +11:00
def import_bookmarks!
parse_import_data! ( [ '#uri' ] )
items = @data . take ( ROWS_PROCESSING_LIMIT ) . map { | row | row [ '#uri' ] . strip }
if @import . overwrite?
2021-03-24 20:44:31 +11:00
presence_hash = items . index_with ( true )
2020-11-20 03:48:13 +11:00
@account . bookmarks . find_each do | bookmark |
if presence_hash [ bookmark . status . uri ]
items . delete ( bookmark . status . uri )
else
bookmark . destroy!
end
end
end
2021-01-10 10:32:01 +11:00
statuses = items . filter_map do | uri |
2020-11-20 03:48:13 +11:00
status = ActivityPub :: TagManager . instance . uri_to_resource ( uri , Status )
next if status . nil? && ActivityPub :: TagManager . instance . local_uri? ( uri )
status || ActivityPub :: FetchRemoteStatusService . new . call ( uri )
2022-11-03 02:38:23 +11:00
rescue HTTP :: Error , OpenSSL :: SSL :: SSLError , Mastodon :: UnexpectedResponseError
nil
2023-02-20 21:01:20 +11:00
rescue = > e
2022-11-03 02:38:23 +11:00
Rails . logger . warn " Unexpected error when importing bookmark: #{ e } "
nil
2021-01-10 10:32:01 +11:00
end
2020-11-20 03:48:13 +11:00
account_ids = statuses . map ( & :account_id )
2023-03-21 20:32:58 +11:00
preloaded_relations = @account . relations_map ( account_ids , skip_blocking_and_muting : true )
2020-11-20 03:48:13 +11:00
statuses . keep_if { | status | StatusPolicy . new ( @account , status , preloaded_relations ) . show? }
statuses . each do | status |
@account . bookmarks . find_or_create_by! ( account : @account , status : status )
end
end
2019-04-04 03:17:43 +11:00
def parse_import_data! ( default_headers )
data = CSV . parse ( import_data , headers : true )
data = CSV . parse ( import_data , headers : default_headers ) unless data . headers & . first & . strip & . include? ( ' ' )
2023-04-30 22:07:21 +10:00
@data = data . compact_blank
2019-04-04 03:17:43 +11:00
end
2019-02-03 13:59:51 +11:00
def import_data
2022-11-14 15:52:13 +11:00
Paperclip . io_adapters . for ( @import . data ) . read . force_encoding ( Encoding :: UTF_8 )
2019-02-03 13:59:51 +11:00
end
end