2019-02-03 04:59:51 +02:00
# frozen_string_literal: true
require 'csv'
2023-05-02 13:08:48 +03:00
# NOTE: This is a deprecated service, only kept to not break ongoing imports
# on upgrade. See `BulkImportService` for its replacement.
2019-02-03 04:59:51 +02:00
class ImportService < BaseService
ROWS_PROCESSING_LIMIT = 20_000
def call ( import )
@import = import
@account = @import . account
case @import . type
when 'following'
import_follows!
when 'blocking'
import_blocks!
when 'muting'
import_mutes!
when 'domain_blocking'
import_domain_blocks!
2020-11-19 18:48:13 +02:00
when 'bookmarks'
import_bookmarks!
2019-02-03 04:59:51 +02:00
end
end
private
def import_follows!
2019-04-03 19:17:43 +03:00
parse_import_data! ( [ 'Account address' ] )
2022-09-21 00:51:21 +03:00
import_relationships! ( 'follow' , 'unfollow' , @account . following , ROWS_PROCESSING_LIMIT , reblogs : { header : 'Show boosts' , default : true } , notify : { header : 'Notify on new posts' , default : false } , languages : { header : 'Languages' , default : nil } )
2019-02-03 04:59:51 +02:00
end
def import_blocks!
2019-04-03 19:17:43 +03:00
parse_import_data! ( [ 'Account address' ] )
2019-02-03 04:59:51 +02:00
import_relationships! ( 'block' , 'unblock' , @account . blocking , ROWS_PROCESSING_LIMIT )
end
def import_mutes!
2019-04-03 19:17:43 +03:00
parse_import_data! ( [ 'Account address' ] )
2020-09-18 18:26:45 +03:00
import_relationships! ( 'mute' , 'unmute' , @account . muting , ROWS_PROCESSING_LIMIT , notifications : { header : 'Hide notifications' , default : true } )
2019-02-03 04:59:51 +02:00
end
def import_domain_blocks!
2019-04-03 19:17:43 +03:00
parse_import_data! ( [ '#domain' ] )
items = @data . take ( ROWS_PROCESSING_LIMIT ) . map { | row | row [ '#domain' ] . strip }
2019-02-03 04:59:51 +02:00
if @import . overwrite?
2021-03-24 11:44:31 +02:00
presence_hash = items . index_with ( true )
2019-02-03 04:59:51 +02:00
@account . domain_blocks . find_each do | domain_block |
if presence_hash [ domain_block . domain ]
items . delete ( domain_block . domain )
else
@account . unblock_domain! ( domain_block . domain )
end
end
end
items . each do | domain |
@account . block_domain! ( domain )
end
AfterAccountDomainBlockWorker . push_bulk ( items ) do | domain |
[ @account . id , domain ]
end
end
2019-04-08 08:28:27 +03:00
def import_relationships! ( action , undo_action , overwrite_scope , limit , extra_fields = { } )
2020-03-30 21:32:34 +03:00
local_domain_suffix = " @ #{ Rails . configuration . x . local_domain } "
2023-02-28 15:59:19 +02:00
items = @data . take ( limit ) . map { | row | [ row [ 'Account address' ] & . strip & . delete_suffix ( local_domain_suffix ) , extra_fields . to_h { | key , field_settings | [ key , row [ field_settings [ :header ] ] & . strip || field_settings [ :default ] ] } ] } . reject { | ( id , _ ) | id . blank? }
2019-02-03 04:59:51 +02:00
if @import . overwrite?
2019-04-03 19:17:43 +03:00
presence_hash = items . each_with_object ( { } ) { | ( id , extra ) , mapping | mapping [ id ] = [ true , extra ] }
2019-02-03 04:59:51 +02:00
overwrite_scope . find_each do | target_account |
if presence_hash [ target_account . acct ]
items . delete ( target_account . acct )
2019-04-03 19:17:43 +03:00
extra = presence_hash [ target_account . acct ] [ 1 ]
2022-01-28 01:43:56 +02:00
Import :: RelationshipWorker . perform_async ( @account . id , target_account . acct , action , extra . stringify_keys )
2019-02-03 04:59:51 +02:00
else
Import :: RelationshipWorker . perform_async ( @account . id , target_account . acct , undo_action )
end
end
end
2020-06-09 11:26:58 +03:00
head_items = items . uniq { | acct , _ | acct . split ( '@' ) [ 1 ] }
tail_items = items - head_items
2020-12-18 10:18:31 +02:00
2020-06-09 11:26:58 +03:00
Import :: RelationshipWorker . push_bulk ( head_items + tail_items ) do | acct , extra |
2022-01-28 01:43:56 +02:00
[ @account . id , acct , action , extra . stringify_keys ]
2019-02-03 04:59:51 +02:00
end
end
2020-11-19 18:48:13 +02:00
def import_bookmarks!
parse_import_data! ( [ '#uri' ] )
items = @data . take ( ROWS_PROCESSING_LIMIT ) . map { | row | row [ '#uri' ] . strip }
if @import . overwrite?
2021-03-24 11:44:31 +02:00
presence_hash = items . index_with ( true )
2020-11-19 18:48:13 +02:00
@account . bookmarks . find_each do | bookmark |
if presence_hash [ bookmark . status . uri ]
items . delete ( bookmark . status . uri )
else
bookmark . destroy!
end
end
end
2021-01-10 01:32:01 +02:00
statuses = items . filter_map do | uri |
2020-11-19 18:48:13 +02:00
status = ActivityPub :: TagManager . instance . uri_to_resource ( uri , Status )
next if status . nil? && ActivityPub :: TagManager . instance . local_uri? ( uri )
status || ActivityPub :: FetchRemoteStatusService . new . call ( uri )
2022-11-02 17:38:23 +02:00
rescue HTTP :: Error , OpenSSL :: SSL :: SSLError , Mastodon :: UnexpectedResponseError
nil
2023-02-20 12:01:20 +02:00
rescue = > e
2022-11-02 17:38:23 +02:00
Rails . logger . warn " Unexpected error when importing bookmark: #{ e } "
nil
2021-01-10 01:32:01 +02:00
end
2020-11-19 18:48:13 +02:00
account_ids = statuses . map ( & :account_id )
2023-03-21 11:32:58 +02:00
preloaded_relations = @account . relations_map ( account_ids , skip_blocking_and_muting : true )
2020-11-19 18:48:13 +02:00
statuses . keep_if { | status | StatusPolicy . new ( @account , status , preloaded_relations ) . show? }
statuses . each do | status |
@account . bookmarks . find_or_create_by! ( account : @account , status : status )
end
end
2019-04-03 19:17:43 +03:00
def parse_import_data! ( default_headers )
data = CSV . parse ( import_data , headers : true )
data = CSV . parse ( import_data , headers : default_headers ) unless data . headers & . first & . strip & . include? ( ' ' )
2023-04-30 15:07:21 +03:00
@data = data . compact_blank
2019-04-03 19:17:43 +03:00
end
2019-02-03 04:59:51 +02:00
def import_data
2022-11-14 06:52:13 +02:00
Paperclip . io_adapters . for ( @import . data ) . read . force_encoding ( Encoding :: UTF_8 )
2019-02-03 04:59:51 +02:00
end
end