2016-11-16 02:56:29 +11:00
|
|
|
# frozen_string_literal: true
|
|
|
|
|
2016-02-24 22:57:29 +11:00
|
|
|
class ProcessFeedService < BaseService
|
2016-02-21 08:53:20 +11:00
|
|
|
def call(body, account)
|
|
|
|
xml = Nokogiri::XML(body)
|
2016-11-14 05:12:40 +11:00
|
|
|
xml.encoding = 'utf-8'
|
2016-11-08 11:32:34 +11:00
|
|
|
|
2017-04-08 21:26:03 +10:00
|
|
|
update_author(body, account)
|
2016-11-08 11:32:34 +11:00
|
|
|
process_entries(xml, account)
|
2016-03-25 12:13:30 +11:00
|
|
|
end
|
2016-02-21 08:53:20 +11:00
|
|
|
|
2016-03-25 12:13:30 +11:00
|
|
|
private
|
2016-02-29 00:26:26 +11:00
|
|
|
|
2017-04-08 21:26:03 +10:00
|
|
|
def update_author(body, account)
|
2017-04-06 05:41:50 +10:00
|
|
|
RemoteProfileUpdateWorker.perform_async(account.id, body.force_encoding('UTF-8'), true)
|
2016-11-08 11:32:34 +11:00
|
|
|
end
|
2016-02-24 11:28:53 +11:00
|
|
|
|
2016-11-08 11:32:34 +11:00
|
|
|
def process_entries(xml, account)
|
2016-12-01 07:32:11 +11:00
|
|
|
xml.xpath('//xmlns:entry', xmlns: TagManager::XMLNS).reverse_each.map { |entry| ProcessEntry.new.call(entry, account) }.compact
|
2016-11-08 11:32:34 +11:00
|
|
|
end
|
2016-03-16 20:46:15 +11:00
|
|
|
|
2016-11-08 11:32:34 +11:00
|
|
|
class ProcessEntry
|
|
|
|
def call(xml, account)
|
|
|
|
@account = account
|
|
|
|
@xml = xml
|
2016-02-24 11:28:53 +11:00
|
|
|
|
2016-11-08 11:32:34 +11:00
|
|
|
return if skip_unsupported_type?
|
2016-02-24 11:28:53 +11:00
|
|
|
|
2016-11-08 11:32:34 +11:00
|
|
|
case verb
|
|
|
|
when :post, :share
|
|
|
|
return create_status
|
|
|
|
when :delete
|
|
|
|
return delete_status
|
2016-02-24 11:28:53 +11:00
|
|
|
end
|
2016-11-19 09:19:38 +11:00
|
|
|
rescue ActiveRecord::RecordInvalid => e
|
|
|
|
Rails.logger.debug "Nothing was saved for #{id} because: #{e}"
|
|
|
|
nil
|
2016-03-25 12:13:30 +11:00
|
|
|
end
|
2016-02-25 10:17:01 +11:00
|
|
|
|
2016-11-08 11:32:34 +11:00
|
|
|
private
|
2016-09-10 04:04:34 +10:00
|
|
|
|
2016-11-08 11:32:34 +11:00
|
|
|
def create_status
|
|
|
|
Rails.logger.debug "Creating remote status #{id}"
|
2017-01-28 02:55:06 +11:00
|
|
|
status, just_created = status_from_xml(@xml)
|
2016-11-06 01:20:05 +11:00
|
|
|
|
2017-01-14 12:22:16 +11:00
|
|
|
return if status.nil?
|
2017-01-28 02:55:06 +11:00
|
|
|
return status unless just_created
|
2017-01-21 04:31:49 +11:00
|
|
|
|
2016-11-08 11:32:34 +11:00
|
|
|
if verb == :share
|
2017-01-28 02:55:06 +11:00
|
|
|
original_status, = status_from_xml(@xml.at_xpath('.//activity:object', activity: TagManager::AS_XMLNS))
|
|
|
|
status.reblog = original_status
|
2016-11-09 05:37:08 +11:00
|
|
|
|
|
|
|
if original_status.nil?
|
|
|
|
status.destroy
|
|
|
|
return nil
|
2016-11-10 10:15:49 +11:00
|
|
|
elsif original_status.reblog?
|
|
|
|
status.reblog = original_status.reblog
|
2016-11-09 05:37:08 +11:00
|
|
|
end
|
2016-11-08 11:32:34 +11:00
|
|
|
end
|
2016-09-10 04:04:34 +10:00
|
|
|
|
2016-11-08 11:32:34 +11:00
|
|
|
status.save!
|
2016-11-21 19:56:01 +11:00
|
|
|
|
2017-03-14 02:34:15 +11:00
|
|
|
notify_about_mentions!(status) unless status.reblog?
|
|
|
|
notify_about_reblog!(status) if status.reblog? && status.reblog.account.local?
|
2016-11-08 11:32:34 +11:00
|
|
|
Rails.logger.debug "Queuing remote status #{status.id} (#{id}) for distribution"
|
2016-03-25 13:22:26 +11:00
|
|
|
DistributionWorker.perform_async(status.id)
|
2016-11-08 11:32:34 +11:00
|
|
|
status
|
2016-03-25 12:13:30 +11:00
|
|
|
end
|
2016-02-29 07:22:56 +11:00
|
|
|
|
2017-03-14 02:34:15 +11:00
|
|
|
def notify_about_mentions!(status)
|
|
|
|
status.mentions.includes(:account).each do |mention|
|
|
|
|
mentioned_account = mention.account
|
|
|
|
next unless mentioned_account.local?
|
|
|
|
NotifyService.new.call(mentioned_account, mention)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def notify_about_reblog!(status)
|
|
|
|
NotifyService.new.call(status.reblog.account, status)
|
|
|
|
end
|
|
|
|
|
2016-11-08 11:32:34 +11:00
|
|
|
def delete_status
|
|
|
|
Rails.logger.debug "Deleting remote status #{id}"
|
|
|
|
status = Status.find_by(uri: id)
|
|
|
|
RemoveStatusService.new.call(status) unless status.nil?
|
|
|
|
nil
|
|
|
|
end
|
2016-10-15 05:14:53 +11:00
|
|
|
|
2016-11-08 11:32:34 +11:00
|
|
|
def skip_unsupported_type?
|
|
|
|
!([:post, :share, :delete].include?(verb) && [:activity, :note, :comment].include?(type))
|
|
|
|
end
|
2016-02-29 07:22:56 +11:00
|
|
|
|
2016-11-08 11:32:34 +11:00
|
|
|
def status_from_xml(entry)
|
|
|
|
# Return early if status already exists in db
|
|
|
|
status = find_status(id(entry))
|
2017-01-28 02:55:06 +11:00
|
|
|
|
|
|
|
return [status, false] unless status.nil?
|
2016-11-08 11:32:34 +11:00
|
|
|
|
2016-11-10 10:15:49 +11:00
|
|
|
# If status embeds an author, find that author
|
|
|
|
# If that author cannot be found, don't record the status (do not misattribute)
|
|
|
|
if account?(entry)
|
|
|
|
begin
|
|
|
|
account = find_or_resolve_account(acct(entry))
|
2017-01-28 02:55:06 +11:00
|
|
|
return [nil, false] if account.nil?
|
2016-11-10 10:15:49 +11:00
|
|
|
rescue Goldfinger::Error
|
2017-01-28 02:55:06 +11:00
|
|
|
return [nil, false]
|
2016-11-10 10:15:49 +11:00
|
|
|
end
|
|
|
|
else
|
|
|
|
account = @account
|
2016-11-09 05:37:08 +11:00
|
|
|
end
|
|
|
|
|
2017-01-28 02:55:06 +11:00
|
|
|
return [nil, false] if account.suspended?
|
2016-12-06 08:59:30 +11:00
|
|
|
|
2016-11-16 02:56:29 +11:00
|
|
|
status = Status.create!(
|
2016-11-08 11:32:34 +11:00
|
|
|
uri: id(entry),
|
|
|
|
url: url(entry),
|
2016-11-09 05:37:08 +11:00
|
|
|
account: account,
|
2016-11-08 11:32:34 +11:00
|
|
|
text: content(entry),
|
2017-01-25 10:49:08 +11:00
|
|
|
spoiler_text: content_warning(entry),
|
2017-02-10 06:25:39 +11:00
|
|
|
created_at: published(entry),
|
2017-02-12 01:10:22 +11:00
|
|
|
reply: thread?(entry),
|
|
|
|
visibility: visibility_scope(entry)
|
2016-11-16 02:56:29 +11:00
|
|
|
)
|
2016-11-08 11:32:34 +11:00
|
|
|
|
|
|
|
if thread?(entry)
|
2016-11-09 05:37:08 +11:00
|
|
|
Rails.logger.debug "Trying to attach #{status.id} (#{id(entry)}) to #{thread(entry).first}"
|
2016-11-08 11:32:34 +11:00
|
|
|
status.thread = find_or_resolve_status(status, *thread(entry))
|
|
|
|
end
|
2016-09-27 00:42:38 +10:00
|
|
|
|
2016-11-08 11:32:34 +11:00
|
|
|
mentions_from_xml(status, entry)
|
|
|
|
hashtags_from_xml(status, entry)
|
|
|
|
media_from_xml(status, entry)
|
2016-09-30 05:28:21 +10:00
|
|
|
|
2017-01-28 02:55:06 +11:00
|
|
|
[status, true]
|
2016-11-08 11:32:34 +11:00
|
|
|
end
|
2016-02-29 07:22:56 +11:00
|
|
|
|
2016-11-08 11:32:34 +11:00
|
|
|
def find_or_resolve_account(acct)
|
|
|
|
FollowRemoteAccountService.new.call(acct)
|
|
|
|
end
|
2016-03-19 10:41:29 +11:00
|
|
|
|
2016-11-08 11:32:34 +11:00
|
|
|
def find_or_resolve_status(parent, uri, url)
|
|
|
|
status = find_status(uri)
|
2016-12-12 08:23:11 +11:00
|
|
|
|
2016-12-13 07:12:19 +11:00
|
|
|
ThreadResolveWorker.perform_async(parent.id, url) if status.nil?
|
2016-09-23 05:10:36 +10:00
|
|
|
|
2016-11-08 11:32:34 +11:00
|
|
|
status
|
|
|
|
end
|
2016-09-27 00:42:38 +10:00
|
|
|
|
2016-11-08 11:32:34 +11:00
|
|
|
def find_status(uri)
|
|
|
|
if TagManager.instance.local_id?(uri)
|
|
|
|
local_id = TagManager.instance.unique_tag_to_local_id(uri, 'Status')
|
|
|
|
return Status.find(local_id)
|
2016-02-29 07:22:56 +11:00
|
|
|
end
|
2016-11-08 11:32:34 +11:00
|
|
|
|
|
|
|
Status.find_by(uri: uri)
|
2016-02-24 11:28:53 +11:00
|
|
|
end
|
|
|
|
|
2016-11-08 11:32:34 +11:00
|
|
|
def mentions_from_xml(parent, xml)
|
|
|
|
processed_account_ids = []
|
2016-10-15 05:14:53 +11:00
|
|
|
|
2016-12-01 07:32:11 +11:00
|
|
|
xml.xpath('./xmlns:link[@rel="mentioned"]', xmlns: TagManager::XMLNS).each do |link|
|
2017-02-12 01:10:22 +11:00
|
|
|
next if [TagManager::TYPES[:group], TagManager::TYPES[:collection]].include? link['ostatus:object-type']
|
2016-09-06 02:39:53 +10:00
|
|
|
|
2016-11-08 11:32:34 +11:00
|
|
|
url = Addressable::URI.parse(link['href'])
|
2016-09-23 04:42:20 +10:00
|
|
|
|
2017-04-15 10:15:46 +10:00
|
|
|
mentioned_account = if TagManager.instance.web_domain?(url.host)
|
2016-11-08 11:32:34 +11:00
|
|
|
Account.find_local(url.path.gsub('/users/', ''))
|
|
|
|
else
|
|
|
|
Account.find_by(url: link['href']) || FetchRemoteAccountService.new.call(link['href'])
|
|
|
|
end
|
2016-09-30 05:28:21 +10:00
|
|
|
|
2016-11-08 11:32:34 +11:00
|
|
|
next if mentioned_account.nil? || processed_account_ids.include?(mentioned_account.id)
|
2016-09-06 02:39:53 +10:00
|
|
|
|
2017-03-14 02:34:15 +11:00
|
|
|
mentioned_account.mentions.where(status: parent).first_or_create(status: parent)
|
2016-02-25 03:23:59 +11:00
|
|
|
|
2016-11-08 11:32:34 +11:00
|
|
|
# So we can skip duplicate mentions
|
|
|
|
processed_account_ids << mentioned_account.id
|
|
|
|
end
|
2016-03-20 05:20:07 +11:00
|
|
|
end
|
2016-02-24 11:28:53 +11:00
|
|
|
|
2016-11-08 11:32:34 +11:00
|
|
|
def hashtags_from_xml(parent, xml)
|
2017-04-16 20:51:30 +10:00
|
|
|
tags = xml.xpath('./xmlns:category', xmlns: TagManager::XMLNS).map { |category| category['term'] }.select(&:present?)
|
2016-11-08 11:32:34 +11:00
|
|
|
ProcessHashtagsService.new.call(parent, tags)
|
Fix #24 - Thread resolving for remote statuses
This is a big one, so let me enumerate:
Accounts as well as stream entry pages now contain Link headers that
reference the Atom feed and Webfinger URL for the former and Atom entry
for the latter. So you only need to HEAD those resources to get that
information, no need to download and parse HTML <link>s.
ProcessFeedService will now queue ThreadResolveWorker for each remote
status that it cannot find otherwise. Furthermore, entries are now
processed in reverse order (from bottom to top) in case a newer entry
references a chronologically previous one.
ThreadResolveWorker uses FetchRemoteStatusService to obtain a status
and attach the child status it was queued for to it.
FetchRemoteStatusService looks up the URL, first with a HEAD, tests
if it's an Atom feed, in which case it processes it directly. Next
for Link headers to the Atom feed, in which case that is fetched
and processed. Lastly if it's HTML, it is checked for <link>s to the Atom
feed, and if such is found, that is fetched and processed. The account for
the status is derived from author/name attribute in the XML and the hostname
in the URL (domain). FollowRemoteAccountService and ProcessFeedService
are used.
This means that potentially threads are resolved recursively until a dead-end
is encountered, however it is performed asynchronously over background jobs,
so it should be ok.
2016-09-21 09:34:14 +10:00
|
|
|
end
|
2016-02-21 08:53:20 +11:00
|
|
|
|
2016-11-08 11:32:34 +11:00
|
|
|
def media_from_xml(parent, xml)
|
2017-04-16 20:51:30 +10:00
|
|
|
do_not_download = DomainBlock.find_by(domain: parent.account.domain)&.reject_media?
|
2017-01-24 07:36:08 +11:00
|
|
|
|
2016-12-01 07:32:11 +11:00
|
|
|
xml.xpath('./xmlns:link[@rel="enclosure"]', xmlns: TagManager::XMLNS).each do |link|
|
2016-11-08 11:32:34 +11:00
|
|
|
next unless link['href']
|
2016-03-16 20:46:15 +11:00
|
|
|
|
2016-11-08 11:32:34 +11:00
|
|
|
media = MediaAttachment.where(status: parent, remote_url: link['href']).first_or_initialize(account: parent.account, status: parent, remote_url: link['href'])
|
2017-02-23 05:55:14 +11:00
|
|
|
parsed_url = URI.parse(link['href'])
|
|
|
|
|
2017-04-16 20:51:30 +10:00
|
|
|
next if !%w[http https].include?(parsed_url.scheme) || parsed_url.host.empty?
|
|
|
|
|
|
|
|
media.save
|
|
|
|
|
|
|
|
next if do_not_download
|
2016-02-21 08:53:20 +11:00
|
|
|
|
2016-11-08 11:32:34 +11:00
|
|
|
begin
|
|
|
|
media.file_remote_url = link['href']
|
|
|
|
media.save
|
2016-11-19 09:16:34 +11:00
|
|
|
rescue OpenURI::HTTPError, Paperclip::Errors::NotIdentifiedByImageMagickError
|
2016-11-08 11:32:34 +11:00
|
|
|
next
|
|
|
|
end
|
|
|
|
end
|
2016-02-21 08:53:20 +11:00
|
|
|
end
|
2016-02-25 03:23:59 +11:00
|
|
|
|
2016-11-08 11:32:34 +11:00
|
|
|
def id(xml = @xml)
|
2016-12-01 07:32:11 +11:00
|
|
|
xml.at_xpath('./xmlns:id', xmlns: TagManager::XMLNS).content
|
2016-02-25 03:23:59 +11:00
|
|
|
end
|
2016-02-24 11:28:53 +11:00
|
|
|
|
2016-11-08 11:32:34 +11:00
|
|
|
def verb(xml = @xml)
|
2016-12-01 07:32:11 +11:00
|
|
|
raw = xml.at_xpath('./activity:verb', activity: TagManager::AS_XMLNS).content
|
|
|
|
TagManager::VERBS.key(raw)
|
2016-11-08 11:32:34 +11:00
|
|
|
rescue
|
|
|
|
:post
|
Fix #24 - Thread resolving for remote statuses
This is a big one, so let me enumerate:
Accounts as well as stream entry pages now contain Link headers that
reference the Atom feed and Webfinger URL for the former and Atom entry
for the latter. So you only need to HEAD those resources to get that
information, no need to download and parse HTML <link>s.
ProcessFeedService will now queue ThreadResolveWorker for each remote
status that it cannot find otherwise. Furthermore, entries are now
processed in reverse order (from bottom to top) in case a newer entry
references a chronologically previous one.
ThreadResolveWorker uses FetchRemoteStatusService to obtain a status
and attach the child status it was queued for to it.
FetchRemoteStatusService looks up the URL, first with a HEAD, tests
if it's an Atom feed, in which case it processes it directly. Next
for Link headers to the Atom feed, in which case that is fetched
and processed. Lastly if it's HTML, it is checked for <link>s to the Atom
feed, and if such is found, that is fetched and processed. The account for
the status is derived from author/name attribute in the XML and the hostname
in the URL (domain). FollowRemoteAccountService and ProcessFeedService
are used.
This means that potentially threads are resolved recursively until a dead-end
is encountered, however it is performed asynchronously over background jobs,
so it should be ok.
2016-09-21 09:34:14 +10:00
|
|
|
end
|
|
|
|
|
2016-11-08 11:32:34 +11:00
|
|
|
def type(xml = @xml)
|
2016-12-01 07:32:11 +11:00
|
|
|
raw = xml.at_xpath('./activity:object-type', activity: TagManager::AS_XMLNS).content
|
|
|
|
TagManager::TYPES.key(raw)
|
2016-11-08 11:32:34 +11:00
|
|
|
rescue
|
|
|
|
:activity
|
|
|
|
end
|
2016-02-24 11:28:53 +11:00
|
|
|
|
2016-11-08 11:32:34 +11:00
|
|
|
def url(xml = @xml)
|
2016-12-01 07:32:11 +11:00
|
|
|
link = xml.at_xpath('./xmlns:link[@rel="alternate"]', xmlns: TagManager::XMLNS)
|
2016-11-09 05:09:22 +11:00
|
|
|
link.nil? ? nil : link['href']
|
2016-11-08 11:32:34 +11:00
|
|
|
end
|
2016-02-29 07:22:56 +11:00
|
|
|
|
2016-11-08 11:32:34 +11:00
|
|
|
def content(xml = @xml)
|
2016-12-01 07:32:11 +11:00
|
|
|
xml.at_xpath('./xmlns:content', xmlns: TagManager::XMLNS).content
|
2016-11-08 11:32:34 +11:00
|
|
|
end
|
2016-02-25 03:23:59 +11:00
|
|
|
|
2017-01-25 10:49:08 +11:00
|
|
|
def content_warning(xml = @xml)
|
2017-01-26 02:53:30 +11:00
|
|
|
xml.at_xpath('./xmlns:summary', xmlns: TagManager::XMLNS)&.content || ''
|
2017-01-25 10:49:08 +11:00
|
|
|
end
|
|
|
|
|
2017-02-12 01:10:22 +11:00
|
|
|
def visibility_scope(xml = @xml)
|
|
|
|
xml.at_xpath('./mastodon:scope', mastodon: TagManager::MTDN_XMLNS)&.content&.to_sym || :public
|
|
|
|
end
|
|
|
|
|
2016-11-08 11:32:34 +11:00
|
|
|
def published(xml = @xml)
|
2016-12-01 07:32:11 +11:00
|
|
|
xml.at_xpath('./xmlns:published', xmlns: TagManager::XMLNS).content
|
2016-11-08 11:32:34 +11:00
|
|
|
end
|
2016-02-25 03:23:59 +11:00
|
|
|
|
2016-11-08 11:32:34 +11:00
|
|
|
def thread?(xml = @xml)
|
2016-12-01 07:32:11 +11:00
|
|
|
!xml.at_xpath('./thr:in-reply-to', thr: TagManager::THR_XMLNS).nil?
|
2016-11-08 11:32:34 +11:00
|
|
|
end
|
2016-02-24 11:28:53 +11:00
|
|
|
|
2016-11-08 11:32:34 +11:00
|
|
|
def thread(xml = @xml)
|
2016-12-01 07:32:11 +11:00
|
|
|
thr = xml.at_xpath('./thr:in-reply-to', thr: TagManager::THR_XMLNS)
|
2016-11-08 11:32:34 +11:00
|
|
|
[thr['ref'], thr['href']]
|
|
|
|
end
|
2016-02-24 13:05:40 +11:00
|
|
|
|
2016-11-08 11:32:34 +11:00
|
|
|
def account?(xml = @xml)
|
2016-12-01 07:32:11 +11:00
|
|
|
!xml.at_xpath('./xmlns:author', xmlns: TagManager::XMLNS).nil?
|
2016-11-08 11:32:34 +11:00
|
|
|
end
|
2016-02-25 10:17:01 +11:00
|
|
|
|
2016-11-08 11:32:34 +11:00
|
|
|
def acct(xml = @xml)
|
2016-12-01 07:32:11 +11:00
|
|
|
username = xml.at_xpath('./xmlns:author/xmlns:name', xmlns: TagManager::XMLNS).content
|
|
|
|
url = xml.at_xpath('./xmlns:author/xmlns:uri', xmlns: TagManager::XMLNS).content
|
2016-11-08 11:32:34 +11:00
|
|
|
domain = Addressable::URI.parse(url).host
|
2016-09-20 08:39:03 +10:00
|
|
|
|
2016-11-08 11:32:34 +11:00
|
|
|
"#{username}@#{domain}"
|
|
|
|
end
|
2016-09-20 08:39:03 +10:00
|
|
|
end
|
2016-02-21 08:53:20 +11:00
|
|
|
end
|