2016-02-24 12:57:29 +01:00
|
|
|
class FollowRemoteAccountService < BaseService
|
2016-10-12 21:07:00 +02:00
|
|
|
include OStatus2::MagicKey
|
|
|
|
|
2016-10-12 22:55:00 +02:00
|
|
|
DFRN_NS = 'http://purl.org/macgirvin/dfrn/1.0'.freeze
|
|
|
|
|
2016-02-24 12:57:29 +01:00
|
|
|
# Find or create a local account for a remote user.
|
|
|
|
# When creating, look up the user's webfinger and fetch all
|
|
|
|
# important information from their feed
|
|
|
|
# @param [String] uri User URI in the form of username@domain
|
|
|
|
# @return [Account]
|
2016-09-20 00:39:03 +02:00
|
|
|
def call(uri)
|
2016-02-22 16:00:20 +01:00
|
|
|
username, domain = uri.split('@')
|
2016-03-21 18:26:47 +01:00
|
|
|
|
2016-10-06 16:36:16 +02:00
|
|
|
return Account.find_local(username) if TagManager.instance.local_domain?(domain)
|
2016-10-09 14:48:43 +02:00
|
|
|
return nil if DomainBlock.blocked?(domain)
|
2016-03-21 18:26:47 +01:00
|
|
|
|
2016-09-04 21:15:52 +02:00
|
|
|
account = Account.find_remote(username, domain)
|
2016-02-20 22:53:20 +01:00
|
|
|
|
2016-09-20 00:39:03 +02:00
|
|
|
return account unless account.nil?
|
|
|
|
|
|
|
|
Rails.logger.debug "Creating new remote account for #{uri}"
|
|
|
|
account = Account.new(username: username, domain: domain)
|
2016-02-20 22:53:20 +01:00
|
|
|
|
2016-02-22 18:10:30 +01:00
|
|
|
data = Goldfinger.finger("acct:#{uri}")
|
2016-02-20 22:53:20 +01:00
|
|
|
|
|
|
|
account.remote_url = data.link('http://schemas.google.com/g/2010#updates-from').href
|
|
|
|
account.salmon_url = data.link('salmon').href
|
2016-02-23 19:17:37 +01:00
|
|
|
account.url = data.link('http://webfinger.net/rel/profile-page').href
|
2016-02-20 22:53:20 +01:00
|
|
|
account.public_key = magic_key_to_pem(data.link('magic-public-key').href)
|
|
|
|
account.private_key = nil
|
|
|
|
|
2016-10-12 22:55:00 +02:00
|
|
|
xml = get_feed(account.remote_url)
|
|
|
|
hubs = get_hubs(xml)
|
2016-02-20 22:53:20 +01:00
|
|
|
|
2016-10-12 22:55:00 +02:00
|
|
|
account.uri = get_account_uri(xml)
|
2016-02-20 22:53:20 +01:00
|
|
|
account.hub_url = hubs.first.attribute('href').value
|
2016-02-22 18:10:30 +01:00
|
|
|
|
2016-10-12 22:55:00 +02:00
|
|
|
get_profile(xml, account)
|
2016-02-20 22:53:20 +01:00
|
|
|
account.save!
|
|
|
|
|
2016-02-22 18:10:30 +01:00
|
|
|
return account
|
2016-02-20 22:53:20 +01:00
|
|
|
end
|
|
|
|
|
|
|
|
private
|
|
|
|
|
|
|
|
def get_feed(url)
|
|
|
|
response = http_client.get(Addressable::URI.parse(url))
|
|
|
|
Nokogiri::XML(response)
|
|
|
|
end
|
|
|
|
|
2016-10-12 22:55:00 +02:00
|
|
|
def get_hubs(xml)
|
|
|
|
hubs = xml.xpath('//xmlns:link[@rel="hub"]')
|
|
|
|
raise Goldfinger::Error, 'No PubSubHubbub hubs found' if hubs.empty? || hubs.first.attribute('href').nil?
|
|
|
|
hubs
|
|
|
|
end
|
|
|
|
|
|
|
|
def get_account_uri(xml)
|
|
|
|
author_uri = xml.at_xpath('/xmlns:feed/xmlns:author/xmlns:uri')
|
|
|
|
|
|
|
|
if author_uri.nil?
|
|
|
|
owner = xml.at_xpath('/xmlns:feed').at_xpath('./dfrn:owner', dfrn: DFRN_NS)
|
|
|
|
author_uri = owner.at_xpath('./xmlns:uri') unless owner.nil?
|
|
|
|
end
|
|
|
|
|
|
|
|
raise Goldfinger::Error, 'Author URI could not be found' if author_uri.nil?
|
|
|
|
author_uri.content
|
|
|
|
end
|
|
|
|
|
2016-02-22 18:10:30 +01:00
|
|
|
def get_profile(xml, account)
|
2016-10-12 22:55:00 +02:00
|
|
|
author = xml.at_xpath('/xmlns:feed/xmlns:author') || xml.at_xpath('/xmlns:feed').at_xpath('./dfrn:owner', dfrn: DFRN_NS)
|
2016-09-29 21:28:21 +02:00
|
|
|
update_remote_profile_service.call(author, account)
|
2016-02-22 18:10:30 +01:00
|
|
|
end
|
|
|
|
|
2016-02-28 14:26:26 +01:00
|
|
|
def update_remote_profile_service
|
|
|
|
@update_remote_profile_service ||= UpdateRemoteProfileService.new
|
|
|
|
end
|
|
|
|
|
2016-02-20 22:53:20 +01:00
|
|
|
def http_client
|
Fix #24 - Thread resolving for remote statuses
This is a big one, so let me enumerate:
Accounts as well as stream entry pages now contain Link headers that
reference the Atom feed and Webfinger URL for the former and Atom entry
for the latter. So you only need to HEAD those resources to get that
information, no need to download and parse HTML <link>s.
ProcessFeedService will now queue ThreadResolveWorker for each remote
status that it cannot find otherwise. Furthermore, entries are now
processed in reverse order (from bottom to top) in case a newer entry
references a chronologically previous one.
ThreadResolveWorker uses FetchRemoteStatusService to obtain a status
and attach the child status it was queued for to it.
FetchRemoteStatusService looks up the URL, first with a HEAD, tests
if it's an Atom feed, in which case it processes it directly. Next
for Link headers to the Atom feed, in which case that is fetched
and processed. Lastly if it's HTML, it is checked for <link>s to the Atom
feed, and if such is found, that is fetched and processed. The account for
the status is derived from author/name attribute in the XML and the hostname
in the URL (domain). FollowRemoteAccountService and ProcessFeedService
are used.
This means that potentially threads are resolved recursively until a dead-end
is encountered, however it is performed asynchronously over background jobs,
so it should be ok.
2016-09-21 01:34:14 +02:00
|
|
|
HTTP.timeout(:per_operation, write: 20, connect: 20, read: 50)
|
2016-02-20 22:53:20 +01:00
|
|
|
end
|
|
|
|
end
|