2017-05-05 19:48:22 +02:00
|
|
|
# frozen_string_literal: true
|
|
|
|
|
|
|
|
module Extractor
|
2022-03-30 14:46:03 +02:00
|
|
|
MAX_DOMAIN_LENGTH = 253
|
|
|
|
|
2021-03-02 12:02:56 +01:00
|
|
|
extend Twitter::TwitterText::Extractor
|
2017-05-05 19:48:22 +02:00
|
|
|
|
|
|
|
module_function
|
|
|
|
|
2022-03-26 02:53:34 +01:00
|
|
|
def extract_entities_with_indices(text, options = {}, &block)
|
|
|
|
entities = begin
|
|
|
|
extract_urls_with_indices(text, options) +
|
|
|
|
extract_hashtags_with_indices(text, check_url_overlap: false) +
|
|
|
|
extract_mentions_or_lists_with_indices(text) +
|
|
|
|
extract_extra_uris_with_indices(text)
|
|
|
|
end
|
|
|
|
|
|
|
|
return [] if entities.empty?
|
|
|
|
|
|
|
|
entities = remove_overlapping_entities(entities)
|
2023-02-08 10:36:23 +01:00
|
|
|
entities.each(&block) if block
|
2022-03-26 02:53:34 +01:00
|
|
|
entities
|
|
|
|
end
|
|
|
|
|
2017-11-17 02:06:26 +01:00
|
|
|
def extract_mentions_or_lists_with_indices(text)
|
2022-03-26 02:53:34 +01:00
|
|
|
return [] unless text && Twitter::TwitterText::Regex[:at_signs].match?(text)
|
2017-05-05 19:48:22 +02:00
|
|
|
|
|
|
|
possible_entries = []
|
|
|
|
|
2022-03-26 02:53:34 +01:00
|
|
|
text.scan(Account::MENTION_RE) do |screen_name, _|
|
2017-05-05 19:48:22 +02:00
|
|
|
match_data = $LAST_MATCH_INFO
|
2023-02-18 06:20:20 +01:00
|
|
|
after = ::Regexp.last_match.post_match
|
2022-03-26 02:53:34 +01:00
|
|
|
|
2021-03-02 12:02:56 +01:00
|
|
|
unless Twitter::TwitterText::Regex[:end_mention_match].match?(after)
|
2022-03-30 14:46:03 +02:00
|
|
|
_, domain = screen_name.split('@')
|
|
|
|
|
|
|
|
next if domain.present? && domain.length > MAX_DOMAIN_LENGTH
|
|
|
|
|
2017-05-05 19:48:22 +02:00
|
|
|
start_position = match_data.char_begin(1) - 1
|
2022-03-26 02:53:34 +01:00
|
|
|
end_position = match_data.char_end(1)
|
|
|
|
|
2017-05-05 19:48:22 +02:00
|
|
|
possible_entries << {
|
|
|
|
screen_name: screen_name,
|
|
|
|
indices: [start_position, end_position],
|
|
|
|
}
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
if block_given?
|
|
|
|
possible_entries.each do |mention|
|
|
|
|
yield mention[:screen_name], mention[:indices].first, mention[:indices].last
|
|
|
|
end
|
|
|
|
end
|
2022-03-26 02:53:34 +01:00
|
|
|
|
2017-05-05 19:48:22 +02:00
|
|
|
possible_entries
|
|
|
|
end
|
2017-05-19 20:19:14 +02:00
|
|
|
|
2022-03-26 02:53:34 +01:00
|
|
|
def extract_hashtags_with_indices(text, _options = {})
|
|
|
|
return [] unless text&.index('#')
|
|
|
|
|
|
|
|
possible_entries = []
|
2017-05-19 20:19:14 +02:00
|
|
|
|
|
|
|
text.scan(Tag::HASHTAG_RE) do |hash_text, _|
|
2022-03-26 02:53:34 +01:00
|
|
|
match_data = $LAST_MATCH_INFO
|
2017-05-19 20:19:14 +02:00
|
|
|
start_position = match_data.char_begin(1) - 1
|
2022-03-26 02:53:34 +01:00
|
|
|
end_position = match_data.char_end(1)
|
2023-02-18 06:20:20 +01:00
|
|
|
after = ::Regexp.last_match.post_match
|
2022-03-26 02:53:34 +01:00
|
|
|
|
2021-01-22 10:09:08 +01:00
|
|
|
if %r{\A://}.match?(after)
|
2017-05-19 20:19:14 +02:00
|
|
|
hash_text.match(/(.+)(https?\Z)/) do |matched|
|
2022-03-26 02:53:34 +01:00
|
|
|
hash_text = matched[1]
|
2021-03-02 12:02:56 +01:00
|
|
|
end_position -= matched[2].codepoint_length
|
2017-05-19 20:19:14 +02:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2022-03-26 02:53:34 +01:00
|
|
|
possible_entries << {
|
2017-05-19 20:19:14 +02:00
|
|
|
hashtag: hash_text,
|
|
|
|
indices: [start_position, end_position],
|
|
|
|
}
|
|
|
|
end
|
|
|
|
|
2022-03-26 02:53:34 +01:00
|
|
|
if block_given?
|
|
|
|
possible_entries.each do |tag|
|
|
|
|
yield tag[:hashtag], tag[:indices].first, tag[:indices].last
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
possible_entries
|
2017-05-19 20:19:14 +02:00
|
|
|
end
|
2017-05-24 14:32:53 +02:00
|
|
|
|
|
|
|
def extract_cashtags_with_indices(_text)
|
2022-03-26 02:53:34 +01:00
|
|
|
[]
|
|
|
|
end
|
|
|
|
|
|
|
|
def extract_extra_uris_with_indices(text)
|
|
|
|
return [] unless text&.index(':')
|
|
|
|
|
|
|
|
possible_entries = []
|
|
|
|
|
|
|
|
text.scan(Twitter::TwitterText::Regex[:valid_extended_uri]) do
|
|
|
|
valid_uri_match_data = $LAST_MATCH_INFO
|
|
|
|
|
|
|
|
start_position = valid_uri_match_data.char_begin(3)
|
|
|
|
end_position = valid_uri_match_data.char_end(3)
|
|
|
|
|
|
|
|
possible_entries << {
|
|
|
|
url: valid_uri_match_data[3],
|
|
|
|
indices: [start_position, end_position],
|
|
|
|
}
|
|
|
|
end
|
|
|
|
|
|
|
|
if block_given?
|
|
|
|
possible_entries.each do |url|
|
|
|
|
yield url[:url], url[:indices].first, url[:indices].last
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
possible_entries
|
2017-05-24 14:32:53 +02:00
|
|
|
end
|
2017-05-05 19:48:22 +02:00
|
|
|
end
|