gearheads
/
mastodon
Archived
2
0
Fork 0

Autofix Rubocop Style/RedundantBegin (#23703)

gh/dev
Nick Schonning 2023-02-18 17:09:40 -05:00 committed by GitHub
parent 167709f6b0
commit 2177daeae9
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
69 changed files with 458 additions and 695 deletions

View File

@ -2958,79 +2958,6 @@ Style/RedundantArgument:
- 'app/helpers/application_helper.rb' - 'app/helpers/application_helper.rb'
- 'lib/tasks/emojis.rake' - 'lib/tasks/emojis.rake'
# Offense count: 83
# This cop supports safe autocorrection (--autocorrect).
Style/RedundantBegin:
Exclude:
- 'app/controllers/admin/dashboard_controller.rb'
- 'app/controllers/api/v1/announcements_controller.rb'
- 'app/controllers/api/v1/trends/links_controller.rb'
- 'app/controllers/api/v1/trends/statuses_controller.rb'
- 'app/controllers/api/v1/trends/tags_controller.rb'
- 'app/controllers/concerns/rate_limit_headers.rb'
- 'app/controllers/concerns/two_factor_authentication_concern.rb'
- 'app/helpers/admin/dashboard_helper.rb'
- 'app/helpers/admin/trends/statuses_helper.rb'
- 'app/helpers/branding_helper.rb'
- 'app/helpers/domain_control_helper.rb'
- 'app/helpers/formatting_helper.rb'
- 'app/helpers/instance_helper.rb'
- 'app/helpers/jsonld_helper.rb'
- 'app/lib/activity_tracker.rb'
- 'app/lib/activitypub/activity/create.rb'
- 'app/lib/activitypub/forwarder.rb'
- 'app/lib/admin/metrics/dimension/software_versions_dimension.rb'
- 'app/lib/admin/metrics/dimension/space_usage_dimension.rb'
- 'app/lib/extractor.rb'
- 'app/lib/importer/statuses_index_importer.rb'
- 'app/lib/link_details_extractor.rb'
- 'app/lib/request.rb'
- 'app/models/account.rb'
- 'app/models/account/field.rb'
- 'app/models/admin/account_action.rb'
- 'app/models/announcement.rb'
- 'app/models/concerns/account_merging.rb'
- 'app/models/concerns/pam_authenticable.rb'
- 'app/models/email_domain_block.rb'
- 'app/models/form/admin_settings.rb'
- 'app/models/form/custom_emoji_batch.rb'
- 'app/models/notification.rb'
- 'app/models/remote_follow.rb'
- 'app/models/status.rb'
- 'app/models/status_edit.rb'
- 'app/models/trends/links.rb'
- 'app/models/trends/statuses.rb'
- 'app/models/trends/tag_filter.rb'
- 'app/models/trends/tags.rb'
- 'app/models/web/push_subscription.rb'
- 'app/presenters/tag_relationships_presenter.rb'
- 'app/services/account_search_service.rb'
- 'app/services/activitypub/fetch_featured_tags_collection_service.rb'
- 'app/services/activitypub/fetch_remote_status_service.rb'
- 'app/services/fetch_link_card_service.rb'
- 'app/services/process_mentions_service.rb'
- 'app/services/reblog_service.rb'
- 'app/services/resolve_account_service.rb'
- 'app/validators/domain_validator.rb'
- 'app/validators/existing_username_validator.rb'
- 'app/validators/import_validator.rb'
- 'app/workers/backup_worker.rb'
- 'app/workers/post_process_media_worker.rb'
- 'app/workers/scheduler/follow_recommendations_scheduler.rb'
- 'db/migrate/20180528141303_fix_accounts_unique_index.rb'
- 'db/migrate/20180812173710_copy_status_stats.rb'
- 'db/migrate/20181116173541_copy_account_stats.rb'
- 'lib/mastodon/accounts_cli.rb'
- 'lib/mastodon/cli_helper.rb'
- 'lib/mastodon/ip_blocks_cli.rb'
- 'lib/mastodon/maintenance_cli.rb'
- 'lib/mastodon/media_cli.rb'
- 'lib/mastodon/search_cli.rb'
- 'lib/mastodon/upgrade_cli.rb'
- 'lib/paperclip/color_extractor.rb'
- 'lib/sanitize_ext/sanitize_config.rb'
- 'lib/tasks/db.rake'
# Offense count: 16 # Offense count: 16
# This cop supports safe autocorrection (--autocorrect). # This cop supports safe autocorrection (--autocorrect).
Style/RedundantRegexpCharacterClass: Style/RedundantRegexpCharacterClass:

View File

@ -18,13 +18,11 @@ module Admin
private private
def redis_info def redis_info
@redis_info ||= begin @redis_info ||= if redis.is_a?(Redis::Namespace)
if redis.is_a?(Redis::Namespace) redis.redis.info
redis.redis.info else
else redis.info
redis.info end
end
end
end end
end end
end end

View File

@ -18,9 +18,7 @@ class Api::V1::AnnouncementsController < Api::BaseController
private private
def set_announcements def set_announcements
@announcements = begin @announcements = Announcement.published.chronological
Announcement.published.chronological
end
end end
def set_announcement def set_announcement

View File

@ -18,13 +18,11 @@ class Api::V1::Trends::LinksController < Api::BaseController
end end
def set_links def set_links
@links = begin @links = if enabled?
if enabled? links_from_trends.offset(offset_param).limit(limit_param(DEFAULT_LINKS_LIMIT))
links_from_trends.offset(offset_param).limit(limit_param(DEFAULT_LINKS_LIMIT)) else
else []
[] end
end
end
end end
def links_from_trends def links_from_trends

View File

@ -16,13 +16,11 @@ class Api::V1::Trends::StatusesController < Api::BaseController
end end
def set_statuses def set_statuses
@statuses = begin @statuses = if enabled?
if enabled? cache_collection(statuses_from_trends.offset(offset_param).limit(limit_param(DEFAULT_STATUSES_LIMIT)), Status)
cache_collection(statuses_from_trends.offset(offset_param).limit(limit_param(DEFAULT_STATUSES_LIMIT)), Status) else
else []
[] end
end
end
end end
def statuses_from_trends def statuses_from_trends

View File

@ -18,13 +18,11 @@ class Api::V1::Trends::TagsController < Api::BaseController
end end
def set_tags def set_tags
@tags = begin @tags = if enabled?
if enabled? tags_from_trends.offset(offset_param).limit(limit_param(DEFAULT_TAGS_LIMIT))
tags_from_trends.offset(offset_param).limit(limit_param(DEFAULT_TAGS_LIMIT)) else
else []
[] end
end
end
end end
def tags_from_trends def tags_from_trends

View File

@ -6,13 +6,11 @@ module RateLimitHeaders
class_methods do class_methods do
def override_rate_limit_headers(method_name, options = {}) def override_rate_limit_headers(method_name, options = {})
around_action(only: method_name, if: :current_account) do |_controller, block| around_action(only: method_name, if: :current_account) do |_controller, block|
begin block.call
block.call ensure
ensure rate_limiter = RateLimiter.new(current_account, options)
rate_limiter = RateLimiter.new(current_account, options) rate_limit_headers = rate_limiter.to_headers
rate_limit_headers = rate_limiter.to_headers response.headers.merge!(rate_limit_headers) unless response.headers['X-RateLimit-Remaining'].present? && rate_limit_headers['X-RateLimit-Remaining'].to_i > response.headers['X-RateLimit-Remaining'].to_i
response.headers.merge!(rate_limit_headers) unless response.headers['X-RateLimit-Remaining'].present? && rate_limit_headers['X-RateLimit-Remaining'].to_i > response.headers['X-RateLimit-Remaining'].to_i
end
end end
end end
end end

View File

@ -79,13 +79,11 @@ module TwoFactorAuthenticationConcern
@body_classes = 'lighter' @body_classes = 'lighter'
@webauthn_enabled = user.webauthn_enabled? @webauthn_enabled = user.webauthn_enabled?
@scheme_type = begin @scheme_type = if user.webauthn_enabled? && user_params[:otp_attempt].blank?
if user.webauthn_enabled? && user_params[:otp_attempt].blank? 'webauthn'
'webauthn' else
else 'totp'
'totp' end
end
end
set_locale { render :two_factor } set_locale { render :two_factor }
end end

View File

@ -19,19 +19,17 @@ module Admin::DashboardHelper
end end
def relevant_account_timestamp(account) def relevant_account_timestamp(account)
timestamp, exact = begin timestamp, exact = if account.user_current_sign_in_at && account.user_current_sign_in_at < 24.hours.ago
if account.user_current_sign_in_at && account.user_current_sign_in_at < 24.hours.ago [account.user_current_sign_in_at, true]
[account.user_current_sign_in_at, true] elsif account.user_current_sign_in_at
elsif account.user_current_sign_in_at [account.user_current_sign_in_at, false]
[account.user_current_sign_in_at, false] elsif account.user_pending?
elsif account.user_pending? [account.user_created_at, true]
[account.user_created_at, true] elsif account.last_status_at.present?
elsif account.last_status_at.present? [account.last_status_at, true]
[account.last_status_at, true] else
else [nil, false]
[nil, false] end
end
end
return '-' if timestamp.nil? return '-' if timestamp.nil?
return t('generic.today') unless exact return t('generic.today') unless exact

View File

@ -2,13 +2,11 @@
module Admin::Trends::StatusesHelper module Admin::Trends::StatusesHelper
def one_line_preview(status) def one_line_preview(status)
text = begin text = if status.local?
if status.local? status.text.split("\n").first
status.text.split("\n").first else
else Nokogiri::HTML(status.text).css('html > body > *').first&.text
Nokogiri::HTML(status.text).css('html > body > *').first&.text end
end
end
return '' if text.blank? return '' if text.blank?

View File

@ -23,14 +23,12 @@ module BrandingHelper
end end
def render_symbol(version = :icon) def render_symbol(version = :icon)
path = begin path = case version
case version when :icon
when :icon 'logo-symbol-icon.svg'
'logo-symbol-icon.svg' when :wordmark
when :wordmark 'logo-symbol-wordmark.svg'
'logo-symbol-wordmark.svg' end
end
end
render(file: Rails.root.join('app', 'javascript', 'images', path)).html_safe # rubocop:disable Rails/OutputSafety render(file: Rails.root.join('app', 'javascript', 'images', path)).html_safe # rubocop:disable Rails/OutputSafety
end end

View File

@ -4,13 +4,11 @@ module DomainControlHelper
def domain_not_allowed?(uri_or_domain) def domain_not_allowed?(uri_or_domain)
return if uri_or_domain.blank? return if uri_or_domain.blank?
domain = begin domain = if uri_or_domain.include?('://')
if uri_or_domain.include?('://') Addressable::URI.parse(uri_or_domain).host
Addressable::URI.parse(uri_or_domain).host else
else uri_or_domain
uri_or_domain end
end
end
if whitelist_mode? if whitelist_mode?
!DomainAllow.allowed?(domain) !DomainAllow.allowed?(domain)

View File

@ -21,30 +21,26 @@ module FormattingHelper
def rss_status_content_format(status) def rss_status_content_format(status)
html = status_content_format(status) html = status_content_format(status)
before_html = begin before_html = if status.spoiler_text?
if status.spoiler_text? tag.p do
tag.p do tag.strong do
tag.strong do I18n.t('rss.content_warning', locale: available_locale_or_nil(status.language) || I18n.default_locale)
I18n.t('rss.content_warning', locale: available_locale_or_nil(status.language) || I18n.default_locale) end
end
status.spoiler_text status.spoiler_text
end + tag.hr end + tag.hr
end end
end
after_html = begin after_html = if status.preloadable_poll
if status.preloadable_poll tag.p do
tag.p do safe_join(
safe_join( status.preloadable_poll.options.map do |o|
status.preloadable_poll.options.map do |o| tag.send(status.preloadable_poll.multiple? ? 'checkbox' : 'radio', o, disabled: true)
tag.send(status.preloadable_poll.multiple? ? 'checkbox' : 'radio', o, disabled: true) end,
end, tag.br
tag.br )
) end
end end
end
end
prerender_custom_emojis( prerender_custom_emojis(
safe_join([before_html, html, after_html]), safe_join([before_html, html, after_html]),

View File

@ -10,13 +10,11 @@ module InstanceHelper
end end
def description_for_sign_up def description_for_sign_up
prefix = begin prefix = if @invite.present?
if @invite.present? I18n.t('auth.description.prefix_invited_by_user', name: @invite.user.account.username)
I18n.t('auth.description.prefix_invited_by_user', name: @invite.user.account.username) else
else I18n.t('auth.description.prefix_sign_up')
I18n.t('auth.description.prefix_sign_up') end
end
end
safe_join([prefix, I18n.t('auth.description.suffix')], ' ') safe_join([prefix, I18n.t('auth.description.suffix')], ' ')
end end

View File

@ -26,15 +26,13 @@ module JsonLdHelper
# The url attribute can be a string, an array of strings, or an array of objects. # The url attribute can be a string, an array of strings, or an array of objects.
# The objects could include a mimeType. Not-included mimeType means it's text/html. # The objects could include a mimeType. Not-included mimeType means it's text/html.
def url_to_href(value, preferred_type = nil) def url_to_href(value, preferred_type = nil)
single_value = begin single_value = if value.is_a?(Array) && !value.first.is_a?(String)
if value.is_a?(Array) && !value.first.is_a?(String) value.find { |link| preferred_type.nil? || ((link['mimeType'].presence || 'text/html') == preferred_type) }
value.find { |link| preferred_type.nil? || ((link['mimeType'].presence || 'text/html') == preferred_type) } elsif value.is_a?(Array)
elsif value.is_a?(Array) value.first
value.first else
else value
value end
end
end
if single_value.nil? || single_value.is_a?(String) if single_value.nil? || single_value.is_a?(String)
single_value single_value

View File

@ -27,14 +27,12 @@ class ActivityTracker
(start_at.to_date...end_at.to_date).map do |date| (start_at.to_date...end_at.to_date).map do |date|
key = key_at(date.to_time(:utc)) key = key_at(date.to_time(:utc))
value = begin value = case @type
case @type when :basic
when :basic redis.get(key).to_i
redis.get(key).to_i when :unique
when :unique redis.pfcount(key)
redis.pfcount(key) end
end
end
[date, value] [date, value]
end end

View File

@ -108,26 +108,24 @@ class ActivityPub::Activity::Create < ActivityPub::Activity
def process_status_params def process_status_params
@status_parser = ActivityPub::Parser::StatusParser.new(@json, followers_collection: @account.followers_url) @status_parser = ActivityPub::Parser::StatusParser.new(@json, followers_collection: @account.followers_url)
@params = begin @params = {
{ uri: @status_parser.uri,
uri: @status_parser.uri, url: @status_parser.url || @status_parser.uri,
url: @status_parser.url || @status_parser.uri, account: @account,
account: @account, text: converted_object_type? ? converted_text : (@status_parser.text || ''),
text: converted_object_type? ? converted_text : (@status_parser.text || ''), language: @status_parser.language,
language: @status_parser.language, spoiler_text: converted_object_type? ? '' : (@status_parser.spoiler_text || ''),
spoiler_text: converted_object_type? ? '' : (@status_parser.spoiler_text || ''), created_at: @status_parser.created_at,
created_at: @status_parser.created_at, edited_at: @status_parser.edited_at && @status_parser.edited_at != @status_parser.created_at ? @status_parser.edited_at : nil,
edited_at: @status_parser.edited_at && @status_parser.edited_at != @status_parser.created_at ? @status_parser.edited_at : nil, override_timestamps: @options[:override_timestamps],
override_timestamps: @options[:override_timestamps], reply: @status_parser.reply,
reply: @status_parser.reply, sensitive: @account.sensitized? || @status_parser.sensitive || false,
sensitive: @account.sensitized? || @status_parser.sensitive || false, visibility: @status_parser.visibility,
visibility: @status_parser.visibility, thread: replied_to_status,
thread: replied_to_status, conversation: conversation_from_uri(@object['conversation']),
conversation: conversation_from_uri(@object['conversation']), media_attachment_ids: process_attachments.take(4).map(&:id),
media_attachment_ids: process_attachments.take(4).map(&:id), poll: process_poll,
poll: process_poll, }
}
end
end end
def process_audience def process_audience

View File

@ -28,13 +28,11 @@ class ActivityPub::Forwarder
end end
def signature_account_id def signature_account_id
@signature_account_id ||= begin @signature_account_id ||= if in_reply_to_local?
if in_reply_to_local? in_reply_to.account_id
in_reply_to.account_id else
else reblogged_by_account_ids.first
reblogged_by_account_ids.first end
end
end
end end
def inboxes def inboxes

View File

@ -58,12 +58,10 @@ class Admin::Metrics::Dimension::SoftwareVersionsDimension < Admin::Metrics::Dim
end end
def redis_info def redis_info
@redis_info ||= begin @redis_info ||= if redis.is_a?(Redis::Namespace)
if redis.is_a?(Redis::Namespace) redis.redis.info
redis.redis.info else
else redis.info
redis.info end
end
end
end end
end end

View File

@ -59,12 +59,10 @@ class Admin::Metrics::Dimension::SpaceUsageDimension < Admin::Metrics::Dimension
end end
def redis_info def redis_info
@redis_info ||= begin @redis_info ||= if redis.is_a?(Redis::Namespace)
if redis.is_a?(Redis::Namespace) redis.redis.info
redis.redis.info else
else redis.info
redis.info end
end
end
end end
end end

View File

@ -8,12 +8,10 @@ module Extractor
module_function module_function
def extract_entities_with_indices(text, options = {}, &block) def extract_entities_with_indices(text, options = {}, &block)
entities = begin entities = extract_urls_with_indices(text, options) +
extract_urls_with_indices(text, options) + extract_hashtags_with_indices(text, check_url_overlap: false) +
extract_hashtags_with_indices(text, check_url_overlap: false) + extract_mentions_or_lists_with_indices(text) +
extract_mentions_or_lists_with_indices(text) + extract_extra_uris_with_indices(text)
extract_extra_uris_with_indices(text)
end
return [] if entities.empty? return [] if entities.empty?

View File

@ -24,13 +24,11 @@ class Importer::StatusesIndexImporter < Importer::BaseImporter
# is called before rendering the data and we need to filter based # is called before rendering the data and we need to filter based
# on the results of the filter, so this filtering happens here instead # on the results of the filter, so this filtering happens here instead
bulk.map! do |entry| bulk.map! do |entry|
new_entry = begin new_entry = if entry[:index] && entry.dig(:index, :data, 'searchable_by').blank?
if entry[:index] && entry.dig(:index, :data, 'searchable_by').blank? { delete: entry[:index].except(:data) }
{ delete: entry[:index].except(:data) } else
else entry
entry end
end
end
if new_entry[:index] if new_entry[:index]
indexed += 1 indexed += 1

View File

@ -232,26 +232,24 @@ class LinkDetailsExtractor
end end
def structured_data def structured_data
@structured_data ||= begin # Some publications have more than one JSON-LD definition on the page,
# Some publications have more than one JSON-LD definition on the page, # and some of those definitions aren't valid JSON either, so we have
# and some of those definitions aren't valid JSON either, so we have # to loop through here until we find something that is the right type
# to loop through here until we find something that is the right type # and doesn't break
# and doesn't break @structured_data ||= document.xpath('//script[@type="application/ld+json"]').filter_map do |element|
document.xpath('//script[@type="application/ld+json"]').filter_map do |element| json_ld = element.content&.gsub(CDATA_JUNK_PATTERN, '')
json_ld = element.content&.gsub(CDATA_JUNK_PATTERN, '')
next if json_ld.blank? next if json_ld.blank?
structured_data = StructuredData.new(html_entities.decode(json_ld)) structured_data = StructuredData.new(html_entities.decode(json_ld))
next unless structured_data.valid? next unless structured_data.valid?
structured_data structured_data
rescue Oj::ParseError, EncodingError rescue Oj::ParseError, EncodingError
Rails.logger.debug { "Invalid JSON-LD in #{@original_url}" } Rails.logger.debug { "Invalid JSON-LD in #{@original_url}" }
next next
end.first end.first
end
end end
def document def document

View File

@ -215,26 +215,24 @@ class Request
addr_by_socket = {} addr_by_socket = {}
addresses.each do |address| addresses.each do |address|
begin check_private_address(address, host)
check_private_address(address, host)
sock = ::Socket.new(address.is_a?(Resolv::IPv6) ? ::Socket::AF_INET6 : ::Socket::AF_INET, ::Socket::SOCK_STREAM, 0) sock = ::Socket.new(address.is_a?(Resolv::IPv6) ? ::Socket::AF_INET6 : ::Socket::AF_INET, ::Socket::SOCK_STREAM, 0)
sockaddr = ::Socket.pack_sockaddr_in(port, address.to_s) sockaddr = ::Socket.pack_sockaddr_in(port, address.to_s)
sock.setsockopt(::Socket::IPPROTO_TCP, ::Socket::TCP_NODELAY, 1) sock.setsockopt(::Socket::IPPROTO_TCP, ::Socket::TCP_NODELAY, 1)
sock.connect_nonblock(sockaddr) sock.connect_nonblock(sockaddr)
# If that hasn't raised an exception, we somehow managed to connect # If that hasn't raised an exception, we somehow managed to connect
# immediately, close pending sockets and return immediately # immediately, close pending sockets and return immediately
socks.each(&:close) socks.each(&:close)
return sock return sock
rescue IO::WaitWritable rescue IO::WaitWritable
socks << sock socks << sock
addr_by_socket[sock] = sockaddr addr_by_socket[sock] = sockaddr
rescue => e rescue => e
outer_e = e outer_e = e
end
end end
until socks.empty? until socks.empty?
@ -279,9 +277,7 @@ class Request
end end
def private_address_exceptions def private_address_exceptions
@private_address_exceptions = begin @private_address_exceptions = (ENV['ALLOWED_PRIVATE_ADDRESSES'] || '').split(',').map { |addr| IPAddr.new(addr) }
(ENV['ALLOWED_PRIVATE_ADDRESSES'] || '').split(',').map { |addr| IPAddr.new(addr) }
end
end end
end end
end end

View File

@ -459,13 +459,12 @@ class Account < ApplicationRecord
return [] if text.blank? return [] if text.blank?
text.scan(MENTION_RE).map { |match| match.first.split('@', 2) }.uniq.filter_map do |(username, domain)| text.scan(MENTION_RE).map { |match| match.first.split('@', 2) }.uniq.filter_map do |(username, domain)|
domain = begin domain = if TagManager.instance.local_domain?(domain)
if TagManager.instance.local_domain?(domain) nil
nil else
else TagManager.instance.normalize_domain(domain)
TagManager.instance.normalize_domain(domain) end
end
end
EntityCache.instance.mention(username, domain) EntityCache.instance.mention(username, domain)
end end
end end

View File

@ -25,13 +25,11 @@ class Account::Field < ActiveModelSerializers::Model
end end
def value_for_verification def value_for_verification
@value_for_verification ||= begin @value_for_verification ||= if account.local?
if account.local? value
value else
else extract_url_from_html
extract_url_from_html end
end
end
end end
def verifiable? def verifiable?

View File

@ -166,13 +166,11 @@ class Admin::AccountAction
end end
def reports def reports
@reports ||= begin @reports ||= if type == 'none'
if type == 'none' with_report? ? [report] : []
with_report? ? [report] : [] else
else Report.where(target_account: target_account).unresolved
Report.where(target_account: target_account).unresolved end
end
end
end end
def warning_preset def warning_preset

View File

@ -54,13 +54,11 @@ class Announcement < ApplicationRecord
end end
def statuses def statuses
@statuses ||= begin @statuses ||= if status_ids.nil?
if status_ids.nil? []
[] else
else Status.where(id: status_ids, visibility: [:public, :unlisted])
Status.where(id: status_ids, visibility: [:public, :unlisted]) end
end
end
end end
def tags def tags

View File

@ -21,11 +21,9 @@ module AccountMerging
owned_classes.each do |klass| owned_classes.each do |klass|
klass.where(account_id: other_account.id).find_each do |record| klass.where(account_id: other_account.id).find_each do |record|
begin record.update_attribute(:account_id, id)
record.update_attribute(:account_id, id) rescue ActiveRecord::RecordNotUnique
rescue ActiveRecord::RecordNotUnique next
next
end
end end
end end
@ -36,11 +34,9 @@ module AccountMerging
target_classes.each do |klass| target_classes.each do |klass|
klass.where(target_account_id: other_account.id).find_each do |record| klass.where(target_account_id: other_account.id).find_each do |record|
begin record.update_attribute(:target_account_id, id)
record.update_attribute(:target_account_id, id) rescue ActiveRecord::RecordNotUnique
rescue ActiveRecord::RecordNotUnique next
next
end
end end
end end

View File

@ -42,13 +42,11 @@ module PamAuthenticable
def self.pam_get_user(attributes = {}) def self.pam_get_user(attributes = {})
return nil unless attributes[:email] return nil unless attributes[:email]
resource = begin resource = if Devise.check_at_sign && !attributes[:email].index('@')
if Devise.check_at_sign && !attributes[:email].index('@') joins(:account).find_by(accounts: { username: attributes[:email] })
joins(:account).find_by(accounts: { username: attributes[:email] }) else
else find_by(email: attributes[:email])
find_by(email: attributes[:email]) end
end
end
if resource.nil? if resource.nil?
resource = new(email: attributes[:email], agreement: true) resource = new(email: attributes[:email], agreement: true)

View File

@ -69,13 +69,11 @@ class EmailDomainBlock < ApplicationRecord
def extract_uris(domain_or_domains) def extract_uris(domain_or_domains)
Array(domain_or_domains).map do |str| Array(domain_or_domains).map do |str|
domain = begin domain = if str.include?('@')
if str.include?('@') str.split('@', 2).last
str.split('@', 2).last else
else str
str end
end
end
Addressable::URI.new.tap { |u| u.host = domain.strip } if domain.present? Addressable::URI.new.tap { |u| u.host = domain.strip } if domain.present?
rescue Addressable::URI::InvalidURIError, IDN::Idna::IdnaError rescue Addressable::URI::InvalidURIError, IDN::Idna::IdnaError

View File

@ -76,13 +76,11 @@ class Form::AdminSettings
define_method(key) do define_method(key) do
return instance_variable_get("@#{key}") if instance_variable_defined?("@#{key}") return instance_variable_get("@#{key}") if instance_variable_defined?("@#{key}")
stored_value = begin stored_value = if UPLOAD_KEYS.include?(key)
if UPLOAD_KEYS.include?(key) SiteUpload.where(var: key).first_or_initialize(var: key)
SiteUpload.where(var: key).first_or_initialize(var: key) else
else Setting.public_send(key)
Setting.public_send(key) end
end
end
instance_variable_set("@#{key}", stored_value) instance_variable_set("@#{key}", stored_value)
end end

View File

@ -36,13 +36,11 @@ class Form::CustomEmojiBatch
def update! def update!
custom_emojis.each { |custom_emoji| authorize(custom_emoji, :update?) } custom_emojis.each { |custom_emoji| authorize(custom_emoji, :update?) }
category = begin category = if category_id.present?
if category_id.present? CustomEmojiCategory.find(category_id)
CustomEmojiCategory.find(category_id) elsif category_name.present?
elsif category_name.present? CustomEmojiCategory.find_or_create_by!(name: category_name)
CustomEmojiCategory.find_or_create_by!(name: category_name) end
end
end
custom_emojis.each do |custom_emoji| custom_emojis.each do |custom_emoji|
custom_emoji.update(category_id: category&.id) custom_emoji.update(category_id: category&.id)

View File

@ -87,13 +87,11 @@ class Notification < ApplicationRecord
class << self class << self
def browserable(types: [], exclude_types: [], from_account_id: nil) def browserable(types: [], exclude_types: [], from_account_id: nil)
requested_types = begin requested_types = if types.empty?
if types.empty? TYPES
TYPES else
else types.map(&:to_sym) & TYPES
types.map(&:to_sym) & TYPES end
end
end
requested_types -= exclude_types.map(&:to_sym) requested_types -= exclude_types.map(&:to_sym)

View File

@ -36,13 +36,11 @@ class RemoteFollow
username, domain = value.strip.gsub(/\A@/, '').split('@') username, domain = value.strip.gsub(/\A@/, '').split('@')
domain = begin domain = if TagManager.instance.local_domain?(domain)
if TagManager.instance.local_domain?(domain) nil
nil else
else TagManager.instance.normalize_domain(domain)
TagManager.instance.normalize_domain(domain) end
end
end
[username, domain].compact.join('@') [username, domain].compact.join('@')
rescue Addressable::URI::InvalidURIError rescue Addressable::URI::InvalidURIError

View File

@ -368,13 +368,12 @@ class Status < ApplicationRecord
return [] if text.blank? return [] if text.blank?
text.scan(FetchLinkCardService::URL_PATTERN).map(&:second).uniq.filter_map do |url| text.scan(FetchLinkCardService::URL_PATTERN).map(&:second).uniq.filter_map do |url|
status = begin status = if TagManager.instance.local_url?(url)
if TagManager.instance.local_url?(url) ActivityPub::TagManager.instance.uri_to_resource(url, Status)
ActivityPub::TagManager.instance.uri_to_resource(url, Status) else
else EntityCache.instance.status(url)
EntityCache.instance.status(url) end
end
end
status&.distributable? ? status : nil status&.distributable? ? status : nil
end end
end end

View File

@ -51,14 +51,12 @@ class StatusEdit < ApplicationRecord
def ordered_media_attachments def ordered_media_attachments
return @ordered_media_attachments if defined?(@ordered_media_attachments) return @ordered_media_attachments if defined?(@ordered_media_attachments)
@ordered_media_attachments = begin @ordered_media_attachments = if ordered_media_attachment_ids.nil?
if ordered_media_attachment_ids.nil? []
[] else
else map = status.media_attachments.index_by(&:id)
map = status.media_attachments.index_by(&:id) ordered_media_attachment_ids.map.with_index { |media_attachment_id, index| PreservedMediaAttachment.new(media_attachment: map[media_attachment_id], description: media_descriptions[index]) }
ordered_media_attachment_ids.map.with_index { |media_attachment_id, index| PreservedMediaAttachment.new(media_attachment: map[media_attachment_id], description: media_descriptions[index]) } end
end
end
end end
def proper def proper

View File

@ -113,13 +113,11 @@ class Trends::Links < Trends::Base
max_score = preview_card.max_score max_score = preview_card.max_score
max_score = 0 if max_time.nil? || max_time < (at_time - options[:max_score_cooldown]) max_score = 0 if max_time.nil? || max_time < (at_time - options[:max_score_cooldown])
score = begin score = if expected > observed || observed < options[:threshold]
if expected > observed || observed < options[:threshold] 0
0 else
else ((observed - expected)**2) / expected
((observed - expected)**2) / expected end
end
end
if score > max_score if score > max_score
max_score = score max_score = score
@ -129,13 +127,11 @@ class Trends::Links < Trends::Base
preview_card.update_columns(max_score: max_score, max_score_at: max_time) preview_card.update_columns(max_score: max_score, max_score_at: max_time)
end end
decaying_score = begin decaying_score = if max_score.zero? || !valid_locale?(preview_card.language)
if max_score.zero? || !valid_locale?(preview_card.language) 0
0 else
else max_score * (0.5**((at_time.to_f - max_time.to_f) / options[:max_score_halflife].to_f))
max_score * (0.5**((at_time.to_f - max_time.to_f) / options[:max_score_halflife].to_f)) end
end
end
[decaying_score, preview_card] [decaying_score, preview_card]
end end

View File

@ -99,21 +99,17 @@ class Trends::Statuses < Trends::Base
expected = 1.0 expected = 1.0
observed = (status.reblogs_count + status.favourites_count).to_f observed = (status.reblogs_count + status.favourites_count).to_f
score = begin score = if expected > observed || observed < options[:threshold]
if expected > observed || observed < options[:threshold] 0
0 else
else ((observed - expected)**2) / expected
((observed - expected)**2) / expected end
end
end
decaying_score = begin decaying_score = if score.zero? || !eligible?(status)
if score.zero? || !eligible?(status) 0
0 else
else score * (0.5**((at_time.to_f - status.created_at.to_f) / options[:score_halflife].to_f))
score * (0.5**((at_time.to_f - status.created_at.to_f) / options[:score_halflife].to_f)) end
end
end
[decaying_score, status] [decaying_score, status]
end end

View File

@ -13,13 +13,11 @@ class Trends::TagFilter
end end
def results def results
scope = begin scope = if params[:status] == 'pending_review'
if params[:status] == 'pending_review' Tag.unscoped
Tag.unscoped else
else trending_scope
trending_scope end
end
end
params.each do |key, value| params.each do |key, value|
next if key.to_s == 'page' next if key.to_s == 'page'

View File

@ -63,13 +63,11 @@ class Trends::Tags < Trends::Base
max_score = tag.max_score max_score = tag.max_score
max_score = 0 if max_time.nil? || max_time < (at_time - options[:max_score_cooldown]) max_score = 0 if max_time.nil? || max_time < (at_time - options[:max_score_cooldown])
score = begin score = if expected > observed || observed < options[:threshold]
if expected > observed || observed < options[:threshold] 0
0 else
else ((observed - expected)**2) / expected
((observed - expected)**2) / expected end
end
end
if score > max_score if score > max_score
max_score = score max_score = score

View File

@ -53,25 +53,21 @@ class Web::PushSubscription < ApplicationRecord
def associated_user def associated_user
return @associated_user if defined?(@associated_user) return @associated_user if defined?(@associated_user)
@associated_user = begin @associated_user = if user_id.nil?
if user_id.nil? session_activation.user
session_activation.user else
else user
user end
end
end
end end
def associated_access_token def associated_access_token
return @associated_access_token if defined?(@associated_access_token) return @associated_access_token if defined?(@associated_access_token)
@associated_access_token = begin @associated_access_token = if access_token_id.nil?
if access_token_id.nil? find_or_create_access_token.token
find_or_create_access_token.token else
else access_token.token
access_token.token end
end
end
end end
class << self class << self

View File

@ -4,12 +4,10 @@ class TagRelationshipsPresenter
attr_reader :following_map attr_reader :following_map
def initialize(tags, current_account_id = nil, **options) def initialize(tags, current_account_id = nil, **options)
@following_map = begin @following_map = if current_account_id.nil?
if current_account_id.nil? {}
{} else
else TagFollow.select(:tag_id).where(tag_id: tags.map(&:id), account_id: current_account_id).each_with_object({}) { |f, h| h[f.tag_id] = true }.merge(options[:following_map] || {})
TagFollow.select(:tag_id).where(tag_id: tags.map(&:id), account_id: current_account_id).each_with_object({}) { |f, h| h[f.tag_id] = true }.merge(options[:following_map] || {}) end
end
end
end end
end end

View File

@ -32,15 +32,13 @@ class AccountSearchService < BaseService
return @exact_match if defined?(@exact_match) return @exact_match if defined?(@exact_match)
match = begin match = if options[:resolve]
if options[:resolve] ResolveAccountService.new.call(query)
ResolveAccountService.new.call(query) elsif domain_is_local?
elsif domain_is_local? Account.find_local(query_username)
Account.find_local(query_username) else
else Account.find_remote(query_username, query_domain)
Account.find_remote(query_username, query_domain) end
end
end
match = nil if !match.nil? && !account.nil? && options[:following] && !account.following?(match) match = nil if !match.nil? && !account.nil? && options[:following] && !account.following?(match)

View File

@ -22,14 +22,12 @@ class ActivityPub::FetchFeaturedTagsCollectionService < BaseService
collection = fetch_collection(collection['first']) if collection['first'].present? collection = fetch_collection(collection['first']) if collection['first'].present?
while collection.is_a?(Hash) while collection.is_a?(Hash)
items = begin items = case collection['type']
case collection['type'] when 'Collection', 'CollectionPage'
when 'Collection', 'CollectionPage' collection['items']
collection['items'] when 'OrderedCollection', 'OrderedCollectionPage'
when 'OrderedCollection', 'OrderedCollectionPage' collection['orderedItems']
collection['orderedItems'] end
end
end
break if items.blank? break if items.blank?

View File

@ -9,13 +9,11 @@ class ActivityPub::FetchRemoteStatusService < BaseService
# Should be called when uri has already been checked for locality # Should be called when uri has already been checked for locality
def call(uri, id: true, prefetched_body: nil, on_behalf_of: nil, expected_actor_uri: nil, request_id: nil) def call(uri, id: true, prefetched_body: nil, on_behalf_of: nil, expected_actor_uri: nil, request_id: nil)
@request_id = request_id || "#{Time.now.utc.to_i}-status-#{uri}" @request_id = request_id || "#{Time.now.utc.to_i}-status-#{uri}"
@json = begin @json = if prefetched_body.nil?
if prefetched_body.nil? fetch_resource(uri, id, on_behalf_of)
fetch_resource(uri, id, on_behalf_of) else
else body_to_json(prefetched_body, compare_id: id ? uri : nil)
body_to_json(prefetched_body, compare_id: id ? uri : nil) end
end
end
return unless supported_context? return unless supported_context?

View File

@ -69,16 +69,14 @@ class FetchLinkCardService < BaseService
end end
def parse_urls def parse_urls
urls = begin urls = if @status.local?
if @status.local? @status.text.scan(URL_PATTERN).map { |array| Addressable::URI.parse(array[1]).normalize }
@status.text.scan(URL_PATTERN).map { |array| Addressable::URI.parse(array[1]).normalize } else
else document = Nokogiri::HTML(@status.text)
document = Nokogiri::HTML(@status.text) links = document.css('a')
links = document.css('a')
links.filter_map { |a| Addressable::URI.parse(a['href']) unless skip_link?(a) }.filter_map(&:normalize) links.filter_map { |a| Addressable::URI.parse(a['href']) unless skip_link?(a) }.filter_map(&:normalize)
end end
end
urls.reject { |uri| bad_url?(uri) }.first urls.reject { |uri| bad_url?(uri) }.first
end end

View File

@ -28,13 +28,11 @@ class ProcessMentionsService < BaseService
@status.text = @status.text.gsub(Account::MENTION_RE) do |match| @status.text = @status.text.gsub(Account::MENTION_RE) do |match|
username, domain = Regexp.last_match(1).split('@') username, domain = Regexp.last_match(1).split('@')
domain = begin domain = if TagManager.instance.local_domain?(domain)
if TagManager.instance.local_domain?(domain) nil
nil else
else TagManager.instance.normalize_domain(domain)
TagManager.instance.normalize_domain(domain) end
end
end
mentioned_account = Account.find_remote(username, domain) mentioned_account = Account.find_remote(username, domain)

View File

@ -20,13 +20,11 @@ class ReblogService < BaseService
return reblog unless reblog.nil? return reblog unless reblog.nil?
visibility = begin visibility = if reblogged_status.hidden?
if reblogged_status.hidden? reblogged_status.visibility
reblogged_status.visibility else
else options[:visibility] || account.user&.setting_default_privacy
options[:visibility] || account.user&.setting_default_privacy end
end
end
reblog = account.statuses.create!(reblog: reblogged_status, text: '', visibility: visibility, rate_limit: options[:with_rate_limit]) reblog = account.statuses.create!(reblog: reblogged_status, text: '', visibility: visibility, rate_limit: options[:with_rate_limit])

View File

@ -71,13 +71,11 @@ class ResolveAccountService < BaseService
@username, @domain = uri.strip.gsub(/\A@/, '').split('@') @username, @domain = uri.strip.gsub(/\A@/, '').split('@')
end end
@domain = begin @domain = if TagManager.instance.local_domain?(@domain)
if TagManager.instance.local_domain?(@domain) nil
nil else
else TagManager.instance.normalize_domain(@domain)
TagManager.instance.normalize_domain(@domain) end
end
end
@uri = [@username, @domain].compact.join('@') @uri = [@username, @domain].compact.join('@')
end end

View File

@ -4,13 +4,11 @@ class DomainValidator < ActiveModel::EachValidator
def validate_each(record, attribute, value) def validate_each(record, attribute, value)
return if value.blank? return if value.blank?
domain = begin domain = if options[:acct]
if options[:acct] value.split('@').last
value.split('@').last else
else value
value end
end
end
record.errors.add(attribute, I18n.t('domain_validator.invalid_domain')) unless compliant?(domain) record.errors.add(attribute, I18n.t('domain_validator.invalid_domain')) unless compliant?(domain)
end end

View File

@ -4,16 +4,14 @@ class ExistingUsernameValidator < ActiveModel::EachValidator
def validate_each(record, attribute, value) def validate_each(record, attribute, value)
return if value.blank? return if value.blank?
usernames_and_domains = begin usernames_and_domains = value.split(',').map do |str|
value.split(',').map do |str| username, domain = str.strip.gsub(/\A@/, '').split('@', 2)
username, domain = str.strip.gsub(/\A@/, '').split('@', 2) domain = nil if TagManager.instance.local_domain?(domain)
domain = nil if TagManager.instance.local_domain?(domain)
next if username.blank? next if username.blank?
[str, username, domain] [str, username, domain]
end.compact end.compact
end
usernames_with_no_accounts = usernames_and_domains.filter_map do |(str, username, domain)| usernames_with_no_accounts = usernames_and_domains.filter_map do |(str, username, domain)|
str unless Account.find_remote(username, domain) str unless Account.find_remote(username, domain)

View File

@ -35,13 +35,11 @@ class ImportValidator < ActiveModel::Validator
def validate_following_import(import, row_count) def validate_following_import(import, row_count)
base_limit = FollowLimitValidator.limit_for_account(import.account) base_limit = FollowLimitValidator.limit_for_account(import.account)
limit = begin limit = if import.overwrite?
if import.overwrite? base_limit
base_limit else
else base_limit - import.account.following_count
base_limit - import.account.following_count end
end
end
import.errors.add(:data, I18n.t('users.follow_limit_reached', limit: base_limit)) if row_count > limit import.errors.add(:data, I18n.t('users.follow_limit_reached', limit: base_limit)) if row_count > limit
end end

View File

@ -9,12 +9,10 @@ class BackupWorker
backup_id = msg['args'].first backup_id = msg['args'].first
ActiveRecord::Base.connection_pool.with_connection do ActiveRecord::Base.connection_pool.with_connection do
begin backup = Backup.find(backup_id)
backup = Backup.find(backup_id) backup.destroy
backup.destroy rescue ActiveRecord::RecordNotFound
rescue ActiveRecord::RecordNotFound true
true
end
end end
end end

View File

@ -9,13 +9,11 @@ class PostProcessMediaWorker
media_attachment_id = msg['args'].first media_attachment_id = msg['args'].first
ActiveRecord::Base.connection_pool.with_connection do ActiveRecord::Base.connection_pool.with_connection do
begin media_attachment = MediaAttachment.find(media_attachment_id)
media_attachment = MediaAttachment.find(media_attachment_id) media_attachment.processing = :failed
media_attachment.processing = :failed media_attachment.save
media_attachment.save rescue ActiveRecord::RecordNotFound
rescue ActiveRecord::RecordNotFound true
true
end
end end
Sidekiq.logger.error("Processing media attachment #{media_attachment_id} failed with #{msg['error_message']}") Sidekiq.logger.error("Processing media attachment #{media_attachment_id} failed with #{msg['error_message']}")

View File

@ -19,13 +19,11 @@ class Scheduler::FollowRecommendationsScheduler
fallback_recommendations = FollowRecommendation.order(rank: :desc).limit(SET_SIZE) fallback_recommendations = FollowRecommendation.order(rank: :desc).limit(SET_SIZE)
Trends.available_locales.each do |locale| Trends.available_locales.each do |locale|
recommendations = begin recommendations = if AccountSummary.safe.filtered.localized(locale).exists? # We can skip the work if no accounts with that language exist
if AccountSummary.safe.filtered.localized(locale).exists? # We can skip the work if no accounts with that language exist FollowRecommendation.localized(locale).order(rank: :desc).limit(SET_SIZE).map { |recommendation| [recommendation.account_id, recommendation.rank] }
FollowRecommendation.localized(locale).order(rank: :desc).limit(SET_SIZE).map { |recommendation| [recommendation.account_id, recommendation.rank] } else
else []
[] end
end
end
# Use language-agnostic results if there are not enough language-specific ones # Use language-agnostic results if there are not enough language-specific ones
missing = SET_SIZE - recommendations.size missing = SET_SIZE - recommendations.size

View File

@ -106,21 +106,17 @@ class FixAccountsUniqueIndex < ActiveRecord::Migration[5.2]
# to check for (and skip past) uniqueness errors # to check for (and skip past) uniqueness errors
[Favourite, Follow, FollowRequest, Block, Mute].each do |klass| [Favourite, Follow, FollowRequest, Block, Mute].each do |klass|
klass.where(account_id: duplicate_account.id).find_each do |record| klass.where(account_id: duplicate_account.id).find_each do |record|
begin record.update_attribute(:account_id, main_account.id)
record.update_attribute(:account_id, main_account.id) rescue ActiveRecord::RecordNotUnique
rescue ActiveRecord::RecordNotUnique next
next
end
end end
end end
[Follow, FollowRequest, Block, Mute].each do |klass| [Follow, FollowRequest, Block, Mute].each do |klass|
klass.where(target_account_id: duplicate_account.id).find_each do |record| klass.where(target_account_id: duplicate_account.id).find_each do |record|
begin record.update_attribute(:target_account_id, main_account.id)
record.update_attribute(:target_account_id, main_account.id) rescue ActiveRecord::RecordNotUnique
rescue ActiveRecord::RecordNotUnique next
next
end
end end
end end
end end

View File

@ -43,12 +43,10 @@ class CopyStatusStats < ActiveRecord::Migration[5.2]
# We cannot use bulk INSERT or overarching transactions here because of possible # We cannot use bulk INSERT or overarching transactions here because of possible
# uniqueness violations that we need to skip over # uniqueness violations that we need to skip over
Status.unscoped.select('id, reblogs_count, favourites_count, created_at, updated_at').find_each do |status| Status.unscoped.select('id, reblogs_count, favourites_count, created_at, updated_at').find_each do |status|
begin params = [[nil, status.id], [nil, status.reblogs_count], [nil, status.favourites_count], [nil, status.created_at], [nil, status.updated_at]]
params = [[nil, status.id], [nil, status.reblogs_count], [nil, status.favourites_count], [nil, status.created_at], [nil, status.updated_at]] exec_insert('INSERT INTO status_stats (status_id, reblogs_count, favourites_count, created_at, updated_at) VALUES ($1, $2, $3, $4, $5)', nil, params)
exec_insert('INSERT INTO status_stats (status_id, reblogs_count, favourites_count, created_at, updated_at) VALUES ($1, $2, $3, $4, $5)', nil, params) rescue ActiveRecord::RecordNotUnique
rescue ActiveRecord::RecordNotUnique next
next
end
end end
end end
end end

View File

@ -43,12 +43,10 @@ class CopyAccountStats < ActiveRecord::Migration[5.2]
# We cannot use bulk INSERT or overarching transactions here because of possible # We cannot use bulk INSERT or overarching transactions here because of possible
# uniqueness violations that we need to skip over # uniqueness violations that we need to skip over
Account.unscoped.select('id, statuses_count, following_count, followers_count, created_at, updated_at').find_each do |account| Account.unscoped.select('id, statuses_count, following_count, followers_count, created_at, updated_at').find_each do |account|
begin params = [[nil, account.id], [nil, account[:statuses_count]], [nil, account[:following_count]], [nil, account[:followers_count]], [nil, account.created_at], [nil, account.updated_at]]
params = [[nil, account.id], [nil, account[:statuses_count]], [nil, account[:following_count]], [nil, account[:followers_count]], [nil, account.created_at], [nil, account.updated_at]] exec_insert('INSERT INTO account_stats (account_id, statuses_count, following_count, followers_count, created_at, updated_at) VALUES ($1, $2, $3, $4, $5, $6)', nil, params)
exec_insert('INSERT INTO account_stats (account_id, statuses_count, following_count, followers_count, created_at, updated_at) VALUES ($1, $2, $3, $4, $5, $6)', nil, params) rescue ActiveRecord::RecordNotUnique
rescue ActiveRecord::RecordNotUnique next
next
end
end end
end end
end end

View File

@ -490,14 +490,12 @@ module Mastodon
scope = Account.where(id: ::Follow.where(account: account).select(:target_account_id)) scope = Account.where(id: ::Follow.where(account: account).select(:target_account_id))
scope.find_each do |target_account| scope.find_each do |target_account|
begin UnfollowService.new.call(account, target_account)
UnfollowService.new.call(account, target_account) rescue => e
rescue => e progress.log pastel.red("Error processing #{target_account.id}: #{e}")
progress.log pastel.red("Error processing #{target_account.id}: #{e}") ensure
ensure progress.increment
progress.increment processed += 1
processed += 1
end
end end
BootstrapTimelineWorker.perform_async(account.id) BootstrapTimelineWorker.perform_async(account.id)
@ -507,14 +505,12 @@ module Mastodon
scope = Account.where(id: ::Follow.where(target_account: account).select(:account_id)) scope = Account.where(id: ::Follow.where(target_account: account).select(:account_id))
scope.find_each do |target_account| scope.find_each do |target_account|
begin UnfollowService.new.call(target_account, account)
UnfollowService.new.call(target_account, account) rescue => e
rescue => e progress.log pastel.red("Error processing #{target_account.id}: #{e}")
progress.log pastel.red("Error processing #{target_account.id}: #{e}") ensure
ensure progress.increment
progress.increment processed += 1
processed += 1
end
end end
end end

View File

@ -42,30 +42,28 @@ module Mastodon
items.each do |item| items.each do |item|
futures << Concurrent::Future.execute(executor: pool) do futures << Concurrent::Future.execute(executor: pool) do
begin if !progress.total.nil? && progress.progress + 1 > progress.total
if !progress.total.nil? && progress.progress + 1 > progress.total # The number of items has changed between start and now,
# The number of items has changed between start and now, # since there is no good way to predict the final count from
# since there is no good way to predict the final count from # here, just change the progress bar to an indeterminate one
# here, just change the progress bar to an indeterminate one
progress.total = nil progress.total = nil
end
progress.log("Processing #{item.id}") if options[:verbose]
result = ActiveRecord::Base.connection_pool.with_connection do
yield(item)
ensure
RedisConfiguration.pool.checkin if Thread.current[:redis]
Thread.current[:redis] = nil
end
aggregate.increment(result) if result.is_a?(Integer)
rescue => e
progress.log pastel.red("Error processing #{item.id}: #{e}")
ensure
progress.increment
end end
progress.log("Processing #{item.id}") if options[:verbose]
result = ActiveRecord::Base.connection_pool.with_connection do
yield(item)
ensure
RedisConfiguration.pool.checkin if Thread.current[:redis]
Thread.current[:redis] = nil
end
aggregate.increment(result) if result.is_a?(Integer)
rescue => e
progress.log pastel.red("Error processing #{item.id}: #{e}")
ensure
progress.increment
end end
end end

View File

@ -79,13 +79,11 @@ module Mastodon
skipped = 0 skipped = 0
addresses.each do |address| addresses.each do |address|
ip_blocks = begin ip_blocks = if options[:force]
if options[:force] IpBlock.where('ip >>= ?', address)
IpBlock.where('ip >>= ?', address) else
else IpBlock.where('ip <<= ?', address)
IpBlock.where('ip <<= ?', address) end
end
end
if ip_blocks.empty? if ip_blocks.empty?
say("#{address} is not yet blocked", :yellow) say("#{address} is not yet blocked", :yellow)

View File

@ -98,11 +98,9 @@ module Mastodon
owned_classes.each do |klass| owned_classes.each do |klass|
klass.where(account_id: other_account.id).find_each do |record| klass.where(account_id: other_account.id).find_each do |record|
begin record.update_attribute(:account_id, id)
record.update_attribute(:account_id, id) rescue ActiveRecord::RecordNotUnique
rescue ActiveRecord::RecordNotUnique next
next
end
end end
end end
@ -111,11 +109,9 @@ module Mastodon
target_classes.each do |klass| target_classes.each do |klass|
klass.where(target_account_id: other_account.id).find_each do |record| klass.where(target_account_id: other_account.id).find_each do |record|
begin record.update_attribute(:target_account_id, id)
record.update_attribute(:target_account_id, id) rescue ActiveRecord::RecordNotUnique
rescue ActiveRecord::RecordNotUnique next
next
end
end end
end end
@ -601,11 +597,9 @@ module Mastodon
owned_classes = [ConversationMute, AccountConversation] owned_classes = [ConversationMute, AccountConversation]
owned_classes.each do |klass| owned_classes.each do |klass|
klass.where(conversation_id: duplicate_conv.id).find_each do |record| klass.where(conversation_id: duplicate_conv.id).find_each do |record|
begin record.update_attribute(:account_id, main_conv.id)
record.update_attribute(:account_id, main_conv.id) rescue ActiveRecord::RecordNotUnique
rescue ActiveRecord::RecordNotUnique next
next
end
end end
end end
end end
@ -629,47 +623,37 @@ module Mastodon
owned_classes << Bookmark if ActiveRecord::Base.connection.table_exists?(:bookmarks) owned_classes << Bookmark if ActiveRecord::Base.connection.table_exists?(:bookmarks)
owned_classes.each do |klass| owned_classes.each do |klass|
klass.where(status_id: duplicate_status.id).find_each do |record| klass.where(status_id: duplicate_status.id).find_each do |record|
begin
record.update_attribute(:status_id, main_status.id)
rescue ActiveRecord::RecordNotUnique
next
end
end
end
StatusPin.where(account_id: main_status.account_id, status_id: duplicate_status.id).find_each do |record|
begin
record.update_attribute(:status_id, main_status.id) record.update_attribute(:status_id, main_status.id)
rescue ActiveRecord::RecordNotUnique rescue ActiveRecord::RecordNotUnique
next next
end end
end end
StatusPin.where(account_id: main_status.account_id, status_id: duplicate_status.id).find_each do |record|
record.update_attribute(:status_id, main_status.id)
rescue ActiveRecord::RecordNotUnique
next
end
Status.where(in_reply_to_id: duplicate_status.id).find_each do |record| Status.where(in_reply_to_id: duplicate_status.id).find_each do |record|
begin record.update_attribute(:in_reply_to_id, main_status.id)
record.update_attribute(:in_reply_to_id, main_status.id) rescue ActiveRecord::RecordNotUnique
rescue ActiveRecord::RecordNotUnique next
next
end
end end
Status.where(reblog_of_id: duplicate_status.id).find_each do |record| Status.where(reblog_of_id: duplicate_status.id).find_each do |record|
begin record.update_attribute(:reblog_of_id, main_status.id)
record.update_attribute(:reblog_of_id, main_status.id) rescue ActiveRecord::RecordNotUnique
rescue ActiveRecord::RecordNotUnique next
next
end
end end
end end
def merge_tags!(main_tag, duplicate_tag) def merge_tags!(main_tag, duplicate_tag)
[FeaturedTag].each do |klass| [FeaturedTag].each do |klass|
klass.where(tag_id: duplicate_tag.id).find_each do |record| klass.where(tag_id: duplicate_tag.id).find_each do |record|
begin record.update_attribute(:tag_id, main_tag.id)
record.update_attribute(:tag_id, main_tag.id) rescue ActiveRecord::RecordNotUnique
rescue ActiveRecord::RecordNotUnique next
next
end
end end
end end
end end

View File

@ -116,13 +116,11 @@ module Mastodon
loop do loop do
objects = begin objects = begin
begin bucket.objects(start_after: last_key, prefix: prefix).limit(1000).map { |x| x }
bucket.objects(start_after: last_key, prefix: prefix).limit(1000).map { |x| x } rescue => e
rescue => e progress.log(pastel.red("Error fetching list of files: #{e}"))
progress.log(pastel.red("Error fetching list of files: #{e}")) progress.log("If you want to continue from this point, add --start-after=#{last_key} to your command") if last_key
progress.log("If you want to continue from this point, add --start-after=#{last_key} to your command") if last_key break
break
end
end end
break if objects.empty? break if objects.empty?

View File

@ -43,13 +43,11 @@ module Mastodon
exit(1) exit(1)
end end
indices = begin indices = if options[:only]
if options[:only] options[:only].map { |str| "#{str.camelize}Index".constantize }
options[:only].map { |str| "#{str.camelize}Index".constantize } else
else INDICES
INDICES end
end
end
pool = Concurrent::FixedThreadPool.new(options[:concurrency], max_queue: options[:concurrency] * 10) pool = Concurrent::FixedThreadPool.new(options[:concurrency], max_queue: options[:concurrency] * 10)
importers = indices.index_with { |index| "Importer::#{index.name}Importer".constantize.new(batch_size: options[:batch_size], executor: pool) } importers = indices.index_with { |index| "Importer::#{index.name}Importer".constantize.new(batch_size: options[:batch_size], executor: pool) }

View File

@ -50,16 +50,14 @@ module Mastodon
styles << :original unless styles.include?(:original) styles << :original unless styles.include?(:original)
styles.each do |style| styles.each do |style|
success = begin success = case Paperclip::Attachment.default_options[:storage]
case Paperclip::Attachment.default_options[:storage] when :s3
when :s3 upgrade_storage_s3(progress, attachment, style)
upgrade_storage_s3(progress, attachment, style) when :fog
when :fog upgrade_storage_fog(progress, attachment, style)
upgrade_storage_fog(progress, attachment, style) when :filesystem
when :filesystem upgrade_storage_filesystem(progress, attachment, style)
upgrade_storage_filesystem(progress, attachment, style) end
end
end
upgraded = true if style == :original && success upgraded = true if style == :original && success

View File

@ -161,13 +161,11 @@ module Paperclip
def lighten_or_darken(color, by) def lighten_or_darken(color, by)
hue, saturation, light = rgb_to_hsl(color.r, color.g, color.b) hue, saturation, light = rgb_to_hsl(color.r, color.g, color.b)
light = begin light = if light < 50
if light < 50 [100, light + by].min
[100, light + by].min else
else [0, light - by].max
[0, light - by].max end
end
end
ColorDiff::Color::RGB.new(*hsl_to_rgb(hue, saturation, light)) ColorDiff::Color::RGB.new(*hsl_to_rgb(hue, saturation, light))
end end

View File

@ -41,13 +41,11 @@ class Sanitize
current_node = env[:node] current_node = env[:node]
scheme = begin scheme = if current_node['href'] =~ Sanitize::REGEX_PROTOCOL
if current_node['href'] =~ Sanitize::REGEX_PROTOCOL Regexp.last_match(1).downcase
Regexp.last_match(1).downcase else
else :relative
:relative end
end
end
current_node.replace(Nokogiri::XML::Text.new(current_node.text, current_node.document)) unless LINK_PROTOCOLS.include?(scheme) current_node.replace(Nokogiri::XML::Text.new(current_node.text, current_node.document)) unless LINK_PROTOCOLS.include?(scheme)
end end

View File

@ -4,16 +4,14 @@ namespace :db do
namespace :migrate do namespace :migrate do
desc 'Setup the db or migrate depending on state of db' desc 'Setup the db or migrate depending on state of db'
task setup: :environment do task setup: :environment do
begin if ActiveRecord::Migrator.current_version.zero?
if ActiveRecord::Migrator.current_version.zero?
Rake::Task['db:migrate'].invoke
Rake::Task['db:seed'].invoke
end
rescue ActiveRecord::NoDatabaseError
Rake::Task['db:setup'].invoke
else
Rake::Task['db:migrate'].invoke Rake::Task['db:migrate'].invoke
Rake::Task['db:seed'].invoke
end end
rescue ActiveRecord::NoDatabaseError
Rake::Task['db:setup'].invoke
else
Rake::Task['db:migrate'].invoke
end end
end end