gearheads
/
mastodon
Archived
2
0
Fork 0

Separate background jobs into different queues. ATTENTION: new queue "pull"

must be added to the Sidekiq invokation in your systemd file

The pull queue will handle link crawling, thread resolving, and OStatus
processing. Such tasks are more likely to hang for a longer time (due to
network requests) so it is more sensible to not make the "in-house" tasks
wait for them.
gh/stable
Eugen Rochko 2017-04-04 00:53:20 +02:00
parent 8232f76c48
commit f722bd2387
13 changed files with 16 additions and 10 deletions

View File

@ -3,7 +3,7 @@
class AfterRemoteFollowRequestWorker
include Sidekiq::Worker
sidekiq_options retry: 5
sidekiq_options queue: 'pull', retry: 5
def perform(follow_request_id)
follow_request = FollowRequest.find(follow_request_id)

View File

@ -3,7 +3,7 @@
class AfterRemoteFollowWorker
include Sidekiq::Worker
sidekiq_options retry: 5
sidekiq_options queue: 'pull', retry: 5
def perform(follow_id)
follow = Follow.find(follow_id)

View File

@ -5,7 +5,7 @@ require 'csv'
class ImportWorker
include Sidekiq::Worker
sidekiq_options retry: false
sidekiq_options queue: 'pull', retry: false
def perform(import_id)
import = Import.find(import_id)

View File

@ -3,7 +3,7 @@
class LinkCrawlWorker
include Sidekiq::Worker
sidekiq_options retry: false
sidekiq_options queue: 'pull', retry: false
def perform(status_id)
FetchLinkCardService.new.call(Status.find(status_id))

View File

@ -3,6 +3,8 @@
class MergeWorker
include Sidekiq::Worker
sidekiq_options queue: 'pull'
def perform(from_account_id, into_account_id)
FeedManager.instance.merge_into_timeline(Account.find(from_account_id), Account.find(into_account_id))
end

View File

@ -3,7 +3,7 @@
class NotificationWorker
include Sidekiq::Worker
sidekiq_options retry: 5
sidekiq_options queue: 'push', retry: 5
def perform(xml, source_account_id, target_account_id)
SendInteractionService.new.call(xml, Account.find(source_account_id), Account.find(target_account_id))

View File

@ -3,7 +3,7 @@
class ProcessingWorker
include Sidekiq::Worker
sidekiq_options backtrace: true
sidekiq_options queue: 'pull', backtrace: true
def perform(account_id, body)
ProcessFeedService.new.call(body, Account.find(account_id))

View File

@ -3,6 +3,8 @@
class RegenerationWorker
include Sidekiq::Worker
sidekiq_options queue: 'pull', backtrace: true
def perform(account_id, timeline_type)
PrecomputeFeedService.new.call(timeline_type, Account.find(account_id))
end

View File

@ -3,7 +3,7 @@
class SalmonWorker
include Sidekiq::Worker
sidekiq_options backtrace: true
sidekiq_options queue: 'pull', backtrace: true
def perform(account_id, body)
ProcessInteractionService.new.call(body, Account.find(account_id))

View File

@ -3,7 +3,7 @@
class ThreadResolveWorker
include Sidekiq::Worker
sidekiq_options retry: false
sidekiq_options queue: 'pull', retry: false
def perform(child_status_id, parent_url)
child_status = Status.find(child_status_id)

View File

@ -3,6 +3,8 @@
class UnmergeWorker
include Sidekiq::Worker
sidekiq_options queue: 'pull'
def perform(from_account_id, into_account_id)
FeedManager.instance.unmerge_from_timeline(Account.find(from_account_id), Account.find(into_account_id))
end

View File

@ -33,7 +33,7 @@ services:
restart: always
build: .
env_file: .env.production
command: bundle exec sidekiq -q default -q mailers -q push
command: bundle exec sidekiq -q default -q mailers -q pull -q push
depends_on:
- db
- redis

View File

@ -180,7 +180,7 @@ User=mastodon
WorkingDirectory=/home/mastodon/live
Environment="RAILS_ENV=production"
Environment="DB_POOL=5"
ExecStart=/home/mastodon/.rbenv/shims/bundle exec sidekiq -c 5 -q default -q mailers -q push
ExecStart=/home/mastodon/.rbenv/shims/bundle exec sidekiq -c 5 -q default -q mailers -q pull -q push
TimeoutSec=15
Restart=always