IndexingScheduler: fetch and import in batches (#24285)
Co-authored-by: Claire <claire.github-309c@sitedethib.com>
This commit is contained in:
parent
79f5b8f156
commit
6f484fbbd2
1 changed files with 10 additions and 6 deletions
|
@ -6,17 +6,21 @@ class Scheduler::IndexingScheduler
|
|||
|
||||
sidekiq_options retry: 0
|
||||
|
||||
IMPORT_BATCH_SIZE = 1000
|
||||
SCAN_BATCH_SIZE = 10 * IMPORT_BATCH_SIZE
|
||||
|
||||
def perform
|
||||
return unless Chewy.enabled?
|
||||
|
||||
indexes.each do |type|
|
||||
with_redis do |redis|
|
||||
ids = redis.smembers("chewy:queue:#{type.name}")
|
||||
|
||||
type.import!(ids)
|
||||
|
||||
redis.pipelined do |pipeline|
|
||||
ids.each { |id| pipeline.srem("chewy:queue:#{type.name}", id) }
|
||||
redis.sscan_each("chewy:queue:#{type.name}", count: SCAN_BATCH_SIZE) do |ids|
|
||||
redis.pipelined do
|
||||
ids.each_slice(IMPORT_BATCH_SIZE) do |slice_ids|
|
||||
type.import!(slice_ids)
|
||||
redis.srem("chewy:queue:#{type.name}", slice_ids)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
Loading…
Reference in a new issue