Fix Redis warnings regarding version 5.0.0

This commit is contained in:
Jeremy Kescher 2022-12-04 22:12:02 +01:00
parent 3520ebbd3b
commit 8a24bef2e3
No known key found for this signature in database
GPG key ID: 48DFE4BB15BA5940
11 changed files with 28 additions and 28 deletions

View file

@ -10,7 +10,7 @@ class DeliveryFailureTracker
end end
def track_failure! def track_failure!
redis.sadd(exhausted_deliveries_key, today) redis.sadd?(exhausted_deliveries_key, today)
UnavailableDomain.create(domain: @host) if reached_failure_threshold? UnavailableDomain.create(domain: @host) if reached_failure_threshold?
end end

View file

@ -322,24 +322,24 @@ class FeedManager
def clean_feeds!(type, ids) def clean_feeds!(type, ids)
reblogged_id_sets = {} reblogged_id_sets = {}
redis.pipelined do redis.pipelined do |pipeline|
ids.each do |feed_id| ids.each do |feed_id|
redis.del(key(type, feed_id)) pipeline.del(key(type, feed_id))
reblog_key = key(type, feed_id, 'reblogs') reblog_key = key(type, feed_id, 'reblogs')
# We collect a future for this: we don't block while getting # We collect a future for this: we don't block while getting
# it, but we can iterate over it later. # it, but we can iterate over it later.
reblogged_id_sets[feed_id] = redis.zrange(reblog_key, 0, -1) reblogged_id_sets[feed_id] = pipeline.zrange(reblog_key, 0, -1)
redis.del(reblog_key) pipeline.del(reblog_key)
end end
end end
# Remove all of the reblog tracking keys we just removed the # Remove all of the reblog tracking keys we just removed the
# references to. # references to.
redis.pipelined do redis.pipelined do |pipeline|
reblogged_id_sets.each do |feed_id, future| reblogged_id_sets.each do |feed_id, future|
future.value.each do |reblogged_id| future.value.each do |reblogged_id|
reblog_set_key = key(type, feed_id, "reblogs:#{reblogged_id}") reblog_set_key = key(type, feed_id, "reblogs:#{reblogged_id}")
redis.del(reblog_set_key) pipeline.del(reblog_set_key)
end end
end end
end end
@ -519,7 +519,7 @@ class FeedManager
# REBLOG_FALLOFF most recent statuses, so we note that this # REBLOG_FALLOFF most recent statuses, so we note that this
# is an "extra" reblog, by storing it in reblog_set_key. # is an "extra" reblog, by storing it in reblog_set_key.
reblog_set_key = key(timeline_type, account_id, "reblogs:#{status.reblog_of_id}") reblog_set_key = key(timeline_type, account_id, "reblogs:#{status.reblog_of_id}")
redis.sadd(reblog_set_key, status.id) redis.sadd?(reblog_set_key, status.id)
return false return false
end end
else else
@ -556,7 +556,7 @@ class FeedManager
# 2. Remove reblog from set of this status's reblogs. # 2. Remove reblog from set of this status's reblogs.
reblog_set_key = key(timeline_type, account_id, "reblogs:#{status.reblog_of_id}") reblog_set_key = key(timeline_type, account_id, "reblogs:#{status.reblog_of_id}")
redis.srem(reblog_set_key, status.id) redis.srem?(reblog_set_key, status.id)
redis.zrem(reblog_key, status.reblog_of_id) redis.zrem(reblog_key, status.reblog_of_id)
# 3. Re-insert another reblog or original into the feed if one # 3. Re-insert another reblog or original into the feed if one
# remains in the set. We could pick a random element, but this # remains in the set. We could pick a random element, but this

View file

@ -42,6 +42,6 @@ class Vacuum::StatusesVacuum
end end
def remove_from_search_index(status_ids) def remove_from_search_index(status_ids)
with_redis { |redis| redis.sadd('chewy:queue:StatusesIndex', status_ids) } with_redis { |redis| redis.sadd?('chewy:queue:StatusesIndex', status_ids) }
end end
end end

View file

@ -19,9 +19,9 @@ class FollowRecommendationSuppression < ApplicationRecord
private private
def remove_follow_recommendations def remove_follow_recommendations
redis.pipelined do redis.pipelined do |pipeline|
I18n.available_locales.each do |locale| I18n.available_locales.each do |locale|
redis.zrem("follow_recommendations:#{locale}", account_id) pipeline.zrem("follow_recommendations:#{locale}", account_id)
end end
end end
end end

View file

@ -60,7 +60,7 @@ class Trends::Base
end end
def record_used_id(id, at_time = Time.now.utc) def record_used_id(id, at_time = Time.now.utc)
redis.sadd(used_key(at_time), id) redis.sadd?(used_key(at_time), id)
redis.expire(used_key(at_time), 1.day.seconds) redis.expire(used_key(at_time), 1.day.seconds)
end end

View file

@ -48,9 +48,9 @@ class BatchedRemoveStatusService < BaseService
# Cannot be batched # Cannot be batched
@status_id_cutoff = Mastodon::Snowflake.id_at(2.weeks.ago) @status_id_cutoff = Mastodon::Snowflake.id_at(2.weeks.ago)
redis.pipelined do redis.pipelined do |pipeline|
statuses.each do |status| statuses.each do |status|
unpush_from_public_timelines(status) unpush_from_public_timelines(pipeline, status)
end end
end end
end end
@ -73,22 +73,22 @@ class BatchedRemoveStatusService < BaseService
end end
end end
def unpush_from_public_timelines(status) def unpush_from_public_timelines(pipeline, status)
return unless status.public_visibility? && status.id > @status_id_cutoff return unless status.public_visibility? && status.id > @status_id_cutoff
payload = Oj.dump(event: :delete, payload: status.id.to_s) payload = Oj.dump(event: :delete, payload: status.id.to_s)
redis.publish('timeline:public', payload) pipeline.publish('timeline:public', payload)
redis.publish(status.local? ? 'timeline:public:local' : 'timeline:public:remote', payload) pipeline.publish(status.local? ? 'timeline:public:local' : 'timeline:public:remote', payload)
if status.media_attachments.any? if status.media_attachments.any?
redis.publish('timeline:public:media', payload) pipeline.publish('timeline:public:media', payload)
redis.publish(status.local? ? 'timeline:public:local:media' : 'timeline:public:remote:media', payload) pipeline.publish(status.local? ? 'timeline:public:local:media' : 'timeline:public:remote:media', payload)
end end
status.tags.map { |tag| tag.name.mb_chars.downcase }.each do |hashtag| status.tags.map { |tag| tag.name.mb_chars.downcase }.each do |hashtag|
redis.publish("timeline:hashtag:#{hashtag}", payload) pipeline.publish("timeline:hashtag:#{hashtag}", payload)
redis.publish("timeline:hashtag:#{hashtag}:local", payload) if status.local? pipeline.publish("timeline:hashtag:#{hashtag}:local", payload) if status.local?
end end
end end

View file

@ -16,7 +16,7 @@ class Scheduler::IndexingScheduler
type.import!(ids) type.import!(ids)
redis.pipelined do |pipeline| redis.pipelined do |pipeline|
ids.each { |id| pipeline.srem("chewy:queue:#{type.name}", id) } ids.each { |id| pipeline.srem?("chewy:queue:#{type.name}", id) }
end end
end end
end end

View file

@ -17,7 +17,7 @@ module Chewy
RedisConfiguration.with do |redis| RedisConfiguration.with do |redis|
redis.pipelined do |pipeline| redis.pipelined do |pipeline|
@stash.each do |type, ids| @stash.each do |type, ids|
pipeline.sadd("chewy:queue:#{type.name}", ids) pipeline.sadd?("chewy:queue:#{type.name}", ids)
end end
end end
end end

View file

@ -54,8 +54,8 @@ module Mastodon
def clear def clear
keys = redis.keys('feed:*') keys = redis.keys('feed:*')
redis.pipelined do redis.pipelined do |pipeline|
keys.each { |key| redis.del(key) } keys.each { |key| pipeline.del(key) }
end end
say('OK', :green) say('OK', :green)

View file

@ -22,7 +22,7 @@ describe DeliveryFailureTracker do
describe '#track_failure!' do describe '#track_failure!' do
it 'marks URL as unavailable after 7 days of being called' do it 'marks URL as unavailable after 7 days of being called' do
6.times { |i| redis.sadd('exhausted_deliveries:example.com', i) } 6.times { |i| redis.sadd?('exhausted_deliveries:example.com', i) }
subject.track_failure! subject.track_failure!
expect(subject.days).to eq 7 expect(subject.days).to eq 7

View file

@ -11,7 +11,7 @@ RSpec.describe Vacuum::FeedsVacuum do
redis.zadd(feed_key_for(inactive_user), 1, 1) redis.zadd(feed_key_for(inactive_user), 1, 1)
redis.zadd(feed_key_for(active_user), 1, 1) redis.zadd(feed_key_for(active_user), 1, 1)
redis.zadd(feed_key_for(inactive_user, 'reblogs'), 2, 2) redis.zadd(feed_key_for(inactive_user, 'reblogs'), 2, 2)
redis.sadd(feed_key_for(inactive_user, 'reblogs:2'), 3) redis.sadd?(feed_key_for(inactive_user, 'reblogs:2'), 3)
subject.perform subject.perform
end end