Rate Limit Configuration Try it

class RateLimit
  include Singleton
  attr_reader :limiter
  def initialize
    @limiter = RateLimit::Limiter.new(apikey: "ACCT_ID|APIKEY",
        shared_cache: Rails.cache,
        logger: Rails.logger,
        in_process_cache: ActiveSupport::Cache::MemoryStore.new(size: 1.megabytes)
    )
  end
end
public class MyModule extends AbstractModule {
  @Provides
  public ApiClient.Builder getapiClient(RedisClient redisClient, MetricRegistry metrics) {

    return new ApiClient.Builder()
        .setDistributedCache(new RedisWrapper(redisClient))
        .setFeatureFlagDistributedCacheExpirySecs(300)
        .setFeatureFlagRefetchBuffer(150)
        .setMetricRegistry(metrics);
  }
}

public class MyClass {
  private final ApiClient apiClient

  @Inject
  public MyClass(ApiClient apiClient){
    this.apiClient = apiClient;
  }
}

To make RateLimit more efficient you can specify two different caches. The shared_cache will be used to track when a rate limit has been exceeded and will skip hitting the API if the expiry time is still in the future. The in_process_cache is used to make FeatureFlags cache for 1 minute. This makes it safe to call @limiter.feature_is_on_for?("MyFeatures::BetaGroup") within a tight loop.

class MyController > ApplicationController
  def index
    accounts = Accounts.all
    @results = []
    accounts.each do |a|
        if RateLimit.instance.limiter.feature_is_on_for?("App:UseNewQuery", a.id)
          @results = Thing.new_faster_query(@account)
        else
          @results = Thing.query(@account)
        end
    end
  end
end

Next let's read about using Ratelim.it for deduplification workflows