Commit 7c3c7942 authored by Heinrich Lee Yu's avatar Heinrich Lee Yu

Improve tracking of requests in rate limiter

This follows Rack::Attack's strategy in counting requests in Redis. This
ensures that we don't end up in a state where the counter never expires
and the user is always blocked. This could happen if the process gets
terminated between the INCR and EXPIRE calls.

With the new approach, the expiry is less important and is only used so
that we don't store useless keys in Redis.

Changelog: fixed
parent c1953919
---
name: rate_limiter_safe_increment
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/73343
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/285352
milestone: '14.5'
type: development
group: group::project management
default_enabled: false
......@@ -66,7 +66,6 @@ module Gitlab
# @param key [Symbol] Key attribute registered in `.rate_limits`
# @option scope [Array<ActiveRecord>] Array of ActiveRecord models to scope throttling to a specific request (e.g. per user per project)
# @option threshold [Integer] Optional threshold value to override default one registered in `.rate_limits`
# @option interval [Integer] Optional interval value to override default one registered in `.rate_limits`
# @option users_allowlist [Array<String>] Optional list of usernames to exclude from the limit. This param will only be functional if Scope includes a current user.
#
# @return [Boolean] Whether or not a request should be throttled
......@@ -77,7 +76,7 @@ module Gitlab
threshold_value = options[:threshold] || threshold(key)
threshold_value > 0 &&
increment(key, options[:scope], options[:interval]) > threshold_value
increment(key, options[:scope]) > threshold_value
end
# Increments the given cache key and increments the value by 1 with the
......@@ -85,12 +84,13 @@ module Gitlab
#
# @param key [Symbol] Key attribute registered in `.rate_limits`
# @option scope [Array<ActiveRecord>] Array of ActiveRecord models to scope throttling to a specific request (e.g. per user per project)
# @option interval [Integer] Optional interval value to override default one registered in `.rate_limits`
#
# @return [Integer] incremented value
def increment(key, scope, interval = nil)
def increment(key, scope)
return safe_increment(key, scope) if Feature.enabled?(:rate_limiter_safe_increment, default_enabled: :yaml)
value = 0
interval_value = interval || interval(key)
interval_value = interval(key)
::Gitlab::Redis::RateLimiting.with do |redis|
cache_key = action_key(key, scope)
......@@ -101,6 +101,32 @@ module Gitlab
value
end
# Increments a cache key that is based on the current time and interval.
# So that when time passes to the next interval, the key changes and the count starts again from 0.
#
# Based on https://github.com/rack/rack-attack/blob/886ba3a18d13c6484cd511a4dc9b76c0d14e5e96/lib/rack/attack/cache.rb#L63-L68
#
# @param key [Symbol] Key attribute registered in `.rate_limits`
# @option scope [Array<ActiveRecord>] Array of ActiveRecord models to scope throttling to a specific request (e.g. per user per project)
#
# @return [Integer] incremented value
def safe_increment(key, scope)
interval_value = interval(key)
period_key, time_elapsed_in_period = Time.now.to_i.divmod(interval_value)
cache_key = "#{action_key(key, scope)}:#{period_key}"
# We add a 1 second buffer to avoid timing issues when we're at the end of a period
expiry = interval_value - time_elapsed_in_period + 1
::Gitlab::Redis::RateLimiting.with do |redis|
redis.pipelined do
redis.incr(cache_key)
redis.expire(cache_key, expiry)
end.first
end
end
# Logs request using provided logger
#
# @param request [Http::Request] - Web request to be logged
......
......@@ -3,53 +3,29 @@
require 'spec_helper'
RSpec.describe Gitlab::ApplicationRateLimiter do
let(:redis) { double('redis') }
let(:user) { create(:user) }
let(:project) { create(:project) }
let(:rate_limits) do
{
test_action: {
threshold: 1,
interval: 2.minutes
}
}
end
let(:key) { rate_limits.keys[0] }
let_it_be(:user) { create(:user) }
let_it_be(:project) { create(:project) }
subject { described_class }
before do
allow(Gitlab::Redis::RateLimiting).to receive(:with).and_yield(redis)
allow(described_class).to receive(:rate_limits).and_return(rate_limits)
end
shared_examples 'action rate limiter' do
it 'increases the throttle count and sets the expiration time' do
expect(redis).to receive(:incr).with(cache_key).and_return(1)
expect(redis).to receive(:expire).with(cache_key, 120)
expect(subject.throttled?(key, scope: scope)).to be_falsy
end
it 'returns true if the key is throttled' do
expect(redis).to receive(:incr).with(cache_key).and_return(2)
expect(redis).not_to receive(:expire)
expect(subject.throttled?(key, scope: scope)).to be_truthy
describe '.throttled?' do
let(:rate_limits) do
{
test_action: {
threshold: 1,
interval: 2.minutes
},
another_action: {
threshold: 2,
interval: 3.minutes
}
}
end
context 'when throttling is disabled' do
it 'returns false and does not set expiration time' do
expect(redis).not_to receive(:incr)
expect(redis).not_to receive(:expire)
expect(subject.throttled?(key, scope: scope, threshold: 0)).to be_falsy
end
before do
allow(described_class).to receive(:rate_limits).and_return(rate_limits)
end
end
describe '.throttled?' do
context 'when the key is invalid' do
context 'is provided as a Symbol' do
context 'but is not defined in the rate_limits Hash' do
......@@ -80,27 +56,116 @@ RSpec.describe Gitlab::ApplicationRateLimiter do
end
end
context 'when the key is an array of only ActiveRecord models' do
let(:scope) { [user, project] }
context 'when rate_limiter_safe_increment is disabled' do
let(:redis) { double('redis') }
let(:key) { rate_limits.keys[0] }
before do
allow(Gitlab::Redis::RateLimiting).to receive(:with).and_yield(redis)
stub_feature_flags(rate_limiter_safe_increment: false)
end
shared_examples 'action rate limiter' do
it 'increases the throttle count and sets the expiration time' do
expect(redis).to receive(:incr).with(cache_key).and_return(1)
expect(redis).to receive(:expire).with(cache_key, 120)
expect(subject.throttled?(key, scope: scope)).to be_falsy
end
it 'returns true if the key is throttled' do
expect(redis).to receive(:incr).with(cache_key).and_return(2)
expect(redis).not_to receive(:expire)
expect(subject.throttled?(key, scope: scope)).to be_truthy
end
context 'when throttling is disabled' do
it 'returns false and does not set expiration time' do
expect(redis).not_to receive(:incr)
expect(redis).not_to receive(:expire)
expect(subject.throttled?(key, scope: scope, threshold: 0)).to be_falsy
end
end
end
context 'when the key is an array of only ActiveRecord models' do
let(:scope) { [user, project] }
let(:cache_key) do
"application_rate_limiter:test_action:user:#{user.id}:project:#{project.id}"
let(:cache_key) do
"application_rate_limiter:test_action:user:#{user.id}:project:#{project.id}"
end
it_behaves_like 'action rate limiter'
end
it_behaves_like 'action rate limiter'
context 'when the key is a combination of ActiveRecord models and strings' do
let(:project) { create(:project, :public, :repository) }
let(:commit) { project.repository.commit }
let(:path) { 'app/controllers/groups_controller.rb' }
let(:scope) { [project, commit, path] }
let(:cache_key) do
"application_rate_limiter:test_action:project:#{project.id}:commit:#{commit.sha}:#{path}"
end
it_behaves_like 'action rate limiter'
end
end
context 'when they key a combination of ActiveRecord models and strings' do
let(:project) { create(:project, :public, :repository) }
let(:commit) { project.repository.commit }
let(:path) { 'app/controllers/groups_controller.rb' }
let(:scope) { [project, commit, path] }
context 'when rate_limiter_safe_increment is enabled', :clean_gitlab_redis_rate_limiting do
before do
stub_feature_flags(rate_limiter_safe_increment: true)
end
shared_examples 'throttles based on key and scope' do
let(:start_time) { Time.current.beginning_of_hour }
it 'returns true when threshold is exceeded' do
travel_to(start_time) do
expect(subject.throttled?(:test_action, scope: scope)).to eq(false)
end
travel_to(start_time + 1.minute) do
expect(subject.throttled?(:test_action, scope: scope)).to eq(true)
let(:cache_key) do
"application_rate_limiter:test_action:project:#{project.id}:commit:#{commit.sha}:#{path}"
# Assert that it does not affect other actions or scope
expect(subject.throttled?(:another_action, scope: scope)).to eq(false)
expect(subject.throttled?(:test_action, scope: [user])).to eq(false)
end
end
it 'returns false when interval has elapsed' do
travel_to(start_time) do
expect(subject.throttled?(:test_action, scope: scope)).to eq(false)
# another_action has a threshold of 3 so we simulate 2 requests
expect(subject.throttled?(:another_action, scope: scope)).to eq(false)
expect(subject.throttled?(:another_action, scope: scope)).to eq(false)
end
travel_to(start_time + 2.minutes) do
expect(subject.throttled?(:test_action, scope: scope)).to eq(false)
# Assert that another_action has its own interval that hasn't elapsed
expect(subject.throttled?(:another_action, scope: scope)).to eq(true)
end
end
end
it_behaves_like 'action rate limiter'
context 'when using ActiveRecord models as scope' do
let(:scope) { [user, project] }
it_behaves_like 'throttles based on key and scope'
end
context 'when using ActiveRecord models and strings as scope' do
let(:scope) { [project, 'app/controllers/groups_controller.rb'] }
it_behaves_like 'throttles based on key and scope'
end
end
end
......@@ -134,7 +199,7 @@ RSpec.describe Gitlab::ApplicationRateLimiter do
end
context 'with a current_user' do
let(:current_user) { create(:user) }
let(:current_user) { user }
let(:attributes) do
base_attributes.merge({
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment