Commit b0be4baf authored by Nick Thomas's avatar Nick Thomas

Merge branch 'bvl-gitaly-dynamic-deadline' into 'master'

Add dynamic deadlines to gitaly

See merge request gitlab-org/gitlab!21492
parents 500f8dc5 9e6c9ec4
......@@ -18,7 +18,7 @@ module RequiresWhitelistedMonitoringClient
# debugging purposes
return true if Rails.env.development? && request.local?
ip_whitelist.any? { |e| e.include?(Gitlab::RequestContext.client_ip) }
ip_whitelist.any? { |e| e.include?(Gitlab::RequestContext.instance.client_ip) }
end
def ip_whitelist
......
......@@ -169,7 +169,11 @@ class ApplicationSetting < ApplicationRecord
validates :gitaly_timeout_default,
presence: true,
numericality: { only_integer: true, greater_than_or_equal_to: 0 }
numericality: {
only_integer: true,
greater_than_or_equal_to: 0,
less_than_or_equal_to: Settings.gitlab.max_request_duration_seconds
}
validates :gitaly_timeout_medium,
presence: true,
......
......@@ -8,6 +8,9 @@
.form-text.text-muted
Timeout for Gitaly calls from the GitLab application (in seconds). This timeout is not enforced
for git fetch/push operations or Sidekiq jobs.
This timeout should be less than the worker timeout. If a Gitaly call timeout would exceed the
worker timeout, the remaining time from the worker timeout would be used to avoid having to terminate
the worker.
.form-group
= f.label :gitaly_timeout_fast, 'Fast Timeout Period', class: 'label-bold'
= f.number_field :gitaly_timeout_fast, class: 'form-control'
......
---
title: Don't let Gitaly calls exceed a request time of 55 seconds
merge_request: 21492
author:
type: performance
......@@ -33,6 +33,9 @@ production: &base
host: localhost
port: 80 # Set to 443 if using HTTPS, see installation.md#using-https for additional HTTPS configuration details
https: false # Set to true if using HTTPS, see installation.md#using-https for additional HTTPS configuration details
# The maximum time unicorn/puma can spend on the request. This needs to be smaller than the worker timeout.
# Default is 95% of the worker timeout
max_request_duration: 57
# Uncomment this line below if your ssh host is different from HTTP/HTTPS one
# (you'd obviously need to replace ssh.host_example.com with your own host).
......
......@@ -209,6 +209,7 @@ Settings.gitlab['content_security_policy'] ||= Gitlab::ContentSecurityPolicy::Co
Settings.gitlab['no_todos_messages'] ||= YAML.load_file(Rails.root.join('config', 'no_todos_messages.yml'))
Settings.gitlab['impersonation_enabled'] ||= true if Settings.gitlab['impersonation_enabled'].nil?
Settings.gitlab['usage_ping_enabled'] = true if Settings.gitlab['usage_ping_enabled'].nil?
Settings.gitlab['max_request_duration_seconds'] ||= 57
Gitlab.ee do
Settings.gitlab['mirror_max_delay'] ||= 300
......
......@@ -37,7 +37,7 @@ unless Gitlab::Runtime.sidekiq?
payload[:response] = event.payload[:response] if event.payload[:response]
payload[Labkit::Correlation::CorrelationId::LOG_KEY] = Labkit::Correlation::CorrelationId.current_id
if cpu_s = Gitlab::Metrics::System.thread_cpu_duration(::Gitlab::RequestContext.start_thread_cpu_time)
if cpu_s = Gitlab::Metrics::System.thread_cpu_duration(::Gitlab::RequestContext.instance.start_thread_cpu_time)
payload[:cpu_s] = cpu_s
end
......
Rails.application.configure do |config|
config.middleware.insert_after RequestStore::Middleware, Gitlab::RequestContext
config.middleware.insert_after RequestStore::Middleware, Gitlab::Middleware::RequestContext
end
......@@ -8,7 +8,7 @@ module Gitlab
class << self
def limit_user_id!(user_id)
if config.unique_ips_limit_enabled
ip = RequestContext.client_ip
ip = RequestContext.instance.client_ip
unique_ips = update_and_return_ips_count(user_id, ip)
raise TooManyIps.new(user_id, ip, unique_ips) if unique_ips > config.unique_ips_limit_per_user
......
......@@ -160,6 +160,7 @@ module Gitlab
def self.execute(storage, service, rpc, request, remote_storage:, timeout:)
enforce_gitaly_request_limits(:call)
Gitlab::RequestContext.instance.ensure_deadline_not_exceeded!
kwargs = request_kwargs(storage, timeout: timeout.to_f, remote_storage: remote_storage)
kwargs = yield(kwargs) if block_given?
......@@ -234,12 +235,28 @@ module Gitlab
metadata['gitaly-session-id'] = session_id
metadata.merge!(Feature::Gitaly.server_feature_flags)
result = { metadata: metadata }
deadline_info = request_deadline(timeout)
metadata.merge!(deadline_info.slice(:deadline_type))
result[:deadline] = real_time + timeout if timeout > 0
result
{ metadata: metadata, deadline: deadline_info[:deadline] }
end
def self.request_deadline(timeout)
# timeout being 0 means the request is allowed to run indefinitely.
# We can't allow that inside a request, but this won't count towards Gitaly
# error budgets
regular_deadline = real_time.to_i + timeout if timeout > 0
return { deadline: regular_deadline } if Sidekiq.server?
return { deadline: regular_deadline } unless Gitlab::RequestContext.instance.request_deadline
limited_deadline = [regular_deadline, Gitlab::RequestContext.instance.request_deadline].compact.min
limited = limited_deadline < regular_deadline
{ deadline: limited_deadline, deadline_type: limited ? "limited" : "regular" }
end
private_class_method :request_deadline
def self.session_id
Gitlab::SafeRequestStore[:gitaly_session_id] ||= SecureRandom.uuid
end
......
# frozen_string_literal: true
module Gitlab
module Middleware
class RequestContext
def initialize(app)
@app = app
end
def call(env)
# We should be using ActionDispatch::Request instead of
# Rack::Request to be consistent with Rails, but due to a Rails
# bug described in
# https://gitlab.com/gitlab-org/gitlab-foss/issues/58573#note_149799010
# hosts behind a load balancer will only see 127.0.0.1 for the
# load balancer's IP.
req = Rack::Request.new(env)
Gitlab::RequestContext.instance.client_ip = req.ip
Gitlab::RequestContext.instance.start_thread_cpu_time = Gitlab::Metrics::System.thread_cpu_time
Gitlab::RequestContext.instance.request_start_time = Gitlab::Metrics::System.real_time
@app.call(env)
end
end
end
end
......@@ -2,34 +2,37 @@
module Gitlab
class RequestContext
class << self
def client_ip
Gitlab::SafeRequestStore[:client_ip]
end
include Singleton
RequestDeadlineExceeded = Class.new(StandardError)
attr_accessor :client_ip, :start_thread_cpu_time, :request_start_time
def start_thread_cpu_time
Gitlab::SafeRequestStore[:start_thread_cpu_time]
class << self
def instance
Gitlab::SafeRequestStore[:request_context] ||= new
end
end
def initialize(app)
@app = app
def request_deadline
return unless request_start_time
return unless Feature.enabled?(:request_deadline)
@request_deadline ||= request_start_time + max_request_duration_seconds
end
def call(env)
# We should be using ActionDispatch::Request instead of
# Rack::Request to be consistent with Rails, but due to a Rails
# bug described in
# https://gitlab.com/gitlab-org/gitlab-foss/issues/58573#note_149799010
# hosts behind a load balancer will only see 127.0.0.1 for the
# load balancer's IP.
req = Rack::Request.new(env)
def ensure_deadline_not_exceeded!
return unless request_deadline
return if Gitlab::Metrics::System.real_time < request_deadline
Gitlab::SafeRequestStore[:client_ip] = req.ip
raise RequestDeadlineExceeded,
"Request takes longer than #{max_request_duration_seconds}"
end
Gitlab::SafeRequestStore[:start_thread_cpu_time] = Gitlab::Metrics::System.thread_cpu_time
private
@app.call(env)
def max_request_duration_seconds
Settings.gitlab.max_request_duration_seconds
end
end
end
......@@ -2,7 +2,7 @@
require 'spec_helper'
describe HealthCheckController do
describe HealthCheckController, :request_store do
include StubENV
let(:xml_response) { Hash.from_xml(response.body)['hash'] }
......@@ -18,7 +18,7 @@ describe HealthCheckController do
describe 'GET #index' do
context 'when services are up but accessed from outside whitelisted ips' do
before do
allow(Gitlab::RequestContext).to receive(:client_ip).and_return(not_whitelisted_ip)
allow(Gitlab::RequestContext.instance).to receive(:client_ip).and_return(not_whitelisted_ip)
end
it 'returns a not found page' do
......@@ -48,7 +48,7 @@ describe HealthCheckController do
context 'when services are up and accessed from whitelisted ips' do
before do
allow(Gitlab::RequestContext).to receive(:client_ip).and_return(whitelisted_ip)
allow(Gitlab::RequestContext.instance).to receive(:client_ip).and_return(whitelisted_ip)
end
it 'supports successful plaintext response' do
......@@ -95,7 +95,7 @@ describe HealthCheckController do
before do
allow(HealthCheck::Utils).to receive(:process_checks).with(['standard']).and_return('The server is on fire')
allow(HealthCheck::Utils).to receive(:process_checks).with(['email']).and_return('Email is on fire')
allow(Gitlab::RequestContext).to receive(:client_ip).and_return(whitelisted_ip)
allow(Gitlab::RequestContext.instance).to receive(:client_ip).and_return(whitelisted_ip)
end
it 'supports failure plaintext response' do
......
......@@ -2,7 +2,7 @@
require 'spec_helper'
describe MetricsController do
describe MetricsController, :request_store do
include StubENV
let(:metrics_multiproc_dir) { @metrics_multiproc_dir }
......@@ -53,7 +53,7 @@ describe MetricsController do
context 'accessed from whitelisted ip' do
before do
allow(Gitlab::RequestContext).to receive(:client_ip).and_return(whitelisted_ip)
allow(Gitlab::RequestContext.instance).to receive(:client_ip).and_return(whitelisted_ip)
end
it_behaves_like 'endpoint providing metrics'
......@@ -61,7 +61,7 @@ describe MetricsController do
context 'accessed from ip in whitelisted range' do
before do
allow(Gitlab::RequestContext).to receive(:client_ip).and_return(ip_in_whitelisted_range)
allow(Gitlab::RequestContext.instance).to receive(:client_ip).and_return(ip_in_whitelisted_range)
end
it_behaves_like 'endpoint providing metrics'
......@@ -69,7 +69,7 @@ describe MetricsController do
context 'accessed from not whitelisted ip' do
before do
allow(Gitlab::RequestContext).to receive(:client_ip).and_return(not_whitelisted_ip)
allow(Gitlab::RequestContext.instance).to receive(:client_ip).and_return(not_whitelisted_ip)
end
it 'returns the expected error response' do
......
......@@ -229,6 +229,59 @@ describe Gitlab::GitalyClient do
end
end
end
context 'deadlines', :request_store do
let(:request_deadline) { real_time + 10.0 }
before do
allow(Gitlab::RequestContext.instance).to receive(:request_deadline).and_return(request_deadline)
end
it 'includes the deadline information' do
kword_args = described_class.request_kwargs('default', timeout: 2)
expect(kword_args[:deadline])
.to be_within(1).of(real_time + 2)
expect(kword_args[:metadata][:deadline_type]).to eq("regular")
end
it 'limits the deadline do the request deadline if that is closer', :aggregate_failures do
kword_args = described_class.request_kwargs('default', timeout: 15)
expect(kword_args[:deadline]).to eq(request_deadline)
expect(kword_args[:metadata][:deadline_type]).to eq("limited")
end
it 'does not limit calls in sidekiq' do
expect(Sidekiq).to receive(:server?).and_return(true)
kword_args = described_class.request_kwargs('default', timeout: 6.hours.to_i)
expect(kword_args[:deadline]).to be_within(1).of(real_time + 6.hours.to_i)
expect(kword_args[:metadata][:deadline_type]).to be_nil
end
it 'does not limit calls in sidekiq when allowed unlimited' do
expect(Sidekiq).to receive(:server?).and_return(true)
kword_args = described_class.request_kwargs('default', timeout: 0)
expect(kword_args[:deadline]).to be_nil
expect(kword_args[:metadata][:deadline_type]).to be_nil
end
it 'includes only the deadline specified by the timeout when there was no deadline' do
allow(Gitlab::RequestContext.instance).to receive(:request_deadline).and_return(nil)
kword_args = described_class.request_kwargs('default', timeout: 6.hours.to_i)
expect(kword_args[:deadline]).to be_within(1).of(Gitlab::Metrics::System.real_time + 6.hours.to_i)
expect(kword_args[:metadata][:deadline_type]).to be_nil
end
def real_time
Gitlab::Metrics::System.real_time
end
end
end
describe 'enforce_gitaly_request_limits?' do
......
# frozen_string_literal: true
require 'fast_spec_helper'
require 'rack'
require 'request_store'
require_relative '../../../support/helpers/next_instance_of'
describe Gitlab::Middleware::RequestContext do
include NextInstanceOf
let(:app) { -> (env) {} }
let(:env) { {} }
around do |example|
RequestStore.begin!
example.run
RequestStore.end!
RequestStore.clear!
end
describe '#call' do
context 'setting the client ip' do
subject { Gitlab::RequestContext.instance.client_ip }
context 'with X-Forwarded-For headers' do
let(:load_balancer_ip) { '1.2.3.4' }
let(:headers) do
{
'HTTP_X_FORWARDED_FOR' => "#{load_balancer_ip}, 127.0.0.1",
'REMOTE_ADDR' => '127.0.0.1'
}
end
let(:env) { Rack::MockRequest.env_for("/").merge(headers) }
it 'returns the load balancer IP' do
endpoint = proc do
[200, {}, ["Hello"]]
end
described_class.new(endpoint).call(env)
expect(subject).to eq(load_balancer_ip)
end
end
context 'request' do
let(:ip) { '192.168.1.11' }
before do
allow_next_instance_of(Rack::Request) do |instance|
allow(instance).to receive(:ip).and_return(ip)
end
described_class.new(app).call(env)
end
it { is_expected.to eq(ip) }
end
context 'before RequestContext middleware run' do
it { is_expected.to be_nil }
end
end
end
context 'setting the thread cpu time' do
it 'sets the `start_thread_cpu_time`' do
expect { described_class.new(app).call(env) }
.to change { Gitlab::RequestContext.instance.start_thread_cpu_time }.from(nil).to(Float)
end
end
context 'setting the request start time' do
it 'sets the `request_start_time`' do
expect { described_class.new(app).call(env) }
.to change { Gitlab::RequestContext.instance.request_start_time }.from(nil).to(Float)
end
end
end
......@@ -2,59 +2,44 @@
require 'spec_helper'
describe Gitlab::RequestContext do
describe '#client_ip' do
subject { described_class.client_ip }
describe Gitlab::RequestContext, :request_store do
subject { described_class.instance }
let(:app) { -> (env) {} }
let(:env) { Hash.new }
it { is_expected.to have_attributes(client_ip: nil, start_thread_cpu_time: nil, request_start_time: nil) }
context 'with X-Forwarded-For headers', :request_store do
let(:load_balancer_ip) { '1.2.3.4' }
let(:headers) do
{
'HTTP_X_FORWARDED_FOR' => "#{load_balancer_ip}, 127.0.0.1",
'REMOTE_ADDR' => '127.0.0.1'
}
end
describe '#request_deadline' do
let(:request_start_time) { 1575982156.206008 }
let(:env) { Rack::MockRequest.env_for("/").merge(headers) }
it "sets the time to #{Settings.gitlab.max_request_duration_seconds} seconds in the future" do
allow(subject).to receive(:request_start_time).and_return(request_start_time)
it 'returns the load balancer IP' do
client_ip = nil
endpoint = proc do
client_ip = Gitlab::SafeRequestStore[:client_ip]
[200, {}, ["Hello"]]
end
expect(subject.request_deadline).to eq(1575982156.206008 + Settings.gitlab.max_request_duration_seconds)
expect(subject.request_deadline).to be_a(Float)
end
described_class.new(endpoint).call(env)
it 'returns nil if there is no start time' do
allow(subject).to receive(:request_start_time).and_return(nil)
expect(client_ip).to eq(load_balancer_ip)
end
expect(subject.request_deadline).to be_nil
end
end
context 'when RequestStore::Middleware is used' do
around do |example|
RequestStore::Middleware.new(-> (env) { example.run }).call({})
end
describe '#ensure_request_deadline_not_exceeded!' do
it 'does not raise an error when there was no deadline' do
expect(subject).to receive(:request_deadline).and_return(nil)
expect { subject.ensure_deadline_not_exceeded! }.not_to raise_error
end
context 'request' do
let(:ip) { '192.168.1.11' }
it 'does not raise an error if the deadline is in the future' do
allow(subject).to receive(:request_deadline).and_return(Gitlab::Metrics::System.real_time + 10)
before do
allow_next_instance_of(Rack::Request) do |instance|
allow(instance).to receive(:ip).and_return(ip)
end
described_class.new(app).call(env)
end
expect { subject.ensure_deadline_not_exceeded! }.not_to raise_error
end
it { is_expected.to eq(ip) }
end
it 'raises an error when the deadline is in the past' do
allow(subject).to receive(:request_deadline).and_return(Gitlab::Metrics::System.real_time - 10)
context 'before RequestContext middleware run' do
it { is_expected.to be_nil }
end
expect { subject.ensure_deadline_not_exceeded! }.to raise_error(described_class::RequestDeadlineExceeded)
end
end
end
......@@ -319,6 +319,11 @@ describe ApplicationSetting do
end
context 'gitaly timeouts' do
it "validates that the default_timeout is lower than the max_request_duration" do
is_expected.to validate_numericality_of(:gitaly_timeout_default)
.is_less_than_or_equal_to(Settings.gitlab.max_request_duration_seconds)
end
[:gitaly_timeout_default, :gitaly_timeout_medium, :gitaly_timeout_fast].each do |timeout_name|
it do
is_expected.to validate_presence_of(timeout_name)
......
......@@ -2,6 +2,8 @@
shared_context 'unique ips sign in limit' do
include StubENV
let(:request_context) { Gitlab::RequestContext.instance }
before do
Gitlab::Redis::Cache.with(&:flushall)
Gitlab::Redis::Queues.with(&:flushall)
......@@ -15,10 +17,13 @@ shared_context 'unique ips sign in limit' do
unique_ips_limit_enabled: true,
unique_ips_limit_time_window: 10000
)
# Make sure we're working with the same reqeust context everywhere
allow(Gitlab::RequestContext).to receive(:instance).and_return(request_context)
end
def change_ip(ip)
allow(Gitlab::RequestContext).to receive(:client_ip).and_return(ip)
allow(request_context).to receive(:client_ip).and_return(ip)
end
def request_from_ip(ip)
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment