Commit 0a65e0da authored by Douwe Maan's avatar Douwe Maan

Merge branch 'move-to-adaptive-scheduling-in-project-mirror-pull' into 'master'

Stop using sidekiq-cron for scheduling all project mirror pull and move to adaptive scheduling

Closes gitlab-ce#29218

See merge request !1853
parents 27ded72a 7db0975a
...@@ -56,6 +56,8 @@ ...@@ -56,6 +56,8 @@
if (job.import_status === 'finished') { if (job.import_status === 'finished') {
job_item.removeClass("active").addClass("success"); job_item.removeClass("active").addClass("success");
return status_field.html('<span><i class="fa fa-check"></i> done</span>'); return status_field.html('<span><i class="fa fa-check"></i> done</span>');
} else if (job.import_status === 'scheduled') {
return status_field.html("<i class='fa fa-spinner fa-spin'></i> scheduled");
} else if (job.import_status === 'started') { } else if (job.import_status === 'started') {
return status_field.html("<i class='fa fa-spinner fa-spin'></i> started"); return status_field.html("<i class='fa fa-spinner fa-spin'></i> started");
} else { } else {
......
...@@ -172,10 +172,12 @@ class Admin::ApplicationSettingsController < Admin::ApplicationController ...@@ -172,10 +172,12 @@ class Admin::ApplicationSettingsController < Admin::ApplicationController
:elasticsearch_search, :elasticsearch_search,
:repository_size_limit, :repository_size_limit,
:shared_runners_minutes, :shared_runners_minutes,
:minimum_mirror_sync_time,
:geo_status_timeout, :geo_status_timeout,
:elasticsearch_experimental_indexer, :elasticsearch_experimental_indexer,
:check_namespace_plan :check_namespace_plan,
:mirror_max_delay,
:mirror_max_capacity,
:mirror_capacity_threshold
] ]
end end
end end
...@@ -12,14 +12,7 @@ class Projects::ImportsController < Projects::ApplicationController ...@@ -12,14 +12,7 @@ class Projects::ImportsController < Projects::ApplicationController
def create def create
if @project.update_attributes(import_params) if @project.update_attributes(import_params)
@project.reload @project.reload.import_schedule
if @project.import_failed?
@project.import_retry
else
@project.import_start
@project.add_import_job
end
end end
redirect_to namespace_project_import_path(@project.namespace, @project) redirect_to namespace_project_import_path(@project.namespace, @project)
......
...@@ -14,10 +14,10 @@ class Projects::MirrorsController < Projects::ApplicationController ...@@ -14,10 +14,10 @@ class Projects::MirrorsController < Projects::ApplicationController
def update def update
if @project.update_attributes(mirror_params) if @project.update_attributes(mirror_params)
if @project.mirror? if @project.mirror?
@project.update_mirror @project.force_import_job!
flash[:notice] = "Mirroring settings were successfully updated. The project is being updated." flash[:notice] = "Mirroring settings were successfully updated. The project is being updated."
elsif @project.mirror_changed? elsif project.previous_changes.has_key?('mirror')
flash[:notice] = "Mirroring was successfully disabled." flash[:notice] = "Mirroring was successfully disabled."
else else
flash[:notice] = "Mirroring settings were successfully updated." flash[:notice] = "Mirroring settings were successfully updated."
...@@ -34,9 +34,10 @@ class Projects::MirrorsController < Projects::ApplicationController ...@@ -34,9 +34,10 @@ class Projects::MirrorsController < Projects::ApplicationController
@project.update_remote_mirrors @project.update_remote_mirrors
flash[:notice] = "The remote repository is being updated..." flash[:notice] = "The remote repository is being updated..."
else else
@project.update_mirror @project.force_import_job!
flash[:notice] = "The repository is being updated..." flash[:notice] = "The repository is being updated..."
end end
redirect_to_repository_settings(@project) redirect_to_repository_settings(@project)
end end
...@@ -48,7 +49,6 @@ class Projects::MirrorsController < Projects::ApplicationController ...@@ -48,7 +49,6 @@ class Projects::MirrorsController < Projects::ApplicationController
def mirror_params def mirror_params
params.require(:project).permit(:mirror, :import_url, :mirror_user_id, params.require(:project).permit(:mirror, :import_url, :mirror_user_id,
:mirror_trigger_builds, :sync_time, :mirror_trigger_builds, remote_mirrors_attributes: [:url, :id, :enabled])
remote_mirrors_attributes: [:url, :id, :enabled])
end end
end end
...@@ -4,10 +4,4 @@ module MirrorHelper ...@@ -4,10 +4,4 @@ module MirrorHelper
message << "<br>To discard the local changes and overwrite the branch with the upstream version, delete it here and choose 'Update Now' above." if can?(current_user, :push_code, @project) message << "<br>To discard the local changes and overwrite the branch with the upstream version, delete it here and choose 'Update Now' above." if can?(current_user, :push_code, @project)
message message
end end
def mirror_sync_time_options
Gitlab::Mirror::SYNC_TIME_OPTIONS.select do |key, value|
value >= current_application_settings.minimum_mirror_sync_time
end
end
end end
...@@ -432,6 +432,12 @@ module ProjectsHelper ...@@ -432,6 +432,12 @@ module ProjectsHelper
end end
end end
def can_force_update_mirror?(project)
return true unless project.mirror_last_update_at
Time.now - project.mirror_last_update_at >= 5.minutes
end
def membership_locked? def membership_locked?
if @project.group && @project.group.membership_lock if @project.group && @project.group.membership_lock
true true
......
...@@ -154,10 +154,6 @@ class ApplicationSetting < ActiveRecord::Base ...@@ -154,10 +154,6 @@ class ApplicationSetting < ActiveRecord::Base
presence: true, presence: true,
numericality: { greater_than_or_equal_to: 0 } numericality: { greater_than_or_equal_to: 0 }
validates :minimum_mirror_sync_time,
presence: true,
inclusion: { in: Gitlab::Mirror::SYNC_TIME_OPTIONS.values }
validates_each :restricted_visibility_levels do |record, attr, value| validates_each :restricted_visibility_levels do |record, attr, value|
value&.each do |level| value&.each do |level|
unless Gitlab::VisibilityLevel.options.value?(level) unless Gitlab::VisibilityLevel.options.value?(level)
...@@ -186,8 +182,6 @@ class ApplicationSetting < ActiveRecord::Base ...@@ -186,8 +182,6 @@ class ApplicationSetting < ActiveRecord::Base
before_save :ensure_runners_registration_token before_save :ensure_runners_registration_token
before_save :ensure_health_check_access_token before_save :ensure_health_check_access_token
after_update :update_mirror_cron_job, if: :minimum_mirror_sync_time_changed?
after_commit do after_commit do
Rails.cache.write(CACHE_KEY, self) Rails.cache.write(CACHE_KEY, self)
end end
...@@ -218,7 +212,7 @@ class ApplicationSetting < ActiveRecord::Base ...@@ -218,7 +212,7 @@ class ApplicationSetting < ActiveRecord::Base
ApplicationSetting.define_attribute_methods ApplicationSetting.define_attribute_methods
end end
def self.defaults_ce def self.defaults
{ {
after_sign_up_text: nil, after_sign_up_text: nil,
akismet_enabled: false, akismet_enabled: false,
...@@ -269,20 +263,6 @@ class ApplicationSetting < ActiveRecord::Base ...@@ -269,20 +263,6 @@ class ApplicationSetting < ActiveRecord::Base
} }
end end
def self.defaults_ee
{
elasticsearch_url: ENV['ELASTIC_URL'] || 'http://localhost:9200',
elasticsearch_aws: false,
elasticsearch_aws_region: ENV['ELASTIC_REGION'] || 'us-east-1',
minimum_mirror_sync_time: Gitlab::Mirror::FIFTEEN,
repository_size_limit: 0
}
end
def self.defaults
defaults_ce.merge(defaults_ee)
end
def self.create_from_defaults def self.create_from_defaults
create(defaults) create(defaults)
end end
...@@ -295,13 +275,6 @@ class ApplicationSetting < ActiveRecord::Base ...@@ -295,13 +275,6 @@ class ApplicationSetting < ActiveRecord::Base
end end
end end
def update_mirror_cron_job
Project.mirror.where('sync_time < ?', minimum_mirror_sync_time)
.update_all(sync_time: minimum_mirror_sync_time)
Gitlab::Mirror.configure_cron_job!
end
def elasticsearch_url def elasticsearch_url
read_attribute(:elasticsearch_url).split(',').map(&:strip) read_attribute(:elasticsearch_url).split(',').map(&:strip)
end end
......
...@@ -7,12 +7,54 @@ module EE ...@@ -7,12 +7,54 @@ module EE
extend ActiveSupport::Concern extend ActiveSupport::Concern
prepended do prepended do
include IgnorableColumn
ignore_column :minimum_mirror_sync_time
validates :shared_runners_minutes, validates :shared_runners_minutes,
numericality: { greater_than_or_equal_to: 0 } numericality: { greater_than_or_equal_to: 0 }
validates :mirror_max_delay,
presence: true,
numericality: { allow_nil: true, only_integer: true, greater_than: 0 }
validates :mirror_max_capacity,
presence: true,
numericality: { allow_nil: true, only_integer: true, greater_than: 0 }
validates :mirror_capacity_threshold,
presence: true,
numericality: { allow_nil: true, only_integer: true, greater_than: 0 }
validate :mirror_capacity_threshold_less_than
end
module ClassMethods
def defaults
super.merge(
elasticsearch_url: ENV['ELASTIC_URL'] || 'http://localhost:9200',
elasticsearch_aws: false,
elasticsearch_aws_region: ENV['ELASTIC_REGION'] || 'us-east-1',
repository_size_limit: 0,
mirror_max_delay: Settings.gitlab['mirror_max_delay'],
mirror_max_capacity: Settings.gitlab['mirror_max_capacity'],
mirror_capacity_threshold: Settings.gitlab['mirror_capacity_threshold']
)
end
end end
def should_check_namespace_plan? def should_check_namespace_plan?
check_namespace_plan? && (::Gitlab.com? || Rails.env.development?) check_namespace_plan? && (::Gitlab.com? || Rails.env.development?)
end end
private
def mirror_capacity_threshold_less_than
return unless mirror_max_capacity && mirror_capacity_threshold
if mirror_capacity_threshold > mirror_max_capacity
errors.add(:mirror_capacity_threshold, "Project's mirror capacity threshold can't be higher than it's maximum capacity")
end
end
end end
end end
...@@ -7,8 +7,28 @@ module EE ...@@ -7,8 +7,28 @@ module EE
extend ActiveSupport::Concern extend ActiveSupport::Concern
prepended do prepended do
include IgnorableColumn
ignore_column :sync_time
after_save :create_mirror_data, if: ->(project) { project.mirror? && project.mirror_changed? }
after_save :destroy_mirror_data, if: ->(project) { !project.mirror? && project.mirror_changed? }
has_one :mirror_data, dependent: :delete, autosave: true, class_name: 'ProjectMirrorData'
scope :with_shared_runners_limit_enabled, -> { with_shared_runners.non_public_only } scope :with_shared_runners_limit_enabled, -> { with_shared_runners.non_public_only }
scope :mirrors_to_sync, -> do
mirror.joins(:mirror_data).where("next_execution_timestamp <= ? AND import_status NOT IN ('scheduled', 'started')", Time.now).
order_by(:next_execution_timestamp).limit(::Gitlab::Mirror.available_capacity)
end
scope :stuck_mirrors, -> do
mirror.joins(:mirror_data).
where("(import_status = 'started' AND project_mirror_data.last_update_started_at < :limit) OR (import_status = 'scheduled' AND project_mirror_data.last_update_scheduled_at < :limit)",
{ limit: 20.minutes.ago })
end
delegate :shared_runners_minutes, :shared_runners_seconds, :shared_runners_seconds_last_reset, delegate :shared_runners_minutes, :shared_runners_seconds, :shared_runners_seconds_last_reset,
to: :statistics, allow_nil: true to: :statistics, allow_nil: true
...@@ -44,6 +64,19 @@ module EE ...@@ -44,6 +64,19 @@ module EE
config.address&.gsub(wildcard, full_path) config.address&.gsub(wildcard, full_path)
end end
def force_import_job!
self.mirror_data.set_next_execution_to_now!
UpdateAllMirrorsWorker.perform_async
end
def add_import_job
if import? && !repository_exists?
super
elsif mirror?
RepositoryUpdateMirrorWorker.perform_async(self.id)
end
end
private private
def licensed_feature_available?(feature) def licensed_feature_available?(feature)
...@@ -57,6 +90,10 @@ module EE ...@@ -57,6 +90,10 @@ module EE
end end
end end
def destroy_mirror_data
mirror_data.destroy
end
def service_desk_available? def service_desk_available?
return @service_desk_available if defined?(@service_desk_available) return @service_desk_available if defined?(@service_desk_available)
......
...@@ -170,7 +170,7 @@ class Project < ActiveRecord::Base ...@@ -170,7 +170,7 @@ class Project < ActiveRecord::Base
has_many :audit_events, as: :entity, dependent: :destroy has_many :audit_events, as: :entity, dependent: :destroy
has_many :notification_settings, dependent: :destroy, as: :source has_many :notification_settings, dependent: :destroy, as: :source
has_one :import_data, dependent: :destroy, class_name: "ProjectImportData" has_one :import_data, dependent: :delete, class_name: 'ProjectImportData'
has_one :project_feature, dependent: :destroy has_one :project_feature, dependent: :destroy
has_one :statistics, class_name: 'ProjectStatistics', dependent: :delete has_one :statistics, class_name: 'ProjectStatistics', dependent: :delete
has_many :container_repositories, dependent: :destroy has_many :container_repositories, dependent: :destroy
...@@ -239,10 +239,6 @@ class Project < ActiveRecord::Base ...@@ -239,10 +239,6 @@ class Project < ActiveRecord::Base
validates :repository_size_limit, validates :repository_size_limit,
numericality: { only_integer: true, greater_than_or_equal_to: 0, allow_nil: true } numericality: { only_integer: true, greater_than_or_equal_to: 0, allow_nil: true }
validates :sync_time,
presence: true,
inclusion: { in: Gitlab::Mirror::SYNC_TIME_OPTIONS.values }
with_options if: :mirror? do |project| with_options if: :mirror? do |project|
project.validates :import_url, presence: true project.validates :import_url, presence: true
project.validates :mirror_user, presence: true project.validates :mirror_user, presence: true
...@@ -272,7 +268,6 @@ class Project < ActiveRecord::Base ...@@ -272,7 +268,6 @@ class Project < ActiveRecord::Base
scope :for_milestones, ->(ids) { joins(:milestones).where('milestones.id' => ids).distinct } scope :for_milestones, ->(ids) { joins(:milestones).where('milestones.id' => ids).distinct }
scope :with_push, -> { joins(:events).where('events.action = ?', Event::PUSHED) } scope :with_push, -> { joins(:events).where('events.action = ?', Event::PUSHED) }
scope :with_remote_mirrors, -> { joins(:remote_mirrors).where(remote_mirrors: { enabled: true }).distinct } scope :with_remote_mirrors, -> { joins(:remote_mirrors).where(remote_mirrors: { enabled: true }).distinct }
scope :with_project_feature, -> { joins('LEFT JOIN project_features ON projects.id = project_features.project_id') } scope :with_project_feature, -> { joins('LEFT JOIN project_features ON projects.id = project_features.project_id') }
scope :with_statistics, -> { includes(:statistics) } scope :with_statistics, -> { includes(:statistics) }
scope :with_shared_runners, -> { where(shared_runners_enabled: true) } scope :with_shared_runners, -> { where(shared_runners_enabled: true) }
...@@ -323,8 +318,16 @@ class Project < ActiveRecord::Base ...@@ -323,8 +318,16 @@ class Project < ActiveRecord::Base
scope :excluding_project, ->(project) { where.not(id: project) } scope :excluding_project, ->(project) { where.not(id: project) }
state_machine :import_status, initial: :none do state_machine :import_status, initial: :none do
event :import_schedule do
transition [:none, :finished, :failed] => :scheduled
end
event :force_import_start do
transition [:none, :finished, :failed] => :started
end
event :import_start do event :import_start do
transition [:none, :finished] => :started transition scheduled: :started
end end
event :import_finish do event :import_finish do
...@@ -332,24 +335,57 @@ class Project < ActiveRecord::Base ...@@ -332,24 +335,57 @@ class Project < ActiveRecord::Base
end end
event :import_fail do event :import_fail do
transition started: :failed transition [:scheduled, :started] => :failed
end
event :import_retry do
transition failed: :started
end end
state :scheduled
state :started state :started
state :finished state :finished
state :failed state :failed
after_transition any => :finished, do: :reset_cache_and_import_attrs before_transition [:none, :finished, :failed] => :scheduled do |project, _|
project.mirror_data&.last_update_scheduled_at = Time.now
end
after_transition [:none, :finished, :failed] => :scheduled do |project, _|
project.run_after_commit { add_import_job }
end
before_transition scheduled: :started do |project, _|
project.mirror_data&.last_update_started_at = Time.now
end
before_transition scheduled: :failed do |project, _|
if project.mirror?
timestamp = Time.now
project.mirror_last_update_at = timestamp
project.mirror_data.next_execution_timestamp = timestamp
end
end
after_transition [:scheduled, :started] => [:finished, :failed] do |project, _|
Gitlab::Mirror.decrement_capacity(project.id) if project.mirror?
end
before_transition started: :failed do |project, _|
if project.mirror?
project.mirror_last_update_at = Time.now
mirror_data = project.mirror_data
mirror_data.increment_retry_count!
mirror_data.set_next_execution_timestamp!
end
end
before_transition started: :finished do |project, transaction| before_transition started: :finished do |project, _|
if project.mirror? if project.mirror?
timestamp = DateTime.now timestamp = Time.now
project.mirror_last_update_at = timestamp project.mirror_last_update_at = timestamp
project.mirror_last_successful_update_at = timestamp project.mirror_last_successful_update_at = timestamp
mirror_data = project.mirror_data
mirror_data.reset_retry_count!
mirror_data.set_next_execution_timestamp!
end end
if current_application_settings.elasticsearch_indexing? if current_application_settings.elasticsearch_indexing?
...@@ -357,8 +393,10 @@ class Project < ActiveRecord::Base ...@@ -357,8 +393,10 @@ class Project < ActiveRecord::Base
end end
end end
before_transition started: :failed do |project, transaction| after_transition started: :finished, do: :reset_cache_and_import_attrs
project.mirror_last_update_at = DateTime.now if project.mirror?
after_transition [:finished, :failed] => [:scheduled, :started] do |project, _|
Gitlab::Mirror.increment_capacity(project.id) if project.mirror?
end end
end end
...@@ -577,9 +615,17 @@ class Project < ActiveRecord::Base ...@@ -577,9 +615,17 @@ class Project < ActiveRecord::Base
end end
def import_in_progress? def import_in_progress?
import_started? || import_scheduled?
end
def import_started?
import? && import_status == 'started' import? && import_status == 'started'
end end
def import_scheduled?
import_status == 'scheduled'
end
def import_failed? def import_failed?
import_status == 'failed' import_status == 'failed'
end end
...@@ -597,7 +643,10 @@ class Project < ActiveRecord::Base ...@@ -597,7 +643,10 @@ class Project < ActiveRecord::Base
end end
def updating_mirror? def updating_mirror?
mirror? && import_in_progress? && !empty_repo? return false unless mirror? && !empty_repo?
return true if import_in_progress?
self.mirror_data.next_execution_timestamp < Time.now
end end
def mirror_last_update_status def mirror_last_update_status
...@@ -622,20 +671,6 @@ class Project < ActiveRecord::Base ...@@ -622,20 +671,6 @@ class Project < ActiveRecord::Base
mirror_updated? && self.mirror_last_successful_update_at mirror_updated? && self.mirror_last_successful_update_at
end end
def update_mirror
return unless mirror? && repository_exists?
return if import_in_progress?
if import_failed?
import_retry
else
import_start
end
RepositoryUpdateMirrorWorker.perform_async(self.id)
end
def has_remote_mirror? def has_remote_mirror?
remote_mirrors.enabled.exists? remote_mirrors.enabled.exists?
end end
......
class ProjectMirrorData < ActiveRecord::Base
include Gitlab::CurrentSettings
BACKOFF_PERIOD = 24.seconds
JITTER = 6.seconds
belongs_to :project
validates :project, presence: true
validates :next_execution_timestamp, presence: true
before_validation on: :create do
self.next_execution_timestamp = Time.now
end
def reset_retry_count!
self.retry_count = 0
end
def increment_retry_count!
self.retry_count += 1
end
# We schedule the next sync time based on the duration of the
# last mirroring period and add it a fixed backoff period with a random jitter
def set_next_execution_timestamp!
timestamp = Time.now
retry_factor = [1, self.retry_count].max
delay = [base_delay(timestamp) * retry_factor, Gitlab::Mirror.max_delay].min
self.next_execution_timestamp = timestamp + delay
end
def set_next_execution_to_now!
self.update_attributes(next_execution_timestamp: Time.now)
end
private
def base_delay(timestamp)
duration = timestamp - self.last_update_started_at
(BACKOFF_PERIOD + rand(JITTER)) * duration.seconds
end
end
...@@ -51,15 +51,14 @@ module Projects ...@@ -51,15 +51,14 @@ module Projects
save_project_and_import_data(import_data) save_project_and_import_data(import_data)
@project.import_start if @project.import?
after_create_actions if @project.persisted? after_create_actions if @project.persisted?
if @project.errors.empty? if @project.errors.empty?
@project.add_import_job if @project.import? @project.import_schedule if @project.import?
else else
fail(error: @project.errors.full_messages.join(', ')) fail(error: @project.errors.full_messages.join(', '))
end end
@project @project
rescue ActiveRecord::RecordInvalid => e rescue ActiveRecord::RecordInvalid => e
message = "Unable to save #{e.record.type}: #{e.record.errors.full_messages.join(", ")} " message = "Unable to save #{e.record.type}: #{e.record.errors.full_messages.join(", ")} "
......
...@@ -71,10 +71,6 @@ ...@@ -71,10 +71,6 @@
%span.help-block#repository_size_limit_help_block %span.help-block#repository_size_limit_help_block
Includes LFS objects. It can be overridden per group, or per project. 0 for unlimited. Includes LFS objects. It can be overridden per group, or per project. 0 for unlimited.
= link_to icon('question-circle'), help_page_path("user/admin_area/settings/account_and_limit_settings") = link_to icon('question-circle'), help_page_path("user/admin_area/settings/account_and_limit_settings")
.form-group
= f.label :minimum_mirror_sync_time, class: 'control-label col-sm-2'
.col-sm-10
= f.select :minimum_mirror_sync_time, options_for_select(Gitlab::Mirror::SYNC_TIME_OPTIONS, @application_setting.minimum_mirror_sync_time), {}, class: 'form-control'
.form-group .form-group
= f.label :session_expire_delay, 'Session duration (minutes)', class: 'control-label col-sm-2' = f.label :session_expire_delay, 'Session duration (minutes)', class: 'control-label col-sm-2'
.col-sm-10 .col-sm-10
...@@ -105,6 +101,8 @@ ...@@ -105,6 +101,8 @@
Enabling this will only make licensed EE features available to projects if the project namespace's plan Enabling this will only make licensed EE features available to projects if the project namespace's plan
includes the feature or if the project is public. includes the feature or if the project is public.
= render partial: 'repository_mirrors_form', locals: { f: f }
%fieldset %fieldset
%legend Sign-up Restrictions %legend Sign-up Restrictions
.form-group .form-group
......
%fieldset
%legend Repository mirror settings
.form-group
= f.label :mirror_max_delay, class: 'control-label col-sm-2' do
Maximum delay (Hours)
.col-sm-10
= f.number_field :mirror_max_delay, class: 'form-control', min: 0
%span.help-block#mirror_max_delay_help_block
Maximum time between updates that a mirror can have when scheduled to synchronize.
.form-group
= f.label :mirror_max_capacity, class: 'control-label col-sm-2' do
Maximum capacity
.col-sm-10
= f.number_field :mirror_max_capacity, class: 'form-control', min: 0
%span.help-block#mirror_max_capacity_help_block
Maximum number of mirrors that can be synchronizing at the same time.
.form-group
= f.label :mirror_capacity_threshold, class: 'control-label col-sm-2' do
Capacity threshold
.col-sm-10
= f.number_field :mirror_capacity_threshold, class: 'form-control', min: 0
%span.help-block#mirror_capacity_threshold
Minimum capacity to be available before we schedule more mirrors preemptively.
...@@ -43,9 +43,6 @@ ...@@ -43,9 +43,6 @@
They need to have at least master access to this project. They need to have at least master access to this project.
- if @project.builds_enabled? - if @project.builds_enabled?
= render "shared/mirror_trigger_builds_setting", f: f = render "shared/mirror_trigger_builds_setting", f: f
.form-group
= f.label :sync_time, "Synchronization time", class: "label-light append-bottom-0"
= f.select :sync_time, options_for_select(mirror_sync_time_options, @project.sync_time), {}, class: 'form-control project-mirror-sync-time'
.col-sm-12 .col-sm-12
%hr %hr
.col-lg-3 .col-lg-3
......
...@@ -4,8 +4,13 @@ ...@@ -4,8 +4,13 @@
%span.btn.disabled %span.btn.disabled
= icon("refresh spin") = icon("refresh spin")
Updating&hellip; Updating&hellip;
- elsif !can_force_update_mirror?(@project)
%span.btn.disabled{ data: { toggle: 'tooltip', placement: 'auto top' }, style: 'cursor: default',
title: 'You can only force update once every five minutes.' }
= icon("refresh")
Update Now
- else - else
= link_to update_now_namespace_project_mirror_path(@project.namespace, @project), method: :post, class: "btn" do = link_to update_now_namespace_project_mirror_path(@project.namespace, @project), method: :post, class: 'btn' do
= icon("refresh") = icon("refresh")
Update Now Update Now
- if @project.mirror_last_update_success? - if @project.mirror_last_update_success?
......
class RepositoryForkWorker class RepositoryForkWorker
ForkError = Class.new(StandardError)
include Sidekiq::Worker include Sidekiq::Worker
include Gitlab::ShellAdapter include Gitlab::ShellAdapter
include DedicatedSidekiqQueue include DedicatedSidekiqQueue
...@@ -8,29 +10,31 @@ class RepositoryForkWorker ...@@ -8,29 +10,31 @@ class RepositoryForkWorker
source_path: source_path, source_path: source_path,
target_path: target_path) target_path: target_path)
project = Project.find_by_id(project_id) project = Project.find(project_id)
project.import_start
unless project.present?
logger.error("Project #{project_id} no longer exists!")
return
end
result = gitlab_shell.fork_repository(forked_from_repository_storage_path, source_path, result = gitlab_shell.fork_repository(forked_from_repository_storage_path, source_path,
project.repository_storage_path, target_path) project.repository_storage_path, target_path)
unless result raise ForkError, "Unable to fork project #{project_id} for repository #{source_path} -> #{target_path}" unless result
logger.error("Unable to fork project #{project_id} for repository #{source_path} -> #{target_path}")
project.mark_import_as_failed('The project could not be forked.')
return
end
project.repository.after_import project.repository.after_import
raise ForkError, "Project #{project_id} had an invalid repository after fork" unless project.valid_repo?
unless project.valid_repo?
logger.error("Project #{project_id} had an invalid repository after fork")
project.mark_import_as_failed('The forked repository is invalid.')
return
end
project.import_finish project.import_finish
rescue ForkError => ex
fail_fork(project, ex.message)
raise
rescue => ex
return unless project
fail_fork(project, ex.message)
raise ForkError, "#{ex.class} #{ex.message}"
end
private
def fail_fork(project, message)
Rails.logger.error(message)
project.mark_import_as_failed(message)
end end
end end
class RepositoryImportWorker class RepositoryImportWorker
ImportError = Class.new(StandardError)
include Sidekiq::Worker include Sidekiq::Worker
include DedicatedSidekiqQueue include DedicatedSidekiqQueue
...@@ -10,6 +12,8 @@ class RepositoryImportWorker ...@@ -10,6 +12,8 @@ class RepositoryImportWorker
@project = Project.find(project_id) @project = Project.find(project_id)
@current_user = @project.creator @current_user = @project.creator
project.import_start
Gitlab::Metrics.add_event(:import_repository, Gitlab::Metrics.add_event(:import_repository,
import_url: @project.import_url, import_url: @project.import_url,
path: @project.path_with_namespace) path: @project.path_with_namespace)
...@@ -17,16 +21,27 @@ class RepositoryImportWorker ...@@ -17,16 +21,27 @@ class RepositoryImportWorker
project.update_columns(import_jid: self.jid, import_error: nil) project.update_columns(import_jid: self.jid, import_error: nil)
result = Projects::ImportService.new(project, current_user).execute result = Projects::ImportService.new(project, current_user).execute
raise ImportError, result[:message] if result[:status] == :error
if result[:status] == :error
project.mark_import_as_failed(result[:message])
return
end
project.repository.after_import project.repository.after_import
project.import_finish project.import_finish
# Explicitly update mirror so that upstream remote is created and fetched # Explicitly schedule mirror for update so
project.update_mirror # that upstream remote is created and fetched
project.import_schedule if project.mirror?
rescue ImportError => ex
fail_import(project, ex.message)
raise
rescue => ex
return unless project
fail_import(project, ex.message)
raise ImportError, "#{ex.class} #{ex.message}"
end
private
def fail_import(project, message)
project.mark_import_as_failed(message)
end end
end end
class RepositoryUpdateMirrorDispatchWorker
include Sidekiq::Worker
LEASE_TIMEOUT = 5.minutes
sidekiq_options queue: :project_mirror
attr_accessor :project, :repository, :current_user
def perform(project_id)
return unless try_obtain_lease(project_id)
@project = Project.find_by_id(project_id)
return unless project
project.update_mirror
end
private
def try_obtain_lease(project_id)
# Using 5 minutes timeout based on the 95th percent of timings (currently max of 25 minutes)
lease = ::Gitlab::ExclusiveLease.new("repository_update_mirror_dispatcher:#{project_id}", timeout: LEASE_TIMEOUT)
lease.try_obtain
end
end
class RepositoryUpdateMirrorWorker class RepositoryUpdateMirrorWorker
UpdateMirrorError = Class.new(StandardError) UpdateError = Class.new(StandardError)
UpdateAlreadyInProgressError = Class.new(StandardError)
include Sidekiq::Worker include Sidekiq::Worker
include Gitlab::ShellAdapter include Gitlab::ShellAdapter
...@@ -10,25 +11,35 @@ class RepositoryUpdateMirrorWorker ...@@ -10,25 +11,35 @@ class RepositoryUpdateMirrorWorker
attr_accessor :project, :repository, :current_user attr_accessor :project, :repository, :current_user
def perform(project_id) def perform(project_id)
begin project = Project.find(project_id)
project = Project.find(project_id)
raise UpdateAlreadyInProgressError if project.import_started?
return unless project project.import_start
@current_user = project.mirror_user || project.creator @current_user = project.mirror_user || project.creator
result = Projects::UpdateMirrorService.new(project, @current_user).execute result = Projects::UpdateMirrorService.new(project, @current_user).execute
if result[:status] == :error raise UpdateError, result[:message] if result[:status] == :error
project.mark_import_as_failed(result[:message])
return project.import_finish
end rescue UpdateAlreadyInProgressError
raise
project.import_finish rescue UpdateError => ex
rescue => ex fail_mirror(project, ex.message)
if project raise
project.mark_import_as_failed("We're sorry, a temporary error occurred, please try again.") rescue => ex
raise UpdateMirrorError, "#{ex.class}: #{Gitlab::UrlSanitizer.sanitize(ex.message)}" return unless project
end
end fail_mirror(project, ex.message)
raise UpdateError, "#{ex.class}: #{ex.message}"
ensure
UpdateAllMirrorsWorker.perform_async if Gitlab::Mirror.threshold_reached?
end
private
def fail_mirror(project, message)
Rails.logger.error(message)
project.mark_import_as_failed(message)
end end
end end
...@@ -23,15 +23,25 @@ class RepositoryUpdateRemoteMirrorWorker ...@@ -23,15 +23,25 @@ class RepositoryUpdateRemoteMirrorWorker
project = remote_mirror.project project = remote_mirror.project
current_user = project.creator current_user = project.creator
result = Projects::UpdateRemoteMirrorService.new(project, current_user).execute(remote_mirror) result = Projects::UpdateRemoteMirrorService.new(project, current_user).execute(remote_mirror)
raise UpdateError, result[:message] if result[:status] == :error raise UpdateError, result[:message] if result[:status] == :error
remote_mirror.update_finish remote_mirror.update_finish
rescue UpdateAlreadyInProgressError rescue UpdateAlreadyInProgressError
raise raise
rescue UpdateError => ex rescue UpdateError => ex
remote_mirror.mark_as_failed(Gitlab::UrlSanitizer.sanitize(ex.message)) fail_remote_mirror(remote_mirror, ex.message)
raise raise
rescue => ex rescue => ex
return unless remote_mirror
fail_remote_mirror(remote_mirror, ex.message)
raise UpdateError, "#{ex.class}: #{ex.message}" raise UpdateError, "#{ex.class}: #{ex.message}"
end end
private
def fail_remote_mirror(remote_mirror, message)
Rails.logger.error(message)
remote_mirror.mark_as_failed(message)
end
end end
...@@ -2,39 +2,38 @@ class UpdateAllMirrorsWorker ...@@ -2,39 +2,38 @@ class UpdateAllMirrorsWorker
include Sidekiq::Worker include Sidekiq::Worker
include CronjobQueue include CronjobQueue
LEASE_TIMEOUT = 840 LEASE_TIMEOUT = 5.minutes
LEASE_KEY = 'update_all_mirrors'.freeze
def perform def perform
# This worker requires updating the database state, which we can't # This worker requires updating the database state, which we can't
# do on a Geo secondary # do on a Geo secondary
return if Gitlab::Geo.secondary? return if Gitlab::Geo.secondary?
return unless try_obtain_lease
lease_uuid = try_obtain_lease
return unless lease_uuid
fail_stuck_mirrors! fail_stuck_mirrors!
mirrors_to_sync.find_each(batch_size: 200) do |project| return if Gitlab::Mirror.max_mirror_capacity_reached?
RepositoryUpdateMirrorDispatchWorker.perform_in(rand((project.sync_time / 2).minutes), project.id) Project.mirrors_to_sync.find_each(batch_size: 200, &:import_schedule)
end
cancel_lease(lease_uuid)
end end
def fail_stuck_mirrors! def fail_stuck_mirrors!
stuck = Project.mirror Project.stuck_mirrors.find_each(batch_size: 50) do |project|
.with_import_status(:started)
.where('mirror_last_update_at < ?', 2.hours.ago)
stuck.find_each(batch_size: 50) do |project|
project.mark_import_as_failed('The mirror update took too long to complete.') project.mark_import_as_failed('The mirror update took too long to complete.')
end end
end end
private private
def mirrors_to_sync def try_obtain_lease
Project.mirror.where("mirror_last_successful_update_at + #{Gitlab::Database.minute_interval('sync_time')} <= ? OR sync_time IN (?)", DateTime.now, Gitlab::Mirror.sync_times) ::Gitlab::ExclusiveLease.new(LEASE_KEY, timeout: LEASE_TIMEOUT).try_obtain
end end
def try_obtain_lease def cancel_lease(uuid)
lease = ::Gitlab::ExclusiveLease.new("update_all_mirrors", timeout: LEASE_TIMEOUT) ::Gitlab::ExclusiveLease.cancel(LEASE_KEY, uuid)
lease.try_obtain
end end
end end
---
title: Move pull mirroring to adaptive scheduling
merge_request: 1853
author:
...@@ -264,6 +264,9 @@ Settings.gitlab['default_projects_features'] ||= {} ...@@ -264,6 +264,9 @@ Settings.gitlab['default_projects_features'] ||= {}
Settings.gitlab['webhook_timeout'] ||= 10 Settings.gitlab['webhook_timeout'] ||= 10
Settings.gitlab['max_attachment_size'] ||= 10 Settings.gitlab['max_attachment_size'] ||= 10
Settings.gitlab['session_expire_delay'] ||= 10080 Settings.gitlab['session_expire_delay'] ||= 10080
Settings.gitlab['mirror_max_delay'] ||= 5
Settings.gitlab['mirror_max_capacity'] ||= 30
Settings.gitlab['mirror_capacity_threshold'] ||= 15
Settings.gitlab.default_projects_features['issues'] = true if Settings.gitlab.default_projects_features['issues'].nil? Settings.gitlab.default_projects_features['issues'] = true if Settings.gitlab.default_projects_features['issues'].nil?
Settings.gitlab.default_projects_features['merge_requests'] = true if Settings.gitlab.default_projects_features['merge_requests'].nil? Settings.gitlab.default_projects_features['merge_requests'] = true if Settings.gitlab.default_projects_features['merge_requests'].nil?
Settings.gitlab.default_projects_features['wiki'] = true if Settings.gitlab.default_projects_features['wiki'].nil? Settings.gitlab.default_projects_features['wiki'] = true if Settings.gitlab.default_projects_features['wiki'].nil?
......
class CreateProjectMirrorData < ActiveRecord::Migration
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
disable_ddl_transaction!
def up
execute <<-SQL
CREATE TABLE project_mirror_data
AS (
SELECT id AS project_id,
0 AS retry_count,
CAST(NULL AS #{timestamp}) AS last_update_started_at,
CAST(NULL AS #{timestamp}) AS last_update_scheduled_at,
NOW() AS next_execution_timestamp,
NOW() AS created_at,
NOW() AS updated_at
FROM projects
WHERE mirror IS TRUE
);
SQL
add_column :project_mirror_data, :id, :primary_key
change_column_default :project_mirror_data, :retry_count, 0
change_column_null :project_mirror_data, :retry_count, false
add_concurrent_foreign_key :project_mirror_data, :projects, column: :project_id
add_concurrent_index :project_mirror_data, [:project_id], unique: true
end
def down
drop_table :project_mirror_data if table_exists?(:project_mirror_data)
end
def timestamp
return 'TIMESTAMP' if Gitlab::Database.postgresql?
'DATETIME'
end
end
class AddMirrorSettingsToApplicationSetting < ActiveRecord::Migration
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
disable_ddl_transaction!
def up
add_column_with_default :application_settings,
:mirror_max_delay,
:integer,
default: 5,
allow_null: false
add_column_with_default :application_settings,
:mirror_max_capacity,
:integer,
default: 100,
allow_null: false
add_column_with_default :application_settings,
:mirror_capacity_threshold,
:integer,
default: 50,
allow_null: false
ApplicationSetting.expire
end
def down
remove_column :application_settings, :mirror_max_delay
remove_column :application_settings, :mirror_max_capacity
remove_column :application_settings, :mirror_capacity_threshold
end
end
class RemoveSyncTimeFromProjectMirrorsAndMinimumMirrorSyncTimeFromApplicationSettings < ActiveRecord::Migration
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
disable_ddl_transaction!
def up
remove_concurrent_index :projects, [:sync_time] if index_exists? :projects, [:sync_time]
remove_column :projects, :sync_time, :integer
remove_column :application_settings, :minimum_mirror_sync_time
ApplicationSetting.expire
end
def down
add_column :projects, :sync_time, :integer
add_concurrent_index :projects, [:sync_time]
add_column_with_default :application_settings,
:minimum_mirror_sync_time,
:integer,
default: 15,
allow_null: false
end
end
class EnsureProjectMirrorData < ActiveRecord::Migration
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
def up
return unless Gitlab::Database.postgresql?
execute <<-SQL
INSERT INTO project_mirror_data
SELECT id AS project_id,
0 AS retry_count,
CAST(NULL AS TIMESTAMP) AS last_update_started_at,
CAST(NULL AS TIMESTAMP) AS last_update_scheduled_at,
NOW() AS next_execution_timestamp,
NOW() AS created_at,
NOW() as updated_at
FROM projects
WHERE mirror IS TRUE
AND NOT EXISTS (
SELECT true
FROM project_mirror_data
WHERE project_mirror_data.project_id = projects.id
);
SQL
end
def down
end
end
...@@ -123,7 +123,6 @@ ActiveRecord::Schema.define(version: 20170602003304) do ...@@ -123,7 +123,6 @@ ActiveRecord::Schema.define(version: 20170602003304) do
t.integer "unique_ips_limit_per_user" t.integer "unique_ips_limit_per_user"
t.integer "unique_ips_limit_time_window" t.integer "unique_ips_limit_time_window"
t.boolean "unique_ips_limit_enabled", default: false, null: false t.boolean "unique_ips_limit_enabled", default: false, null: false
t.integer "minimum_mirror_sync_time", default: 15, null: false
t.string "default_artifacts_expire_in", default: "0", null: false t.string "default_artifacts_expire_in", default: "0", null: false
t.string "elasticsearch_url", default: "http://localhost:9200" t.string "elasticsearch_url", default: "http://localhost:9200"
t.boolean "elasticsearch_aws", default: false, null: false t.boolean "elasticsearch_aws", default: false, null: false
...@@ -138,6 +137,9 @@ ActiveRecord::Schema.define(version: 20170602003304) do ...@@ -138,6 +137,9 @@ ActiveRecord::Schema.define(version: 20170602003304) do
t.boolean "clientside_sentry_enabled", default: false, null: false t.boolean "clientside_sentry_enabled", default: false, null: false
t.string "clientside_sentry_dsn" t.string "clientside_sentry_dsn"
t.boolean "check_namespace_plan", default: false, null: false t.boolean "check_namespace_plan", default: false, null: false
t.integer "mirror_max_delay", default: 5, null: false
t.integer "mirror_max_capacity", default: 100, null: false
t.integer "mirror_capacity_threshold", default: 50, null: false
end end
create_table "approvals", force: :cascade do |t| create_table "approvals", force: :cascade do |t|
...@@ -1079,6 +1081,18 @@ ActiveRecord::Schema.define(version: 20170602003304) do ...@@ -1079,6 +1081,18 @@ ActiveRecord::Schema.define(version: 20170602003304) do
add_index "project_import_data", ["project_id"], name: "index_project_import_data_on_project_id", using: :btree add_index "project_import_data", ["project_id"], name: "index_project_import_data_on_project_id", using: :btree
create_table "project_mirror_data", force: :cascade do |t|
t.integer "project_id"
t.integer "retry_count", default: 0, null: false
t.datetime "last_update_started_at"
t.datetime "last_update_scheduled_at"
t.datetime "next_execution_timestamp"
t.datetime "created_at"
t.datetime "updated_at"
end
add_index "project_mirror_data", ["project_id"], name: "index_project_mirror_data_on_project_id", unique: true, using: :btree
create_table "project_statistics", force: :cascade do |t| create_table "project_statistics", force: :cascade do |t|
t.integer "project_id", null: false t.integer "project_id", null: false
t.integer "namespace_id", null: false t.integer "namespace_id", null: false
...@@ -1144,7 +1158,6 @@ ActiveRecord::Schema.define(version: 20170602003304) do ...@@ -1144,7 +1158,6 @@ ActiveRecord::Schema.define(version: 20170602003304) do
t.text "description_html" t.text "description_html"
t.boolean "only_allow_merge_if_all_discussions_are_resolved" t.boolean "only_allow_merge_if_all_discussions_are_resolved"
t.integer "repository_size_limit", limit: 8 t.integer "repository_size_limit", limit: 8
t.integer "sync_time", default: 60, null: false
t.boolean "printing_merge_request_link_enabled", default: true, null: false t.boolean "printing_merge_request_link_enabled", default: true, null: false
t.integer "auto_cancel_pending_pipelines", default: 1, null: false t.integer "auto_cancel_pending_pipelines", default: 1, null: false
t.boolean "service_desk_enabled" t.boolean "service_desk_enabled"
...@@ -1168,7 +1181,6 @@ ActiveRecord::Schema.define(version: 20170602003304) do ...@@ -1168,7 +1181,6 @@ ActiveRecord::Schema.define(version: 20170602003304) do
add_index "projects", ["pending_delete"], name: "index_projects_on_pending_delete", using: :btree add_index "projects", ["pending_delete"], name: "index_projects_on_pending_delete", using: :btree
add_index "projects", ["runners_token"], name: "index_projects_on_runners_token", using: :btree add_index "projects", ["runners_token"], name: "index_projects_on_runners_token", using: :btree
add_index "projects", ["star_count"], name: "index_projects_on_star_count", using: :btree add_index "projects", ["star_count"], name: "index_projects_on_star_count", using: :btree
add_index "projects", ["sync_time"], name: "index_projects_on_sync_time", using: :btree
add_index "projects", ["visibility_level"], name: "index_projects_on_visibility_level", using: :btree add_index "projects", ["visibility_level"], name: "index_projects_on_visibility_level", using: :btree
create_table "protected_branch_merge_access_levels", force: :cascade do |t| create_table "protected_branch_merge_access_levels", force: :cascade do |t|
...@@ -1673,6 +1685,7 @@ ActiveRecord::Schema.define(version: 20170602003304) do ...@@ -1673,6 +1685,7 @@ ActiveRecord::Schema.define(version: 20170602003304) do
add_foreign_key "personal_access_tokens", "users" add_foreign_key "personal_access_tokens", "users"
add_foreign_key "project_authorizations", "projects", on_delete: :cascade add_foreign_key "project_authorizations", "projects", on_delete: :cascade
add_foreign_key "project_authorizations", "users", on_delete: :cascade add_foreign_key "project_authorizations", "users", on_delete: :cascade
add_foreign_key "project_mirror_data", "projects", name: "fk_d1aad367d7", on_delete: :cascade
add_foreign_key "project_statistics", "projects", on_delete: :cascade add_foreign_key "project_statistics", "projects", on_delete: :cascade
add_foreign_key "protected_branch_merge_access_levels", "namespaces", column: "group_id" add_foreign_key "protected_branch_merge_access_levels", "namespaces", column: "group_id"
add_foreign_key "protected_branch_merge_access_levels", "protected_branches" add_foreign_key "protected_branch_merge_access_levels", "protected_branches"
......
...@@ -51,8 +51,8 @@ whether you want to trigger builds for mirror updates. ...@@ -51,8 +51,8 @@ whether you want to trigger builds for mirror updates.
Since the repository on GitLab functions as a mirror of the upstream repository, Since the repository on GitLab functions as a mirror of the upstream repository,
you are advised not to push commits directly to the repository on GitLab. you are advised not to push commits directly to the repository on GitLab.
Instead, any commits should be pushed to the upstream repository, and will end Instead, any commits should be pushed to the upstream repository, and will end
up in the GitLab repository automatically within your project's configured up in the GitLab repository automatically within a certain period of time
synchronization time, or when a [forced update](#forcing-an-update) is initiated. or when a [forced update](#forcing-an-update) is initiated.
If you do manually update a branch in the GitLab repository, the branch will If you do manually update a branch in the GitLab repository, the branch will
become diverged from upstream, and GitLab will no longer automatically update become diverged from upstream, and GitLab will no longer automatically update
...@@ -83,7 +83,7 @@ In case of a diverged branch, you will see an error indicated at the ...@@ -83,7 +83,7 @@ In case of a diverged branch, you will see an error indicated at the
## Forcing an update ## Forcing an update
While mirrors update at a pre-configured time (hourly by default), you can always force an update (either **push** or While mirrors are scheduled to update automatically, you can always force an update (either **push** or
**pull**) by using the **Update now** button which is exposed in various places: **pull**) by using the **Update now** button which is exposed in various places:
- in the commits page - in the commits page
...@@ -91,24 +91,13 @@ While mirrors update at a pre-configured time (hourly by default), you can alway ...@@ -91,24 +91,13 @@ While mirrors update at a pre-configured time (hourly by default), you can alway
- in the tags page - in the tags page
- in the **Mirror repository** settings page - in the **Mirror repository** settings page
## Adjusting synchronization times
Your repository's default synchronization time is hourly.
However, you can adjust it by visiting the **Mirror repository** page
under the wheel icon in the upper right corner.
Check the Synchronization time section where you can choose to have your mirror
be updated once every fifteen minutes, hourly or daily and then hit **Save changes**
at the bottom.
## Using both mirroring methods at the same time ## Using both mirroring methods at the same time
Currently there is no bidirectional support without conflicts. That means that Currently there is no bidirectional support without conflicts. That means that
if you configure a repository to both pull and push to a second one, there is if you configure a repository to both pull and push to a second one, there is
no guarantee that it will update correctly on both remotes. You could no guarantee that it will update correctly on both remotes.
adjust the synchronization times on the mirror settings page You can try [configuring custom Git hooks][hooks] on the GitLab server in order
to a very low value and hope that no conflicts occur during to resolve this issue.
the pull/push window time, but that is not a solution to consider on a
production environment. Another thing you could try is [configuring custom Git hooks][hooks] on the GitLab server.
[ee-51]: https://gitlab.com/gitlab-org/gitlab-ee/merge_requests/51 [ee-51]: https://gitlab.com/gitlab-org/gitlab-ee/merge_requests/51
......
...@@ -2,85 +2,66 @@ module Gitlab ...@@ -2,85 +2,66 @@ module Gitlab
module Mirror module Mirror
include Gitlab::CurrentSettings include Gitlab::CurrentSettings
FIFTEEN = 15 # Runs scheduler every minute
HOURLY = 60 SCHEDULER_CRON = '* * * * *'.freeze
THREE = 180 PULL_CAPACITY_KEY = 'MIRROR_PULL_CAPACITY'.freeze
SIX = 360 UPPER_JITTER = 1.minute
TWELVE = 720
DAILY = 1440
INTERVAL_BEFORE_FIFTEEN = 14.minutes
SYNC_TIME_TO_CRON = {
FIFTEEN => "*/15 * * * *",
HOURLY => "0 * * * *",
THREE => "0 */3 * * *",
SIX => "0 */6 * * *",
TWELVE => "0 */12 * * *",
DAILY => "0 0 * * *"
}.freeze
SYNC_TIME_OPTIONS = {
"Update every 15 minutes" => FIFTEEN,
"Update hourly" => HOURLY,
"Update every three hours" => THREE,
"Update every six hours" => SIX,
"Update every twelve hours" => TWELVE,
"Update every day" => DAILY
}.freeze
class << self class << self
def sync_times def configure_cron_job!
sync_times = [FIFTEEN] destroy_cron_job!
sync_times << DAILY if at_beginning_of_day? return if Gitlab::Geo.secondary?
sync_times << TWELVE if at_beginning_of_hour?(12)
sync_times << SIX if at_beginning_of_hour?(6)
sync_times << THREE if at_beginning_of_hour?(3)
sync_times << HOURLY if at_beginning_of_hour?
sync_times Sidekiq::Cron::Job.create(
name: 'update_all_mirrors_worker',
cron: SCHEDULER_CRON,
class: 'UpdateAllMirrorsWorker'
)
end end
def update_all_mirrors_cron_job def max_mirror_capacity_reached?
Sidekiq::Cron::Job.find("update_all_mirrors_worker") available_capacity <= 0
end end
def destroy_cron_job! def threshold_reached?
update_all_mirrors_cron_job&.destroy available_capacity >= capacity_threshold
end end
def configure_cron_job! def available_capacity
if Gitlab::Geo.secondary? current_capacity = Gitlab::Redis.with { |redis| redis.scard(PULL_CAPACITY_KEY) }
destroy_cron_job!
return
end
minimum_mirror_sync_time = current_application_settings.minimum_mirror_sync_time rescue FIFTEEN max_capacity - current_capacity.to_i
sync_time = SYNC_TIME_TO_CRON[minimum_mirror_sync_time] end
destroy_cron_job!
Sidekiq::Cron::Job.create( def increment_capacity(project_id)
name: 'update_all_mirrors_worker', Gitlab::Redis.with { |redis| redis.sadd(PULL_CAPACITY_KEY, project_id) }
cron: sync_time,
class: 'UpdateAllMirrorsWorker'
)
end end
def at_beginning_of_day? # We do not want negative capacity
start_at = DateTime.now.at_beginning_of_day def decrement_capacity(project_id)
end_at = start_at + INTERVAL_BEFORE_FIFTEEN Gitlab::Redis.with { |redis| redis.srem(PULL_CAPACITY_KEY, project_id) }
end
DateTime.now.between?(start_at, end_at) def max_delay
current_application_settings.mirror_max_delay.hours + rand(UPPER_JITTER)
end end
def at_beginning_of_hour?(hour_mark = nil) def max_capacity
start_at = DateTime.now.at_beginning_of_hour current_application_settings.mirror_max_capacity
end_at = start_at + INTERVAL_BEFORE_FIFTEEN end
between_interval = DateTime.now.between?(start_at, end_at) def capacity_threshold
return between_interval unless hour_mark current_application_settings.mirror_capacity_threshold
end
between_interval && DateTime.now.hour % hour_mark == 0 private
def update_all_mirrors_cron_job
Sidekiq::Cron::Job.find("update_all_mirrors_worker")
end
def destroy_cron_job!
update_all_mirrors_cron_job&.destroy
end end
end end
end end
......
...@@ -37,7 +37,7 @@ class GithubImport ...@@ -37,7 +37,7 @@ class GithubImport
end end
def import! def import!
@project.import_start @project.force_import_start
timings = Benchmark.measure do timings = Benchmark.measure do
Github::Import.new(@project, @options).execute Github::Import.new(@project, @options).execute
......
require 'spec_helper' require 'spec_helper'
describe Projects::MirrorsController do describe Projects::MirrorsController do
let(:sync_times) { Gitlab::Mirror::SYNC_TIME_OPTIONS.values }
describe 'setting up a mirror' do
context 'when the current project is a mirror' do
before do
@project = create(:project, :mirror)
sign_in(@project.owner)
end
context 'sync_time update' do
it 'allows sync_time update with valid time' do
sync_times.each do |sync_time|
expect do
do_put(@project, sync_time: sync_time)
end.to change { Project.mirror.where(sync_time: sync_time).count }.by(1)
end
end
it 'fails to update sync_time with invalid time' do
expect do
do_put(@project, sync_time: 1000)
end.not_to change { @project.sync_time }
end
end
end
end
describe 'setting up a remote mirror' do describe 'setting up a remote mirror' do
context 'when the current project is a mirror' do context 'when the current project is a mirror' do
let(:project) { create(:project, :mirror) }
before do before do
@project = create(:project, :mirror) sign_in(project.owner)
sign_in(@project.owner)
end end
it 'allows to create a remote mirror' do it 'allows to create a remote mirror' do
expect_any_instance_of(EE::Project).to receive(:force_import_job!)
expect do expect do
do_put(@project, remote_mirrors_attributes: { '0' => { 'enabled' => 1, 'url' => 'http://foo.com' } }) do_put(project, remote_mirrors_attributes: { '0' => { 'enabled' => 1, 'url' => 'http://foo.com' } })
end.to change { RemoteMirror.count }.to(1) end.to change { RemoteMirror.count }.to(1)
end end
context 'when remote mirror has the same URL' do context 'when remote mirror has the same URL' do
it 'does not allow to create the remote mirror' do it 'does not allow to create the remote mirror' do
expect do expect do
do_put(@project, remote_mirrors_attributes: { '0' => { 'enabled' => 1, 'url' => @project.import_url } }) do_put(project, remote_mirrors_attributes: { '0' => { 'enabled' => 1, 'url' => project.import_url } })
end.not_to change { RemoteMirror.count } end.not_to change { RemoteMirror.count }
end end
context 'with disabled local mirror' do context 'with disabled local mirror' do
it 'allows to create a remote mirror' do it 'allows to create a remote mirror' do
expect do expect do
do_put(@project, mirror: 0, remote_mirrors_attributes: { '0' => { 'enabled' => 1, 'url' => @project.import_url } }) do_put(project, mirror: 0, remote_mirrors_attributes: { '0' => { 'enabled' => 1, 'url' => project.import_url } })
end.to change { RemoteMirror.count }.to(1) end.to change { RemoteMirror.count }.to(1)
end end
end end
...@@ -70,32 +46,72 @@ describe Projects::MirrorsController do ...@@ -70,32 +46,72 @@ describe Projects::MirrorsController do
end end
context 'when the current project has a remote mirror' do context 'when the current project has a remote mirror' do
let(:project) { create(:project) }
let(:remote_mirror) { project.remote_mirrors.create!(enabled: 1, url: 'http://local.dev') }
before do before do
@project = create(:project) sign_in(project.owner)
@remote_mirror = @project.remote_mirrors.create!(enabled: 1, url: 'http://local.dev')
sign_in(@project.owner)
end end
context 'when trying to create a mirror with the same URL' do context 'when trying to create a mirror with the same URL' do
it 'should not setup the mirror' do it 'should not setup the mirror' do
do_put(@project, mirror: true, import_url: @remote_mirror.url) do_put(project, mirror: true, import_url: remote_mirror.url)
expect(@project.reload.mirror).to be_falsey expect(project.reload.mirror).to be_falsey
expect(@project.reload.import_url).to be_blank expect(project.reload.import_url).to be_blank
end end
end end
context 'when trying to create a mirror with a different URL' do context 'when trying to create a mirror with a different URL' do
it 'should setup the mirror' do it 'should setup the mirror' do
do_put(@project, mirror: true, mirror_user_id: @project.owner.id, import_url: 'http://test.com') expect_any_instance_of(EE::Project).to receive(:force_import_job!)
do_put(project, mirror: true, mirror_user_id: project.owner.id, import_url: 'http://local.dev')
expect(@project.reload.mirror).to eq(true) expect(project.reload.mirror).to eq(true)
expect(@project.reload.import_url).to eq('http://test.com') expect(project.reload.import_url).to eq('http://local.dev')
end end
end end
end end
end end
describe 'setting up a mirror' do
before do
sign_in(project.owner)
end
context 'when project does not have a mirror' do
let(:project) { create(:project) }
it 'allows to create a mirror' do
expect_any_instance_of(EE::Project).to receive(:force_import_job!)
expect do
do_put(project, mirror: true, mirror_user_id: project.owner.id, import_url: 'http://foo.com')
end.to change { Project.mirror.count }.to(1)
end
end
context 'when project has a mirror' do
let(:project) { create(:project, :mirror, :import_finished) }
it 'is able to disable the mirror' do
expect { do_put(project, mirror: false) }.to change { Project.mirror.count }.to(0)
end
end
end
describe 'forcing an update' do
it 'forces update' do
expect_any_instance_of(EE::Project).to receive(:force_import_job!)
project = create(:project, :mirror)
sign_in(project.owner)
put :update_now, { namespace_id: project.namespace.to_param, project_id: project.to_param }
end
end
def do_put(project, options) def do_put(project, options)
attrs = { namespace_id: project.namespace.to_param, project_id: project.to_param } attrs = { namespace_id: project.namespace.to_param, project_id: project.to_param }
attrs[:project] = options attrs[:project] = options
......
...@@ -7,5 +7,9 @@ FactoryGirl.define do ...@@ -7,5 +7,9 @@ FactoryGirl.define do
link.forked_from_project.reload link.forked_from_project.reload
link.forked_to_project.reload link.forked_to_project.reload
end end
trait :forked_to_empty_project do
association :forked_to_project, factory: :empty_project
end
end end
end end
...@@ -24,20 +24,25 @@ FactoryGirl.define do ...@@ -24,20 +24,25 @@ FactoryGirl.define do
visibility_level Gitlab::VisibilityLevel::PRIVATE visibility_level Gitlab::VisibilityLevel::PRIVATE
end end
trait :import_scheduled do
import_status :scheduled
end
trait :import_started do trait :import_started do
import_url { generate(:url) }
import_status :started import_status :started
end end
trait :import_finished do trait :import_finished do
import_started
import_status :finished import_status :finished
end end
trait :mirror do trait :import_failed do
import_started import_status :failed
end
trait :mirror do
mirror true mirror true
import_url { generate(:url) }
mirror_user_id { creator_id } mirror_user_id { creator_id }
end end
......
...@@ -68,9 +68,14 @@ feature 'Diffs URL', js: true, feature: true do ...@@ -68,9 +68,14 @@ feature 'Diffs URL', js: true, feature: true do
let(:merge_request) { create(:merge_request_with_diffs, source_project: forked_project, target_project: project, author: author_user) } let(:merge_request) { create(:merge_request_with_diffs, source_project: forked_project, target_project: project, author: author_user) }
let(:changelog_id) { Digest::SHA1.hexdigest("CHANGELOG") } let(:changelog_id) { Digest::SHA1.hexdigest("CHANGELOG") }
before do
forked_project.repository.after_import
end
context 'as author' do context 'as author' do
it 'shows direct edit link' do it 'shows direct edit link' do
login_as(author_user) login_as(author_user)
visit diffs_namespace_project_merge_request_path(project.namespace, project, merge_request) visit diffs_namespace_project_merge_request_path(project.namespace, project, merge_request)
# Throws `Capybara::Poltergeist::InvalidSelector` if we try to use `#hash` syntax # Throws `Capybara::Poltergeist::InvalidSelector` if we try to use `#hash` syntax
...@@ -81,6 +86,7 @@ feature 'Diffs URL', js: true, feature: true do ...@@ -81,6 +86,7 @@ feature 'Diffs URL', js: true, feature: true do
context 'as user who needs to fork' do context 'as user who needs to fork' do
it 'shows fork/cancel confirmation' do it 'shows fork/cancel confirmation' do
login_as(user) login_as(user)
visit diffs_namespace_project_merge_request_path(project.namespace, project, merge_request) visit diffs_namespace_project_merge_request_path(project.namespace, project, merge_request)
# Throws `Capybara::Poltergeist::InvalidSelector` if we try to use `#hash` syntax # Throws `Capybara::Poltergeist::InvalidSelector` if we try to use `#hash` syntax
......
...@@ -10,27 +10,39 @@ feature 'Project mirror', feature: true do ...@@ -10,27 +10,39 @@ feature 'Project mirror', feature: true do
login_as user login_as user
end end
describe 'pressing "Update now"' do context 'with Update now button' do
before { visit namespace_project_mirror_path(project.namespace, project) } let(:timestamp) { Time.now }
it 'returns with the project updating (job enqueued)' do before do
Sidekiq::Testing.fake! { click_link('Update Now') } project.mirror_data.update_attributes(next_execution_timestamp: timestamp + 10.minutes)
expect(page).to have_content('Updating')
end end
end
describe 'synchronization times' do context 'when able to force update' do
Gitlab::Mirror::SYNC_TIME_TO_CRON.keys.reverse.each_with_index do |sync_time, index| it 'forces import' do
describe "#{sync_time} minimum mirror sync time" do project.update_attributes(mirror_last_update_at: timestamp - 8.minutes)
before do
stub_application_setting(minimum_mirror_sync_time: sync_time) expect_any_instance_of(EE::Project).to receive(:force_import_job!)
Timecop.freeze(timestamp) do
visit namespace_project_mirror_path(project.namespace, project) visit namespace_project_mirror_path(project.namespace, project)
end end
it 'shows the correct selector options' do Sidekiq::Testing.fake! { click_link('Update Now') }
expect(page).to have_selector('.project-mirror-sync-time > option', count: index + 1) end
end
context 'when unable to force update' do
it 'does not force import' do
project.update_attributes(mirror_last_update_at: timestamp - 3.minutes)
expect_any_instance_of(EE::Project).not_to receive(:force_import_job!)
Timecop.freeze(timestamp) do
visit namespace_project_mirror_path(project.namespace, project)
end end
expect(page).to have_content('Update Now')
expect(page).to have_selector('.btn.disabled')
end end
end end
end end
......
...@@ -25,7 +25,7 @@ describe Gitlab::BitbucketImport::ProjectCreator, lib: true do ...@@ -25,7 +25,7 @@ describe Gitlab::BitbucketImport::ProjectCreator, lib: true do
end end
it 'creates project' do it 'creates project' do
allow_any_instance_of(Project).to receive(:add_import_job) allow_any_instance_of(EE::Project).to receive(:add_import_job)
project_creator = Gitlab::BitbucketImport::ProjectCreator.new(repo, 'vim', namespace, user, access_params) project_creator = Gitlab::BitbucketImport::ProjectCreator.new(repo, 'vim', namespace, user, access_params)
project = project_creator.execute project = project_creator.execute
......
...@@ -17,7 +17,7 @@ describe Gitlab::GithubImport::ProjectCreator, lib: true do ...@@ -17,7 +17,7 @@ describe Gitlab::GithubImport::ProjectCreator, lib: true do
before do before do
namespace.add_owner(user) namespace.add_owner(user)
allow_any_instance_of(Project).to receive(:add_import_job) allow_any_instance_of(EE::Project).to receive(:add_import_job)
end end
describe '#execute' do describe '#execute' do
......
...@@ -21,7 +21,7 @@ describe Gitlab::GitlabImport::ProjectCreator, lib: true do ...@@ -21,7 +21,7 @@ describe Gitlab::GitlabImport::ProjectCreator, lib: true do
end end
it 'creates project' do it 'creates project' do
allow_any_instance_of(Project).to receive(:add_import_job) allow_any_instance_of(EE::Project).to receive(:add_import_job)
project_creator = Gitlab::GitlabImport::ProjectCreator.new(repo, namespace, user, access_params) project_creator = Gitlab::GitlabImport::ProjectCreator.new(repo, namespace, user, access_params)
project = project_creator.execute project = project_creator.execute
......
...@@ -16,7 +16,7 @@ describe Gitlab::GoogleCodeImport::ProjectCreator, lib: true do ...@@ -16,7 +16,7 @@ describe Gitlab::GoogleCodeImport::ProjectCreator, lib: true do
end end
it 'creates project' do it 'creates project' do
allow_any_instance_of(Project).to receive(:add_import_job) allow_any_instance_of(EE::Project).to receive(:add_import_job)
project_creator = Gitlab::GoogleCodeImport::ProjectCreator.new(repo, namespace, user) project_creator = Gitlab::GoogleCodeImport::ProjectCreator.new(repo, namespace, user)
project = project_creator.execute project = project_creator.execute
......
...@@ -262,6 +262,7 @@ project: ...@@ -262,6 +262,7 @@ project:
- statistics - statistics
- container_repositories - container_repositories
- uploads - uploads
- mirror_data
award_emoji: award_emoji:
- awardable - awardable
- user - user
......
This diff is collapsed.
...@@ -21,11 +21,24 @@ describe ApplicationSetting, models: true do ...@@ -21,11 +21,24 @@ describe ApplicationSetting, models: true do
it { is_expected.to allow_value(https).for(:after_sign_out_path) } it { is_expected.to allow_value(https).for(:after_sign_out_path) }
it { is_expected.not_to allow_value(ftp).for(:after_sign_out_path) } it { is_expected.not_to allow_value(ftp).for(:after_sign_out_path) }
it { is_expected.to allow_value(Gitlab::Mirror::FIFTEEN).for(:minimum_mirror_sync_time) } it { is_expected.to allow_value(10).for(:mirror_max_delay) }
it { is_expected.to allow_value(Gitlab::Mirror::HOURLY).for(:minimum_mirror_sync_time) } it { is_expected.not_to allow_value(nil).for(:mirror_max_delay) }
it { is_expected.to allow_value(Gitlab::Mirror::DAILY).for(:minimum_mirror_sync_time) } it { is_expected.not_to allow_value(0).for(:mirror_max_delay) }
it { is_expected.not_to allow_value(nil).for(:minimum_mirror_sync_time) } it { is_expected.not_to allow_value(1.0).for(:mirror_max_delay) }
it { is_expected.not_to allow_value(61).for(:minimum_mirror_sync_time) } it { is_expected.not_to allow_value(-1).for(:mirror_max_delay) }
it { is_expected.to allow_value(10).for(:mirror_max_capacity) }
it { is_expected.not_to allow_value(nil).for(:mirror_max_capacity) }
it { is_expected.not_to allow_value(0).for(:mirror_max_capacity) }
it { is_expected.not_to allow_value(1.0).for(:mirror_max_capacity) }
it { is_expected.not_to allow_value(-1).for(:mirror_max_capacity) }
it { is_expected.to allow_value(10).for(:mirror_capacity_threshold) }
it { is_expected.not_to allow_value(nil).for(:mirror_capacity_threshold) }
it { is_expected.not_to allow_value(0).for(:mirror_capacity_threshold) }
it { is_expected.not_to allow_value(1.0).for(:mirror_capacity_threshold) }
it { is_expected.not_to allow_value(-1).for(:mirror_capacity_threshold) }
it { is_expected.not_to allow_value(subject.mirror_max_capacity + 1).for(:mirror_capacity_threshold) }
describe 'disabled_oauth_sign_in_sources validations' do describe 'disabled_oauth_sign_in_sources validations' do
before do before do
...@@ -82,51 +95,6 @@ describe ApplicationSetting, models: true do ...@@ -82,51 +95,6 @@ describe ApplicationSetting, models: true do
subject { setting } subject { setting }
end end
context "update minimum_mirror_sync_time" do
sync_times = Gitlab::Mirror::SYNC_TIME_TO_CRON.keys
before do
Sidekiq::Logging.logger = nil
sync_times.each do |sync_time|
create(:project, :mirror, sync_time: sync_time)
end
end
sync_times.drop(1).each_with_index do |sync_time, index|
context "with #{sync_time} sync_time" do
subject { setting.update_attributes(minimum_mirror_sync_time: sync_time) }
it "updates minimum mirror sync time to #{sync_time}" do
expect_any_instance_of(ApplicationSetting).to receive(:update_mirror_cron_job).and_call_original
expect(Gitlab::Mirror).to receive(:configure_cron_job!)
subject
end
it 'updates every mirror to the current minimum_mirror_sync_time' do
expect { subject }.to change { Project.mirror.where('sync_time < ?', sync_time).count }.from(index + 1).to(0)
end
end
end
# fifteen is a special case so we isolate it
context 'with default fifteen sync time' do
let(:sync_time) { Gitlab::Mirror::FIFTEEN }
it 'does not update minimum_mirror_sync_time' do
expect_any_instance_of(ApplicationSetting).not_to receive(:update_mirror_cron_job)
expect(Gitlab::Mirror).not_to receive(:configure_cron_job!)
expect(setting.minimum_mirror_sync_time).to eq(Gitlab::Mirror::FIFTEEN)
setting.update_attributes(minimum_mirror_sync_time: sync_time)
end
it 'updates every mirror to the current minimum_mirror_sync_time' do
expect { setting.update_attributes(minimum_mirror_sync_time: sync_time) }.not_to change { Project.mirror.where('sync_time < ?', sync_time).count }
end
end
end
# Upgraded databases will have this sort of content # Upgraded databases will have this sort of content
context 'repository_storages is a String, not an Array' do context 'repository_storages is a String, not an Array' do
before { setting.__send__(:raw_write_attribute, :repository_storages, 'default') } before { setting.__send__(:raw_write_attribute, :repository_storages, 'default') }
......
require 'rails_helper'
describe ProjectMirrorData, type: :model do
include Gitlab::CurrentSettings
describe 'associations' do
it { is_expected.to belong_to(:project) }
end
describe 'modules' do
it { is_expected.to include_module(Gitlab::CurrentSettings) }
end
describe 'validations' do
it { is_expected.to validate_presence_of(:project) }
end
describe 'when create' do
it 'sets next execution timestamp to now' do
project = create(:empty_project)
Timecop.freeze(Time.now) do
project.create_mirror_data
expect(project.mirror_data.next_execution_timestamp).to eq(Time.now)
end
end
end
describe '#reset_retry_count!' do
let(:mirror_data) { create(:project, :mirror, :import_finished).mirror_data }
it 'resets retry_count to 0' do
mirror_data.retry_count = 3
expect { mirror_data.reset_retry_count! }.to change { mirror_data.retry_count }.from(3).to(0)
end
end
describe '#increment_retry_count!' do
let(:mirror_data) { create(:project, :mirror, :import_finished).mirror_data }
it 'increments retry_count' do
expect { mirror_data.increment_retry_count! }.to change { mirror_data.retry_count }.from(0).to(1)
end
end
describe '#set_next_execution_timestamp!' do
let(:mirror_data) { create(:project, :mirror, :import_finished).mirror_data }
let!(:timestamp) { Time.now }
let!(:jitter) { 2.seconds }
let(:interval) { 2.minutes }
before do
allow_any_instance_of(ProjectMirrorData).to receive(:rand).and_return(jitter)
end
context 'when base delay is lower than mirror_max_delay' do
before do
mirror_data.last_update_started_at = timestamp - 1.minute
end
context 'when retry count is 0' do
it 'applies transition successfully' do
expect do
mirror_data.set_next_execution_timestamp!
end.to change { mirror_data.next_execution_timestamp }.to be_within(interval).of(timestamp + 26.minutes)
end
end
context 'when incrementing retry count' do
it 'applies transition successfully' do
mirror_data.retry_count = 2
mirror_data.increment_retry_count!
expect do
mirror_data.set_next_execution_timestamp!
end.to change { mirror_data.next_execution_timestamp }.to be_within(interval).of(timestamp + 79.minutes)
end
end
end
context 'when base delay is higher than mirror_max_delay' do
let!(:upper_jitter) { 30.seconds }
let(:max_timestamp) { timestamp + current_application_settings.mirror_max_delay.hours }
before do
allow_any_instance_of(Gitlab::Mirror).to receive(:rand).and_return(upper_jitter)
mirror_data.last_update_started_at = timestamp - 1.hour
end
context 'when reseting retry count' do
it 'applies transition successfully' do
expect do
mirror_data.set_next_execution_timestamp!
end.to change { mirror_data.next_execution_timestamp }.to be_within(interval).of(max_timestamp + upper_jitter)
end
end
context 'when incrementing retry count' do
it 'applies transition successfully' do
mirror_data.retry_count = 2
mirror_data.increment_retry_count!
expect do
mirror_data.set_next_execution_timestamp!
end.to change { mirror_data.next_execution_timestamp }.to be_within(interval).of(max_timestamp + upper_jitter)
end
end
end
end
end
...@@ -50,7 +50,8 @@ describe Project, models: true do ...@@ -50,7 +50,8 @@ describe Project, models: true do
it { is_expected.to have_one(:external_wiki_service).dependent(:destroy) } it { is_expected.to have_one(:external_wiki_service).dependent(:destroy) }
it { is_expected.to have_one(:project_feature).dependent(:destroy) } it { is_expected.to have_one(:project_feature).dependent(:destroy) }
it { is_expected.to have_one(:statistics).class_name('ProjectStatistics').dependent(:delete) } it { is_expected.to have_one(:statistics).class_name('ProjectStatistics').dependent(:delete) }
it { is_expected.to have_one(:import_data).class_name('ProjectImportData').dependent(:destroy) } it { is_expected.to have_one(:import_data).class_name('ProjectImportData').dependent(:delete) }
it { is_expected.to have_one(:mirror_data).class_name('ProjectMirrorData').dependent(:delete) }
it { is_expected.to have_one(:last_event).class_name('Event') } it { is_expected.to have_one(:last_event).class_name('Event') }
it { is_expected.to have_one(:forked_from_project).through(:forked_project_link) } it { is_expected.to have_one(:forked_from_project).through(:forked_project_link) }
it { is_expected.to have_many(:commit_statuses) } it { is_expected.to have_many(:commit_statuses) }
...@@ -262,6 +263,18 @@ describe Project, models: true do ...@@ -262,6 +263,18 @@ describe Project, models: true do
expect(project2.errors[:import_url]).to include('imports are not allowed from that URL') expect(project2.errors[:import_url]).to include('imports are not allowed from that URL')
end end
it 'creates mirror data when enabled' do
project2 = create(:empty_project, :mirror, mirror: false)
expect { project2.update_attributes(mirror: true) }.to change { ProjectMirrorData.count }.from(0).to(1)
end
it 'destroys mirror data when disabled' do
project2 = create(:empty_project, :mirror)
expect { project2.update_attributes(mirror: false) }.to change { ProjectMirrorData.count }.from(1).to(0)
end
describe 'project pending deletion' do describe 'project pending deletion' do
let!(:project_pending_deletion) do let!(:project_pending_deletion) do
create(:empty_project, create(:empty_project,
...@@ -1681,16 +1694,13 @@ describe Project, models: true do ...@@ -1681,16 +1694,13 @@ describe Project, models: true do
end end
describe 'Project import job' do describe 'Project import job' do
let(:project) { create(:empty_project) } let(:project) { create(:empty_project, import_url: generate(:url)) }
let(:mirror) { false }
before do before do
allow_any_instance_of(Gitlab::Shell).to receive(:import_repository) allow_any_instance_of(Gitlab::Shell).to receive(:import_repository)
.with(project.repository_storage_path, project.path_with_namespace, project.import_url) .with(project.repository_storage_path, project.path_with_namespace, project.import_url)
.and_return(true) .and_return(true)
allow(project).to receive(:repository_exists?).and_return(true)
expect_any_instance_of(Repository).to receive(:after_import) expect_any_instance_of(Repository).to receive(:after_import)
.and_call_original .and_call_original
end end
...@@ -1698,23 +1708,21 @@ describe Project, models: true do ...@@ -1698,23 +1708,21 @@ describe Project, models: true do
it 'imports a project' do it 'imports a project' do
expect_any_instance_of(RepositoryImportWorker).to receive(:perform).and_call_original expect_any_instance_of(RepositoryImportWorker).to receive(:perform).and_call_original
project.import_start project.import_schedule
project.add_import_job
expect(project.reload.import_status).to eq('finished') expect(project.reload.import_status).to eq('finished')
end end
it 'imports a mirrored project' do context 'with a mirrored project' do
allow_any_instance_of(RepositoryUpdateMirrorWorker).to receive(:perform) let(:project) { create(:empty_project, :mirror) }
expect_any_instance_of(RepositoryImportWorker).to receive(:perform).and_call_original
project.import_start
project.mirror = true it 'first calls RepositoryImportWorker and RepositoryUpdateMirrorWorker after' do
allow_any_instance_of(Project).to receive(:repository_exists?).and_return(false, true)
expect_any_instance_of(RepositoryUpdateMirrorWorker).to receive(:perform).with(project.id)
expect_any_instance_of(RepositoryImportWorker).to receive(:perform).with(project.id).and_call_original
project.add_import_job project.import_schedule
end
expect(project.reload.import_status).to eq('finished')
end end
end end
...@@ -1797,9 +1805,61 @@ describe Project, models: true do ...@@ -1797,9 +1805,61 @@ describe Project, models: true do
end end
end end
describe '#updating_mirror?' do
context 'when repository is empty' do
it 'returns false' do
project = create(:empty_project, :mirror, :import_started)
expect(project.updating_mirror?).to be false
end
end
context 'when project is not a mirror' do
it 'returns false' do
project = create(:project, :import_started)
expect(project.updating_mirror?).to be false
end
end
context 'when project is in progress' do
it 'returns true' do
project = create(:project, :mirror, :import_started)
expect(project.updating_mirror?).to be true
end
end
context 'when project is expected to run soon' do
it 'returns true' do
timestamp = Time.now
project = create(:project, :mirror, :import_finished)
project.mirror_last_update_at = timestamp - 3.minutes
project.mirror_data.next_execution_timestamp = timestamp - 2.minutes
expect(project.updating_mirror?).to be true
end
end
end
describe '#force_import_job!' do
it 'sets next execution timestamp to now and schedules UpdateAllMirrorsWorker' do
timestamp = Time.now
project = create(:project, :mirror)
project.mirror_data.update_attributes(next_execution_timestamp: timestamp - 3.minutes)
expect(UpdateAllMirrorsWorker).to receive(:perform_async)
Timecop.freeze(timestamp) do
expect { project.force_import_job! }.to change { project.mirror_data.reload.next_execution_timestamp }.to be_within(1.second).of(timestamp)
end
end
end
describe '#add_import_job' do describe '#add_import_job' do
context 'forked' do context 'forked' do
let(:forked_project_link) { create(:forked_project_link) } let(:forked_project_link) { create(:forked_project_link, :forked_to_empty_project) }
let(:forked_from_project) { forked_project_link.forked_from_project } let(:forked_from_project) { forked_project_link.forked_from_project }
let(:project) { forked_project_link.forked_to_project } let(:project) { forked_project_link.forked_to_project }
...@@ -1810,15 +1870,33 @@ describe Project, models: true do ...@@ -1810,15 +1870,33 @@ describe Project, models: true do
project.add_import_job project.add_import_job
end end
end
context 'not forked' do context 'without mirror' do
let(:project) { create(:empty_project) } it 'returns nil' do
project = create(:project)
expect(project.add_import_job).to be_nil
end
end
it 'schedules a RepositoryImportWorker job' do context 'without repository' do
expect(RepositoryImportWorker).to receive(:perform_async).with(project.id) it 'schedules RepositoryImportWorker' do
project = create(:empty_project, import_url: generate(:url))
project.add_import_job expect(RepositoryImportWorker).to receive(:perform_async).with(project.id)
project.add_import_job
end
end
context 'with mirror' do
it 'schedules RepositoryUpdateMirrorWorker' do
project = create(:project, :mirror)
expect(RepositoryUpdateMirrorWorker).to receive(:perform_async).with(project.id)
project.add_import_job
end
end end
end end
end end
...@@ -2143,6 +2221,16 @@ describe Project, models: true do ...@@ -2143,6 +2221,16 @@ describe Project, models: true do
end end
end end
describe '#create_mirror_data' do
it 'it is called after save' do
project = create(:project)
expect(project).to receive(:create_mirror_data)
project.update(mirror: true, mirror_user: project.owner, import_url: 'http://foo.com')
end
end
describe 'inside_path' do describe 'inside_path' do
let!(:project1) { create(:empty_project, namespace: create(:namespace, path: 'name_pace')) } let!(:project1) { create(:empty_project, namespace: create(:namespace, path: 'name_pace')) }
let!(:project2) { create(:empty_project) } let!(:project2) { create(:empty_project) }
......
...@@ -197,15 +197,13 @@ describe Projects::CreateService, '#execute', services: true do ...@@ -197,15 +197,13 @@ describe Projects::CreateService, '#execute', services: true do
end end
context 'when a bad service template is created' do context 'when a bad service template is created' do
before do
create(:service, type: 'DroneCiService', project: nil, template: true, active: true)
end
it 'reports an error in the imported project' do it 'reports an error in the imported project' do
opts[:import_url] = 'http://www.gitlab.com/gitlab-org/gitlab-ce' opts[:import_url] = 'http://www.gitlab.com/gitlab-org/gitlab-ce'
create(:service, type: 'DroneCiService', project: nil, template: true, active: true)
project = create_project(user, opts) project = create_project(user, opts)
expect(project.errors.full_messages_for(:base).first).to match /Unable to save project. Error: Unable to save DroneCiService/ expect(project.errors.full_messages_for(:base).first).to match(/Unable to save project. Error: Unable to save DroneCiService/)
expect(project.services.count).to eq 0 expect(project.services.count).to eq 0
end end
end end
......
require 'spec_helper' require 'spec_helper'
describe RepositoryForkWorker do describe RepositoryForkWorker do
let(:project) { create(:project, :repository) } let(:project) { create(:project, :repository, :import_scheduled) }
let(:fork_project) { create(:project, :repository, forked_from_project: project) } let(:fork_project) { create(:project, :repository, forked_from_project: project) }
let(:shell) { Gitlab::Shell.new } let(:shell) { Gitlab::Shell.new }
...@@ -46,15 +46,27 @@ describe RepositoryForkWorker do ...@@ -46,15 +46,27 @@ describe RepositoryForkWorker do
end end
it "handles bad fork" do it "handles bad fork" do
source_path = project.full_path
target_path = fork_project.namespace.full_path
error_message = "Unable to fork project #{project.id} for repository #{source_path} -> #{target_path}"
expect(shell).to receive(:fork_repository).and_return(false) expect(shell).to receive(:fork_repository).and_return(false)
expect(subject.logger).to receive(:error) expect do
subject.perform(project.id, '/test/path', source_path, target_path)
end.to raise_error(RepositoryForkWorker::ForkError, error_message)
end
subject.perform( it 'handles unexpected error' do
project.id, source_path = project.full_path
'/test/path', target_path = fork_project.namespace.full_path
project.full_path,
fork_project.namespace.full_path) allow_any_instance_of(Gitlab::Shell).to receive(:fork_repository).and_raise(RuntimeError)
expect do
subject.perform(project.id, '/test/path', source_path, target_path)
end.to raise_error(RepositoryForkWorker::ForkError)
expect(project.reload.import_status).to eq('failed')
end end
end end
end end
require 'spec_helper' require 'spec_helper'
describe RepositoryImportWorker do describe RepositoryImportWorker do
let(:project) { create(:empty_project) } let(:project) { create(:empty_project, :import_scheduled) }
subject { described_class.new } subject { described_class.new }
...@@ -21,15 +21,26 @@ describe RepositoryImportWorker do ...@@ -21,15 +21,26 @@ describe RepositoryImportWorker do
context 'when the import has failed' do context 'when the import has failed' do
it 'hide the credentials that were used in the import URL' do it 'hide the credentials that were used in the import URL' do
error = %q{remote: Not Found fatal: repository 'https://user:pass@test.com/root/repoC.git/' not found } error = %q{remote: Not Found fatal: repository 'https://user:pass@test.com/root/repoC.git/' not found }
expect_any_instance_of(Projects::ImportService).to receive(:execute).
and_return({ status: :error, message: error })
allow(subject).to receive(:jid).and_return('123')
subject.perform(project.id) expect_any_instance_of(Projects::ImportService).to receive(:execute).and_return({ status: :error, message: error })
allow(subject).to receive(:jid).and_return('123')
expect(project.reload.import_error).to include("https://*****:*****@test.com/root/repoC.git/") expect do
subject.perform(project.id)
end.to raise_error(RepositoryImportWorker::ImportError, error)
expect(project.reload.import_jid).not_to be_nil expect(project.reload.import_jid).not_to be_nil
end end
end end
context 'with unexpected error' do
it 'marks import as failed' do
allow_any_instance_of(Projects::ImportService).to receive(:execute).and_raise(RuntimeError)
expect do
subject.perform(project.id)
end.to raise_error(RepositoryImportWorker::ImportError)
expect(project.reload.import_status).to eq('failed')
end
end
end end
end end
require 'rails_helper'
describe RepositoryUpdateMirrorDispatchWorker do
describe '#perform' do
it 'executes project#update_mirror if can obtain a lease' do
allow_any_instance_of(Gitlab::ExclusiveLease)
.to receive(:try_obtain).and_return(true)
expect_any_instance_of(Project).to receive(:update_mirror)
project = create(:empty_project, :mirror)
described_class.new.perform(project.id)
end
it 'just returns if cannot obtain a lease' do
allow_any_instance_of(Gitlab::ExclusiveLease)
.to receive(:try_obtain).and_return(false)
expect_any_instance_of(Project).not_to receive(:update_mirror)
project = create(:empty_project, :mirror)
described_class.new.perform(project.id)
end
end
end
require 'rails_helper' require 'rails_helper'
describe RepositoryUpdateMirrorWorker do describe RepositoryUpdateMirrorWorker do
subject { described_class.new }
describe '#perform' do describe '#perform' do
it 'sets import as finished when update mirror service executes successfully' do context 'with status none' do
project = create(:empty_project, :mirror) let(:project) { create(:empty_project, :mirror, :import_scheduled) }
it 'sets status as finished when update mirror service executes successfully' do
expect_any_instance_of(Projects::UpdateMirrorService).to receive(:execute).and_return(status: :success)
expect { subject.perform(project.id) }.to change { project.reload.import_status }.to('finished')
end
it 'sets status as failed when update mirror service executes with errors' do
error_message = 'fail!'
expect_any_instance_of(Projects::UpdateMirrorService).to receive(:execute).and_return(status: :error, message: error_message)
expect do
subject.perform(project.id)
end.to raise_error(RepositoryUpdateMirrorWorker::UpdateError, error_message)
expect(project.reload.import_status).to eq('failed')
end
end
expect_any_instance_of(Projects::UpdateMirrorService).to receive(:execute).and_return(status: :success) context 'with another worker already running' do
it 'raises UpdateAlreadyInProgressError' do
mirror = create(:project, :mirror, :import_started)
expect { described_class.new.perform(project.id) } expect do
.to change { project.reload.import_status }.to('finished') subject.perform(mirror.id)
end.to raise_error(RepositoryUpdateMirrorWorker::UpdateAlreadyInProgressError)
end
end end
it 'sets import as failed when update mirror service executes with errors' do context 'with unexpected error' do
project = create(:empty_project, :mirror) it 'marks mirror as failed' do
mirror = create(:project, :mirror, :import_scheduled)
expect_any_instance_of(Projects::UpdateMirrorService).to receive(:execute).and_return(status: :error, message: 'fail!') allow_any_instance_of(Projects::UpdateMirrorService).to receive(:execute).and_raise(RuntimeError)
expect { described_class.new.perform(project.id) } expect do
.to change { project.reload.import_status }.to('failed') subject.perform(mirror.id)
end.to raise_error(RepositoryUpdateMirrorWorker::UpdateError)
expect(mirror.reload.import_status).to eq('failed')
end
end end
it 'does nothing if project does not exist' do context 'threshold_reached?' do
expect(described_class.new.perform(1000000)).to be_nil let(:mirror) { create(:project, :mirror) }
before do
expect_any_instance_of(Projects::UpdateMirrorService).to receive(:execute).and_return(status: :success)
end
context 'with threshold_reached? true' do
it 'schedules UpdateAllMirrorsWorker' do
expect(Gitlab::Mirror).to receive(:threshold_reached?).and_return(true)
expect(UpdateAllMirrorsWorker).to receive(:perform_async)
subject.perform(mirror.id)
end
end
context 'with threshold_reached? false' do
it 'does not schedule UpdateAllMirrorsWorker' do
expect(Gitlab::Mirror).to receive(:threshold_reached?).and_return(false)
expect(UpdateAllMirrorsWorker).not_to receive(:perform_async)
subject.perform(mirror.id)
end
end
end end
end end
end end
...@@ -47,6 +47,17 @@ describe RepositoryUpdateRemoteMirrorWorker do ...@@ -47,6 +47,17 @@ describe RepositoryUpdateRemoteMirrorWorker do
end end
end end
context 'with unexpected error' do
it 'marks mirror as failed' do
allow_any_instance_of(Projects::UpdateRemoteMirrorService).to receive(:execute).with(remote_mirror).and_raise(RuntimeError)
expect do
subject.perform(remote_mirror.id, Time.now)
end.to raise_error(RepositoryUpdateRemoteMirrorWorker::UpdateError)
expect(remote_mirror.reload.update_status).to eq('failed')
end
end
context 'with another worker already running' do context 'with another worker already running' do
before do before do
remote_mirror.update_attributes(update_status: 'started') remote_mirror.update_attributes(update_status: 'started')
......
...@@ -6,14 +6,6 @@ describe UpdateAllMirrorsWorker do ...@@ -6,14 +6,6 @@ describe UpdateAllMirrorsWorker do
before { allow_any_instance_of(Gitlab::ExclusiveLease).to receive(:try_obtain).and_return(true) } before { allow_any_instance_of(Gitlab::ExclusiveLease).to receive(:try_obtain).and_return(true) }
describe '#perform' do describe '#perform' do
let!(:fifteen_mirror) { create(:empty_project, :mirror, sync_time: Gitlab::Mirror::FIFTEEN) }
let!(:hourly_mirror) { create(:empty_project, :mirror, sync_time: Gitlab::Mirror::HOURLY) }
let!(:three_mirror) { create(:empty_project, :mirror, sync_time: Gitlab::Mirror::THREE) }
let!(:six_mirror) { create(:empty_project, :mirror, sync_time: Gitlab::Mirror::SIX) }
let!(:twelve_mirror) { create(:empty_project, :mirror, sync_time: Gitlab::Mirror::TWELVE) }
let!(:daily_mirror) { create(:empty_project, :mirror, sync_time: Gitlab::Mirror::DAILY) }
let!(:outdated_mirror) { create(:empty_project, :mirror) }
it 'fails stuck mirrors' do it 'fails stuck mirrors' do
expect(worker).to receive(:fail_stuck_mirrors!) expect(worker).to receive(:fail_stuck_mirrors!)
...@@ -29,68 +21,6 @@ describe UpdateAllMirrorsWorker do ...@@ -29,68 +21,6 @@ describe UpdateAllMirrorsWorker do
worker.perform worker.perform
end end
describe 'sync_time' do
def expect_worker_to_enqueue_mirrors(mirrors)
mirrors.each do |mirror|
expect(worker).to receive(:rand).with((mirror.sync_time / 2).minutes).and_return(mirror.sync_time / 2)
expect(RepositoryUpdateMirrorDispatchWorker).to receive(:perform_in).with(mirror.sync_time / 2, mirror.id)
end
worker.perform
end
before do
time = DateTime.now.change(time_params)
Timecop.freeze(time)
outdated_mirror.update_attributes(mirror_last_successful_update_at: time - (Gitlab::Mirror::DAILY + 5).minutes)
end
describe 'fifteen' do
let!(:time_params) { { hour: 1, min: 15 } }
let(:mirrors) { [fifteen_mirror, outdated_mirror] }
it { expect_worker_to_enqueue_mirrors(mirrors) }
end
describe 'hourly' do
let!(:time_params) { { hour: 1 } }
let(:mirrors) { [fifteen_mirror, hourly_mirror, outdated_mirror] }
it { expect_worker_to_enqueue_mirrors(mirrors) }
end
describe 'three' do
let!(:time_params) { { hour: 3 } }
let(:mirrors) { [fifteen_mirror, hourly_mirror, three_mirror, outdated_mirror] }
it { expect_worker_to_enqueue_mirrors(mirrors) }
end
describe 'six' do
let!(:time_params) { { hour: 6 } }
let(:mirrors) { [fifteen_mirror, hourly_mirror, three_mirror, six_mirror, outdated_mirror] }
it { expect_worker_to_enqueue_mirrors(mirrors) }
end
describe 'twelve' do
let!(:time_params) { { hour: 12 } }
let(:mirrors) { [fifteen_mirror, hourly_mirror, three_mirror, six_mirror, twelve_mirror, outdated_mirror] }
it { expect_worker_to_enqueue_mirrors(mirrors) }
end
describe 'daily' do
let!(:time_params) { { hour: 0 } }
let(:mirrors) { [fifteen_mirror, hourly_mirror, three_mirror, six_mirror, twelve_mirror, daily_mirror, outdated_mirror] }
it { expect_worker_to_enqueue_mirrors(mirrors) }
end
after { Timecop.return }
end
end end
describe '#fail_stuck_mirrors!' do describe '#fail_stuck_mirrors!' do
...@@ -120,22 +50,15 @@ describe UpdateAllMirrorsWorker do ...@@ -120,22 +50,15 @@ describe UpdateAllMirrorsWorker do
fail_stuck_mirrors! fail_stuck_mirrors!
end end
it 'transitions stuck mirrors to a failed state' do it 'transitions stuck mirrors to a failed state and updates import_error message' do
project = create(:empty_project, :mirror, mirror_last_update_at: 12.hours.ago) project = create(:empty_project, :mirror, :import_started)
project.mirror_data.update_attributes(last_update_started_at: 25.minutes.ago)
fail_stuck_mirrors! fail_stuck_mirrors!
project.reload project.reload
expect(project).to be_import_failed expect(project).to be_import_failed
end expect(project.reload.import_error).to eq 'The mirror update took too long to complete.'
it 'updates the import_error message' do
project = create(:empty_project, :mirror, mirror_last_update_at: 12.hours.ago)
fail_stuck_mirrors!
project.reload
expect(project.import_error).to eq 'The mirror update took too long to complete.'
end end
end end
end end
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment