Commit 22ea74fd authored by GitLab Bot's avatar GitLab Bot

Merge remote-tracking branch 'upstream/master' into ce-to-ee-2018-07-30

# Conflicts:
#	app/services/boards/issues/list_service.rb
#	app/views/shared/boards/components/_board.html.haml
#	doc/administration/high_availability/nfs.md
#	lib/api/projects.rb

[ci skip]
parents f68011c2 e9c9f2e8
...@@ -318,7 +318,7 @@ group :metrics do ...@@ -318,7 +318,7 @@ group :metrics do
gem 'influxdb', '~> 0.2', require: false gem 'influxdb', '~> 0.2', require: false
# Prometheus # Prometheus
gem 'prometheus-client-mmap', '~> 0.9.3' gem 'prometheus-client-mmap', '~> 0.9.4'
gem 'raindrops', '~> 0.18' gem 'raindrops', '~> 0.18'
end end
......
...@@ -664,7 +664,7 @@ GEM ...@@ -664,7 +664,7 @@ GEM
parser parser
unparser unparser
procto (0.0.3) procto (0.0.3)
prometheus-client-mmap (0.9.3) prometheus-client-mmap (0.9.4)
pry (0.10.4) pry (0.10.4)
coderay (~> 1.1.0) coderay (~> 1.1.0)
method_source (~> 0.8.1) method_source (~> 0.8.1)
...@@ -1163,7 +1163,7 @@ DEPENDENCIES ...@@ -1163,7 +1163,7 @@ DEPENDENCIES
peek-sidekiq (~> 1.0.3) peek-sidekiq (~> 1.0.3)
pg (~> 0.18.2) pg (~> 0.18.2)
premailer-rails (~> 1.9.7) premailer-rails (~> 1.9.7)
prometheus-client-mmap (~> 0.9.3) prometheus-client-mmap (~> 0.9.4)
pry-byebug (~> 3.4.1) pry-byebug (~> 3.4.1)
pry-rails (~> 0.3.4) pry-rails (~> 0.3.4)
rack-attack (~> 4.4.1) rack-attack (~> 4.4.1)
......
...@@ -668,7 +668,7 @@ GEM ...@@ -668,7 +668,7 @@ GEM
parser parser
unparser unparser
procto (0.0.3) procto (0.0.3)
prometheus-client-mmap (0.9.3) prometheus-client-mmap (0.9.4)
pry (0.10.4) pry (0.10.4)
coderay (~> 1.1.0) coderay (~> 1.1.0)
method_source (~> 0.8.1) method_source (~> 0.8.1)
...@@ -1173,7 +1173,7 @@ DEPENDENCIES ...@@ -1173,7 +1173,7 @@ DEPENDENCIES
peek-sidekiq (~> 1.0.3) peek-sidekiq (~> 1.0.3)
pg (~> 0.18.2) pg (~> 0.18.2)
premailer-rails (~> 1.9.7) premailer-rails (~> 1.9.7)
prometheus-client-mmap (~> 0.9.3) prometheus-client-mmap (~> 0.9.4)
pry-byebug (~> 3.4.1) pry-byebug (~> 3.4.1)
pry-rails (~> 0.3.4) pry-rails (~> 0.3.4)
rack-attack (~> 4.4.1) rack-attack (~> 4.4.1)
......
...@@ -108,6 +108,9 @@ export default { ...@@ -108,6 +108,9 @@ export default {
false, false,
); );
}, },
gfmCopyText() {
return `\`${this.diffFile.filePath}\``;
},
}, },
methods: { methods: {
...mapActions('diffs', ['toggleFileDiscussions']), ...mapActions('diffs', ['toggleFileDiscussions']),
...@@ -191,6 +194,7 @@ export default { ...@@ -191,6 +194,7 @@ export default {
<clipboard-button <clipboard-button
:title="__('Copy file path to clipboard')" :title="__('Copy file path to clipboard')"
:text="diffFile.filePath" :text="diffFile.filePath"
:gfm="gfmCopyText"
css-class="btn-default btn-transparent btn-clipboard" css-class="btn-default btn-transparent btn-clipboard"
/> />
......
...@@ -14,7 +14,7 @@ import tooltip from '../../../vue_shared/directives/tooltip'; ...@@ -14,7 +14,7 @@ import tooltip from '../../../vue_shared/directives/tooltip';
* "id": 4256, * "id": 4256,
* "name": "test", * "name": "test",
* "status": { * "status": {
* "icon": "icon_status_success", * "icon": "status_success",
* "text": "passed", * "text": "passed",
* "label": "passed", * "label": "passed",
* "group": "success", * "group": "success",
......
...@@ -13,7 +13,7 @@ import tooltip from '../../../vue_shared/directives/tooltip'; ...@@ -13,7 +13,7 @@ import tooltip from '../../../vue_shared/directives/tooltip';
* "id": 4256, * "id": 4256,
* "name": "test", * "name": "test",
* "status": { * "status": {
* "icon": "icon_status_success", * "icon": "status_success",
* "text": "passed", * "text": "passed",
* "label": "passed", * "label": "passed",
* "group": "success", * "group": "success",
......
...@@ -31,6 +31,11 @@ export default { ...@@ -31,6 +31,11 @@ export default {
type: String, type: String,
required: true, required: true,
}, },
gfm: {
type: String,
required: false,
default: null,
},
title: { title: {
type: String, type: String,
required: true, required: true,
...@@ -51,6 +56,14 @@ export default { ...@@ -51,6 +56,14 @@ export default {
default: 'btn-default', default: 'btn-default',
}, },
}, },
computed: {
clipboardText() {
if (this.gfm !== null) {
return JSON.stringify({ text: this.text, gfm: this.gfm });
}
return this.text;
},
},
}; };
</script> </script>
...@@ -59,7 +72,7 @@ export default { ...@@ -59,7 +72,7 @@ export default {
v-tooltip v-tooltip
:class="cssClass" :class="cssClass"
:title="title" :title="title"
:data-clipboard-text="text" :data-clipboard-text="clipboardText"
:data-container="tooltipContainer" :data-container="tooltipContainer"
:data-placement="tooltipPlacement" :data-placement="tooltipPlacement"
type="button" type="button"
......
...@@ -2,7 +2,7 @@ class Admin::JobsController < Admin::ApplicationController ...@@ -2,7 +2,7 @@ class Admin::JobsController < Admin::ApplicationController
def index def index
@scope = params[:scope] @scope = params[:scope]
@all_builds = Ci::Build @all_builds = Ci::Build
@builds = @all_builds.order('created_at DESC') @builds = @all_builds.order('id DESC')
@builds = @builds =
case @scope case @scope
when 'pending' when 'pending'
......
...@@ -26,9 +26,10 @@ module Ci ...@@ -26,9 +26,10 @@ module Ci
has_many :trace_chunks, class_name: 'Ci::BuildTraceChunk', foreign_key: :build_id has_many :trace_chunks, class_name: 'Ci::BuildTraceChunk', foreign_key: :build_id
has_many :job_artifacts, class_name: 'Ci::JobArtifact', foreign_key: :job_id, dependent: :destroy, inverse_of: :job # rubocop:disable Cop/ActiveRecordDependent has_many :job_artifacts, class_name: 'Ci::JobArtifact', foreign_key: :job_id, dependent: :destroy, inverse_of: :job # rubocop:disable Cop/ActiveRecordDependent
has_one :job_artifacts_archive, -> { where(file_type: Ci::JobArtifact.file_types[:archive]) }, class_name: 'Ci::JobArtifact', inverse_of: :job, foreign_key: :job_id
has_one :job_artifacts_metadata, -> { where(file_type: Ci::JobArtifact.file_types[:metadata]) }, class_name: 'Ci::JobArtifact', inverse_of: :job, foreign_key: :job_id Ci::JobArtifact.file_types.each do |key, value|
has_one :job_artifacts_trace, -> { where(file_type: Ci::JobArtifact.file_types[:trace]) }, class_name: 'Ci::JobArtifact', inverse_of: :job, foreign_key: :job_id has_one :"job_artifacts_#{key}", -> { where(file_type: value) }, class_name: 'Ci::JobArtifact', inverse_of: :job, foreign_key: :job_id
end
has_one :metadata, class_name: 'Ci::BuildMetadata' has_one :metadata, class_name: 'Ci::BuildMetadata'
has_one :runner_session, class_name: 'Ci::BuildRunnerSession', validate: true, inverse_of: :build has_one :runner_session, class_name: 'Ci::BuildRunnerSession', validate: true, inverse_of: :build
...@@ -390,6 +391,10 @@ module Ci ...@@ -390,6 +391,10 @@ module Ci
trace.exist? trace.exist?
end end
def has_test_reports?
job_artifacts.test_reports.any?
end
def has_old_trace? def has_old_trace?
old_trace.present? old_trace.present?
end end
...@@ -457,16 +462,22 @@ module Ci ...@@ -457,16 +462,22 @@ module Ci
save save
end end
def erase_test_reports!
# TODO: Use fast_destroy_all in the context of https://gitlab.com/gitlab-org/gitlab-ce/issues/35240
job_artifacts_junit&.destroy
end
def erase(opts = {}) def erase(opts = {})
return false unless erasable? return false unless erasable?
erase_artifacts! erase_artifacts!
erase_test_reports!
erase_trace! erase_trace!
update_erased!(opts[:erased_by]) update_erased!(opts[:erased_by])
end end
def erasable? def erasable?
complete? && (artifacts? || has_trace?) complete? && (artifacts? || has_test_reports? || has_trace?)
end end
def erased? def erased?
...@@ -543,10 +554,6 @@ module Ci ...@@ -543,10 +554,6 @@ module Ci
Gitlab::Ci::Build::Image.from_services(self) Gitlab::Ci::Build::Image.from_services(self)
end end
def artifacts
[options[:artifacts]]
end
def cache def cache
cache = options[:cache] cache = options[:cache]
......
...@@ -6,11 +6,17 @@ module Ci ...@@ -6,11 +6,17 @@ module Ci
include ObjectStorage::BackgroundMove include ObjectStorage::BackgroundMove
extend Gitlab::Ci::Model extend Gitlab::Ci::Model
TEST_REPORT_FILE_TYPES = %w[junit].freeze
DEFAULT_FILE_NAMES = { junit: 'junit.xml' }.freeze
TYPE_AND_FORMAT_PAIRS = { archive: :zip, metadata: :gzip, trace: :raw, junit: :gzip }.freeze
belongs_to :project belongs_to :project
belongs_to :job, class_name: "Ci::Build", foreign_key: :job_id belongs_to :job, class_name: "Ci::Build", foreign_key: :job_id
mount_uploader :file, JobArtifactUploader mount_uploader :file, JobArtifactUploader
validates :file_format, presence: true, unless: :trace?, on: :create
validate :valid_file_format?, unless: :trace?, on: :create
before_save :set_size, if: :file_changed? before_save :set_size, if: :file_changed?
after_save :update_project_statistics_after_save, if: :size_changed? after_save :update_project_statistics_after_save, if: :size_changed?
after_destroy :update_project_statistics_after_destroy, unless: :project_destroyed? after_destroy :update_project_statistics_after_destroy, unless: :project_destroyed?
...@@ -20,14 +26,33 @@ module Ci ...@@ -20,14 +26,33 @@ module Ci
scope :with_files_stored_locally, -> { where(file_store: [nil, ::JobArtifactUploader::Store::LOCAL]) } scope :with_files_stored_locally, -> { where(file_store: [nil, ::JobArtifactUploader::Store::LOCAL]) }
scope :with_files_stored_remotely, -> { where(file_store: ::JobArtifactUploader::Store::REMOTE) } scope :with_files_stored_remotely, -> { where(file_store: ::JobArtifactUploader::Store::REMOTE) }
scope :test_reports, -> do
types = self.file_types.select { |file_type| TEST_REPORT_FILE_TYPES.include?(file_type) }.values
where(file_type: types)
end
delegate :exists?, :open, to: :file delegate :exists?, :open, to: :file
enum file_type: { enum file_type: {
archive: 1, archive: 1,
metadata: 2, metadata: 2,
trace: 3 trace: 3,
junit: 4
} }
enum file_format: {
raw: 1,
zip: 2,
gzip: 3
}
def valid_file_format?
unless TYPE_AND_FORMAT_PAIRS[self.file_type&.to_sym] == self.file_format&.to_sym
errors.add(:file_format, 'Invalid file format with specified file type')
end
end
def update_file_store def update_file_store
# The file.object_store is set during `uploader.store!` # The file.object_store is set during `uploader.store!`
# which happens after object is inserted/updated # which happens after object is inserted/updated
......
require 'openssl'
module Clusters module Clusters
module Applications module Applications
class Helm < ActiveRecord::Base class Helm < ActiveRecord::Base
self.table_name = 'clusters_applications_helm' self.table_name = 'clusters_applications_helm'
attr_encrypted :ca_key,
mode: :per_attribute_iv,
key: Settings.attr_encrypted_db_key_base_truncated,
algorithm: 'aes-256-cbc'
include ::Clusters::Concerns::ApplicationCore include ::Clusters::Concerns::ApplicationCore
include ::Clusters::Concerns::ApplicationStatus include ::Clusters::Concerns::ApplicationStatus
default_value_for :version, Gitlab::Kubernetes::Helm::HELM_VERSION default_value_for :version, Gitlab::Kubernetes::Helm::HELM_VERSION
before_create :create_keys_and_certs
def issue_client_cert
ca_cert_obj.issue
end
def set_initial_status def set_initial_status
return unless not_installable? return unless not_installable?
...@@ -15,7 +28,41 @@ module Clusters ...@@ -15,7 +28,41 @@ module Clusters
end end
def install_command def install_command
Gitlab::Kubernetes::Helm::InitCommand.new(name) Gitlab::Kubernetes::Helm::InitCommand.new(
name: name,
files: files
)
end
def has_ssl?
ca_key.present? && ca_cert.present?
end
private
def files
{
'ca.pem': ca_cert,
'cert.pem': tiller_cert.cert_string,
'key.pem': tiller_cert.key_string
}
end
def create_keys_and_certs
ca_cert = Gitlab::Kubernetes::Helm::Certificate.generate_root
self.ca_key = ca_cert.key_string
self.ca_cert = ca_cert.cert_string
end
def tiller_cert
@tiller_cert ||= ca_cert_obj.issue(expires_in: Gitlab::Kubernetes::Helm::Certificate::INFINITE_EXPIRY)
end
def ca_cert_obj
return unless has_ssl?
Gitlab::Kubernetes::Helm::Certificate
.from_strings(ca_key, ca_cert)
end end
end end
end end
......
...@@ -32,9 +32,9 @@ module Clusters ...@@ -32,9 +32,9 @@ module Clusters
def install_command def install_command
Gitlab::Kubernetes::Helm::InstallCommand.new( Gitlab::Kubernetes::Helm::InstallCommand.new(
name, name: name,
chart: chart, chart: chart,
values: values files: files
) )
end end
......
...@@ -35,9 +35,9 @@ module Clusters ...@@ -35,9 +35,9 @@ module Clusters
def install_command def install_command
Gitlab::Kubernetes::Helm::InstallCommand.new( Gitlab::Kubernetes::Helm::InstallCommand.new(
name, name: name,
chart: chart, chart: chart,
values: values, files: files,
repository: repository repository: repository
) )
end end
......
...@@ -45,10 +45,10 @@ module Clusters ...@@ -45,10 +45,10 @@ module Clusters
def install_command def install_command
Gitlab::Kubernetes::Helm::InstallCommand.new( Gitlab::Kubernetes::Helm::InstallCommand.new(
name, name: name,
chart: chart, chart: chart,
version: version, version: version,
values: values files: files
) )
end end
......
...@@ -28,9 +28,9 @@ module Clusters ...@@ -28,9 +28,9 @@ module Clusters
def install_command def install_command
Gitlab::Kubernetes::Helm::InstallCommand.new( Gitlab::Kubernetes::Helm::InstallCommand.new(
name, name: name,
chart: chart, chart: chart,
values: values, files: files,
repository: repository repository: repository
) )
end end
......
...@@ -12,8 +12,34 @@ module Clusters ...@@ -12,8 +12,34 @@ module Clusters
File.read(chart_values_file) File.read(chart_values_file)
end end
def files
@files ||= begin
files = { 'values.yaml': values }
files.merge!(certificate_files) if cluster.application_helm.has_ssl?
files
end
end
private private
def certificate_files
{
'ca.pem': ca_cert,
'cert.pem': helm_cert.cert_string,
'key.pem': helm_cert.key_string
}
end
def ca_cert
cluster.application_helm.ca_cert
end
def helm_cert
@helm_cert ||= cluster.application_helm.issue_client_cert
end
def chart_values_file def chart_values_file
"#{Rails.root}/vendor/#{name}/values.yaml" "#{Rails.root}/vendor/#{name}/values.yaml"
end end
......
...@@ -26,6 +26,10 @@ module AtomicInternalId ...@@ -26,6 +26,10 @@ module AtomicInternalId
module ClassMethods module ClassMethods
def has_internal_id(column, scope:, init:, presence: true) # rubocop:disable Naming/PredicateName def has_internal_id(column, scope:, init:, presence: true) # rubocop:disable Naming/PredicateName
# We require init here to retain the ability to recalculate in the absence of a
# InternaLId record (we may delete records in `internal_ids` for example).
raise "has_internal_id requires a init block, none given." unless init
before_validation :"ensure_#{scope}_#{column}!", on: :create before_validation :"ensure_#{scope}_#{column}!", on: :create
validates column, presence: presence validates column, presence: presence
......
...@@ -153,6 +153,10 @@ class Milestone < ActiveRecord::Base ...@@ -153,6 +153,10 @@ class Milestone < ActiveRecord::Base
reorder(Gitlab::Database.nulls_last_order('due_date', 'ASC')) reorder(Gitlab::Database.nulls_last_order('due_date', 'ASC'))
when 'due_date_desc' when 'due_date_desc'
reorder(Gitlab::Database.nulls_last_order('due_date', 'DESC')) reorder(Gitlab::Database.nulls_last_order('due_date', 'DESC'))
when 'name_asc'
reorder(Arel::Nodes::Ascending.new(arel_table[:title].lower))
when 'name_desc'
reorder(Arel::Nodes::Descending.new(arel_table[:title].lower))
when 'start_date_asc' when 'start_date_asc'
reorder(Gitlab::Database.nulls_last_order('start_date', 'ASC')) reorder(Gitlab::Database.nulls_last_order('start_date', 'ASC'))
when 'start_date_desc' when 'start_date_desc'
......
...@@ -563,6 +563,10 @@ class Project < ActiveRecord::Base ...@@ -563,6 +563,10 @@ class Project < ActiveRecord::Base
repository.commit_by(oid: oid) repository.commit_by(oid: oid)
end end
def commits_by(oids:)
repository.commits_by(oids: oids)
end
# ref can't be HEAD, can only be branch/tag name or SHA # ref can't be HEAD, can only be branch/tag name or SHA
def latest_successful_builds_for(ref = default_branch) def latest_successful_builds_for(ref = default_branch)
latest_pipeline = pipelines.latest_successful_for(ref) latest_pipeline = pipelines.latest_successful_for(ref)
......
...@@ -257,6 +257,7 @@ class User < ActiveRecord::Base ...@@ -257,6 +257,7 @@ class User < ActiveRecord::Base
scope :todo_authors, ->(user_id, state) { where(id: Todo.where(user_id: user_id, state: state).select(:author_id)) } scope :todo_authors, ->(user_id, state) { where(id: Todo.where(user_id: user_id, state: state).select(:author_id)) }
scope :order_recent_sign_in, -> { reorder(Gitlab::Database.nulls_last_order('current_sign_in_at', 'DESC')) } scope :order_recent_sign_in, -> { reorder(Gitlab::Database.nulls_last_order('current_sign_in_at', 'DESC')) }
scope :order_oldest_sign_in, -> { reorder(Gitlab::Database.nulls_last_order('current_sign_in_at', 'ASC')) } scope :order_oldest_sign_in, -> { reorder(Gitlab::Database.nulls_last_order('current_sign_in_at', 'ASC')) }
scope :confirmed, -> { where.not(confirmed_at: nil) }
def self.with_two_factor_indistinct def self.with_two_factor_indistinct
joins("LEFT OUTER JOIN u2f_registrations AS u2f ON u2f.user_id = users.id") joins("LEFT OUTER JOIN u2f_registrations AS u2f ON u2f.user_id = users.id")
...@@ -302,14 +303,17 @@ class User < ActiveRecord::Base ...@@ -302,14 +303,17 @@ class User < ActiveRecord::Base
end end
# Find a User by their primary email or any associated secondary email # Find a User by their primary email or any associated secondary email
def find_by_any_email(email) def find_by_any_email(email, confirmed: false)
by_any_email(email).take by_any_email(email, confirmed: confirmed).take
end end
# Returns a relation containing all the users for the given Email address # Returns a relation containing all the users for the given Email address
def by_any_email(email) def by_any_email(email, confirmed: false)
users = where(email: email) users = where(email: email)
users = users.confirmed if confirmed
emails = joins(:emails).where(emails: { email: email }) emails = joins(:emails).where(emails: { email: email })
emails = emails.confirmed if confirmed
union = Gitlab::SQL::Union.new([users, emails]) union = Gitlab::SQL::Union.new([users, emails])
from("(#{union.to_sql}) #{table_name}") from("(#{union.to_sql}) #{table_name}")
......
module Ci
class BuildRunnerPresenter < SimpleDelegator
def artifacts
return unless options[:artifacts]
list = []
list << create_archive(options[:artifacts])
list << create_reports(options[:artifacts][:reports], expire_in: options[:artifacts][:expire_in])
list.flatten.compact
end
private
def create_archive(artifacts)
return unless artifacts[:untracked] || artifacts[:paths]
{
artifact_type: :archive,
artifact_format: :zip,
name: artifacts[:name],
untracked: artifacts[:untracked],
paths: artifacts[:paths],
when: artifacts[:when],
expire_in: artifacts[:expire_in]
}
end
def create_reports(reports, expire_in:)
return unless reports&.any?
reports.map do |k, v|
{
artifact_type: k.to_sym,
artifact_format: :gzip,
name: ::Ci::JobArtifact::DEFAULT_FILE_NAMES[k.to_sym],
paths: v,
when: 'always',
expire_in: expire_in
}
end
end
end
end
...@@ -3,7 +3,10 @@ ...@@ -3,7 +3,10 @@
module Boards module Boards
module Issues module Issues
class ListService < Boards::BaseService class ListService < Boards::BaseService
<<<<<<< HEAD
prepend EE::Boards::Issues::ListService prepend EE::Boards::Issues::ListService
=======
>>>>>>> upstream/master
include Gitlab::Utils::StrongMemoize include Gitlab::Utils::StrongMemoize
def execute def execute
......
...@@ -80,7 +80,7 @@ class GitPushService < BaseService ...@@ -80,7 +80,7 @@ class GitPushService < BaseService
else else
paths = Set.new paths = Set.new
@push_commits.last(PROCESS_COMMIT_LIMIT).each do |commit| last_pushed_commits.each do |commit|
commit.raw_deltas.each do |diff| commit.raw_deltas.each do |diff|
paths << diff.new_path paths << diff.new_path
end end
...@@ -96,7 +96,7 @@ class GitPushService < BaseService ...@@ -96,7 +96,7 @@ class GitPushService < BaseService
end end
def update_signatures def update_signatures
commit_shas = @push_commits.last(PROCESS_COMMIT_LIMIT).map(&:sha) commit_shas = last_pushed_commits.map(&:sha)
return if commit_shas.empty? return if commit_shas.empty?
...@@ -107,16 +107,14 @@ class GitPushService < BaseService ...@@ -107,16 +107,14 @@ class GitPushService < BaseService
commit_shas = Gitlab::Git::Commit.shas_with_signatures(project.repository, commit_shas) commit_shas = Gitlab::Git::Commit.shas_with_signatures(project.repository, commit_shas)
commit_shas.each do |sha| CreateGpgSignatureWorker.perform_async(commit_shas, project.id)
CreateGpgSignatureWorker.perform_async(sha, project.id)
end
end end
# Schedules processing of commit messages. # Schedules processing of commit messages.
def process_commit_messages def process_commit_messages
default = default_branch? default = default_branch?
@push_commits.last(PROCESS_COMMIT_LIMIT).each do |commit| last_pushed_commits.each do |commit|
if commit.matches_cross_reference_regex? if commit.matches_cross_reference_regex?
ProcessCommitWorker ProcessCommitWorker
.perform_async(project.id, current_user.id, commit.to_hash, default) .perform_async(project.id, current_user.id, commit.to_hash, default)
...@@ -212,4 +210,8 @@ class GitPushService < BaseService ...@@ -212,4 +210,8 @@ class GitPushService < BaseService
def branch_name def branch_name
@branch_name ||= Gitlab::Git.ref_name(params[:ref]) @branch_name ||= Gitlab::Git.ref_name(params[:ref])
end end
def last_pushed_commits
@last_pushed_commits ||= @push_commits.last(PROCESS_COMMIT_LIMIT)
end
end end
...@@ -36,7 +36,10 @@ ...@@ -36,7 +36,10 @@
%span.issue-count-badge-count %span.issue-count-badge-count
%icon.mr-1{ name: "issues" } %icon.mr-1{ name: "issues" }
{{ list.issuesSize }} {{ list.issuesSize }}
<<<<<<< HEAD
=======
>>>>>>> upstream/master
= render_if_exists "shared/boards/components/list_weight" = render_if_exists "shared/boards/components/list_weight"
- if can?(current_user, :admin_list, current_board_parent) - if can?(current_user, :admin_list, current_board_parent)
...@@ -47,6 +50,10 @@ ...@@ -47,6 +50,10 @@
"title" => _("New issue"), "title" => _("New issue"),
data: { placement: "top", container: "body" } } data: { placement: "top", container: "body" } }
= icon("plus", class: "js-no-trigger-collapse") = icon("plus", class: "js-no-trigger-collapse")
<<<<<<< HEAD
=======
>>>>>>> upstream/master
%board-list{ "v-if" => 'list.type !== "blank" && list.type !== "promotion"', %board-list{ "v-if" => 'list.type !== "blank" && list.type !== "promotion"',
":list" => "list", ":list" => "list",
":issues" => "list.issues", ":issues" => "list.issues",
......
...@@ -3,15 +3,23 @@ ...@@ -3,15 +3,23 @@
class CreateGpgSignatureWorker class CreateGpgSignatureWorker
include ApplicationWorker include ApplicationWorker
def perform(commit_sha, project_id) def perform(commit_shas, project_id)
return if commit_shas.empty?
project = Project.find_by(id: project_id) project = Project.find_by(id: project_id)
return unless project return unless project
commit = project.commit(commit_sha) commits = project.commits_by(oids: commit_shas)
return unless commit return if commits.empty?
# This calculates and caches the signature in the database # This calculates and caches the signature in the database
Gitlab::Gpg::Commit.new(commit).signature commits.each do |commit|
begin
Gitlab::Gpg::Commit.new(commit).signature
rescue => e
Rails.logger.error("Failed to create signature for commit #{commit.id}. Error: #{e.message}")
end
end
end end
end end
---
title: Improve error message when adding invalid user to a project
merge_request: 20885
author: Jacopo Beschi @jacopo-beschi
type: added
---
title: Remove changes_count from MR API documentation where necessary
merge_request: 19745
author: Jan Beckmann
type: fixed
---
title: Ensure installed Helm Tiller For GitLab Managed Apps Is protected by mutual
auth
merge_request: 20801
author:
type: changed
---
title: Resolve Copy diff file path as GFM is broken
merge_request: 20725
author:
type: fixed
---
title: Fix sorting by name on milestones page
merge_request: 20881
author:
type: fixed
---
title: Extend gitlab-ci.yml to request junit.xml test reports
merge_request: 20390
author:
type: added
---
title: Performing Commit GPG signature calculation in bulk
merge_request: 20870
author:
type: performance
---
title: Permit concurrent loads in gpg keychain mutex
merge_request: 20894
author: Jasper Maes
type: fixed
---
title: Fix /admin/jobs failing to load due to statement timeout
merge_request: 20909
author:
type: performance
---
title: Add /-/health basic health check endpoint
merge_request: 20456
author:
type: added
---
title: Add support for searching users by confirmed e-mails
merge_request: 20893
author:
type: other
...@@ -182,6 +182,10 @@ module Gitlab ...@@ -182,6 +182,10 @@ module Gitlab
config.action_view.sanitized_allowed_protocols = %w(smb) config.action_view.sanitized_allowed_protocols = %w(smb)
# This middleware needs to precede ActiveRecord::QueryCache and other middlewares that
# connect to the database.
config.middleware.insert_after "Rails::Rack::Logger", "Gitlab::Middleware::BasicHealthCheck"
config.middleware.insert_after Warden::Manager, Rack::Attack config.middleware.insert_after Warden::Manager, Rack::Attack
# Allow access to GitLab API from other domains # Allow access to GitLab API from other domains
......
# frozen_string_literal: true
require 'rbtrace' if ENV['ENABLE_RBTRACE']
...@@ -8,6 +8,8 @@ Sidekiq.default_worker_options = { retry: 3 } ...@@ -8,6 +8,8 @@ Sidekiq.default_worker_options = { retry: 3 }
enable_json_logs = Gitlab.config.sidekiq.log_format == 'json' enable_json_logs = Gitlab.config.sidekiq.log_format == 'json'
Sidekiq.configure_server do |config| Sidekiq.configure_server do |config|
require 'rbtrace' if ENV['ENABLE_RBTRACE']
config.redis = queues_config_hash config.redis = queues_config_hash
config.server_middleware do |chain| config.server_middleware do |chain|
......
...@@ -61,6 +61,7 @@ Rails.application.routes.draw do ...@@ -61,6 +61,7 @@ Rails.application.routes.draw do
get 'health_check(/:checks)' => 'health_check#index', as: :health_check get 'health_check(/:checks)' => 'health_check#index', as: :health_check
scope path: '-' do scope path: '-' do
# '/-/health' implemented by BasicHealthMiddleware
get 'liveness' => 'health#liveness' get 'liveness' => 'health#liveness'
get 'readiness' => 'health#readiness' get 'readiness' => 'health#readiness'
post 'storage_check' => 'health#storage_check' post 'storage_check' => 'health#storage_check'
......
...@@ -124,6 +124,10 @@ before_fork do |server, worker| ...@@ -124,6 +124,10 @@ before_fork do |server, worker|
end end
after_fork do |server, worker| after_fork do |server, worker|
# Unicorn clears out signals before it forks, so rbtrace won't work
# unless it is enabled after the fork.
require 'rbtrace' if ENV['ENABLE_RBTRACE']
# per-process listener ports for debugging/admin/migrations # per-process listener ports for debugging/admin/migrations
# addr = "127.0.0.1:#{9293 + worker.nr}" # addr = "127.0.0.1:#{9293 + worker.nr}"
# server.listen(addr, :tries => -1, :delay => 5, :tcp_nopush => true) # server.listen(addr, :tries => -1, :delay => 5, :tcp_nopush => true)
......
class AddColumnsForHelmTillerCertificates < ActiveRecord::Migration
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
def change
add_column :clusters_applications_helm, :encrypted_ca_key, :text
add_column :clusters_applications_helm, :encrypted_ca_key_iv, :text
add_column :clusters_applications_helm, :ca_cert, :text
end
end
class AddFileFormatToCiJobArtifacts < ActiveRecord::Migration
DOWNTIME = false
def change
add_column :ci_job_artifacts, :file_format, :integer, limit: 2
end
end
...@@ -482,6 +482,7 @@ ActiveRecord::Schema.define(version: 20180722103201) do ...@@ -482,6 +482,7 @@ ActiveRecord::Schema.define(version: 20180722103201) do
t.string "file" t.string "file"
t.integer "file_store" t.integer "file_store"
t.binary "file_sha256" t.binary "file_sha256"
t.integer "file_format", limit: 2
end end
add_index "ci_job_artifacts", ["expire_at", "job_id"], name: "index_ci_job_artifacts_on_expire_at_and_job_id", using: :btree add_index "ci_job_artifacts", ["expire_at", "job_id"], name: "index_ci_job_artifacts_on_expire_at_and_job_id", using: :btree
...@@ -746,6 +747,9 @@ ActiveRecord::Schema.define(version: 20180722103201) do ...@@ -746,6 +747,9 @@ ActiveRecord::Schema.define(version: 20180722103201) do
t.integer "status", null: false t.integer "status", null: false
t.string "version", null: false t.string "version", null: false
t.text "status_reason" t.text "status_reason"
t.text "encrypted_ca_key"
t.text "encrypted_ca_key_iv"
t.text "ca_cert"
end end
create_table "clusters_applications_ingress", force: :cascade do |t| create_table "clusters_applications_ingress", force: :cascade do |t|
......
...@@ -201,7 +201,8 @@ instant how code changes impact your production environment. ...@@ -201,7 +201,8 @@ instant how code changes impact your production environment.
- [Prometheus metrics](user/project/integrations/prometheus_library/metrics.md): Let Prometheus collect metrics from various services, like Kubernetes, NGINX, NGINX ingress controller, HAProxy, and Amazon Cloud Watch. - [Prometheus metrics](user/project/integrations/prometheus_library/metrics.md): Let Prometheus collect metrics from various services, like Kubernetes, NGINX, NGINX ingress controller, HAProxy, and Amazon Cloud Watch.
- [GitLab Performance Monitoring](administration/monitoring/performance/index.md): Use InfluxDB and Grafana to monitor the performance of your GitLab instance (will be eventually replaced by Prometheus). - [GitLab Performance Monitoring](administration/monitoring/performance/index.md): Use InfluxDB and Grafana to monitor the performance of your GitLab instance (will be eventually replaced by Prometheus).
- [Health check](user/admin_area/monitoring/health_check.md): GitLab provides liveness and readiness probes to indicate service health and reachability to required services. - [Health check](user/admin_area/monitoring/health_check.md): GitLab provides liveness and readiness probes to indicate service health and reachability to required services.
- [GitLab Cycle Analytics](user/project/cycle_analytics.md): Cycle Analytics measures the time it takes to go from an [idea to production](https://about.gitlab.com/2016/08/05/continuous-integration-delivery-and-deployment-with-gitlab/#from-idea-to-production-with-gitlab) for each project you have. - [GitLab Cycle Analytics](user/project/cycle_analytics.md): Cycle Analytics measures the time it takes to go from an
[idea to production](https://about.gitlab.com/2016/08/05/continuous-integration-delivery-and-deployment-with-gitlab/#from-idea-to-production-with-gitlab) for each project you have.
## Getting started with GitLab ## Getting started with GitLab
......
...@@ -39,6 +39,7 @@ Our support team will not be able to assist on performance issues related to ...@@ -39,6 +39,7 @@ Our support team will not be able to assist on performance issues related to
file system access. file system access.
Customers and users have reported that AWS EFS does not perform well for GitLab's Customers and users have reported that AWS EFS does not perform well for GitLab's
<<<<<<< HEAD
use-case. There are several issues that can cause problems. For these reasons use-case. There are several issues that can cause problems. For these reasons
GitLab does not recommend using EFS with GitLab. GitLab does not recommend using EFS with GitLab.
...@@ -52,6 +53,13 @@ GitLab does not recommend using EFS with GitLab. ...@@ -52,6 +53,13 @@ GitLab does not recommend using EFS with GitLab.
In addition, avoid storing GitLab log files (e.g. those in `/var/log/gitlab`) In addition, avoid storing GitLab log files (e.g. those in `/var/log/gitlab`)
because this will also affect performance. We recommend that the log files be because this will also affect performance. We recommend that the log files be
=======
use-case. Workloads where many small files are written in a serialized manner, like `git`,
are not well-suited for EFS. EBS with an NFS server on top will perform much better.
If you do choose to use EFS, avoid storing GitLab log files (e.g. those in `/var/log/gitlab`)
there because this will also affect performance. We recommend that the log files be
>>>>>>> upstream/master
stored on a local volume. stored on a local volume.
For more details on another person's experience with EFS, see For more details on another person's experience with EFS, see
......
...@@ -77,7 +77,12 @@ and more. However, this is not enabled by default. To enable it, define the ...@@ -77,7 +77,12 @@ and more. However, this is not enabled by default. To enable it, define the
gitlab_rails['env'] = {"ENABLE_RBTRACE" => "1"} gitlab_rails['env'] = {"ENABLE_RBTRACE" => "1"}
``` ```
Then reconfigure the system and restart Unicorn and Sidekiq. Then reconfigure the system and restart Unicorn and Sidekiq. To run this
in Omnibus, run as root:
```ruby
/opt/gitlab/embedded/bin/ruby /opt/gitlab/embedded/bin/rbtrace
```
## Common Problems ## Common Problems
......
...@@ -15,11 +15,6 @@ given state (`opened`, `closed`, `locked`, or `merged`) or all of them (`all`). ...@@ -15,11 +15,6 @@ given state (`opened`, `closed`, `locked`, or `merged`) or all of them (`all`).
The pagination parameters `page` and `per_page` can be used to The pagination parameters `page` and `per_page` can be used to
restrict the list of merge requests. restrict the list of merge requests.
**Note**: the `changes_count` value in the response is a string, not an
integer. This is because when an MR has too many changes to display and store,
it will be capped at 1,000. In that case, the API will return the string
`"1000+"` for the changes count.
``` ```
GET /merge_requests GET /merge_requests
GET /merge_requests?state=opened GET /merge_requests?state=opened
...@@ -104,7 +99,6 @@ Parameters: ...@@ -104,7 +99,6 @@ Parameters:
"sha": "8888888888888888888888888888888888888888", "sha": "8888888888888888888888888888888888888888",
"merge_commit_sha": null, "merge_commit_sha": null,
"user_notes_count": 1, "user_notes_count": 1,
"changes_count": "1",
"should_remove_source_branch": true, "should_remove_source_branch": true,
"force_remove_source_branch": false, "force_remove_source_branch": false,
"squash": false, "squash": false,
...@@ -144,10 +138,6 @@ will be the same. In the case of a merge request from a fork, ...@@ -144,10 +138,6 @@ will be the same. In the case of a merge request from a fork,
`target_project_id` and `project_id` will be the same and `target_project_id` and `project_id` will be the same and
`source_project_id` will be the fork project's ID. `source_project_id` will be the fork project's ID.
**Note**: the `changes_count` value in the response is a string, not an
integer. This is because when an MR has too many changes to display and store,
it will be capped at 1,000. In that case, the API will return the string
`"1000+"` for the changes count.
Parameters: Parameters:
...@@ -224,7 +214,6 @@ Parameters: ...@@ -224,7 +214,6 @@ Parameters:
"sha": "8888888888888888888888888888888888888888", "sha": "8888888888888888888888888888888888888888",
"merge_commit_sha": null, "merge_commit_sha": null,
"user_notes_count": 1, "user_notes_count": 1,
"changes_count": "1",
"should_remove_source_branch": true, "should_remove_source_branch": true,
"force_remove_source_branch": false, "force_remove_source_branch": false,
"squash": false, "squash": false,
...@@ -332,7 +321,6 @@ Parameters: ...@@ -332,7 +321,6 @@ Parameters:
"sha": "8888888888888888888888888888888888888888", "sha": "8888888888888888888888888888888888888888",
"merge_commit_sha": null, "merge_commit_sha": null,
"user_notes_count": 1, "user_notes_count": 1,
"changes_count": "1",
"should_remove_source_branch": true, "should_remove_source_branch": true,
"force_remove_source_branch": false, "force_remove_source_branch": false,
"web_url": "http://example.com/example/example/merge_requests/1", "web_url": "http://example.com/example/example/merge_requests/1",
...@@ -351,6 +339,11 @@ Parameters: ...@@ -351,6 +339,11 @@ Parameters:
Shows information about a single merge request. Shows information about a single merge request.
**Note**: the `changes_count` value in the response is a string, not an
integer. This is because when an MR has too many changes to display and store,
it will be capped at 1,000. In that case, the API will return the string
`"1000+"` for the changes count.
``` ```
GET /projects/:id/merge_requests/:merge_request_iid GET /projects/:id/merge_requests/:merge_request_iid
``` ```
......
...@@ -2,10 +2,8 @@ ...@@ -2,10 +2,8 @@
comments: false comments: false
--- ---
DANGER: This guide exists for reference of how an AWS deployment could work. > **Note**: We **do not** recommend using the AWS Elastic File System (EFS), as it can result
We are currently seeing very slow EFS access performance which causes GitLab to in [significantly degraded performance](https://gitlab.com/gitlab-org/gitlab-ee/blob/master/doc/administration/high_availability/nfs.md#aws-elastic-file-system).
be 5-10x slower than using NFS or Local disk. We _do not_ recommend follow this
guide at this time.
# High Availability on AWS # High Availability on AWS
......
...@@ -20,14 +20,24 @@ To access monitoring resources, the client IP needs to be included in a whitelis ...@@ -20,14 +20,24 @@ To access monitoring resources, the client IP needs to be included in a whitelis
[Read how to add IPs to a whitelist for the monitoring endpoints][admin]. [Read how to add IPs to a whitelist for the monitoring endpoints][admin].
## Using the endpoint ## Using the endpoints
With default whitelist settings, the probes can be accessed from localhost: With default whitelist settings, the probes can be accessed from localhost:
- `http://localhost/-/health`
- `http://localhost/-/readiness` - `http://localhost/-/readiness`
- `http://localhost/-/liveness` - `http://localhost/-/liveness`
which will then provide a report of system health in JSON format.
The first endpoint, `/-/health/`, only checks whether the application server is running. It does
-not verify the database or other services are running. A successful response with return
a 200 status code with the following message:
```
GitLab OK
```
The readiness and liveness probes will provide a report of system health in JSON format.
Readiness example output: Readiness example output:
...@@ -42,12 +52,6 @@ Readiness example output: ...@@ -42,12 +52,6 @@ Readiness example output:
"shared_state_check" : { "shared_state_check" : {
"status" : "ok" "status" : "ok"
}, },
"fs_shards_check" : {
"labels" : {
"shard" : "default"
},
"status" : "ok"
},
"db_check" : { "db_check" : {
"status" : "ok" "status" : "ok"
}, },
...@@ -61,9 +65,6 @@ Liveness example output: ...@@ -61,9 +65,6 @@ Liveness example output:
``` ```
{ {
"fs_shards_check" : {
"status" : "ok"
},
"cache_check" : { "cache_check" : {
"status" : "ok" "status" : "ok"
}, },
......
...@@ -1264,7 +1264,13 @@ module API ...@@ -1264,7 +1264,13 @@ module API
end end
class Artifacts < Grape::Entity class Artifacts < Grape::Entity
expose :name, :untracked, :paths, :when, :expire_in expose :name
expose :untracked
expose :paths
expose :when
expose :expire_in
expose :artifact_type
expose :artifact_format
end end
class Cache < Grape::Entity class Cache < Grape::Entity
......
...@@ -75,7 +75,10 @@ module API ...@@ -75,7 +75,10 @@ module API
member = source.members.find_by(user_id: params[:user_id]) member = source.members.find_by(user_id: params[:user_id])
conflict!('Member already exists') if member conflict!('Member already exists') if member
member = source.add_user(params[:user_id], params[:access_level], current_user: current_user, expires_at: params[:expires_at]) user = User.find_by_id(params[:user_id])
not_found!('User') unless user
member = source.add_user(user, params[:access_level], current_user: current_user, expires_at: params[:expires_at])
if !member if !member
not_allowed! # This currently can only be reached in EE not_allowed! # This currently can only be reached in EE
......
...@@ -30,8 +30,11 @@ module API ...@@ -30,8 +30,11 @@ module API
end end
end end
<<<<<<< HEAD
prepend EE::API::Projects prepend EE::API::Projects
=======
>>>>>>> upstream/master
def self.update_params_at_least_one_of def self.update_params_at_least_one_of
[ [
:jobs_enabled, :jobs_enabled,
...@@ -294,7 +297,10 @@ module API ...@@ -294,7 +297,10 @@ module API
optional :path, type: String, desc: 'The path of the repository' optional :path, type: String, desc: 'The path of the repository'
use :optional_project_params use :optional_project_params
<<<<<<< HEAD
use :optional_update_params_ee use :optional_update_params_ee
=======
>>>>>>> upstream/master
at_least_one_of(*::API::Projects.update_params_at_least_one_of) at_least_one_of(*::API::Projects.update_params_at_least_one_of)
end end
......
...@@ -109,7 +109,7 @@ module API ...@@ -109,7 +109,7 @@ module API
if result.valid? if result.valid?
if result.build if result.build
Gitlab::Metrics.add_event(:build_found) Gitlab::Metrics.add_event(:build_found)
present result.build, with: Entities::JobRequest::Response present Ci::BuildRunnerPresenter.new(result.build), with: Entities::JobRequest::Response
else else
Gitlab::Metrics.add_event(:build_not_found) Gitlab::Metrics.add_event(:build_not_found)
header 'X-GitLab-Last-Update', new_update header 'X-GitLab-Last-Update', new_update
...@@ -231,6 +231,10 @@ module API ...@@ -231,6 +231,10 @@ module API
requires :id, type: Integer, desc: %q(Job's ID) requires :id, type: Integer, desc: %q(Job's ID)
optional :token, type: String, desc: %q(Job's authentication token) optional :token, type: String, desc: %q(Job's authentication token)
optional :expire_in, type: String, desc: %q(Specify when artifacts should expire) optional :expire_in, type: String, desc: %q(Specify when artifacts should expire)
optional :artifact_type, type: String, desc: %q(The type of artifact),
default: 'archive', values: Ci::JobArtifact.file_types.keys
optional :artifact_format, type: String, desc: %q(The format of artifact),
default: 'zip', values: Ci::JobArtifact.file_formats.keys
optional 'file.path', type: String, desc: %q(path to locally stored body (generated by Workhorse)) optional 'file.path', type: String, desc: %q(path to locally stored body (generated by Workhorse))
optional 'file.name', type: String, desc: %q(real filename as send in Content-Disposition (generated by Workhorse)) optional 'file.name', type: String, desc: %q(real filename as send in Content-Disposition (generated by Workhorse))
optional 'file.type', type: String, desc: %q(real content type as send in Content-Type (generated by Workhorse)) optional 'file.type', type: String, desc: %q(real content type as send in Content-Type (generated by Workhorse))
...@@ -254,29 +258,29 @@ module API ...@@ -254,29 +258,29 @@ module API
bad_request!('Missing artifacts file!') unless artifacts bad_request!('Missing artifacts file!') unless artifacts
file_to_large! unless artifacts.size < max_artifacts_size file_to_large! unless artifacts.size < max_artifacts_size
bad_request!("Already uploaded") if job.job_artifacts_archive
expire_in = params['expire_in'] || expire_in = params['expire_in'] ||
Gitlab::CurrentSettings.current_application_settings.default_artifacts_expire_in Gitlab::CurrentSettings.current_application_settings.default_artifacts_expire_in
job.build_job_artifacts_archive( job.job_artifacts.build(
project: job.project, project: job.project,
file: artifacts, file: artifacts,
file_type: :archive, file_type: params['artifact_type'],
file_format: params['artifact_format'],
file_sha256: artifacts.sha256, file_sha256: artifacts.sha256,
expire_in: expire_in) expire_in: expire_in)
if metadata if metadata
job.build_job_artifacts_metadata( job.job_artifacts.build(
project: job.project, project: job.project,
file: metadata, file: metadata,
file_type: :metadata, file_type: :metadata,
file_format: :gzip,
file_sha256: metadata.sha256, file_sha256: metadata.sha256,
expire_in: expire_in) expire_in: expire_in)
end end
if job.update(artifacts_expire_in: expire_in) if job.update(artifacts_expire_in: expire_in)
present job, with: Entities::JobRequest::Response present Ci::BuildRunnerPresenter.new(job), with: Entities::JobRequest::Response
else else
render_validation_error!(job) render_validation_error!(job)
end end
......
...@@ -6,13 +6,16 @@ module Gitlab ...@@ -6,13 +6,16 @@ module Gitlab
# Entry that represents a configuration of job artifacts. # Entry that represents a configuration of job artifacts.
# #
class Artifacts < Node class Artifacts < Node
include Configurable
include Validatable include Validatable
include Attributable include Attributable
ALLOWED_KEYS = %i[name untracked paths when expire_in].freeze ALLOWED_KEYS = %i[name untracked paths reports when expire_in].freeze
attributes ALLOWED_KEYS attributes ALLOWED_KEYS
entry :reports, Entry::Reports, description: 'Report-type artifacts.'
validations do validations do
validates :config, type: Hash validates :config, type: Hash
validates :config, allowed_keys: ALLOWED_KEYS validates :config, allowed_keys: ALLOWED_KEYS
...@@ -21,6 +24,7 @@ module Gitlab ...@@ -21,6 +24,7 @@ module Gitlab
validates :name, type: String validates :name, type: String
validates :untracked, boolean: true validates :untracked, boolean: true
validates :paths, array_of_strings: true validates :paths, array_of_strings: true
validates :reports, type: Hash
validates :when, validates :when,
inclusion: { in: %w[on_success on_failure always], inclusion: { in: %w[on_success on_failure always],
message: 'should be on_success, on_failure ' \ message: 'should be on_success, on_failure ' \
...@@ -28,6 +32,13 @@ module Gitlab ...@@ -28,6 +32,13 @@ module Gitlab
validates :expire_in, duration: true validates :expire_in, duration: true
end end
end end
helpers :reports
def value
@config[:reports] = reports_value if @config.key?(:reports)
@config
end
end end
end end
end end
......
...@@ -9,18 +9,7 @@ module Gitlab ...@@ -9,18 +9,7 @@ module Gitlab
include Validatable include Validatable
validations do validations do
include LegacyValidationHelpers validates :config, array_of_strings_or_string: true
validate do
unless string_or_array_of_strings?(config)
errors.add(:config,
'should be a string or an array of strings')
end
end
def string_or_array_of_strings?(field)
validate_string(field) || validate_array_of_strings(field)
end
end end
def value def value
......
module Gitlab
module Ci
class Config
module Entry
##
# Entry that represents a configuration of job artifacts.
#
class Reports < Node
include Validatable
include Attributable
ALLOWED_KEYS = %i[junit].freeze
attributes ALLOWED_KEYS
validations do
validates :config, type: Hash
validates :config, allowed_keys: ALLOWED_KEYS
with_options allow_nil: true do
validates :junit, array_of_strings_or_string: true
end
end
def value
@config.transform_values { |v| Array(v) }
end
end
end
end
end
end
...@@ -130,6 +130,20 @@ module Gitlab ...@@ -130,6 +130,20 @@ module Gitlab
end end
end end
class ArrayOfStringsOrStringValidator < RegexpValidator
def validate_each(record, attribute, value)
unless validate_array_of_strings_or_string(value)
record.errors.add(attribute, 'should be an array of strings or a string')
end
end
private
def validate_array_of_strings_or_string(values)
validate_array_of_strings(values) || validate_string(values)
end
end
class TypeValidator < ActiveModel::EachValidator class TypeValidator < ActiveModel::EachValidator
def validate_each(record, attribute, value) def validate_each(record, attribute, value)
type = options[:with] type = options[:with]
......
...@@ -164,6 +164,8 @@ module Gitlab ...@@ -164,6 +164,8 @@ module Gitlab
def create_build_trace!(job, path) def create_build_trace!(job, path)
File.open(path) do |stream| File.open(path) do |stream|
# TODO: Set `file_format: :raw` after we've cleaned up legacy traces migration
# https://gitlab.com/gitlab-org/gitlab-ce/merge_requests/20307
job.create_job_artifacts_trace!( job.create_job_artifacts_trace!(
project: job.project, project: job.project,
file_type: :trace, file_type: :trace,
......
...@@ -71,8 +71,16 @@ module Gitlab ...@@ -71,8 +71,16 @@ module Gitlab
if MUTEX.locked? && MUTEX.owned? if MUTEX.locked? && MUTEX.owned?
optimistic_using_tmp_keychain(&block) optimistic_using_tmp_keychain(&block)
else else
MUTEX.synchronize do if Gitlab.rails5?
optimistic_using_tmp_keychain(&block) ActiveSupport::Dependencies.interlock.permit_concurrent_loads do
MUTEX.synchronize do
optimistic_using_tmp_keychain(&block)
end
end
else
MUTEX.synchronize do
optimistic_using_tmp_keychain(&block)
end
end end
end end
end end
......
module Gitlab module Gitlab
module Kubernetes module Kubernetes
class ConfigMap class ConfigMap
def initialize(name, values = "") def initialize(name, files)
@name = name @name = name
@values = values @files = files
end end
def generate def generate
resource = ::Kubeclient::Resource.new resource = ::Kubeclient::Resource.new
resource.metadata = metadata resource.metadata = metadata
resource.data = { values: values } resource.data = files
resource resource
end end
...@@ -19,7 +19,7 @@ module Gitlab ...@@ -19,7 +19,7 @@ module Gitlab
private private
attr_reader :name, :values attr_reader :name, :files
def metadata def metadata
{ {
......
...@@ -11,7 +11,7 @@ module Gitlab ...@@ -11,7 +11,7 @@ module Gitlab
def install(command) def install(command)
namespace.ensure_exists! namespace.ensure_exists!
create_config_map(command) if command.config_map? create_config_map(command)
kubeclient.create_pod(command.pod_resource) kubeclient.create_pod(command.pod_resource)
end end
......
module Gitlab module Gitlab
module Kubernetes module Kubernetes
module Helm module Helm
class BaseCommand module BaseCommand
attr_reader :name
def initialize(name)
@name = name
end
def pod_resource def pod_resource
Gitlab::Kubernetes::Helm::Pod.new(self, namespace).generate Gitlab::Kubernetes::Helm::Pod.new(self, namespace).generate
end end
...@@ -24,16 +18,32 @@ module Gitlab ...@@ -24,16 +18,32 @@ module Gitlab
HEREDOC HEREDOC
end end
def config_map?
false
end
def pod_name def pod_name
"install-#{name}" "install-#{name}"
end end
def config_map_resource
Gitlab::Kubernetes::ConfigMap.new(name, files).generate
end
def file_names
files.keys
end
def name
raise "Not implemented"
end
def files
raise "Not implemented"
end
private private
def files_dir
"/data/helm/#{name}/config"
end
def namespace def namespace
Gitlab::Kubernetes::Helm::NAMESPACE Gitlab::Kubernetes::Helm::NAMESPACE
end end
......
module Gitlab
module Kubernetes
module Helm
class Certificate
INFINITE_EXPIRY = 1000.years
SHORT_EXPIRY = 30.minutes
attr_reader :key, :cert
def key_string
@key.to_s
end
def cert_string
@cert.to_pem
end
def self.from_strings(key_string, cert_string)
key = OpenSSL::PKey::RSA.new(key_string)
cert = OpenSSL::X509::Certificate.new(cert_string)
new(key, cert)
end
def self.generate_root
_issue(signed_by: nil, expires_in: INFINITE_EXPIRY, certificate_authority: true)
end
def issue(expires_in: SHORT_EXPIRY)
self.class._issue(signed_by: self, expires_in: expires_in, certificate_authority: false)
end
private
def self._issue(signed_by:, expires_in:, certificate_authority:)
key = OpenSSL::PKey::RSA.new(4096)
public_key = key.public_key
subject = OpenSSL::X509::Name.parse("/C=US")
cert = OpenSSL::X509::Certificate.new
cert.subject = subject
cert.issuer = signed_by&.cert&.subject || subject
cert.not_before = Time.now
cert.not_after = expires_in.from_now
cert.public_key = public_key
cert.serial = 0x0
cert.version = 2
if certificate_authority
extension_factory = OpenSSL::X509::ExtensionFactory.new
extension_factory.subject_certificate = cert
extension_factory.issuer_certificate = cert
cert.add_extension(extension_factory.create_extension('subjectKeyIdentifier', 'hash'))
cert.add_extension(extension_factory.create_extension('basicConstraints', 'CA:TRUE', true))
cert.add_extension(extension_factory.create_extension('keyUsage', 'cRLSign,keyCertSign', true))
end
cert.sign(signed_by&.key || key, OpenSSL::Digest::SHA256.new)
new(key, cert)
end
def initialize(key, cert)
@key = key
@cert = cert
end
end
end
end
end
module Gitlab module Gitlab
module Kubernetes module Kubernetes
module Helm module Helm
class InitCommand < BaseCommand class InitCommand
include BaseCommand
attr_reader :name, :files
def initialize(name:, files:)
@name = name
@files = files
end
def generate_script def generate_script
super + [ super + [
init_helm_command init_helm_command
...@@ -11,7 +20,12 @@ module Gitlab ...@@ -11,7 +20,12 @@ module Gitlab
private private
def init_helm_command def init_helm_command
"helm init >/dev/null" tls_flags = "--tiller-tls" \
" --tiller-tls-verify --tls-ca-cert #{files_dir}/ca.pem" \
" --tiller-tls-cert #{files_dir}/cert.pem" \
" --tiller-tls-key #{files_dir}/key.pem"
"helm init #{tls_flags} >/dev/null"
end end
end end
end end
......
module Gitlab module Gitlab
module Kubernetes module Kubernetes
module Helm module Helm
class InstallCommand < BaseCommand class InstallCommand
attr_reader :name, :chart, :version, :repository, :values include BaseCommand
def initialize(name, chart:, values:, version: nil, repository: nil) attr_reader :name, :files, :chart, :version, :repository
def initialize(name:, chart:, files:, version: nil, repository: nil)
@name = name @name = name
@chart = chart @chart = chart
@version = version @version = version
@values = values @files = files
@repository = repository @repository = repository
end end
...@@ -20,14 +22,6 @@ module Gitlab ...@@ -20,14 +22,6 @@ module Gitlab
].compact.join("\n") ].compact.join("\n")
end end
def config_map?
true
end
def config_map_resource
Gitlab::Kubernetes::ConfigMap.new(name, values).generate
end
private private
def init_command def init_command
...@@ -39,14 +33,27 @@ module Gitlab ...@@ -39,14 +33,27 @@ module Gitlab
end end
def script_command def script_command
<<~HEREDOC "helm install" \
helm install #{chart} --name #{name}#{optional_version_flag} --namespace #{Gitlab::Kubernetes::Helm::NAMESPACE} -f /data/helm/#{name}/config/values.yaml >/dev/null "#{optional_tls_flags} " \
HEREDOC "#{chart} " \
"--name #{name}" \
"#{optional_version_flag} " \
"--namespace #{Gitlab::Kubernetes::Helm::NAMESPACE} " \
"-f /data/helm/#{name}/config/values.yaml >/dev/null\n"
end end
def optional_version_flag def optional_version_flag
" --version #{version}" if version " --version #{version}" if version
end end
def optional_tls_flags
return unless files.key?(:'ca.pem')
" --tls" \
" --tls-ca-cert #{files_dir}/ca.pem" \
" --tls-cert #{files_dir}/cert.pem" \
" --tls-key #{files_dir}/key.pem"
end
end end
end end
end end
......
...@@ -10,10 +10,8 @@ module Gitlab ...@@ -10,10 +10,8 @@ module Gitlab
def generate def generate
spec = { containers: [container_specification], restartPolicy: 'Never' } spec = { containers: [container_specification], restartPolicy: 'Never' }
if command.config_map? spec[:volumes] = volumes_specification
spec[:volumes] = volumes_specification spec[:containers][0][:volumeMounts] = volume_mounts_specification
spec[:containers][0][:volumeMounts] = volume_mounts_specification
end
::Kubeclient::Resource.new(metadata: metadata, spec: spec) ::Kubeclient::Resource.new(metadata: metadata, spec: spec)
end end
...@@ -61,7 +59,7 @@ module Gitlab ...@@ -61,7 +59,7 @@ module Gitlab
name: 'configuration-volume', name: 'configuration-volume',
configMap: { configMap: {
name: "values-content-configuration-#{command.name}", name: "values-content-configuration-#{command.name}",
items: [{ key: 'values', path: 'values.yaml' }] items: command.file_names.map { |name| { key: name, path: name } }
} }
} }
] ]
......
# frozen_string_literal: true
# This middleware provides a health check that does not hit the database. Its purpose
# is to notify the prober that the application server is handling requests, but a 200
# response does not signify that the database or other services are ready.
#
# See https://thisdata.com/blog/making-a-rails-health-check-that-doesnt-hit-the-database/ for
# more details.
module Gitlab
module Middleware
class BasicHealthCheck
# This can't be frozen because Rails::Rack::Logger wraps the body
# rubocop:disable Style/MutableConstant
OK_RESPONSE = [200, { 'Content-Type' => 'text/plain' }, ["GitLab OK"]]
EMPTY_RESPONSE = [404, { 'Content-Type' => 'text/plain' }, [""]]
# rubocop:enable Style/MutableConstant
HEALTH_PATH = '/-/health'
def initialize(app)
@app = app
end
def call(env)
return @app.call(env) unless env['PATH_INFO'] == HEALTH_PATH
request = Rack::Request.new(env)
return OK_RESPONSE if client_ip_whitelisted?(request)
EMPTY_RESPONSE
end
def client_ip_whitelisted?(request)
ip_whitelist.any? { |e| e.include?(request.ip) }
end
def ip_whitelist
@ip_whitelist ||= Settings.monitoring.ip_whitelist.map(&IPAddr.method(:new))
end
end
end
end
...@@ -44,10 +44,11 @@ module QA ...@@ -44,10 +44,11 @@ module QA
page.await_installed(:helm) page.await_installed(:helm)
page.install!(:ingress) if @install_ingress page.install!(:ingress) if @install_ingress
page.await_installed(:ingress) if @install_ingress
page.install!(:prometheus) if @install_prometheus page.install!(:prometheus) if @install_prometheus
page.await_installed(:prometheus) if @install_prometheus
page.install!(:runner) if @install_runner page.install!(:runner) if @install_runner
page.await_installed(:ingress) if @install_ingress
page.await_installed(:prometheus) if @install_prometheus
page.await_installed(:runner) if @install_runner page.await_installed(:runner) if @install_runner
end end
end end
......
...@@ -16,6 +16,7 @@ module QA ...@@ -16,6 +16,7 @@ module QA
def install!(application_name) def install!(application_name)
within(".js-cluster-application-row-#{application_name}") do within(".js-cluster-application-row-#{application_name}") do
page.has_button?('Install', wait: 30)
click_on 'Install' click_on 'Install'
end end
end end
......
...@@ -187,6 +187,13 @@ FactoryBot.define do ...@@ -187,6 +187,13 @@ FactoryBot.define do
end end
end end
trait :test_reports do
after(:create) do |build|
create(:ci_job_artifact, :junit, job: build)
build.reload
end
end
trait :expired do trait :expired do
artifacts_expire_at 1.minute.ago artifacts_expire_at 1.minute.ago
end end
......
...@@ -4,6 +4,7 @@ FactoryBot.define do ...@@ -4,6 +4,7 @@ FactoryBot.define do
factory :ci_job_artifact, class: Ci::JobArtifact do factory :ci_job_artifact, class: Ci::JobArtifact do
job factory: :ci_build job factory: :ci_build
file_type :archive file_type :archive
file_format :zip
trait :remote_store do trait :remote_store do
file_store JobArtifactUploader::Store::REMOTE file_store JobArtifactUploader::Store::REMOTE
...@@ -15,6 +16,7 @@ FactoryBot.define do ...@@ -15,6 +16,7 @@ FactoryBot.define do
trait :archive do trait :archive do
file_type :archive file_type :archive
file_format :zip
after(:build) do |artifact, _| after(:build) do |artifact, _|
artifact.file = fixture_file_upload( artifact.file = fixture_file_upload(
...@@ -24,6 +26,7 @@ FactoryBot.define do ...@@ -24,6 +26,7 @@ FactoryBot.define do
trait :metadata do trait :metadata do
file_type :metadata file_type :metadata
file_format :gzip
after(:build) do |artifact, _| after(:build) do |artifact, _|
artifact.file = fixture_file_upload( artifact.file = fixture_file_upload(
...@@ -33,6 +36,7 @@ FactoryBot.define do ...@@ -33,6 +36,7 @@ FactoryBot.define do
trait :trace do trait :trace do
file_type :trace file_type :trace
file_format :raw
after(:build) do |artifact, evaluator| after(:build) do |artifact, evaluator|
artifact.file = fixture_file_upload( artifact.file = fixture_file_upload(
...@@ -40,6 +44,16 @@ FactoryBot.define do ...@@ -40,6 +44,16 @@ FactoryBot.define do
end end
end end
trait :junit do
file_type :junit
file_format :gzip
after(:build) do |artifact, evaluator|
artifact.file = fixture_file_upload(
Rails.root.join('spec/fixtures/junit.xml.gz'), 'application/x-gzip')
end
end
trait :correct_checksum do trait :correct_checksum do
after(:build) do |artifact, evaluator| after(:build) do |artifact, evaluator|
artifact.file_sha256 = Digest::SHA256.file(artifact.file.path).hexdigest artifact.file_sha256 = Digest::SHA256.file(artifact.file.path).hexdigest
......
...@@ -45,11 +45,21 @@ FactoryBot.define do ...@@ -45,11 +45,21 @@ FactoryBot.define do
updated_at ClusterWaitForAppInstallationWorker::TIMEOUT.ago updated_at ClusterWaitForAppInstallationWorker::TIMEOUT.ago
end end
factory :clusters_applications_ingress, class: Clusters::Applications::Ingress factory :clusters_applications_ingress, class: Clusters::Applications::Ingress do
factory :clusters_applications_prometheus, class: Clusters::Applications::Prometheus cluster factory: %i(cluster with_installed_helm provided_by_gcp)
factory :clusters_applications_runner, class: Clusters::Applications::Runner end
factory :clusters_applications_prometheus, class: Clusters::Applications::Prometheus do
cluster factory: %i(cluster with_installed_helm provided_by_gcp)
end
factory :clusters_applications_runner, class: Clusters::Applications::Runner do
cluster factory: %i(cluster with_installed_helm provided_by_gcp)
end
factory :clusters_applications_jupyter, class: Clusters::Applications::Jupyter do factory :clusters_applications_jupyter, class: Clusters::Applications::Jupyter do
oauth_application factory: :oauth_application oauth_application factory: :oauth_application
cluster factory: %i(cluster with_installed_helm provided_by_gcp)
end end
end end
end end
...@@ -36,5 +36,9 @@ FactoryBot.define do ...@@ -36,5 +36,9 @@ FactoryBot.define do
trait :production_environment do trait :production_environment do
sequence(:environment_scope) { |n| "production#{n}/*" } sequence(:environment_scope) { |n| "production#{n}/*" }
end end
trait :with_installed_helm do
application_helm factory: %i(clusters_applications_helm installed)
end
end end
end end
...@@ -46,12 +46,14 @@ describe 'Clusters Applications', :js do ...@@ -46,12 +46,14 @@ describe 'Clusters Applications', :js do
end end
end end
it 'he sees status transition' do it 'they see status transition' do
page.within('.js-cluster-application-row-helm') do page.within('.js-cluster-application-row-helm') do
# FE sends request and gets the response, then the buttons is "Install" # FE sends request and gets the response, then the buttons is "Install"
expect(page.find(:css, '.js-cluster-application-install-button')['disabled']).to eq('true') expect(page.find(:css, '.js-cluster-application-install-button')['disabled']).to eq('true')
expect(page).to have_css('.js-cluster-application-install-button', exact_text: 'Install') expect(page).to have_css('.js-cluster-application-install-button', exact_text: 'Install')
wait_until_helm_created!
Clusters::Cluster.last.application_helm.make_installing! Clusters::Cluster.last.application_helm.make_installing!
# FE starts polling and update the buttons to "Installing" # FE starts polling and update the buttons to "Installing"
...@@ -83,7 +85,7 @@ describe 'Clusters Applications', :js do ...@@ -83,7 +85,7 @@ describe 'Clusters Applications', :js do
end end
end end
it 'he sees status transition' do it 'they see status transition' do
page.within('.js-cluster-application-row-ingress') do page.within('.js-cluster-application-row-ingress') do
# FE sends request and gets the response, then the buttons is "Install" # FE sends request and gets the response, then the buttons is "Install"
expect(page).to have_css('.js-cluster-application-install-button[disabled]') expect(page).to have_css('.js-cluster-application-install-button[disabled]')
...@@ -116,4 +118,14 @@ describe 'Clusters Applications', :js do ...@@ -116,4 +118,14 @@ describe 'Clusters Applications', :js do
end end
end end
end end
def wait_until_helm_created!
retries = 0
while Clusters::Cluster.last.application_helm.nil?
raise "Timed out waiting for helm application to be created in DB" if (retries += 1) > 3
sleep(1)
end
end
end end
...@@ -303,7 +303,7 @@ describe('diff_file_header', () => { ...@@ -303,7 +303,7 @@ describe('diff_file_header', () => {
const button = vm.$el.querySelector('.btn-clipboard'); const button = vm.$el.querySelector('.btn-clipboard');
expect(button).not.toBe(null); expect(button).not.toBe(null);
expect(button.dataset.clipboardText).toBe(props.diffFile.filePath); expect(button.dataset.clipboardText).toBe('{"text":"files/ruby/popen.rb","gfm":"`files/ruby/popen.rb`"}');
}); });
describe('file mode', () => { describe('file mode', () => {
......
...@@ -6,31 +6,47 @@ describe('clipboard button', () => { ...@@ -6,31 +6,47 @@ describe('clipboard button', () => {
const Component = Vue.extend(clipboardButton); const Component = Vue.extend(clipboardButton);
let vm; let vm;
beforeEach(() => {
vm = mountComponent(Component, {
text: 'copy me',
title: 'Copy this value into Clipboard!',
cssClass: 'btn-danger',
});
});
afterEach(() => { afterEach(() => {
vm.$destroy(); vm.$destroy();
}); });
it('renders a button for clipboard', () => { describe('without gfm', () => {
expect(vm.$el.tagName).toEqual('BUTTON'); beforeEach(() => {
expect(vm.$el.getAttribute('data-clipboard-text')).toEqual('copy me'); vm = mountComponent(Component, {
expect(vm.$el).toHaveSpriteIcon('duplicate'); text: 'copy me',
}); title: 'Copy this value into Clipboard!',
cssClass: 'btn-danger',
});
});
it('should have a tooltip with default values', () => { it('renders a button for clipboard', () => {
expect(vm.$el.getAttribute('data-original-title')).toEqual('Copy this value into Clipboard!'); expect(vm.$el.tagName).toEqual('BUTTON');
expect(vm.$el.getAttribute('data-placement')).toEqual('top'); expect(vm.$el.getAttribute('data-clipboard-text')).toEqual('copy me');
expect(vm.$el.getAttribute('data-container')).toEqual(null); expect(vm.$el).toHaveSpriteIcon('duplicate');
});
it('should have a tooltip with default values', () => {
expect(vm.$el.getAttribute('data-original-title')).toEqual('Copy this value into Clipboard!');
expect(vm.$el.getAttribute('data-placement')).toEqual('top');
expect(vm.$el.getAttribute('data-container')).toEqual(null);
});
it('should render provided classname', () => {
expect(vm.$el.classList).toContain('btn-danger');
});
}); });
it('should render provided classname', () => { describe('with gfm', () => {
expect(vm.$el.classList).toContain('btn-danger'); it('sets data-clipboard-text with gfm', () => {
vm = mountComponent(Component, {
text: 'copy me',
gfm: '`path/to/file`',
title: 'Copy this value into Clipboard!',
cssClass: 'btn-danger',
});
expect(vm.$el.getAttribute('data-clipboard-text')).toEqual(
'{"text":"copy me","gfm":"`path/to/file`"}',
);
});
}); });
}); });
...@@ -18,6 +18,14 @@ describe Gitlab::Ci::Config::Entry::Artifacts do ...@@ -18,6 +18,14 @@ describe Gitlab::Ci::Config::Entry::Artifacts do
expect(entry).to be_valid expect(entry).to be_valid
end end
end end
context "when value includes 'reports' keyword" do
let(:config) { { paths: %w[public/], reports: { junit: 'junit.xml' } } }
it 'returns general artifact and report-type artifacts configuration' do
expect(entry.value).to eq config
end
end
end end
context 'when entry value is not correct' do context 'when entry value is not correct' do
...@@ -39,6 +47,15 @@ describe Gitlab::Ci::Config::Entry::Artifacts do ...@@ -39,6 +47,15 @@ describe Gitlab::Ci::Config::Entry::Artifacts do
.to include 'artifacts config contains unknown keys: test' .to include 'artifacts config contains unknown keys: test'
end end
end end
context "when 'reports' keyword is not hash" do
let(:config) { { paths: %w[public/], reports: 'junit.xml' } }
it 'reports error' do
expect(entry.errors)
.to include 'artifacts reports should be a hash'
end
end
end end
end end
end end
......
...@@ -41,8 +41,7 @@ describe Gitlab::Ci::Config::Entry::Commands do ...@@ -41,8 +41,7 @@ describe Gitlab::Ci::Config::Entry::Commands do
describe '#errors' do describe '#errors' do
it 'saves errors' do it 'saves errors' do
expect(entry.errors) expect(entry.errors)
.to include 'commands config should be a ' \ .to include 'commands config should be an array of strings or a string'
'string or an array of strings'
end end
end end
end end
......
require 'spec_helper'
describe Gitlab::Ci::Config::Entry::Reports do
let(:entry) { described_class.new(config) }
describe 'validation' do
context 'when entry config value is correct' do
let(:config) { { junit: %w[junit.xml] } }
describe '#value' do
it 'returns artifacs configuration' do
expect(entry.value).to eq config
end
end
describe '#valid?' do
it 'is valid' do
expect(entry).to be_valid
end
end
context 'when value is not array' do
let(:config) { { junit: 'junit.xml' } }
it 'converts to array' do
expect(entry.value).to eq({ junit: ['junit.xml'] } )
end
end
end
context 'when entry value is not correct' do
describe '#errors' do
context 'when value of attribute is invalid' do
let(:config) { { junit: 10 } }
it 'reports error' do
expect(entry.errors)
.to include 'reports junit should be an array of strings or a string'
end
end
context 'when there is an unknown key present' do
let(:config) { { codeclimate: 'codeclimate.json' } }
it 'reports error' do
expect(entry.errors)
.to include 'reports config contains unknown keys: codeclimate'
end
end
end
end
end
end
...@@ -3,7 +3,7 @@ require 'spec_helper' ...@@ -3,7 +3,7 @@ require 'spec_helper'
describe Gitlab::Kubernetes::ConfigMap do describe Gitlab::Kubernetes::ConfigMap do
let(:kubeclient) { double('kubernetes client') } let(:kubeclient) { double('kubernetes client') }
let(:application) { create(:clusters_applications_prometheus) } let(:application) { create(:clusters_applications_prometheus) }
let(:config_map) { described_class.new(application.name, application.values) } let(:config_map) { described_class.new(application.name, application.files) }
let(:namespace) { Gitlab::Kubernetes::Helm::NAMESPACE } let(:namespace) { Gitlab::Kubernetes::Helm::NAMESPACE }
let(:metadata) do let(:metadata) do
...@@ -15,7 +15,7 @@ describe Gitlab::Kubernetes::ConfigMap do ...@@ -15,7 +15,7 @@ describe Gitlab::Kubernetes::ConfigMap do
end end
describe '#generate' do describe '#generate' do
let(:resource) { ::Kubeclient::Resource.new(metadata: metadata, data: { values: application.values }) } let(:resource) { ::Kubeclient::Resource.new(metadata: metadata, data: application.files) }
subject { config_map.generate } subject { config_map.generate }
it 'should build a Kubeclient Resource' do it 'should build a Kubeclient Resource' do
......
...@@ -39,7 +39,7 @@ describe Gitlab::Kubernetes::Helm::Api do ...@@ -39,7 +39,7 @@ describe Gitlab::Kubernetes::Helm::Api do
end end
context 'with a ConfigMap' do context 'with a ConfigMap' do
let(:resource) { Gitlab::Kubernetes::ConfigMap.new(application.name, application.values).generate } let(:resource) { Gitlab::Kubernetes::ConfigMap.new(application.name, application.files).generate }
it 'creates a ConfigMap on kubeclient' do it 'creates a ConfigMap on kubeclient' do
expect(client).to receive(:create_config_map).with(resource).once expect(client).to receive(:create_config_map).with(resource).once
......
...@@ -2,7 +2,25 @@ require 'spec_helper' ...@@ -2,7 +2,25 @@ require 'spec_helper'
describe Gitlab::Kubernetes::Helm::BaseCommand do describe Gitlab::Kubernetes::Helm::BaseCommand do
let(:application) { create(:clusters_applications_helm) } let(:application) { create(:clusters_applications_helm) }
let(:base_command) { described_class.new(application.name) } let(:test_class) do
Class.new do
include Gitlab::Kubernetes::Helm::BaseCommand
def name
"test-class-name"
end
def files
{
some: 'value'
}
end
end
end
let(:base_command) do
test_class.new
end
subject { base_command } subject { base_command }
...@@ -18,15 +36,9 @@ describe Gitlab::Kubernetes::Helm::BaseCommand do ...@@ -18,15 +36,9 @@ describe Gitlab::Kubernetes::Helm::BaseCommand do
end end
end end
describe '#config_map?' do
subject { base_command.config_map? }
it { is_expected.to be_falsy }
end
describe '#pod_name' do describe '#pod_name' do
subject { base_command.pod_name } subject { base_command.pod_name }
it { is_expected.to eq('install-helm') } it { is_expected.to eq('install-test-class-name') }
end end
end end
require 'spec_helper'
describe Gitlab::Kubernetes::Helm::Certificate do
describe '.generate_root' do
subject { described_class.generate_root }
it 'should generate a root CA that expires a long way in the future' do
expect(subject.cert.not_after).to be > 999.years.from_now
end
end
describe '#issue' do
subject { described_class.generate_root.issue }
it 'should generate a cert that expires soon' do
expect(subject.cert.not_after).to be < 60.minutes.from_now
end
context 'passing in INFINITE_EXPIRY' do
subject { described_class.generate_root.issue(expires_in: described_class::INFINITE_EXPIRY) }
it 'should generate a cert that expires a long way in the future' do
expect(subject.cert.not_after).to be > 999.years.from_now
end
end
end
end
...@@ -2,9 +2,9 @@ require 'spec_helper' ...@@ -2,9 +2,9 @@ require 'spec_helper'
describe Gitlab::Kubernetes::Helm::InitCommand do describe Gitlab::Kubernetes::Helm::InitCommand do
let(:application) { create(:clusters_applications_helm) } let(:application) { create(:clusters_applications_helm) }
let(:commands) { 'helm init >/dev/null' } let(:commands) { 'helm init --tiller-tls --tiller-tls-verify --tls-ca-cert /data/helm/helm/config/ca.pem --tiller-tls-cert /data/helm/helm/config/cert.pem --tiller-tls-key /data/helm/helm/config/key.pem >/dev/null' }
subject { described_class.new(application.name) } subject { described_class.new(name: application.name, files: {}) }
it_behaves_like 'helm commands' it_behaves_like 'helm commands'
end end
require 'rails_helper' require 'rails_helper'
describe Gitlab::Kubernetes::Helm::InstallCommand do describe Gitlab::Kubernetes::Helm::InstallCommand do
let(:application) { create(:clusters_applications_prometheus) } let(:files) { { 'ca.pem': 'some file content' } }
let(:namespace) { Gitlab::Kubernetes::Helm::NAMESPACE } let(:repository) { 'https://repository.example.com' }
let(:install_command) { application.install_command } let(:version) { '1.2.3' }
let(:install_command) do
described_class.new(
name: 'app-name',
chart: 'chart-name',
files: files,
version: version, repository: repository
)
end
subject { install_command } subject { install_command }
context 'for ingress' do it_behaves_like 'helm commands' do
let(:application) { create(:clusters_applications_ingress) } let(:commands) do
<<~EOS
it_behaves_like 'helm commands' do helm init --client-only >/dev/null
let(:commands) do helm repo add app-name https://repository.example.com
<<~EOS helm install --tls --tls-ca-cert /data/helm/app-name/config/ca.pem --tls-cert /data/helm/app-name/config/cert.pem --tls-key /data/helm/app-name/config/key.pem chart-name --name app-name --version 1.2.3 --namespace gitlab-managed-apps -f /data/helm/app-name/config/values.yaml >/dev/null
helm init --client-only >/dev/null EOS
helm install #{application.chart} --name #{application.name} --namespace #{namespace} -f /data/helm/#{application.name}/config/values.yaml >/dev/null
EOS
end
end end
end end
context 'for prometheus' do context 'when there is no repository' do
let(:application) { create(:clusters_applications_prometheus) } let(:repository) { nil }
it_behaves_like 'helm commands' do it_behaves_like 'helm commands' do
let(:commands) do let(:commands) do
<<~EOS <<~EOS
helm init --client-only >/dev/null helm init --client-only >/dev/null
helm install #{application.chart} --name #{application.name} --version #{application.version} --namespace #{namespace} -f /data/helm/#{application.name}/config/values.yaml >/dev/null helm install --tls --tls-ca-cert /data/helm/app-name/config/ca.pem --tls-cert /data/helm/app-name/config/cert.pem --tls-key /data/helm/app-name/config/key.pem chart-name --name app-name --version 1.2.3 --namespace gitlab-managed-apps -f /data/helm/app-name/config/values.yaml >/dev/null
EOS EOS
end end
end end
end end
context 'for runner' do context 'when there is no ca.pem file' do
let(:ci_runner) { create(:ci_runner) } let(:files) { { 'file.txt': 'some content' } }
let(:application) { create(:clusters_applications_runner, runner: ci_runner) }
it_behaves_like 'helm commands' do it_behaves_like 'helm commands' do
let(:commands) do let(:commands) do
<<~EOS <<~EOS
helm init --client-only >/dev/null helm init --client-only >/dev/null
helm repo add #{application.name} #{application.repository} helm repo add app-name https://repository.example.com
helm install #{application.chart} --name #{application.name} --namespace #{namespace} -f /data/helm/#{application.name}/config/values.yaml >/dev/null helm install chart-name --name app-name --version 1.2.3 --namespace gitlab-managed-apps -f /data/helm/app-name/config/values.yaml >/dev/null
EOS EOS
end end
end end
end end
context 'for jupyter' do context 'when there is no version' do
let(:application) { create(:clusters_applications_jupyter) } let(:version) { nil }
it_behaves_like 'helm commands' do it_behaves_like 'helm commands' do
let(:commands) do let(:commands) do
<<~EOS <<~EOS
helm init --client-only >/dev/null helm init --client-only >/dev/null
helm repo add #{application.name} #{application.repository} helm repo add app-name https://repository.example.com
helm install #{application.chart} --name #{application.name} --namespace #{namespace} -f /data/helm/#{application.name}/config/values.yaml >/dev/null helm install --tls --tls-ca-cert /data/helm/app-name/config/ca.pem --tls-cert /data/helm/app-name/config/cert.pem --tls-key /data/helm/app-name/config/key.pem chart-name --name app-name --namespace gitlab-managed-apps -f /data/helm/app-name/config/values.yaml >/dev/null
EOS EOS
end end
end end
end end
describe '#config_map?' do
subject { install_command.config_map? }
it { is_expected.to be_truthy }
end
describe '#config_map_resource' do describe '#config_map_resource' do
let(:metadata) do let(:metadata) do
{ {
name: "values-content-configuration-#{application.name}", name: "values-content-configuration-app-name",
namespace: namespace, namespace: 'gitlab-managed-apps',
labels: { name: "values-content-configuration-#{application.name}" } labels: { name: "values-content-configuration-app-name" }
} }
end end
let(:resource) { ::Kubeclient::Resource.new(metadata: metadata, data: { values: application.values }) } let(:resource) { ::Kubeclient::Resource.new(metadata: metadata, data: files) }
subject { install_command.config_map_resource } subject { install_command.config_map_resource }
......
...@@ -2,14 +2,13 @@ require 'rails_helper' ...@@ -2,14 +2,13 @@ require 'rails_helper'
describe Gitlab::Kubernetes::Helm::Pod do describe Gitlab::Kubernetes::Helm::Pod do
describe '#generate' do describe '#generate' do
let(:cluster) { create(:cluster) } let(:app) { create(:clusters_applications_prometheus) }
let(:app) { create(:clusters_applications_prometheus, cluster: cluster) }
let(:command) { app.install_command } let(:command) { app.install_command }
let(:namespace) { Gitlab::Kubernetes::Helm::NAMESPACE } let(:namespace) { Gitlab::Kubernetes::Helm::NAMESPACE }
subject { described_class.new(command, namespace) } subject { described_class.new(command, namespace) }
shared_examples 'helm pod' do context 'with a command' do
it 'should generate a Kubeclient::Resource' do it 'should generate a Kubeclient::Resource' do
expect(subject.generate).to be_a_kind_of(Kubeclient::Resource) expect(subject.generate).to be_a_kind_of(Kubeclient::Resource)
end end
...@@ -41,10 +40,6 @@ describe Gitlab::Kubernetes::Helm::Pod do ...@@ -41,10 +40,6 @@ describe Gitlab::Kubernetes::Helm::Pod do
spec = subject.generate.spec spec = subject.generate.spec
expect(spec.restartPolicy).to eq('Never') expect(spec.restartPolicy).to eq('Never')
end end
end
context 'with a install command' do
it_behaves_like 'helm pod'
it 'should include volumes for the container' do it 'should include volumes for the container' do
container = subject.generate.spec.containers.first container = subject.generate.spec.containers.first
...@@ -60,24 +55,8 @@ describe Gitlab::Kubernetes::Helm::Pod do ...@@ -60,24 +55,8 @@ describe Gitlab::Kubernetes::Helm::Pod do
it 'should mount configMap specification in the volume' do it 'should mount configMap specification in the volume' do
volume = subject.generate.spec.volumes.first volume = subject.generate.spec.volumes.first
expect(volume.configMap['name']).to eq("values-content-configuration-#{app.name}") expect(volume.configMap['name']).to eq("values-content-configuration-#{app.name}")
expect(volume.configMap['items'].first['key']).to eq('values') expect(volume.configMap['items'].first['key']).to eq(:'values.yaml')
expect(volume.configMap['items'].first['path']).to eq('values.yaml') expect(volume.configMap['items'].first['path']).to eq(:'values.yaml')
end
end
context 'with a init command' do
let(:app) { create(:clusters_applications_helm, cluster: cluster) }
it_behaves_like 'helm pod'
it 'should not include volumeMounts inside the container' do
container = subject.generate.spec.containers.first
expect(container.volumeMounts).to be_nil
end
it 'should not a volume inside the specification' do
spec = subject.generate.spec
expect(spec.volumes).to be_nil
end end
end end
end end
......
require 'spec_helper'
describe Gitlab::Middleware::BasicHealthCheck do
let(:app) { double(:app) }
let(:middleware) { described_class.new(app) }
let(:env) { {} }
describe '#call' do
context 'outside IP' do
before do
env['REMOTE_ADDR'] = '8.8.8.8'
end
it 'returns a 404' do
env['PATH_INFO'] = described_class::HEALTH_PATH
response = middleware.call(env)
expect(response[0]).to eq(404)
end
it 'forwards the call for other paths' do
env['PATH_INFO'] = '/'
expect(app).to receive(:call)
middleware.call(env)
end
end
context 'whitelisted IP' do
before do
env['REMOTE_ADDR'] = '127.0.0.1'
end
it 'returns 200 response when endpoint is hit' do
env['PATH_INFO'] = described_class::HEALTH_PATH
expect(app).not_to receive(:call)
response = middleware.call(env)
expect(response[0]).to eq(200)
expect(response[1]).to eq({ 'Content-Type' => 'text/plain' })
expect(response[2]).to eq(['GitLab OK'])
end
it 'forwards the call for other paths' do
env['PATH_INFO'] = '/-/readiness'
expect(app).to receive(:call)
middleware.call(env)
end
end
end
end
...@@ -515,6 +515,44 @@ describe Ci::Build do ...@@ -515,6 +515,44 @@ describe Ci::Build do
end end
end end
describe '#has_test_reports?' do
subject { build.has_test_reports? }
context 'when build has a test report' do
let(:build) { create(:ci_build, :test_reports) }
it { is_expected.to be_truthy }
end
context 'when build does not have test reports' do
let(:build) { create(:ci_build, :artifacts) }
it { is_expected.to be_falsy }
end
end
describe '#erase_test_reports!' do
subject { build.erase_test_reports! }
context 'when build has a test report' do
let!(:build) { create(:ci_build, :test_reports) }
it 'removes a test report' do
subject
expect(build.has_test_reports?).to be_falsy
end
end
context 'when build does not have test reports' do
let!(:build) { create(:ci_build, :artifacts) }
it 'does not erase anything' do
expect { subject }.not_to change { Ci::JobArtifact.count }
end
end
end
describe '#has_old_trace?' do describe '#has_old_trace?' do
subject { build.has_old_trace? } subject { build.has_old_trace? }
...@@ -777,6 +815,10 @@ describe Ci::Build do ...@@ -777,6 +815,10 @@ describe Ci::Build do
expect(build.artifacts_metadata.exists?).to be_falsy expect(build.artifacts_metadata.exists?).to be_falsy
end end
it 'removes test reports' do
expect(build.job_artifacts.test_reports.count).to eq(0)
end
it 'erases build trace in trace file' do it 'erases build trace in trace file' do
expect(build).not_to have_trace expect(build).not_to have_trace
end end
...@@ -808,7 +850,7 @@ describe Ci::Build do ...@@ -808,7 +850,7 @@ describe Ci::Build do
context 'build is erasable' do context 'build is erasable' do
context 'new artifacts' do context 'new artifacts' do
let!(:build) { create(:ci_build, :trace_artifact, :success, :artifacts) } let!(:build) { create(:ci_build, :test_reports, :trace_artifact, :success, :artifacts) }
describe '#erase' do describe '#erase' do
before do before do
......
...@@ -15,6 +15,22 @@ describe Ci::JobArtifact do ...@@ -15,6 +15,22 @@ describe Ci::JobArtifact do
it { is_expected.to delegate_method(:open).to(:file) } it { is_expected.to delegate_method(:open).to(:file) }
it { is_expected.to delegate_method(:exists?).to(:file) } it { is_expected.to delegate_method(:exists?).to(:file) }
describe '.test_reports' do
subject { described_class.test_reports }
context 'when there is a test report' do
let!(:artifact) { create(:ci_job_artifact, :junit) }
it { is_expected.to eq([artifact]) }
end
context 'when there are no test reports' do
let!(:artifact) { create(:ci_job_artifact, :archive) }
it { is_expected.to be_empty }
end
end
describe 'callbacks' do describe 'callbacks' do
subject { create(:ci_job_artifact, :archive) } subject { create(:ci_job_artifact, :archive) }
...@@ -87,6 +103,40 @@ describe Ci::JobArtifact do ...@@ -87,6 +103,40 @@ describe Ci::JobArtifact do
end end
end end
describe 'validates file format' do
subject { artifact }
context 'when archive type with zip format' do
let(:artifact) { build(:ci_job_artifact, :archive, file_format: :zip) }
it { is_expected.to be_valid }
end
context 'when archive type with gzip format' do
let(:artifact) { build(:ci_job_artifact, :archive, file_format: :gzip) }
it { is_expected.not_to be_valid }
end
context 'when archive type without format specification' do
let(:artifact) { build(:ci_job_artifact, :archive, file_format: nil) }
it { is_expected.not_to be_valid }
end
context 'when junit type with zip format' do
let(:artifact) { build(:ci_job_artifact, :junit, file_format: :zip) }
it { is_expected.not_to be_valid }
end
context 'when junit type with gzip format' do
let(:artifact) { build(:ci_job_artifact, :junit, file_format: :gzip) }
it { is_expected.to be_valid }
end
end
describe '#file' do describe '#file' do
subject { artifact.file } subject { artifact.file }
......
...@@ -6,13 +6,24 @@ describe Clusters::Applications::Helm do ...@@ -6,13 +6,24 @@ describe Clusters::Applications::Helm do
describe '.installed' do describe '.installed' do
subject { described_class.installed } subject { described_class.installed }
let!(:cluster) { create(:clusters_applications_helm, :installed) } let!(:installed_cluster) { create(:clusters_applications_helm, :installed) }
before do before do
create(:clusters_applications_helm, :errored) create(:clusters_applications_helm, :errored)
end end
it { is_expected.to contain_exactly(cluster) } it { is_expected.to contain_exactly(installed_cluster) }
end
describe '#issue_client_cert' do
let(:application) { create(:clusters_applications_helm) }
subject { application.issue_client_cert }
it 'returns a new cert' do
is_expected.to be_kind_of(Gitlab::Kubernetes::Helm::Certificate)
expect(subject.cert_string).not_to eq(application.ca_cert)
expect(subject.key_string).not_to eq(application.ca_key)
end
end end
describe '#install_command' do describe '#install_command' do
...@@ -25,5 +36,16 @@ describe Clusters::Applications::Helm do ...@@ -25,5 +36,16 @@ describe Clusters::Applications::Helm do
it 'should be initialized with 1 arguments' do it 'should be initialized with 1 arguments' do
expect(subject.name).to eq('helm') expect(subject.name).to eq('helm')
end end
it 'should have cert files' do
expect(subject.files[:'ca.pem']).to be_present
expect(subject.files[:'ca.pem']).to eq(helm.ca_cert)
expect(subject.files[:'cert.pem']).to be_present
expect(subject.files[:'key.pem']).to be_present
cert = OpenSSL::X509::Certificate.new(subject.files[:'cert.pem'])
expect(cert.not_after).to be > 999.years.from_now
end
end end
end end
...@@ -74,18 +74,43 @@ describe Clusters::Applications::Ingress do ...@@ -74,18 +74,43 @@ describe Clusters::Applications::Ingress do
expect(subject.name).to eq('ingress') expect(subject.name).to eq('ingress')
expect(subject.chart).to eq('stable/nginx-ingress') expect(subject.chart).to eq('stable/nginx-ingress')
expect(subject.version).to be_nil expect(subject.version).to be_nil
expect(subject.values).to eq(ingress.values) expect(subject.files).to eq(ingress.files)
end end
end end
describe '#values' do describe '#files' do
subject { ingress.values } let(:application) { ingress }
subject { application.files }
let(:values) { subject[:'values.yaml'] }
it 'should include ingress valid keys' do it 'should include ingress valid keys in values' do
is_expected.to include('image') expect(values).to include('image')
is_expected.to include('repository') expect(values).to include('repository')
is_expected.to include('stats') expect(values).to include('stats')
is_expected.to include('podAnnotations') expect(values).to include('podAnnotations')
end
context 'when the helm application does not have a ca_cert' do
before do
application.cluster.application_helm.ca_cert = nil
end
it 'should not include cert files' do
expect(subject[:'ca.pem']).not_to be_present
expect(subject[:'cert.pem']).not_to be_present
expect(subject[:'key.pem']).not_to be_present
end
end
it 'should include cert files' do
expect(subject[:'ca.pem']).to be_present
expect(subject[:'ca.pem']).to eq(application.cluster.application_helm.ca_cert)
expect(subject[:'cert.pem']).to be_present
expect(subject[:'key.pem']).to be_present
cert = OpenSSL::X509::Certificate.new(subject[:'cert.pem'])
expect(cert.not_after).to be < 60.minutes.from_now
end end
end end
end end
...@@ -38,23 +38,46 @@ describe Clusters::Applications::Jupyter do ...@@ -38,23 +38,46 @@ describe Clusters::Applications::Jupyter do
expect(subject.chart).to eq('jupyter/jupyterhub') expect(subject.chart).to eq('jupyter/jupyterhub')
expect(subject.version).to be_nil expect(subject.version).to be_nil
expect(subject.repository).to eq('https://jupyterhub.github.io/helm-chart/') expect(subject.repository).to eq('https://jupyterhub.github.io/helm-chart/')
expect(subject.values).to eq(jupyter.values) expect(subject.files).to eq(jupyter.files)
end end
end end
describe '#values' do describe '#files' do
let(:jupyter) { create(:clusters_applications_jupyter) } let(:application) { create(:clusters_applications_jupyter) }
subject { application.files }
let(:values) { subject[:'values.yaml'] }
subject { jupyter.values } it 'should include cert files' do
expect(subject[:'ca.pem']).to be_present
expect(subject[:'ca.pem']).to eq(application.cluster.application_helm.ca_cert)
expect(subject[:'cert.pem']).to be_present
expect(subject[:'key.pem']).to be_present
cert = OpenSSL::X509::Certificate.new(subject[:'cert.pem'])
expect(cert.not_after).to be < 60.minutes.from_now
end
context 'when the helm application does not have a ca_cert' do
before do
application.cluster.application_helm.ca_cert = nil
end
it 'should not include cert files' do
expect(subject[:'ca.pem']).not_to be_present
expect(subject[:'cert.pem']).not_to be_present
expect(subject[:'key.pem']).not_to be_present
end
end
it 'should include valid values' do it 'should include valid values' do
is_expected.to include('ingress') expect(values).to include('ingress')
is_expected.to include('hub') expect(values).to include('hub')
is_expected.to include('rbac') expect(values).to include('rbac')
is_expected.to include('proxy') expect(values).to include('proxy')
is_expected.to include('auth') expect(values).to include('auth')
is_expected.to include("clientId: #{jupyter.oauth_application.uid}") expect(values).to match(/clientId: '?#{application.oauth_application.uid}/)
is_expected.to include("callbackUrl: #{jupyter.callback_url}") expect(values).to match(/callbackUrl: '?#{application.callback_url}/)
end end
end end
end end
...@@ -153,21 +153,44 @@ describe Clusters::Applications::Prometheus do ...@@ -153,21 +153,44 @@ describe Clusters::Applications::Prometheus do
expect(command.name).to eq('prometheus') expect(command.name).to eq('prometheus')
expect(command.chart).to eq('stable/prometheus') expect(command.chart).to eq('stable/prometheus')
expect(command.version).to eq('6.7.3') expect(command.version).to eq('6.7.3')
expect(command.values).to eq(prometheus.values) expect(command.files).to eq(prometheus.files)
end end
end end
describe '#values' do describe '#files' do
let(:prometheus) { create(:clusters_applications_prometheus) } let(:application) { create(:clusters_applications_prometheus) }
subject { application.files }
let(:values) { subject[:'values.yaml'] }
it 'should include cert files' do
expect(subject[:'ca.pem']).to be_present
expect(subject[:'ca.pem']).to eq(application.cluster.application_helm.ca_cert)
expect(subject[:'cert.pem']).to be_present
expect(subject[:'key.pem']).to be_present
cert = OpenSSL::X509::Certificate.new(subject[:'cert.pem'])
expect(cert.not_after).to be < 60.minutes.from_now
end
subject { prometheus.values } context 'when the helm application does not have a ca_cert' do
before do
application.cluster.application_helm.ca_cert = nil
end
it 'should not include cert files' do
expect(subject[:'ca.pem']).not_to be_present
expect(subject[:'cert.pem']).not_to be_present
expect(subject[:'key.pem']).not_to be_present
end
end
it 'should include prometheus valid values' do it 'should include prometheus valid values' do
is_expected.to include('alertmanager') expect(values).to include('alertmanager')
is_expected.to include('kubeStateMetrics') expect(values).to include('kubeStateMetrics')
is_expected.to include('nodeExporter') expect(values).to include('nodeExporter')
is_expected.to include('pushgateway') expect(values).to include('pushgateway')
is_expected.to include('serverFiles') expect(values).to include('serverFiles')
end end
end end
end end
...@@ -33,31 +33,55 @@ describe Clusters::Applications::Runner do ...@@ -33,31 +33,55 @@ describe Clusters::Applications::Runner do
expect(subject.chart).to eq('runner/gitlab-runner') expect(subject.chart).to eq('runner/gitlab-runner')
expect(subject.version).to be_nil expect(subject.version).to be_nil
expect(subject.repository).to eq('https://charts.gitlab.io') expect(subject.repository).to eq('https://charts.gitlab.io')
expect(subject.values).to eq(gitlab_runner.values) expect(subject.files).to eq(gitlab_runner.files)
end end
end end
describe '#values' do describe '#files' do
let(:gitlab_runner) { create(:clusters_applications_runner, runner: ci_runner) } let(:application) { create(:clusters_applications_runner, runner: ci_runner) }
subject { application.files }
let(:values) { subject[:'values.yaml'] }
it 'should include cert files' do
expect(subject[:'ca.pem']).to be_present
expect(subject[:'ca.pem']).to eq(application.cluster.application_helm.ca_cert)
expect(subject[:'cert.pem']).to be_present
expect(subject[:'key.pem']).to be_present
cert = OpenSSL::X509::Certificate.new(subject[:'cert.pem'])
expect(cert.not_after).to be < 60.minutes.from_now
end
subject { gitlab_runner.values } context 'when the helm application does not have a ca_cert' do
before do
application.cluster.application_helm.ca_cert = nil
end
it 'should not include cert files' do
expect(subject[:'ca.pem']).not_to be_present
expect(subject[:'cert.pem']).not_to be_present
expect(subject[:'key.pem']).not_to be_present
end
end
it 'should include runner valid values' do it 'should include runner valid values' do
is_expected.to include('concurrent') expect(values).to include('concurrent')
is_expected.to include('checkInterval') expect(values).to include('checkInterval')
is_expected.to include('rbac') expect(values).to include('rbac')
is_expected.to include('runners') expect(values).to include('runners')
is_expected.to include('privileged: true') expect(values).to include('privileged: true')
is_expected.to include('image: ubuntu:16.04') expect(values).to include('image: ubuntu:16.04')
is_expected.to include('resources') expect(values).to include('resources')
is_expected.to include("runnerToken: #{ci_runner.token}") expect(values).to match(/runnerToken: '?#{ci_runner.token}/)
is_expected.to include("gitlabUrl: #{Gitlab::Routing.url_helpers.root_url}") expect(values).to match(/gitlabUrl: '?#{Gitlab::Routing.url_helpers.root_url}/)
end end
context 'without a runner' do context 'without a runner' do
let(:project) { create(:project) } let(:project) { create(:project) }
let(:cluster) { create(:cluster, projects: [project]) } let(:cluster) { create(:cluster, :with_installed_helm, projects: [project]) }
let(:gitlab_runner) { create(:clusters_applications_runner, cluster: cluster) } let(:application) { create(:clusters_applications_runner, cluster: cluster) }
it 'creates a runner' do it 'creates a runner' do
expect do expect do
...@@ -66,18 +90,18 @@ describe Clusters::Applications::Runner do ...@@ -66,18 +90,18 @@ describe Clusters::Applications::Runner do
end end
it 'uses the new runner token' do it 'uses the new runner token' do
expect(subject).to include("runnerToken: #{gitlab_runner.reload.runner.token}") expect(values).to match(/runnerToken: '?#{application.reload.runner.token}/)
end end
it 'assigns the new runner to runner' do it 'assigns the new runner to runner' do
subject subject
expect(gitlab_runner.reload.runner).to be_project_type expect(application.reload.runner).to be_project_type
end end
end end
context 'with duplicated values on vendor/runner/values.yaml' do context 'with duplicated values on vendor/runner/values.yaml' do
let(:values) do let(:stub_values) do
{ {
"concurrent" => 4, "concurrent" => 4,
"checkInterval" => 3, "checkInterval" => 3,
...@@ -96,11 +120,11 @@ describe Clusters::Applications::Runner do ...@@ -96,11 +120,11 @@ describe Clusters::Applications::Runner do
end end
before do before do
allow(gitlab_runner).to receive(:chart_values).and_return(values) allow(application).to receive(:chart_values).and_return(stub_values)
end end
it 'should overwrite values.yaml' do it 'should overwrite values.yaml' do
is_expected.to include("privileged: #{gitlab_runner.privileged}") expect(values).to match(/privileged: '?#{application.privileged}/)
end end
end end
end end
......
...@@ -312,4 +312,24 @@ describe Milestone do ...@@ -312,4 +312,24 @@ describe Milestone do
expect(milestone.participants).to eq [user] expect(milestone.participants).to eq [user]
end end
end end
describe '.sort_by_attribute' do
set(:milestone_1) { create(:milestone, title: 'Foo') }
set(:milestone_2) { create(:milestone, title: 'Bar') }
set(:milestone_3) { create(:milestone, title: 'Zoo') }
context 'ordering by name ascending' do
it 'sorts by title ascending' do
expect(described_class.sort_by_attribute('name_asc'))
.to eq([milestone_2, milestone_1, milestone_3])
end
end
context 'ordering by name descending' do
it 'sorts by title descending' do
expect(described_class.sort_by_attribute('name_desc'))
.to eq([milestone_3, milestone_1, milestone_2])
end
end
end
end end
...@@ -4249,6 +4249,16 @@ describe Project do ...@@ -4249,6 +4249,16 @@ describe Project do
end end
end end
context '#commits_by' do
let(:project) { create(:project, :repository) }
let(:commits) { project.repository.commits('HEAD', limit: 3).commits }
let(:commit_shas) { commits.map(&:id) }
it 'retrieves several commits from the repository by oid' do
expect(project.commits_by(oids: commit_shas)).to eq commits
end
end
def rugged_config def rugged_config
Gitlab::GitalyClient::StorageSettings.allow_disk_access do Gitlab::GitalyClient::StorageSettings.allow_disk_access do
project.repository.rugged.config project.repository.rugged.config
......
...@@ -977,6 +977,7 @@ describe User do ...@@ -977,6 +977,7 @@ describe User do
user = create(:user, email: 'foo@example.com') user = create(:user, email: 'foo@example.com')
expect(described_class.find_by_any_email(user.email)).to eq user expect(described_class.find_by_any_email(user.email)).to eq user
expect(described_class.find_by_any_email(user.email, confirmed: true)).to eq user
end end
it 'finds by secondary email' do it 'finds by secondary email' do
...@@ -984,11 +985,19 @@ describe User do ...@@ -984,11 +985,19 @@ describe User do
user = email.user user = email.user
expect(described_class.find_by_any_email(email.email)).to eq user expect(described_class.find_by_any_email(email.email)).to eq user
expect(described_class.find_by_any_email(email.email, confirmed: true)).to eq user
end end
it 'returns nil when nothing found' do it 'returns nil when nothing found' do
expect(described_class.find_by_any_email('')).to be_nil expect(described_class.find_by_any_email('')).to be_nil
end end
it 'returns nil when user is not confirmed' do
user = create(:user, email: 'foo@example.com', confirmed_at: nil)
expect(described_class.find_by_any_email(user.email, confirmed: false)).to eq(user)
expect(described_class.find_by_any_email(user.email, confirmed: true)).to be_nil
end
end end
describe '.by_any_email' do describe '.by_any_email' do
...@@ -1002,6 +1011,12 @@ describe User do ...@@ -1002,6 +1011,12 @@ describe User do
expect(described_class.by_any_email(user.email)).to eq([user]) expect(described_class.by_any_email(user.email)).to eq([user])
end end
it 'returns a relation of users for confirmed users' do
user = create(:user)
expect(described_class.by_any_email(user.email, confirmed: true)).to eq([user])
end
end end
describe '.search' do describe '.search' do
......
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment