Commit 68f4b26e authored by GitLab Bot's avatar GitLab Bot

Merge remote-tracking branch 'upstream/master' into ce-to-ee-2018-08-14

# Conflicts:
#	app/services/notification_recipient_service.rb
#	doc/user/project/issue_board.md
#	locale/gitlab.pot

[ci skip]
parents 3ff95c3c e610b41e
<script>
import TimeagoTooltip from '~/vue_shared/components/time_ago_tooltip.vue';
export default {
components: {
TimeagoTooltip,
},
props: {
erasedByUser: {
type: Boolean,
required: true,
},
username: {
type: String,
required: false,
default: null,
},
linkToUser: {
type: String,
required: false,
default: null,
},
erasedAt: {
type: String,
required: true,
},
},
};
</script>
<template>
<div class="prepend-top-default js-build-erased">
<div class="erased alert alert-warning">
<template v-if="erasedByUser">
{{ s__("Job|Job has been erased by") }}
<a :href="linkToUser">
{{ username }}
</a>
</template>
<template v-else>
{{ s__("Job|Job has been erased") }}
</template>
<timeago-tooltip
:time="erasedAt"
/>
</div>
</div>
</template>
<script>
export default {
name: 'JobLog',
props: {
trace: {
type: String,
required: true,
},
isReceivingBuildTrace: {
type: Boolean,
required: true,
},
},
};
</script>
<template>
<pre class="build-trace">
<code
class="bash"
v-html="trace"
>
</code>
<div
v-if="isReceivingBuildTrace"
class="js-log-animation build-loader-animation"
>
<div class="dot"></div>
<div class="dot"></div>
<div class="dot"></div>
</div>
</pre>
</template>
# frozen_string_literal: true
module Ci
class BuildRunnerPresenter < SimpleDelegator
def artifacts
......
# frozen_string_literal: true
class ProjectMirrorSerializer < BaseSerializer
entity ProjectMirrorEntity
end
# frozen_string_literal: true
class TestCaseEntity < Grape::Entity
expose :status
expose :name
......
# frozen_string_literal: true
class TestReportsComparerEntity < Grape::Entity
expose :total_status, as: :status
......
# frozen_string_literal: true
class TestReportsComparerSerializer < BaseSerializer
entity TestReportsComparerEntity
end
# frozen_string_literal: true
class TestSuiteComparerEntity < Grape::Entity
expose :name
expose :total_status, as: :status
......
......@@ -220,8 +220,11 @@ module NotificationRecipientService
end
class Default < Base
<<<<<<< HEAD
prepend ::EE::NotificationRecipientBuilders::Default
=======
>>>>>>> upstream/master
MENTION_TYPE_ACTIONS = [:new_issue, :new_merge_request].freeze
attr_reader :target
......
# frozen_string_literal: true
module Projects
class DetectRepositoryLanguagesService < BaseService
attr_reader :detected_repository_languages, :programming_languages
......
# frozen_string_literal: true
module Todos
module Destroy
class BaseService
......
# frozen_string_literal: true
module Todos
module Destroy
class ConfidentialIssueService < ::Todos::Destroy::BaseService
......
# frozen_string_literal: true
module Todos
module Destroy
class EntityLeaveService < ::Todos::Destroy::BaseService
......
# frozen_string_literal: true
module Todos
module Destroy
class GroupPrivateService < ::Todos::Destroy::BaseService
......
# frozen_string_literal: true
module Todos
module Destroy
class PrivateFeaturesService < ::Todos::Destroy::BaseService
......
# frozen_string_literal: true
module Todos
module Destroy
class ProjectPrivateService < ::Todos::Destroy::BaseService
......
......@@ -17,6 +17,6 @@
%th Primary Action
%th
= render @spam_logs
= paginate @spam_logs
= paginate @spam_logs, theme: 'gitlab'
- else
%h4 There are no Spam Logs
# frozen_string_literal: true
class DetectRepositoryLanguagesWorker
include ApplicationWorker
include ExceptionBacktrace
......
# frozen_string_literal: true
module TodosDestroyer
class ConfidentialIssueWorker
include ApplicationWorker
......
# frozen_string_literal: true
module TodosDestroyer
class EntityLeaveWorker
include ApplicationWorker
......
# frozen_string_literal: true
module TodosDestroyer
class GroupPrivateWorker
include ApplicationWorker
......
# frozen_string_literal: true
module TodosDestroyer
class PrivateFeaturesWorker
include ApplicationWorker
......
# frozen_string_literal: true
module TodosDestroyer
class ProjectPrivateWorker
include ApplicationWorker
......
---
title: disable_statement_timeout no longer leak to other migrations
merge_request: 20503
author:
type: fixed
---
title: Add gitlab theme to spam logs pagination
merge_request: 21145
author:
type: fixed
---
title: Creates vue component for erased block on job view
merge_request:
author:
type: other
---
title: Creates vue component for job log trace
merge_request:
author:
type: other
---
title: 'Auto-DevOps.gitlab-ci.yml: Update glibc package signing key URL'
merge_request: 21182
author: sgerrand
type: fixed
---
title: Enable frozen string in vestigial app files
merge_request:
author: gfyoung
type: performance
---
title: Remove storage path dependency of gitaly install task
merge_request: 21101
author:
type: changed
......@@ -106,11 +106,11 @@ class ProjectForeignKeysWithCascadingDeletes < ActiveRecord::Migration
# Disables statement timeouts for the current connection. This is
# necessary as removing of orphaned data might otherwise exceed the
# statement timeout.
disable_statement_timeout
disable_statement_timeout do
remove_orphans(*queue.pop) until queue.empty?
remove_orphans(*queue.pop) until queue.empty?
steal_from_queues(queues - [queue])
steal_from_queues(queues - [queue])
end
end
end
end
......
......@@ -25,8 +25,9 @@ class AddLowerPathIndexToRedirectRoutes < ActiveRecord::Migration
# trivial to write a query that checks for an index. BUT there is a
# convenient `IF EXISTS` parameter for `DROP INDEX`.
if supports_drop_index_concurrently?
disable_statement_timeout
execute "DROP INDEX CONCURRENTLY IF EXISTS #{INDEX_NAME};"
disable_statement_timeout do
execute "DROP INDEX CONCURRENTLY IF EXISTS #{INDEX_NAME};"
end
else
execute "DROP INDEX IF EXISTS #{INDEX_NAME};"
end
......
......@@ -8,25 +8,25 @@ class AddIndexOnNamespacesLowerName < ActiveRecord::Migration
def up
return unless Gitlab::Database.postgresql?
disable_statement_timeout
if Gitlab::Database.version.to_f >= 9.5
# Allow us to hot-patch the index manually ahead of the migration
execute "CREATE INDEX CONCURRENTLY IF NOT EXISTS #{INDEX_NAME} ON namespaces (lower(name));"
else
execute "CREATE INDEX CONCURRENTLY #{INDEX_NAME} ON namespaces (lower(name));"
disable_statement_timeout do
if Gitlab::Database.version.to_f >= 9.5
# Allow us to hot-patch the index manually ahead of the migration
execute "CREATE INDEX CONCURRENTLY IF NOT EXISTS #{INDEX_NAME} ON namespaces (lower(name));"
else
execute "CREATE INDEX CONCURRENTLY #{INDEX_NAME} ON namespaces (lower(name));"
end
end
end
def down
return unless Gitlab::Database.postgresql?
disable_statement_timeout
if Gitlab::Database.version.to_f >= 9.2
execute "DROP INDEX CONCURRENTLY IF EXISTS #{INDEX_NAME};"
else
execute "DROP INDEX IF EXISTS #{INDEX_NAME};"
disable_statement_timeout do
if Gitlab::Database.version.to_f >= 9.2
execute "DROP INDEX CONCURRENTLY IF EXISTS #{INDEX_NAME};"
else
execute "DROP INDEX IF EXISTS #{INDEX_NAME};"
end
end
end
end
......@@ -18,51 +18,51 @@ class ReworkRedirectRoutesIndexes < ActiveRecord::Migration
OLD_INDEX_NAME_PATH_LOWER = "index_on_redirect_routes_lower_path"
def up
disable_statement_timeout
# this is a plain btree on a single boolean column. It'll never be
# selective enough to be valuable. This class is called by
# setup_postgresql.rake so it needs to be able to handle this
# index not existing.
if index_exists?(:redirect_routes, :permanent)
remove_concurrent_index(:redirect_routes, :permanent)
end
disable_statement_timeout do
# this is a plain btree on a single boolean column. It'll never be
# selective enough to be valuable. This class is called by
# setup_postgresql.rake so it needs to be able to handle this
# index not existing.
if index_exists?(:redirect_routes, :permanent)
remove_concurrent_index(:redirect_routes, :permanent)
end
# If we're on MySQL then the existing index on path is ok. But on
# Postgres we need to clean things up:
return unless Gitlab::Database.postgresql?
# If we're on MySQL then the existing index on path is ok. But on
# Postgres we need to clean things up:
break unless Gitlab::Database.postgresql?
if_not_exists = Gitlab::Database.version.to_f >= 9.5 ? "IF NOT EXISTS" : ""
if_not_exists = Gitlab::Database.version.to_f >= 9.5 ? "IF NOT EXISTS" : ""
# Unique index on lower(path) across both types of redirect_routes:
execute("CREATE UNIQUE INDEX CONCURRENTLY #{if_not_exists} #{INDEX_NAME_UNIQUE} ON redirect_routes (lower(path) varchar_pattern_ops);")
# Unique index on lower(path) across both types of redirect_routes:
execute("CREATE UNIQUE INDEX CONCURRENTLY #{if_not_exists} #{INDEX_NAME_UNIQUE} ON redirect_routes (lower(path) varchar_pattern_ops);")
# Make two indexes on path -- one for permanent and one for temporary routes:
execute("CREATE INDEX CONCURRENTLY #{if_not_exists} #{INDEX_NAME_PERM} ON redirect_routes (lower(path) varchar_pattern_ops) where (permanent);")
execute("CREATE INDEX CONCURRENTLY #{if_not_exists} #{INDEX_NAME_TEMP} ON redirect_routes (lower(path) varchar_pattern_ops) where (not permanent or permanent is null) ;")
# Make two indexes on path -- one for permanent and one for temporary routes:
execute("CREATE INDEX CONCURRENTLY #{if_not_exists} #{INDEX_NAME_PERM} ON redirect_routes (lower(path) varchar_pattern_ops) where (permanent);")
execute("CREATE INDEX CONCURRENTLY #{if_not_exists} #{INDEX_NAME_TEMP} ON redirect_routes (lower(path) varchar_pattern_ops) where (not permanent or permanent is null) ;")
# Remove the old indexes:
# Remove the old indexes:
# This one needed to be on lower(path) but wasn't so it's replaced with the two above
execute "DROP INDEX CONCURRENTLY IF EXISTS #{OLD_INDEX_NAME_PATH_TPOPS};"
# This one needed to be on lower(path) but wasn't so it's replaced with the two above
execute "DROP INDEX CONCURRENTLY IF EXISTS #{OLD_INDEX_NAME_PATH_TPOPS};"
# This one isn't needed because we only ever do = and LIKE on this
# column so the varchar_pattern_ops index is sufficient
execute "DROP INDEX CONCURRENTLY IF EXISTS #{OLD_INDEX_NAME_PATH_LOWER};"
# This one isn't needed because we only ever do = and LIKE on this
# column so the varchar_pattern_ops index is sufficient
execute "DROP INDEX CONCURRENTLY IF EXISTS #{OLD_INDEX_NAME_PATH_LOWER};"
end
end
def down
disable_statement_timeout
disable_statement_timeout do
add_concurrent_index(:redirect_routes, :permanent)
add_concurrent_index(:redirect_routes, :permanent)
break unless Gitlab::Database.postgresql?
return unless Gitlab::Database.postgresql?
execute("CREATE INDEX CONCURRENTLY #{OLD_INDEX_NAME_PATH_TPOPS} ON redirect_routes (path varchar_pattern_ops);")
execute("CREATE INDEX CONCURRENTLY #{OLD_INDEX_NAME_PATH_LOWER} ON redirect_routes (LOWER(path));")
execute("CREATE INDEX CONCURRENTLY #{OLD_INDEX_NAME_PATH_TPOPS} ON redirect_routes (path varchar_pattern_ops);")
execute("CREATE INDEX CONCURRENTLY #{OLD_INDEX_NAME_PATH_LOWER} ON redirect_routes (LOWER(path));")
execute("DROP INDEX CONCURRENTLY IF EXISTS #{INDEX_NAME_UNIQUE};")
execute("DROP INDEX CONCURRENTLY IF EXISTS #{INDEX_NAME_PERM};")
execute("DROP INDEX CONCURRENTLY IF EXISTS #{INDEX_NAME_TEMP};")
execute("DROP INDEX CONCURRENTLY IF EXISTS #{INDEX_NAME_UNIQUE};")
execute("DROP INDEX CONCURRENTLY IF EXISTS #{INDEX_NAME_PERM};")
execute("DROP INDEX CONCURRENTLY IF EXISTS #{INDEX_NAME_TEMP};")
end
end
end
......@@ -13,16 +13,16 @@ class CreateProjectCiCdSettings < ActiveRecord::Migration
end
end
disable_statement_timeout
disable_statement_timeout do
# This particular INSERT will take between 10 and 20 seconds.
execute 'INSERT INTO project_ci_cd_settings (project_id) SELECT id FROM projects'
# This particular INSERT will take between 10 and 20 seconds.
execute 'INSERT INTO project_ci_cd_settings (project_id) SELECT id FROM projects'
# We add the index and foreign key separately so the above INSERT statement
# takes as little time as possible.
add_concurrent_index(:project_ci_cd_settings, :project_id, unique: true)
# We add the index and foreign key separately so the above INSERT statement
# takes as little time as possible.
add_concurrent_index(:project_ci_cd_settings, :project_id, unique: true)
add_foreign_key_with_retry
add_foreign_key_with_retry
end
end
def down
......
......@@ -14,48 +14,50 @@ class CleanupBuildStageMigration < ActiveRecord::Migration
end
def up
disable_statement_timeout
##
# We steal from the background migrations queue to catch up with the
# scheduled migrations set.
#
Gitlab::BackgroundMigration.steal('MigrateBuildStage')
##
# We add temporary index, to make iteration over batches more performant.
# Conditional here is to avoid the need of doing that in a separate
# migration file to make this operation idempotent.
#
unless index_exists_by_name?(:ci_builds, TMP_INDEX)
add_concurrent_index(:ci_builds, :id, where: 'stage_id IS NULL', name: TMP_INDEX)
end
##
# We check if there are remaining rows that should be migrated (for example
# if Sidekiq / Redis fails / is restarted, what could result in not all
# background migrations being executed correctly.
#
# We migrate remaining rows synchronously in a blocking way, to make sure
# that when this migration is done we are confident that all rows are
# already migrated.
#
Build.where('stage_id IS NULL').each_batch(of: 50) do |batch|
range = batch.pluck('MIN(id)', 'MAX(id)').first
Gitlab::BackgroundMigration::MigrateBuildStage.new.perform(*range)
disable_statement_timeout do
##
# We steal from the background migrations queue to catch up with the
# scheduled migrations set.
#
Gitlab::BackgroundMigration.steal('MigrateBuildStage')
##
# We add temporary index, to make iteration over batches more performant.
# Conditional here is to avoid the need of doing that in a separate
# migration file to make this operation idempotent.
#
unless index_exists_by_name?(:ci_builds, TMP_INDEX)
add_concurrent_index(:ci_builds, :id, where: 'stage_id IS NULL', name: TMP_INDEX)
end
##
# We check if there are remaining rows that should be migrated (for example
# if Sidekiq / Redis fails / is restarted, what could result in not all
# background migrations being executed correctly.
#
# We migrate remaining rows synchronously in a blocking way, to make sure
# that when this migration is done we are confident that all rows are
# already migrated.
#
Build.where('stage_id IS NULL').each_batch(of: 50) do |batch|
range = batch.pluck('MIN(id)', 'MAX(id)').first
Gitlab::BackgroundMigration::MigrateBuildStage.new.perform(*range)
end
##
# We remove temporary index, because it is not required during standard
# operations and runtime.
#
remove_concurrent_index_by_name(:ci_builds, TMP_INDEX)
end
##
# We remove temporary index, because it is not required during standard
# operations and runtime.
#
remove_concurrent_index_by_name(:ci_builds, TMP_INDEX)
end
def down
if index_exists_by_name?(:ci_builds, TMP_INDEX)
remove_concurrent_index_by_name(:ci_builds, TMP_INDEX)
disable_statement_timeout do
remove_concurrent_index_by_name(:ci_builds, TMP_INDEX)
end
end
end
end
......@@ -13,20 +13,20 @@ class ProjectNameLowerIndex < ActiveRecord::Migration
def up
return unless Gitlab::Database.postgresql?
disable_statement_timeout
execute "CREATE INDEX CONCURRENTLY #{INDEX_NAME} ON projects (LOWER(name))"
disable_statement_timeout do
execute "CREATE INDEX CONCURRENTLY #{INDEX_NAME} ON projects (LOWER(name))"
end
end
def down
return unless Gitlab::Database.postgresql?
disable_statement_timeout
if supports_drop_index_concurrently?
execute "DROP INDEX CONCURRENTLY IF EXISTS #{INDEX_NAME}"
else
execute "DROP INDEX IF EXISTS #{INDEX_NAME}"
disable_statement_timeout do
if supports_drop_index_concurrently?
execute "DROP INDEX CONCURRENTLY IF EXISTS #{INDEX_NAME}"
else
execute "DROP INDEX IF EXISTS #{INDEX_NAME}"
end
end
end
end
......@@ -28,16 +28,16 @@ class RemoveOrphanedRoutes < ActiveRecord::Migration
# which is pretty close to our 15 second statement timeout. To ensure a
# smooth deployment procedure we disable the statement timeouts for this
# migration, just in case.
disable_statement_timeout
# On GitLab.com there are around 4000 orphaned project routes, and around
# 150 orphaned namespace routes.
[
Route.orphaned_project_routes,
Route.orphaned_namespace_routes
].each do |relation|
relation.each_batch(of: 1_000) do |batch|
batch.delete_all
disable_statement_timeout do
# On GitLab.com there are around 4000 orphaned project routes, and around
# 150 orphaned namespace routes.
[
Route.orphaned_project_routes,
Route.orphaned_namespace_routes
].each do |relation|
relation.each_batch(of: 1_000) do |batch|
batch.delete_all
end
end
end
end
......
......@@ -29,18 +29,20 @@ class CompositePrimaryKeysMigration < ActiveRecord::Migration
def up
return unless Gitlab::Database.postgresql?
disable_statement_timeout
TABLES.each do |index|
add_primary_key(index)
disable_statement_timeout do
TABLES.each do |index|
add_primary_key(index)
end
end
end
def down
return unless Gitlab::Database.postgresql?
disable_statement_timeout
TABLES.each do |index|
remove_primary_key(index)
disable_statement_timeout do
TABLES.each do |index|
remove_primary_key(index)
end
end
end
......
......@@ -8,9 +8,9 @@ class EnableAutoCancelPendingPipelinesForAll < ActiveRecord::Migration
DOWNTIME = false
def up
disable_statement_timeout
update_column_in_batches(:projects, :auto_cancel_pending_pipelines, 1)
disable_statement_timeout do
update_column_in_batches(:projects, :auto_cancel_pending_pipelines, 1)
end
end
def down
......
......@@ -7,12 +7,12 @@ class UpdateRetriedForCiBuild < ActiveRecord::Migration
disable_ddl_transaction!
def up
disable_statement_timeout
if Gitlab::Database.mysql?
up_mysql
else
up_postgres
disable_statement_timeout do
up_postgres
end
end
end
......
......@@ -7,20 +7,20 @@ class AddHeadPipelineForEachMergeRequest < ActiveRecord::Migration
disable_ddl_transaction!
def up
disable_statement_timeout
pipelines = Arel::Table.new(:ci_pipelines)
merge_requests = Arel::Table.new(:merge_requests)
head_id = pipelines
.project(Arel::Nodes::NamedFunction.new('max', [pipelines[:id]]))
.from(pipelines)
.where(pipelines[:ref].eq(merge_requests[:source_branch]))
.where(pipelines[:project_id].eq(merge_requests[:source_project_id]))
disable_statement_timeout do
head_id = pipelines
.project(Arel::Nodes::NamedFunction.new('max', [pipelines[:id]]))
.from(pipelines)
.where(pipelines[:ref].eq(merge_requests[:source_branch]))
.where(pipelines[:project_id].eq(merge_requests[:source_project_id]))
sub_query = Arel::Nodes::SqlLiteral.new(Arel::Nodes::Grouping.new(head_id).to_sql)
sub_query = Arel::Nodes::SqlLiteral.new(Arel::Nodes::Grouping.new(head_id).to_sql)
update_column_in_batches(:merge_requests, :head_pipeline_id, sub_query)
update_column_in_batches(:merge_requests, :head_pipeline_id, sub_query)
end
end
def down
......
......@@ -87,16 +87,16 @@ class RenameAllReservedPathsAgain < ActiveRecord::Migration
].freeze
def up
disable_statement_timeout
TOP_LEVEL_ROUTES.each { |route| rename_root_paths(route) }
PROJECT_WILDCARD_ROUTES.each { |route| rename_wildcard_paths(route) }
GROUP_ROUTES.each { |route| rename_child_paths(route) }
disable_statement_timeout do
TOP_LEVEL_ROUTES.each { |route| rename_root_paths(route) }
PROJECT_WILDCARD_ROUTES.each { |route| rename_wildcard_paths(route) }
GROUP_ROUTES.each { |route| rename_child_paths(route) }
end
end
def down
disable_statement_timeout
revert_renames
disable_statement_timeout do
revert_renames
end
end
end
......@@ -6,17 +6,17 @@ class MigratePipelineStages < ActiveRecord::Migration
disable_ddl_transaction!
def up
disable_statement_timeout
execute <<-SQL.strip_heredoc
INSERT INTO ci_stages (project_id, pipeline_id, name)
SELECT project_id, commit_id, stage FROM ci_builds
WHERE stage IS NOT NULL
AND stage_id IS NULL
AND EXISTS (SELECT 1 FROM projects WHERE projects.id = ci_builds.project_id)
AND EXISTS (SELECT 1 FROM ci_pipelines WHERE ci_pipelines.id = ci_builds.commit_id)
GROUP BY project_id, commit_id, stage
ORDER BY MAX(stage_idx)
SQL
disable_statement_timeout do
execute <<-SQL.strip_heredoc
INSERT INTO ci_stages (project_id, pipeline_id, name)
SELECT project_id, commit_id, stage FROM ci_builds
WHERE stage IS NOT NULL
AND stage_id IS NULL
AND EXISTS (SELECT 1 FROM projects WHERE projects.id = ci_builds.project_id)
AND EXISTS (SELECT 1 FROM ci_pipelines WHERE ci_pipelines.id = ci_builds.commit_id)
GROUP BY project_id, commit_id, stage
ORDER BY MAX(stage_idx)
SQL
end
end
end
......@@ -7,22 +7,22 @@ class MigrateBuildStageReferenceAgain < ActiveRecord::Migration
disable_ddl_transaction!
def up
disable_statement_timeout
stage_id = Arel.sql <<-SQL.strip_heredoc
(SELECT id FROM ci_stages
WHERE ci_stages.pipeline_id = ci_builds.commit_id
AND ci_stages.name = ci_builds.stage)
SQL
update_column_in_batches(:ci_builds, :stage_id, stage_id) do |table, query|
query.where(table[:stage_id].eq(nil))
disable_statement_timeout do
update_column_in_batches(:ci_builds, :stage_id, stage_id) do |table, query|
query.where(table[:stage_id].eq(nil))
end
end
end
def down
disable_statement_timeout
update_column_in_batches(:ci_builds, :stage_id, nil)
disable_statement_timeout do
update_column_in_batches(:ci_builds, :stage_id, nil)
end
end
end
......@@ -26,9 +26,9 @@ class MigrateStagesStatuses < ActiveRecord::Migration
end
def down
disable_statement_timeout
# rubocop:disable Migration/UpdateLargeTable
update_column_in_batches(:ci_stages, :status, nil)
disable_statement_timeout do
# rubocop:disable Migration/UpdateLargeTable
update_column_in_batches(:ci_stages, :status, nil)
end
end
end
......@@ -78,12 +78,12 @@ class RemoveSoftRemovedObjects < ActiveRecord::Migration
MODELS = [Issue, MergeRequest, CiPipelineSchedule, CiTrigger].freeze
def up
disable_statement_timeout
remove_personal_routes
remove_personal_namespaces
remove_group_namespaces
remove_simple_soft_removed_rows
disable_statement_timeout do
remove_personal_routes
remove_personal_namespaces
remove_group_namespaces
remove_simple_soft_removed_rows
end
end
def down
......
......@@ -38,29 +38,29 @@ class RemoveRedundantPipelineStages < ActiveRecord::Migration
end
def remove_redundant_pipeline_stages!
disable_statement_timeout
redundant_stages_ids = <<~SQL
SELECT id FROM ci_stages WHERE (pipeline_id, name) IN (
SELECT pipeline_id, name FROM ci_stages
GROUP BY pipeline_id, name HAVING COUNT(*) > 1
)
SQL
execute <<~SQL
UPDATE ci_builds SET stage_id = NULL WHERE stage_id IN (#{redundant_stages_ids})
SQL
if Gitlab::Database.postgresql?
execute <<~SQL
DELETE FROM ci_stages WHERE id IN (#{redundant_stages_ids})
disable_statement_timeout do
redundant_stages_ids = <<~SQL
SELECT id FROM ci_stages WHERE (pipeline_id, name) IN (
SELECT pipeline_id, name FROM ci_stages
GROUP BY pipeline_id, name HAVING COUNT(*) > 1
)
SQL
else # We can't modify a table we are selecting from on MySQL
execute <<~SQL
DELETE a FROM ci_stages AS a, ci_stages AS b
WHERE a.pipeline_id = b.pipeline_id AND a.name = b.name
AND a.id <> b.id
UPDATE ci_builds SET stage_id = NULL WHERE stage_id IN (#{redundant_stages_ids})
SQL
if Gitlab::Database.postgresql?
execute <<~SQL
DELETE FROM ci_stages WHERE id IN (#{redundant_stages_ids})
SQL
else # We can't modify a table we are selecting from on MySQL
execute <<~SQL
DELETE a FROM ci_stages AS a, ci_stages AS b
WHERE a.pipeline_id = b.pipeline_id AND a.name = b.name
AND a.id <> b.id
SQL
end
end
end
end
......@@ -15,10 +15,10 @@ class RemovePermanentFromRedirectRoutes < ActiveRecord::Migration
# ReworkRedirectRoutesIndexes:
# https://gitlab.com/gitlab-org/gitlab-ce/merge_requests/16211
if Gitlab::Database.postgresql?
disable_statement_timeout
execute "DROP INDEX CONCURRENTLY IF EXISTS #{INDEX_NAME_PERM};"
execute "DROP INDEX CONCURRENTLY IF EXISTS #{INDEX_NAME_TEMP};"
disable_statement_timeout do
execute "DROP INDEX CONCURRENTLY IF EXISTS #{INDEX_NAME_PERM};"
execute "DROP INDEX CONCURRENTLY IF EXISTS #{INDEX_NAME_TEMP};"
end
end
remove_column(:redirect_routes, :permanent)
......@@ -28,10 +28,10 @@ class RemovePermanentFromRedirectRoutes < ActiveRecord::Migration
add_column(:redirect_routes, :permanent, :boolean)
if Gitlab::Database.postgresql?
disable_statement_timeout
execute("CREATE INDEX CONCURRENTLY #{INDEX_NAME_PERM} ON redirect_routes (lower(path) varchar_pattern_ops) where (permanent);")
execute("CREATE INDEX CONCURRENTLY #{INDEX_NAME_TEMP} ON redirect_routes (lower(path) varchar_pattern_ops) where (not permanent or permanent is null) ;")
disable_statement_timeout do
execute("CREATE INDEX CONCURRENTLY #{INDEX_NAME_PERM} ON redirect_routes (lower(path) varchar_pattern_ops) where (permanent);")
execute("CREATE INDEX CONCURRENTLY #{INDEX_NAME_TEMP} ON redirect_routes (lower(path) varchar_pattern_ops) where (not permanent or permanent is null) ;")
end
end
end
end
......@@ -20,10 +20,10 @@ class AddPathIndexToRedirectRoutes < ActiveRecord::Migration
def up
return unless Gitlab::Database.postgresql?
disable_statement_timeout
unless index_exists_by_name?(:redirect_routes, INDEX_NAME)
execute("CREATE UNIQUE INDEX CONCURRENTLY #{INDEX_NAME} ON redirect_routes (lower(path) varchar_pattern_ops);")
disable_statement_timeout do
unless index_exists_by_name?(:redirect_routes, INDEX_NAME)
execute("CREATE UNIQUE INDEX CONCURRENTLY #{INDEX_NAME} ON redirect_routes (lower(path) varchar_pattern_ops);")
end
end
end
......
......@@ -17,13 +17,13 @@ class RescheduleBuildsStagesMigration < ActiveRecord::Migration
end
def up
disable_statement_timeout
Build.where('stage_id IS NULL').tap do |relation|
queue_background_migration_jobs_by_range_at_intervals(relation,
MIGRATION,
5.minutes,
batch_size: BATCH_SIZE)
disable_statement_timeout do
Build.where('stage_id IS NULL').tap do |relation|
queue_background_migration_jobs_by_range_at_intervals(relation,
MIGRATION,
5.minutes,
batch_size: BATCH_SIZE)
end
end
end
......
......@@ -13,13 +13,13 @@ class ScheduleStagesIndexMigration < ActiveRecord::Migration
end
def up
disable_statement_timeout
Stage.all.tap do |relation|
queue_background_migration_jobs_by_range_at_intervals(relation,
MIGRATION,
5.minutes,
batch_size: BATCH_SIZE)
disable_statement_timeout do
Stage.all.tap do |relation|
queue_background_migration_jobs_by_range_at_intervals(relation,
MIGRATION,
5.minutes,
batch_size: BATCH_SIZE)
end
end
end
......
......@@ -12,32 +12,34 @@ class CleanupStagesPositionMigration < ActiveRecord::Migration
end
def up
disable_statement_timeout
disable_statement_timeout do
Gitlab::BackgroundMigration.steal('MigrateStageIndex')
Gitlab::BackgroundMigration.steal('MigrateStageIndex')
unless index_exists_by_name?(:ci_stages, TMP_INDEX_NAME)
add_concurrent_index(:ci_stages, :id, where: 'position IS NULL', name: TMP_INDEX_NAME)
end
unless index_exists_by_name?(:ci_stages, TMP_INDEX_NAME)
add_concurrent_index(:ci_stages, :id, where: 'position IS NULL', name: TMP_INDEX_NAME)
end
migratable = <<~SQL
position IS NULL AND EXISTS (
SELECT 1 FROM ci_builds WHERE stage_id = ci_stages.id AND stage_idx IS NOT NULL
)
SQL
migratable = <<~SQL
position IS NULL AND EXISTS (
SELECT 1 FROM ci_builds WHERE stage_id = ci_stages.id AND stage_idx IS NOT NULL
)
SQL
Stages.where(migratable).each_batch(of: 1000) do |batch|
batch.pluck(:id).each do |stage|
Gitlab::BackgroundMigration::MigrateStageIndex.new.perform(stage, stage)
Stages.where(migratable).each_batch(of: 1000) do |batch|
batch.pluck(:id).each do |stage|
Gitlab::BackgroundMigration::MigrateStageIndex.new.perform(stage, stage)
end
end
end
remove_concurrent_index_by_name(:ci_stages, TMP_INDEX_NAME)
remove_concurrent_index_by_name(:ci_stages, TMP_INDEX_NAME)
end
end
def down
if index_exists_by_name?(:ci_stages, TMP_INDEX_NAME)
remove_concurrent_index_by_name(:ci_stages, TMP_INDEX_NAME)
disable_statement_timeout do
remove_concurrent_index_by_name(:ci_stages, TMP_INDEX_NAME)
end
end
end
end
......@@ -255,7 +255,7 @@ Example response:
Get a list of visible events for a particular project.
```
GET /:project_id/events
GET /projects/:project_id/events
```
Parameters:
......
......@@ -153,7 +153,7 @@ page](https://golang.org/dl).
# Remove former Go installation folder
sudo rm -rf /usr/local/go
curl --remote-name --progress https://dl.google.com/go/go1.10.3.linux-amd64.tar.gz
echo 'fa1b0e45d3b647c252f51f5e1204aba049cde4af177ef9f2181f43004f901035 go1.10.3.linux-amd64.tar.gz' | shasum -a256 -c - && \
sudo tar -C /usr/local -xzf go1.10.3.linux-amd64.tar.gz
......@@ -494,11 +494,11 @@ Make GitLab start on boot:
### Install Gitaly
# Fetch Gitaly source with Git and compile with Go
sudo -u git -H bundle exec rake "gitlab:gitaly:install[/home/git/gitaly]" RAILS_ENV=production
sudo -u git -H bundle exec rake "gitlab:gitaly:install[/home/git/gitaly,/home/git/repositories]" RAILS_ENV=production
You can specify a different Git repository by providing it as an extra parameter:
sudo -u git -H bundle exec rake "gitlab:gitaly:install[/home/git/gitaly,https://example.com/gitaly.git]" RAILS_ENV=production
sudo -u git -H bundle exec rake "gitlab:gitaly:install[/home/git/gitaly,/home/git/repositories,https://example.com/gitaly.git]" RAILS_ENV=production
Next, make sure gitaly configured:
......
......@@ -122,7 +122,10 @@ Issue Board, that is, create or delete lists and drag issues from one list to an
- **List** - A column on the issue board that displays issues matching certain attributes. In addition to the default lists of 'Open' and 'Closed' issue, each additional list will show issues matching your chosen label or assignee. On the top of that list you can see the number of issues that belong to it.
- **Label list**: a list based on a label. It shows all opened issues with that label.
- **Assignee list**: a list which includes all issues assigned to a user.
<<<<<<< HEAD
- **Milestone list**: a list which includes all issues with that milestone.
=======
>>>>>>> upstream/master
- **Open** (default): shows all open issues that do not belong to one of the other lists. Always appears as the leftmost list.
- **Closed** (default): shows all closed issues. Always appears as the rightmost list.
- **Card** - A box in the list that represents an individual issue. The information you can see on a card consists of the issue number, the issue title, the assignee, and the labels associated with the issue. You can drag cards from one list to another to change their label or assignee from that of the source list to that of the destination list.
......@@ -380,6 +383,7 @@ As on another list types, click on the trash icon to remove it.
When dragging issues between lists, different behavior occurs depending on the source list and the target list.
<<<<<<< HEAD
| | To Open | To Closed | To label `B` list | To assignee `Bob` list | To milestone `2.0` list |
| --- | --- | --- | --- | --- | --- |
| From Open | - | Issue closed | `B` added | `Bob` assigned | `2.0` added |
......@@ -387,6 +391,14 @@ When dragging issues between lists, different behavior occurs depending on the s
| From label `A` list | `A` removed | Issue closed | `A` removed<br/>`B` added | `Bob` assigned | `2.0` added |
| From assignee `Alice` list | `Alice` unassigned | Issue closed | `B` added | `Alice` unassigned<br/>`Bob` assigned | `2.0` added |
| From milestone `1.0` list | `1.0` removed | Issue closed | `B` added | `Bob` assigned<br/> | `1.0` removed<br/>`2.0` added |
=======
| | To Open | To Closed | To label `B` list | To assignee `Bob` list |
| --- | --- | --- | --- | --- |
| From Open | - | Issue closed | `B` added | `Bob` assigned |
| From Closed | Issue reopened | - | Issue reopened<br/>`B` added | Issue reopened<br/>`Bob` assigned |
| From label `A` list | `A` removed | Issue closed | `A` removed<br/>`B` added | `Bob` assigned |
| From assignee `Alice` list | `Alice` unassigned | Issue closed | `B` added | `Alice` unassigned<br/>`Bob` assigned |
>>>>>>> upstream/master
## Features per tier
......
......@@ -58,7 +58,6 @@ module Gitlab
if Database.postgresql?
options = options.merge({ algorithm: :concurrently })
disable_statement_timeout
end
if index_exists?(table_name, column_name, options)
......@@ -66,7 +65,9 @@ module Gitlab
return
end
add_index(table_name, column_name, options)
disable_statement_timeout do
add_index(table_name, column_name, options)
end
end
# Removes an existed index, concurrently when supported
......@@ -87,7 +88,6 @@ module Gitlab
if supports_drop_index_concurrently?
options = options.merge({ algorithm: :concurrently })
disable_statement_timeout
end
unless index_exists?(table_name, column_name, options)
......@@ -95,7 +95,9 @@ module Gitlab
return
end
remove_index(table_name, options.merge({ column: column_name }))
disable_statement_timeout do
remove_index(table_name, options.merge({ column: column_name }))
end
end
# Removes an existing index, concurrently when supported
......@@ -116,7 +118,6 @@ module Gitlab
if supports_drop_index_concurrently?
options = options.merge({ algorithm: :concurrently })
disable_statement_timeout
end
unless index_exists_by_name?(table_name, index_name)
......@@ -124,7 +125,9 @@ module Gitlab
return
end
remove_index(table_name, options.merge({ name: index_name }))
disable_statement_timeout do
remove_index(table_name, options.merge({ name: index_name }))
end
end
# Only available on Postgresql >= 9.2
......@@ -171,8 +174,6 @@ module Gitlab
on_delete = 'SET NULL' if on_delete == :nullify
end
disable_statement_timeout
key_name = concurrent_foreign_key_name(source, column)
unless foreign_key_exists?(source, target, column: column)
......@@ -199,7 +200,9 @@ module Gitlab
# while running.
#
# Note this is a no-op in case the constraint is VALID already
execute("ALTER TABLE #{source} VALIDATE CONSTRAINT #{key_name};")
disable_statement_timeout do
execute("ALTER TABLE #{source} VALIDATE CONSTRAINT #{key_name};")
end
end
def foreign_key_exists?(source, target = nil, column: nil)
......@@ -224,8 +227,48 @@ module Gitlab
# Long-running migrations may take more than the timeout allowed by
# the database. Disable the session's statement timeout to ensure
# migrations don't get killed prematurely. (PostgreSQL only)
#
# There are two possible ways to disable the statement timeout:
#
# - Per transaction (this is the preferred and default mode)
# - Per connection (requires a cleanup after the execution)
#
# When using a per connection disable statement, code must be inside
# a block so we can automatically execute `RESET ALL` after block finishes
# otherwise the statement will still be disabled until connection is dropped
# or `RESET ALL` is executed
def disable_statement_timeout
execute('SET statement_timeout TO 0') if Database.postgresql?
# bypass disabled_statement logic when not using postgres, but still execute block when one is given
unless Database.postgresql?
if block_given?
yield
end
return
end
if block_given?
begin
execute('SET statement_timeout TO 0')
yield
ensure
execute('RESET ALL')
end
else
unless transaction_open?
raise <<~ERROR
Cannot call disable_statement_timeout() without a transaction open or outside of a transaction block.
If you don't want to use a transaction wrap your code in a block call:
disable_statement_timeout { # code that requires disabled statement here }
This will make sure statement_timeout is disabled before and reset after the block execution is finished.
ERROR
end
execute('SET LOCAL statement_timeout TO 0')
end
end
def true_value
......@@ -367,30 +410,30 @@ module Gitlab
'in the body of your migration class'
end
disable_statement_timeout
transaction do
if limit
add_column(table, column, type, default: nil, limit: limit)
else
add_column(table, column, type, default: nil)
disable_statement_timeout do
transaction do
if limit
add_column(table, column, type, default: nil, limit: limit)
else
add_column(table, column, type, default: nil)
end
# Changing the default before the update ensures any newly inserted
# rows already use the proper default value.
change_column_default(table, column, default)
end
# Changing the default before the update ensures any newly inserted
# rows already use the proper default value.
change_column_default(table, column, default)
end
begin
update_column_in_batches(table, column, default, &block)
begin
update_column_in_batches(table, column, default, &block)
change_column_null(table, column, false) unless allow_null
# We want to rescue _all_ exceptions here, even those that don't inherit
# from StandardError.
rescue Exception => error # rubocop: disable all
remove_column(table, column)
change_column_null(table, column, false) unless allow_null
# We want to rescue _all_ exceptions here, even those that don't inherit
# from StandardError.
rescue Exception => error # rubocop: disable all
remove_column(table, column)
raise error
raise error
end
end
end
......
......@@ -366,18 +366,9 @@ module Gitlab
end
end
# Gitaly migration: https://gitlab.com/gitlab-org/gitaly/issues/1233
def new_commits(newrev)
gitaly_migrate(:new_commits) do |is_enabled|
if is_enabled
gitaly_ref_client.list_new_commits(newrev)
else
refs = Gitlab::GitalyClient::StorageSettings.allow_disk_access do
rev_list(including: newrev, excluding: :all).split("\n").map(&:strip)
end
Gitlab::Git::Commit.batch_by_oid(self, refs)
end
wrapped_gitaly_errors do
gitaly_ref_client.list_new_commits(newrev)
end
end
......
require 'toml-rb'
module Gitlab
module SetupHelper
class << self
......@@ -9,7 +11,7 @@ module Gitlab
# because it uses a Unix socket.
# For development and testing purposes, an extra storage is added to gitaly,
# which is not known to Rails, but must be explicitly stubbed.
def gitaly_configuration_toml(gitaly_dir, gitaly_ruby: true)
def gitaly_configuration_toml(gitaly_dir, storage_paths, gitaly_ruby: true)
storages = []
address = nil
......@@ -24,10 +26,7 @@ module Gitlab
address = val['gitaly_address']
end
# https://gitlab.com/gitlab-org/gitaly/issues/1238
Gitlab::GitalyClient::StorageSettings.allow_disk_access do
storages << { name: key, path: val.legacy_disk_path }
end
storages << { name: key, path: storage_paths[key] }
end
if Rails.env.test?
......@@ -44,12 +43,12 @@ module Gitlab
end
# rubocop:disable Rails/Output
def create_gitaly_configuration(dir, force: false)
def create_gitaly_configuration(dir, storage_paths, force: false)
config_path = File.join(dir, 'config.toml')
FileUtils.rm_f(config_path) if force
File.open(config_path, File::WRONLY | File::CREAT | File::EXCL) do |f|
f.puts gitaly_configuration_toml(dir)
f.puts gitaly_configuration_toml(dir, storage_paths)
end
rescue Errno::EEXIST
puts "Skipping config.toml generation:"
......
namespace :gitlab do
namespace :gitaly do
desc "GitLab | Install or upgrade gitaly"
task :install, [:dir, :repo] => :gitlab_environment do |t, args|
require 'toml-rb'
task :install, [:dir, :storage_path, :repo] => :gitlab_environment do |t, args|
warn_user_is_not_gitlab
unless args.dir.present?
abort %(Please specify the directory where you want to install gitaly:\n rake "gitlab:gitaly:install[/home/git/gitaly]")
unless args.dir.present? && args.storage_path.present?
abort %(Please specify the directory where you want to install gitaly and the path for the default storage
Usage: rake "gitlab:gitaly:install[/installation/dir,/storage/path]")
end
args.with_defaults(repo: 'https://gitlab.com/gitlab-org/gitaly.git')
......@@ -27,7 +26,8 @@ namespace :gitlab do
"BUNDLE_PATH=#{Bundler.bundle_path}")
end
Gitlab::SetupHelper.create_gitaly_configuration(args.dir)
storage_paths = { 'default' => args.storage_path }
Gitlab::SetupHelper.create_gitaly_configuration(args.dir, storage_paths)
Dir.chdir(args.dir) do
# In CI we run scripts/gitaly-test-build instead of this command
unless ENV['CI'].present?
......@@ -35,17 +35,5 @@ namespace :gitlab do
end
end
end
desc "GitLab | Print storage configuration in TOML format"
task storage_config: :environment do
require 'toml-rb'
puts "# Gitaly storage configuration generated from #{Gitlab.config.source} on #{Time.current.to_s(:long)}"
puts "# This is in TOML format suitable for use in Gitaly's config.toml file."
# Exclude gitaly-ruby configuration because that depends on the gitaly
# installation directory.
puts Gitlab::SetupHelper.gitaly_configuration_toml('', gitaly_ruby: false)
end
end
end
......@@ -4067,7 +4067,14 @@ msgstr ""
msgid "Jobs"
msgstr ""
<<<<<<< HEAD
msgid "Job|This job is stuck, because the project doesn't have any runners online assigned to it."
=======
msgid "Job|Job has been erased"
msgstr ""
msgid "Job|Job has been erased by"
>>>>>>> upstream/master
msgstr ""
msgid "Jul"
......
......@@ -50,7 +50,7 @@ module QA
Page::Project::Pipeline::Show.perform do |pipeline|
expect(pipeline).to have_build('build', status: :success, wait: 600)
expect(pipeline).to have_build('test', status: :success, wait: 600)
expect(pipeline).to have_build('production', status: :success, wait: 600)
expect(pipeline).to have_build('production', status: :success, wait: 1200)
end
end
end
......
import Vue from 'vue';
import { getTimeago } from '~/lib/utils/datetime_utility';
import component from '~/jobs/components/erased_block.vue';
import mountComponent from '../helpers/vue_mount_component_helper';
describe('Erased block', () => {
const Component = Vue.extend(component);
let vm;
const erasedAt = '2016-11-07T11:11:16.525Z';
const timeago = getTimeago();
const formatedDate = timeago.format(erasedAt);
afterEach(() => {
vm.$destroy();
});
describe('with job erased by user', () => {
beforeEach(() => {
vm = mountComponent(Component, {
erasedByUser: true,
username: 'root',
linkToUser: 'gitlab.com/root',
erasedAt,
});
});
it('renders username and link', () => {
expect(vm.$el.querySelector('a').getAttribute('href')).toEqual('gitlab.com/root');
expect(vm.$el.textContent).toContain('Job has been erased by');
expect(vm.$el.textContent).toContain('root');
});
it('renders erasedAt', () => {
expect(vm.$el.textContent).toContain(formatedDate);
});
});
describe('with erased job', () => {
beforeEach(() => {
vm = mountComponent(Component, {
erasedByUser: false,
erasedAt,
});
});
it('renders username and link', () => {
expect(vm.$el.textContent).toContain('Job has been erased');
});
it('renders erasedAt', () => {
expect(vm.$el.textContent).toContain(formatedDate);
});
});
});
import Vue from 'vue';
import component from '~/jobs/components/job_log.vue';
import mountComponent from '../helpers/vue_mount_component_helper';
describe('Job Log', () => {
const Component = Vue.extend(component);
let vm;
const trace = 'Running with gitlab-runner 11.1.0 (081978aa)<br> on docker-auto-scale-com d5ae8d25<br>Using Docker executor with image dev.gitlab.org:5005/gitlab/gitlab-build-images:ruby-2.4.4-golang-1.9-git-2.18-chrome-67.0-node-8.x-yarn-1.2-postgresql-9.6-graphicsmagick-1.3.29 ...<br>';
afterEach(() => {
vm.$destroy();
});
it('renders provided trace', () => {
vm = mountComponent(Component, {
trace,
isReceivingBuildTrace: true,
});
expect(vm.$el.querySelector('code').textContent).toContain('Running with gitlab-runner 11.1.0 (081978aa)');
});
describe('while receiving trace', () => {
it('renders animation', () => {
vm = mountComponent(Component, {
trace,
isReceivingBuildTrace: true,
});
expect(vm.$el.querySelector('.js-log-animation')).not.toBeNull();
});
});
describe('when build trace has finishes', () => {
it('does not render animation', () => {
vm = mountComponent(Component, {
trace,
isReceivingBuildTrace: false,
});
expect(vm.$el.querySelector('.js-log-animation')).toBeNull();
});
});
});
......@@ -48,10 +48,10 @@ describe Gitlab::Database::MigrationHelpers do
allow(model).to receive(:transaction_open?).and_return(false)
end
context 'using PostgreSQL' do
context 'using PostgreSQL', :postgresql do
before do
allow(Gitlab::Database).to receive(:postgresql?).and_return(true)
allow(model).to receive(:disable_statement_timeout)
allow(model).to receive(:disable_statement_timeout).and_call_original
end
it 'creates the index concurrently' do
......@@ -114,12 +114,12 @@ describe Gitlab::Database::MigrationHelpers do
before do
allow(model).to receive(:transaction_open?).and_return(false)
allow(model).to receive(:index_exists?).and_return(true)
allow(model).to receive(:disable_statement_timeout).and_call_original
end
context 'using PostgreSQL' do
before do
allow(model).to receive(:supports_drop_index_concurrently?).and_return(true)
allow(model).to receive(:disable_statement_timeout)
end
describe 'by column name' do
......@@ -162,7 +162,7 @@ describe Gitlab::Database::MigrationHelpers do
context 'using MySQL' do
it 'removes an index' do
expect(Gitlab::Database).to receive(:postgresql?).and_return(false)
expect(Gitlab::Database).to receive(:postgresql?).and_return(false).twice
expect(model).to receive(:remove_index)
.with(:users, { column: :foo })
......@@ -224,21 +224,26 @@ describe Gitlab::Database::MigrationHelpers do
context 'using PostgreSQL' do
before do
allow(Gitlab::Database).to receive(:postgresql?).and_return(true)
allow(Gitlab::Database).to receive(:mysql?).and_return(false)
end
it 'creates a concurrent foreign key and validates it' do
expect(model).to receive(:disable_statement_timeout)
expect(model).to receive(:disable_statement_timeout).and_call_original
expect(model).to receive(:execute).with(/statement_timeout/)
expect(model).to receive(:execute).ordered.with(/NOT VALID/)
expect(model).to receive(:execute).ordered.with(/VALIDATE CONSTRAINT/)
expect(model).to receive(:execute).with(/RESET ALL/)
model.add_concurrent_foreign_key(:projects, :users, column: :user_id)
end
it 'appends a valid ON DELETE statement' do
expect(model).to receive(:disable_statement_timeout)
expect(model).to receive(:disable_statement_timeout).and_call_original
expect(model).to receive(:execute).with(/statement_timeout/)
expect(model).to receive(:execute).with(/ON DELETE SET NULL/)
expect(model).to receive(:execute).ordered.with(/VALIDATE CONSTRAINT/)
expect(model).to receive(:execute).with(/RESET ALL/)
model.add_concurrent_foreign_key(:projects, :users,
column: :user_id,
......@@ -291,13 +296,68 @@ describe Gitlab::Database::MigrationHelpers do
describe '#disable_statement_timeout' do
context 'using PostgreSQL' do
it 'disables statement timeouts' do
it 'disables statement timeouts to current transaction only' do
expect(Gitlab::Database).to receive(:postgresql?).and_return(true)
expect(model).to receive(:execute).with('SET statement_timeout TO 0')
expect(model).to receive(:execute).with('SET LOCAL statement_timeout TO 0')
model.disable_statement_timeout
end
# this specs runs without an enclosing transaction (:delete truncation method for db_cleaner)
context 'with real environment', :postgresql, :delete do
before do
model.execute("SET statement_timeout TO '20000'")
end
after do
model.execute('RESET ALL')
end
it 'defines statement to 0 only for current transaction' do
expect(model.execute('SHOW statement_timeout').first['statement_timeout']).to eq('20s')
model.connection.transaction do
model.disable_statement_timeout
expect(model.execute('SHOW statement_timeout').first['statement_timeout']).to eq('0')
end
expect(model.execute('SHOW statement_timeout').first['statement_timeout']).to eq('20s')
end
end
context 'when passing a blocks' do
it 'disables statement timeouts on session level and executes the block' do
expect(Gitlab::Database).to receive(:postgresql?).and_return(true)
expect(model).to receive(:execute).with('SET statement_timeout TO 0')
expect(model).to receive(:execute).with('RESET ALL')
expect { |block| model.disable_statement_timeout(&block) }.to yield_control
end
# this specs runs without an enclosing transaction (:delete truncation method for db_cleaner)
context 'with real environment', :postgresql, :delete do
before do
model.execute("SET statement_timeout TO '20000'")
end
after do
model.execute('RESET ALL')
end
it 'defines statement to 0 for any code run inside the block' do
expect(model.execute('SHOW statement_timeout').first['statement_timeout']).to eq('20s')
model.disable_statement_timeout do
model.connection.transaction do
expect(model.execute('SHOW statement_timeout').first['statement_timeout']).to eq('0')
end
expect(model.execute('SHOW statement_timeout').first['statement_timeout']).to eq('0')
end
end
end
end
end
context 'using MySQL' do
......@@ -308,6 +368,16 @@ describe Gitlab::Database::MigrationHelpers do
model.disable_statement_timeout
end
context 'when passing a blocks' do
it 'executes the block of code' do
expect(Gitlab::Database).to receive(:postgresql?).and_return(false)
expect(model).not_to receive(:execute)
expect { |block| model.disable_statement_timeout(&block) }.to yield_control
end
end
end
end
......
......@@ -296,41 +296,31 @@ describe Repository do
end
describe '#new_commits' do
shared_examples 'finding unreferenced commits' do
set(:project) { create(:project, :repository) }
let(:repository) { project.repository }
set(:project) { create(:project, :repository) }
let(:repository) { project.repository }
subject { repository.new_commits(rev) }
subject { repository.new_commits(rev) }
context 'when there are no new commits' do
let(:rev) { repository.commit.id }
context 'when there are no new commits' do
let(:rev) { repository.commit.id }
it 'returns an empty array' do
expect(subject).to eq([])
end
it 'returns an empty array' do
expect(subject).to eq([])
end
end
context 'when new commits are found' do
let(:branch) { 'orphaned-branch' }
let!(:rev) { repository.commit(branch).id }
context 'when new commits are found' do
let(:branch) { 'orphaned-branch' }
let!(:rev) { repository.commit(branch).id }
it 'returns the commits' do
repository.delete_branch(branch)
it 'returns the commits' do
repository.delete_branch(branch)
expect(subject).not_to be_empty
expect(subject).to all( be_a(::Commit) )
expect(subject.size).to eq(1)
end
expect(subject).not_to be_empty
expect(subject).to all( be_a(::Commit) )
expect(subject.size).to eq(1)
end
end
context 'when Gitaly handles the request' do
it_behaves_like 'finding unreferenced commits'
end
context 'when Gitaly is disabled', :disable_gitaly do
it_behaves_like 'finding unreferenced commits'
end
end
describe '#commits_by' do
......
......@@ -67,6 +67,7 @@ module TestEnv
TMP_TEST_PATH = Rails.root.join('tmp', 'tests', '**')
REPOS_STORAGE = 'default'.freeze
BROKEN_STORAGE = 'broken'.freeze
# Test environment
#
......@@ -157,10 +158,11 @@ module TestEnv
component_timed_setup('Gitaly',
install_dir: gitaly_dir,
version: Gitlab::GitalyClient.expected_server_version,
task: "gitlab:gitaly:install[#{gitaly_dir}]") do
task: "gitlab:gitaly:install[#{gitaly_dir},#{repos_path}]") do
# Always re-create config, in case it's outdated. This is fast anyway.
Gitlab::SetupHelper.create_gitaly_configuration(gitaly_dir, force: true)
# Re-create config, to specify the broken storage path
storage_paths = { 'default' => repos_path, 'broken' => broken_path }
Gitlab::SetupHelper.create_gitaly_configuration(gitaly_dir, storage_paths, force: true)
start_gitaly(gitaly_dir)
end
......@@ -256,6 +258,10 @@ module TestEnv
@repos_path ||= Gitlab.config.repositories.storages[REPOS_STORAGE].legacy_disk_path
end
def broken_path
@broken_path ||= Gitlab.config.repositories.storages[BROKEN_STORAGE].legacy_disk_path
end
def backup_path
Gitlab.config.backup.path
end
......
......@@ -8,13 +8,23 @@ describe 'gitlab:gitaly namespace rake task' do
describe 'install' do
let(:repo) { 'https://gitlab.com/gitlab-org/gitaly.git' }
let(:clone_path) { Rails.root.join('tmp/tests/gitaly').to_s }
let(:storage_path) { Rails.root.join('tmp/tests/repositories').to_s }
let(:version) { File.read(Rails.root.join(Gitlab::GitalyClient::SERVER_VERSION_FILE)).chomp }
subject { run_rake_task('gitlab:gitaly:install', clone_path, storage_path) }
context 'no dir given' do
it 'aborts and display a help message' do
# avoid writing task output to spec progress
allow($stderr).to receive :write
expect { run_rake_task('gitlab:gitaly:install') }.to raise_error /Please specify the directory where you want to install gitaly/
expect { run_rake_task('gitlab:gitaly:install') }.to raise_error /Please specify the directory where you want to install gitaly and the path for the default storage/
end
end
context 'no storage path given' do
it 'aborts and display a help message' do
allow($stderr).to receive :write
expect { run_rake_task('gitlab:gitaly:install', clone_path) }.to raise_error /Please specify the directory where you want to install gitaly and the path for the default storage/
end
end
......@@ -23,7 +33,7 @@ describe 'gitlab:gitaly namespace rake task' do
expect(main_object)
.to receive(:checkout_or_clone_version).and_raise 'Git error'
expect { run_rake_task('gitlab:gitaly:install', clone_path) }.to raise_error 'Git error'
expect { subject }.to raise_error 'Git error'
end
end
......@@ -36,7 +46,7 @@ describe 'gitlab:gitaly namespace rake task' do
expect(main_object)
.to receive(:checkout_or_clone_version).with(version: version, repo: repo, target_dir: clone_path)
run_rake_task('gitlab:gitaly:install', clone_path)
subject
end
end
......@@ -59,7 +69,7 @@ describe 'gitlab:gitaly namespace rake task' do
expect(Gitlab::Popen).to receive(:popen).with(%w[which gmake]).and_return(['/usr/bin/gmake', 0])
expect(main_object).to receive(:run_command!).with(command_preamble + %w[gmake]).and_return(true)
run_rake_task('gitlab:gitaly:install', clone_path)
subject
end
end
......@@ -72,7 +82,7 @@ describe 'gitlab:gitaly namespace rake task' do
it 'calls make in the gitaly directory' do
expect(main_object).to receive(:run_command!).with(command_preamble + %w[make]).and_return(true)
run_rake_task('gitlab:gitaly:install', clone_path)
subject
end
context 'when Rails.env is test' do
......@@ -89,55 +99,10 @@ describe 'gitlab:gitaly namespace rake task' do
it 'calls make in the gitaly directory with --no-deployment flag for bundle' do
expect(main_object).to receive(:run_command!).with(command_preamble + command).and_return(true)
run_rake_task('gitlab:gitaly:install', clone_path)
subject
end
end
end
end
end
describe 'storage_config' do
it 'prints storage configuration in a TOML format' do
config = {
'default' => Gitlab::GitalyClient::StorageSettings.new(
'path' => '/path/to/default',
'gitaly_address' => 'unix:/path/to/my.socket'
),
'nfs_01' => Gitlab::GitalyClient::StorageSettings.new(
'path' => '/path/to/nfs_01',
'gitaly_address' => 'unix:/path/to/my.socket'
)
}
allow(Gitlab.config.repositories).to receive(:storages).and_return(config)
allow(Rails.env).to receive(:test?).and_return(false)
expected_output = ''
Timecop.freeze do
expected_output = <<~TOML
# Gitaly storage configuration generated from #{Gitlab.config.source} on #{Time.current.to_s(:long)}
# This is in TOML format suitable for use in Gitaly's config.toml file.
bin_dir = "tmp/tests/gitaly"
socket_path = "/path/to/my.socket"
[gitlab-shell]
dir = "#{Gitlab.config.gitlab_shell.path}"
[[storage]]
name = "default"
path = "/path/to/default"
[[storage]]
name = "nfs_01"
path = "/path/to/nfs_01"
TOML
end
expect { run_rake_task('gitlab:gitaly:storage_config')}
.to output(expected_output).to_stdout
parsed_output = TomlRB.parse(expected_output)
config.each do |name, params|
Gitlab::GitalyClient::StorageSettings.allow_disk_access do
expect(parsed_output['storage']).to include({ 'name' => name, 'path' => params.legacy_disk_path })
end
end
end
end
end
......@@ -641,7 +641,7 @@ rollout 100%:
function install_dependencies() {
apk add -U openssl curl tar gzip bash ca-certificates git
wget -q -O /etc/apk/keys/sgerrand.rsa.pub https://raw.githubusercontent.com/sgerrand/alpine-pkg-glibc/master/sgerrand.rsa.pub
wget -q -O /etc/apk/keys/sgerrand.rsa.pub https://alpine-pkgs.sgerrand.com/sgerrand.rsa.pub
wget https://github.com/sgerrand/alpine-pkg-glibc/releases/download/2.23-r3/glibc-2.23-r3.apk
apk add glibc-2.23-r3.apk
rm glibc-2.23-r3.apk
......
# Full project: https://gitlab.com/pages/middleman
image: ruby:2.3
image: ruby:2.4
variables:
LANG: "C.UTF-8"
cache:
paths:
- vendor
test:
script:
before_script:
- apt-get update -yqqq
- apt-get install -y nodejs
- bundle install --path vendor
test:
script:
- bundle exec middleman build
except:
- master
pages:
script:
- apt-get update -yqqq
- apt-get install -y nodejs
- bundle install --path vendor
- bundle exec middleman build
artifacts:
paths:
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment