Commit 68f4b26e authored by GitLab Bot's avatar GitLab Bot

Merge remote-tracking branch 'upstream/master' into ce-to-ee-2018-08-14

# Conflicts:
#	app/services/notification_recipient_service.rb
#	doc/user/project/issue_board.md
#	locale/gitlab.pot

[ci skip]
parents 3ff95c3c e610b41e
<script>
import TimeagoTooltip from '~/vue_shared/components/time_ago_tooltip.vue';
export default {
components: {
TimeagoTooltip,
},
props: {
erasedByUser: {
type: Boolean,
required: true,
},
username: {
type: String,
required: false,
default: null,
},
linkToUser: {
type: String,
required: false,
default: null,
},
erasedAt: {
type: String,
required: true,
},
},
};
</script>
<template>
<div class="prepend-top-default js-build-erased">
<div class="erased alert alert-warning">
<template v-if="erasedByUser">
{{ s__("Job|Job has been erased by") }}
<a :href="linkToUser">
{{ username }}
</a>
</template>
<template v-else>
{{ s__("Job|Job has been erased") }}
</template>
<timeago-tooltip
:time="erasedAt"
/>
</div>
</div>
</template>
<script>
export default {
name: 'JobLog',
props: {
trace: {
type: String,
required: true,
},
isReceivingBuildTrace: {
type: Boolean,
required: true,
},
},
};
</script>
<template>
<pre class="build-trace">
<code
class="bash"
v-html="trace"
>
</code>
<div
v-if="isReceivingBuildTrace"
class="js-log-animation build-loader-animation"
>
<div class="dot"></div>
<div class="dot"></div>
<div class="dot"></div>
</div>
</pre>
</template>
# frozen_string_literal: true
module Ci module Ci
class BuildRunnerPresenter < SimpleDelegator class BuildRunnerPresenter < SimpleDelegator
def artifacts def artifacts
......
# frozen_string_literal: true
class ProjectMirrorSerializer < BaseSerializer class ProjectMirrorSerializer < BaseSerializer
entity ProjectMirrorEntity entity ProjectMirrorEntity
end end
# frozen_string_literal: true
class TestCaseEntity < Grape::Entity class TestCaseEntity < Grape::Entity
expose :status expose :status
expose :name expose :name
......
# frozen_string_literal: true
class TestReportsComparerEntity < Grape::Entity class TestReportsComparerEntity < Grape::Entity
expose :total_status, as: :status expose :total_status, as: :status
......
# frozen_string_literal: true
class TestReportsComparerSerializer < BaseSerializer class TestReportsComparerSerializer < BaseSerializer
entity TestReportsComparerEntity entity TestReportsComparerEntity
end end
# frozen_string_literal: true
class TestSuiteComparerEntity < Grape::Entity class TestSuiteComparerEntity < Grape::Entity
expose :name expose :name
expose :total_status, as: :status expose :total_status, as: :status
......
...@@ -220,8 +220,11 @@ module NotificationRecipientService ...@@ -220,8 +220,11 @@ module NotificationRecipientService
end end
class Default < Base class Default < Base
<<<<<<< HEAD
prepend ::EE::NotificationRecipientBuilders::Default prepend ::EE::NotificationRecipientBuilders::Default
=======
>>>>>>> upstream/master
MENTION_TYPE_ACTIONS = [:new_issue, :new_merge_request].freeze MENTION_TYPE_ACTIONS = [:new_issue, :new_merge_request].freeze
attr_reader :target attr_reader :target
......
# frozen_string_literal: true
module Projects module Projects
class DetectRepositoryLanguagesService < BaseService class DetectRepositoryLanguagesService < BaseService
attr_reader :detected_repository_languages, :programming_languages attr_reader :detected_repository_languages, :programming_languages
......
# frozen_string_literal: true
module Todos module Todos
module Destroy module Destroy
class BaseService class BaseService
......
# frozen_string_literal: true
module Todos module Todos
module Destroy module Destroy
class ConfidentialIssueService < ::Todos::Destroy::BaseService class ConfidentialIssueService < ::Todos::Destroy::BaseService
......
# frozen_string_literal: true
module Todos module Todos
module Destroy module Destroy
class EntityLeaveService < ::Todos::Destroy::BaseService class EntityLeaveService < ::Todos::Destroy::BaseService
......
# frozen_string_literal: true
module Todos module Todos
module Destroy module Destroy
class GroupPrivateService < ::Todos::Destroy::BaseService class GroupPrivateService < ::Todos::Destroy::BaseService
......
# frozen_string_literal: true
module Todos module Todos
module Destroy module Destroy
class PrivateFeaturesService < ::Todos::Destroy::BaseService class PrivateFeaturesService < ::Todos::Destroy::BaseService
......
# frozen_string_literal: true
module Todos module Todos
module Destroy module Destroy
class ProjectPrivateService < ::Todos::Destroy::BaseService class ProjectPrivateService < ::Todos::Destroy::BaseService
......
...@@ -17,6 +17,6 @@ ...@@ -17,6 +17,6 @@
%th Primary Action %th Primary Action
%th %th
= render @spam_logs = render @spam_logs
= paginate @spam_logs = paginate @spam_logs, theme: 'gitlab'
- else - else
%h4 There are no Spam Logs %h4 There are no Spam Logs
# frozen_string_literal: true
class DetectRepositoryLanguagesWorker class DetectRepositoryLanguagesWorker
include ApplicationWorker include ApplicationWorker
include ExceptionBacktrace include ExceptionBacktrace
......
# frozen_string_literal: true
module TodosDestroyer module TodosDestroyer
class ConfidentialIssueWorker class ConfidentialIssueWorker
include ApplicationWorker include ApplicationWorker
......
# frozen_string_literal: true
module TodosDestroyer module TodosDestroyer
class EntityLeaveWorker class EntityLeaveWorker
include ApplicationWorker include ApplicationWorker
......
# frozen_string_literal: true
module TodosDestroyer module TodosDestroyer
class GroupPrivateWorker class GroupPrivateWorker
include ApplicationWorker include ApplicationWorker
......
# frozen_string_literal: true
module TodosDestroyer module TodosDestroyer
class PrivateFeaturesWorker class PrivateFeaturesWorker
include ApplicationWorker include ApplicationWorker
......
# frozen_string_literal: true
module TodosDestroyer module TodosDestroyer
class ProjectPrivateWorker class ProjectPrivateWorker
include ApplicationWorker include ApplicationWorker
......
---
title: disable_statement_timeout no longer leak to other migrations
merge_request: 20503
author:
type: fixed
---
title: Add gitlab theme to spam logs pagination
merge_request: 21145
author:
type: fixed
---
title: Creates vue component for erased block on job view
merge_request:
author:
type: other
---
title: Creates vue component for job log trace
merge_request:
author:
type: other
---
title: 'Auto-DevOps.gitlab-ci.yml: Update glibc package signing key URL'
merge_request: 21182
author: sgerrand
type: fixed
---
title: Enable frozen string in vestigial app files
merge_request:
author: gfyoung
type: performance
---
title: Remove storage path dependency of gitaly install task
merge_request: 21101
author:
type: changed
...@@ -106,11 +106,11 @@ class ProjectForeignKeysWithCascadingDeletes < ActiveRecord::Migration ...@@ -106,11 +106,11 @@ class ProjectForeignKeysWithCascadingDeletes < ActiveRecord::Migration
# Disables statement timeouts for the current connection. This is # Disables statement timeouts for the current connection. This is
# necessary as removing of orphaned data might otherwise exceed the # necessary as removing of orphaned data might otherwise exceed the
# statement timeout. # statement timeout.
disable_statement_timeout disable_statement_timeout do
remove_orphans(*queue.pop) until queue.empty?
remove_orphans(*queue.pop) until queue.empty? steal_from_queues(queues - [queue])
end
steal_from_queues(queues - [queue])
end end
end end
end end
......
...@@ -25,8 +25,9 @@ class AddLowerPathIndexToRedirectRoutes < ActiveRecord::Migration ...@@ -25,8 +25,9 @@ class AddLowerPathIndexToRedirectRoutes < ActiveRecord::Migration
# trivial to write a query that checks for an index. BUT there is a # trivial to write a query that checks for an index. BUT there is a
# convenient `IF EXISTS` parameter for `DROP INDEX`. # convenient `IF EXISTS` parameter for `DROP INDEX`.
if supports_drop_index_concurrently? if supports_drop_index_concurrently?
disable_statement_timeout disable_statement_timeout do
execute "DROP INDEX CONCURRENTLY IF EXISTS #{INDEX_NAME};" execute "DROP INDEX CONCURRENTLY IF EXISTS #{INDEX_NAME};"
end
else else
execute "DROP INDEX IF EXISTS #{INDEX_NAME};" execute "DROP INDEX IF EXISTS #{INDEX_NAME};"
end end
......
...@@ -8,25 +8,25 @@ class AddIndexOnNamespacesLowerName < ActiveRecord::Migration ...@@ -8,25 +8,25 @@ class AddIndexOnNamespacesLowerName < ActiveRecord::Migration
def up def up
return unless Gitlab::Database.postgresql? return unless Gitlab::Database.postgresql?
disable_statement_timeout disable_statement_timeout do
if Gitlab::Database.version.to_f >= 9.5
if Gitlab::Database.version.to_f >= 9.5 # Allow us to hot-patch the index manually ahead of the migration
# Allow us to hot-patch the index manually ahead of the migration execute "CREATE INDEX CONCURRENTLY IF NOT EXISTS #{INDEX_NAME} ON namespaces (lower(name));"
execute "CREATE INDEX CONCURRENTLY IF NOT EXISTS #{INDEX_NAME} ON namespaces (lower(name));" else
else execute "CREATE INDEX CONCURRENTLY #{INDEX_NAME} ON namespaces (lower(name));"
execute "CREATE INDEX CONCURRENTLY #{INDEX_NAME} ON namespaces (lower(name));" end
end end
end end
def down def down
return unless Gitlab::Database.postgresql? return unless Gitlab::Database.postgresql?
disable_statement_timeout disable_statement_timeout do
if Gitlab::Database.version.to_f >= 9.2
if Gitlab::Database.version.to_f >= 9.2 execute "DROP INDEX CONCURRENTLY IF EXISTS #{INDEX_NAME};"
execute "DROP INDEX CONCURRENTLY IF EXISTS #{INDEX_NAME};" else
else execute "DROP INDEX IF EXISTS #{INDEX_NAME};"
execute "DROP INDEX IF EXISTS #{INDEX_NAME};" end
end end
end end
end end
...@@ -18,51 +18,51 @@ class ReworkRedirectRoutesIndexes < ActiveRecord::Migration ...@@ -18,51 +18,51 @@ class ReworkRedirectRoutesIndexes < ActiveRecord::Migration
OLD_INDEX_NAME_PATH_LOWER = "index_on_redirect_routes_lower_path" OLD_INDEX_NAME_PATH_LOWER = "index_on_redirect_routes_lower_path"
def up def up
disable_statement_timeout disable_statement_timeout do
# this is a plain btree on a single boolean column. It'll never be
# this is a plain btree on a single boolean column. It'll never be # selective enough to be valuable. This class is called by
# selective enough to be valuable. This class is called by # setup_postgresql.rake so it needs to be able to handle this
# setup_postgresql.rake so it needs to be able to handle this # index not existing.
# index not existing. if index_exists?(:redirect_routes, :permanent)
if index_exists?(:redirect_routes, :permanent) remove_concurrent_index(:redirect_routes, :permanent)
remove_concurrent_index(:redirect_routes, :permanent) end
end
# If we're on MySQL then the existing index on path is ok. But on # If we're on MySQL then the existing index on path is ok. But on
# Postgres we need to clean things up: # Postgres we need to clean things up:
return unless Gitlab::Database.postgresql? break unless Gitlab::Database.postgresql?
if_not_exists = Gitlab::Database.version.to_f >= 9.5 ? "IF NOT EXISTS" : "" if_not_exists = Gitlab::Database.version.to_f >= 9.5 ? "IF NOT EXISTS" : ""
# Unique index on lower(path) across both types of redirect_routes: # Unique index on lower(path) across both types of redirect_routes:
execute("CREATE UNIQUE INDEX CONCURRENTLY #{if_not_exists} #{INDEX_NAME_UNIQUE} ON redirect_routes (lower(path) varchar_pattern_ops);") execute("CREATE UNIQUE INDEX CONCURRENTLY #{if_not_exists} #{INDEX_NAME_UNIQUE} ON redirect_routes (lower(path) varchar_pattern_ops);")
# Make two indexes on path -- one for permanent and one for temporary routes: # Make two indexes on path -- one for permanent and one for temporary routes:
execute("CREATE INDEX CONCURRENTLY #{if_not_exists} #{INDEX_NAME_PERM} ON redirect_routes (lower(path) varchar_pattern_ops) where (permanent);") execute("CREATE INDEX CONCURRENTLY #{if_not_exists} #{INDEX_NAME_PERM} ON redirect_routes (lower(path) varchar_pattern_ops) where (permanent);")
execute("CREATE INDEX CONCURRENTLY #{if_not_exists} #{INDEX_NAME_TEMP} ON redirect_routes (lower(path) varchar_pattern_ops) where (not permanent or permanent is null) ;") execute("CREATE INDEX CONCURRENTLY #{if_not_exists} #{INDEX_NAME_TEMP} ON redirect_routes (lower(path) varchar_pattern_ops) where (not permanent or permanent is null) ;")
# Remove the old indexes: # Remove the old indexes:
# This one needed to be on lower(path) but wasn't so it's replaced with the two above # This one needed to be on lower(path) but wasn't so it's replaced with the two above
execute "DROP INDEX CONCURRENTLY IF EXISTS #{OLD_INDEX_NAME_PATH_TPOPS};" execute "DROP INDEX CONCURRENTLY IF EXISTS #{OLD_INDEX_NAME_PATH_TPOPS};"
# This one isn't needed because we only ever do = and LIKE on this # This one isn't needed because we only ever do = and LIKE on this
# column so the varchar_pattern_ops index is sufficient # column so the varchar_pattern_ops index is sufficient
execute "DROP INDEX CONCURRENTLY IF EXISTS #{OLD_INDEX_NAME_PATH_LOWER};" execute "DROP INDEX CONCURRENTLY IF EXISTS #{OLD_INDEX_NAME_PATH_LOWER};"
end
end end
def down def down
disable_statement_timeout disable_statement_timeout do
add_concurrent_index(:redirect_routes, :permanent)
add_concurrent_index(:redirect_routes, :permanent) break unless Gitlab::Database.postgresql?
return unless Gitlab::Database.postgresql? execute("CREATE INDEX CONCURRENTLY #{OLD_INDEX_NAME_PATH_TPOPS} ON redirect_routes (path varchar_pattern_ops);")
execute("CREATE INDEX CONCURRENTLY #{OLD_INDEX_NAME_PATH_LOWER} ON redirect_routes (LOWER(path));")
execute("CREATE INDEX CONCURRENTLY #{OLD_INDEX_NAME_PATH_TPOPS} ON redirect_routes (path varchar_pattern_ops);") execute("DROP INDEX CONCURRENTLY IF EXISTS #{INDEX_NAME_UNIQUE};")
execute("CREATE INDEX CONCURRENTLY #{OLD_INDEX_NAME_PATH_LOWER} ON redirect_routes (LOWER(path));") execute("DROP INDEX CONCURRENTLY IF EXISTS #{INDEX_NAME_PERM};")
execute("DROP INDEX CONCURRENTLY IF EXISTS #{INDEX_NAME_TEMP};")
execute("DROP INDEX CONCURRENTLY IF EXISTS #{INDEX_NAME_UNIQUE};") end
execute("DROP INDEX CONCURRENTLY IF EXISTS #{INDEX_NAME_PERM};")
execute("DROP INDEX CONCURRENTLY IF EXISTS #{INDEX_NAME_TEMP};")
end end
end end
...@@ -13,16 +13,16 @@ class CreateProjectCiCdSettings < ActiveRecord::Migration ...@@ -13,16 +13,16 @@ class CreateProjectCiCdSettings < ActiveRecord::Migration
end end
end end
disable_statement_timeout disable_statement_timeout do
# This particular INSERT will take between 10 and 20 seconds.
execute 'INSERT INTO project_ci_cd_settings (project_id) SELECT id FROM projects'
# This particular INSERT will take between 10 and 20 seconds. # We add the index and foreign key separately so the above INSERT statement
execute 'INSERT INTO project_ci_cd_settings (project_id) SELECT id FROM projects' # takes as little time as possible.
add_concurrent_index(:project_ci_cd_settings, :project_id, unique: true)
# We add the index and foreign key separately so the above INSERT statement add_foreign_key_with_retry
# takes as little time as possible. end
add_concurrent_index(:project_ci_cd_settings, :project_id, unique: true)
add_foreign_key_with_retry
end end
def down def down
......
...@@ -14,48 +14,50 @@ class CleanupBuildStageMigration < ActiveRecord::Migration ...@@ -14,48 +14,50 @@ class CleanupBuildStageMigration < ActiveRecord::Migration
end end
def up def up
disable_statement_timeout disable_statement_timeout do
##
## # We steal from the background migrations queue to catch up with the
# We steal from the background migrations queue to catch up with the # scheduled migrations set.
# scheduled migrations set. #
# Gitlab::BackgroundMigration.steal('MigrateBuildStage')
Gitlab::BackgroundMigration.steal('MigrateBuildStage')
##
## # We add temporary index, to make iteration over batches more performant.
# We add temporary index, to make iteration over batches more performant. # Conditional here is to avoid the need of doing that in a separate
# Conditional here is to avoid the need of doing that in a separate # migration file to make this operation idempotent.
# migration file to make this operation idempotent. #
# unless index_exists_by_name?(:ci_builds, TMP_INDEX)
unless index_exists_by_name?(:ci_builds, TMP_INDEX) add_concurrent_index(:ci_builds, :id, where: 'stage_id IS NULL', name: TMP_INDEX)
add_concurrent_index(:ci_builds, :id, where: 'stage_id IS NULL', name: TMP_INDEX) end
end
##
## # We check if there are remaining rows that should be migrated (for example
# We check if there are remaining rows that should be migrated (for example # if Sidekiq / Redis fails / is restarted, what could result in not all
# if Sidekiq / Redis fails / is restarted, what could result in not all # background migrations being executed correctly.
# background migrations being executed correctly. #
# # We migrate remaining rows synchronously in a blocking way, to make sure
# We migrate remaining rows synchronously in a blocking way, to make sure # that when this migration is done we are confident that all rows are
# that when this migration is done we are confident that all rows are # already migrated.
# already migrated. #
# Build.where('stage_id IS NULL').each_batch(of: 50) do |batch|
Build.where('stage_id IS NULL').each_batch(of: 50) do |batch| range = batch.pluck('MIN(id)', 'MAX(id)').first
range = batch.pluck('MIN(id)', 'MAX(id)').first
Gitlab::BackgroundMigration::MigrateBuildStage.new.perform(*range)
Gitlab::BackgroundMigration::MigrateBuildStage.new.perform(*range) end
##
# We remove temporary index, because it is not required during standard
# operations and runtime.
#
remove_concurrent_index_by_name(:ci_builds, TMP_INDEX)
end end
##
# We remove temporary index, because it is not required during standard
# operations and runtime.
#
remove_concurrent_index_by_name(:ci_builds, TMP_INDEX)
end end
def down def down
if index_exists_by_name?(:ci_builds, TMP_INDEX) if index_exists_by_name?(:ci_builds, TMP_INDEX)
remove_concurrent_index_by_name(:ci_builds, TMP_INDEX) disable_statement_timeout do
remove_concurrent_index_by_name(:ci_builds, TMP_INDEX)
end
end end
end end
end end
...@@ -13,20 +13,20 @@ class ProjectNameLowerIndex < ActiveRecord::Migration ...@@ -13,20 +13,20 @@ class ProjectNameLowerIndex < ActiveRecord::Migration
def up def up
return unless Gitlab::Database.postgresql? return unless Gitlab::Database.postgresql?
disable_statement_timeout disable_statement_timeout do
execute "CREATE INDEX CONCURRENTLY #{INDEX_NAME} ON projects (LOWER(name))"
execute "CREATE INDEX CONCURRENTLY #{INDEX_NAME} ON projects (LOWER(name))" end
end end
def down def down
return unless Gitlab::Database.postgresql? return unless Gitlab::Database.postgresql?
disable_statement_timeout disable_statement_timeout do
if supports_drop_index_concurrently?
if supports_drop_index_concurrently? execute "DROP INDEX CONCURRENTLY IF EXISTS #{INDEX_NAME}"
execute "DROP INDEX CONCURRENTLY IF EXISTS #{INDEX_NAME}" else
else execute "DROP INDEX IF EXISTS #{INDEX_NAME}"
execute "DROP INDEX IF EXISTS #{INDEX_NAME}" end
end end
end end
end end
...@@ -28,16 +28,16 @@ class RemoveOrphanedRoutes < ActiveRecord::Migration ...@@ -28,16 +28,16 @@ class RemoveOrphanedRoutes < ActiveRecord::Migration
# which is pretty close to our 15 second statement timeout. To ensure a # which is pretty close to our 15 second statement timeout. To ensure a
# smooth deployment procedure we disable the statement timeouts for this # smooth deployment procedure we disable the statement timeouts for this
# migration, just in case. # migration, just in case.
disable_statement_timeout disable_statement_timeout do
# On GitLab.com there are around 4000 orphaned project routes, and around
# On GitLab.com there are around 4000 orphaned project routes, and around # 150 orphaned namespace routes.
# 150 orphaned namespace routes. [
[ Route.orphaned_project_routes,
Route.orphaned_project_routes, Route.orphaned_namespace_routes
Route.orphaned_namespace_routes ].each do |relation|
].each do |relation| relation.each_batch(of: 1_000) do |batch|
relation.each_batch(of: 1_000) do |batch| batch.delete_all
batch.delete_all end
end end
end end
end end
......
...@@ -29,18 +29,20 @@ class CompositePrimaryKeysMigration < ActiveRecord::Migration ...@@ -29,18 +29,20 @@ class CompositePrimaryKeysMigration < ActiveRecord::Migration
def up def up
return unless Gitlab::Database.postgresql? return unless Gitlab::Database.postgresql?
disable_statement_timeout disable_statement_timeout do
TABLES.each do |index| TABLES.each do |index|
add_primary_key(index) add_primary_key(index)
end
end end
end end
def down def down
return unless Gitlab::Database.postgresql? return unless Gitlab::Database.postgresql?
disable_statement_timeout disable_statement_timeout do
TABLES.each do |index| TABLES.each do |index|
remove_primary_key(index) remove_primary_key(index)
end
end end
end end
......
...@@ -8,9 +8,9 @@ class EnableAutoCancelPendingPipelinesForAll < ActiveRecord::Migration ...@@ -8,9 +8,9 @@ class EnableAutoCancelPendingPipelinesForAll < ActiveRecord::Migration
DOWNTIME = false DOWNTIME = false
def up def up
disable_statement_timeout disable_statement_timeout do
update_column_in_batches(:projects, :auto_cancel_pending_pipelines, 1)
update_column_in_batches(:projects, :auto_cancel_pending_pipelines, 1) end
end end
def down def down
......
...@@ -7,12 +7,12 @@ class UpdateRetriedForCiBuild < ActiveRecord::Migration ...@@ -7,12 +7,12 @@ class UpdateRetriedForCiBuild < ActiveRecord::Migration
disable_ddl_transaction! disable_ddl_transaction!
def up def up
disable_statement_timeout
if Gitlab::Database.mysql? if Gitlab::Database.mysql?
up_mysql up_mysql
else else
up_postgres disable_statement_timeout do
up_postgres
end
end end
end end
......
...@@ -7,20 +7,20 @@ class AddHeadPipelineForEachMergeRequest < ActiveRecord::Migration ...@@ -7,20 +7,20 @@ class AddHeadPipelineForEachMergeRequest < ActiveRecord::Migration
disable_ddl_transaction! disable_ddl_transaction!
def up def up
disable_statement_timeout
pipelines = Arel::Table.new(:ci_pipelines) pipelines = Arel::Table.new(:ci_pipelines)
merge_requests = Arel::Table.new(:merge_requests) merge_requests = Arel::Table.new(:merge_requests)
head_id = pipelines disable_statement_timeout do
.project(Arel::Nodes::NamedFunction.new('max', [pipelines[:id]])) head_id = pipelines
.from(pipelines) .project(Arel::Nodes::NamedFunction.new('max', [pipelines[:id]]))
.where(pipelines[:ref].eq(merge_requests[:source_branch])) .from(pipelines)
.where(pipelines[:project_id].eq(merge_requests[:source_project_id])) .where(pipelines[:ref].eq(merge_requests[:source_branch]))
.where(pipelines[:project_id].eq(merge_requests[:source_project_id]))
sub_query = Arel::Nodes::SqlLiteral.new(Arel::Nodes::Grouping.new(head_id).to_sql) sub_query = Arel::Nodes::SqlLiteral.new(Arel::Nodes::Grouping.new(head_id).to_sql)
update_column_in_batches(:merge_requests, :head_pipeline_id, sub_query) update_column_in_batches(:merge_requests, :head_pipeline_id, sub_query)
end
end end
def down def down
......
...@@ -87,16 +87,16 @@ class RenameAllReservedPathsAgain < ActiveRecord::Migration ...@@ -87,16 +87,16 @@ class RenameAllReservedPathsAgain < ActiveRecord::Migration
].freeze ].freeze
def up def up
disable_statement_timeout disable_statement_timeout do
TOP_LEVEL_ROUTES.each { |route| rename_root_paths(route) }
TOP_LEVEL_ROUTES.each { |route| rename_root_paths(route) } PROJECT_WILDCARD_ROUTES.each { |route| rename_wildcard_paths(route) }
PROJECT_WILDCARD_ROUTES.each { |route| rename_wildcard_paths(route) } GROUP_ROUTES.each { |route| rename_child_paths(route) }
GROUP_ROUTES.each { |route| rename_child_paths(route) } end
end end
def down def down
disable_statement_timeout disable_statement_timeout do
revert_renames
revert_renames end
end end
end end
...@@ -6,17 +6,17 @@ class MigratePipelineStages < ActiveRecord::Migration ...@@ -6,17 +6,17 @@ class MigratePipelineStages < ActiveRecord::Migration
disable_ddl_transaction! disable_ddl_transaction!
def up def up
disable_statement_timeout disable_statement_timeout do
execute <<-SQL.strip_heredoc
execute <<-SQL.strip_heredoc INSERT INTO ci_stages (project_id, pipeline_id, name)
INSERT INTO ci_stages (project_id, pipeline_id, name) SELECT project_id, commit_id, stage FROM ci_builds
SELECT project_id, commit_id, stage FROM ci_builds WHERE stage IS NOT NULL
WHERE stage IS NOT NULL AND stage_id IS NULL
AND stage_id IS NULL AND EXISTS (SELECT 1 FROM projects WHERE projects.id = ci_builds.project_id)
AND EXISTS (SELECT 1 FROM projects WHERE projects.id = ci_builds.project_id) AND EXISTS (SELECT 1 FROM ci_pipelines WHERE ci_pipelines.id = ci_builds.commit_id)
AND EXISTS (SELECT 1 FROM ci_pipelines WHERE ci_pipelines.id = ci_builds.commit_id) GROUP BY project_id, commit_id, stage
GROUP BY project_id, commit_id, stage ORDER BY MAX(stage_idx)
ORDER BY MAX(stage_idx) SQL
SQL end
end end
end end
...@@ -7,22 +7,22 @@ class MigrateBuildStageReferenceAgain < ActiveRecord::Migration ...@@ -7,22 +7,22 @@ class MigrateBuildStageReferenceAgain < ActiveRecord::Migration
disable_ddl_transaction! disable_ddl_transaction!
def up def up
disable_statement_timeout
stage_id = Arel.sql <<-SQL.strip_heredoc stage_id = Arel.sql <<-SQL.strip_heredoc
(SELECT id FROM ci_stages (SELECT id FROM ci_stages
WHERE ci_stages.pipeline_id = ci_builds.commit_id WHERE ci_stages.pipeline_id = ci_builds.commit_id
AND ci_stages.name = ci_builds.stage) AND ci_stages.name = ci_builds.stage)
SQL SQL
update_column_in_batches(:ci_builds, :stage_id, stage_id) do |table, query| disable_statement_timeout do
query.where(table[:stage_id].eq(nil)) update_column_in_batches(:ci_builds, :stage_id, stage_id) do |table, query|
query.where(table[:stage_id].eq(nil))
end
end end
end end
def down def down
disable_statement_timeout disable_statement_timeout do
update_column_in_batches(:ci_builds, :stage_id, nil)
update_column_in_batches(:ci_builds, :stage_id, nil) end
end end
end end
...@@ -26,9 +26,9 @@ class MigrateStagesStatuses < ActiveRecord::Migration ...@@ -26,9 +26,9 @@ class MigrateStagesStatuses < ActiveRecord::Migration
end end
def down def down
disable_statement_timeout disable_statement_timeout do
# rubocop:disable Migration/UpdateLargeTable
# rubocop:disable Migration/UpdateLargeTable update_column_in_batches(:ci_stages, :status, nil)
update_column_in_batches(:ci_stages, :status, nil) end
end end
end end
...@@ -78,12 +78,12 @@ class RemoveSoftRemovedObjects < ActiveRecord::Migration ...@@ -78,12 +78,12 @@ class RemoveSoftRemovedObjects < ActiveRecord::Migration
MODELS = [Issue, MergeRequest, CiPipelineSchedule, CiTrigger].freeze MODELS = [Issue, MergeRequest, CiPipelineSchedule, CiTrigger].freeze
def up def up
disable_statement_timeout disable_statement_timeout do
remove_personal_routes
remove_personal_routes remove_personal_namespaces
remove_personal_namespaces remove_group_namespaces
remove_group_namespaces remove_simple_soft_removed_rows
remove_simple_soft_removed_rows end
end end
def down def down
......
...@@ -38,29 +38,29 @@ class RemoveRedundantPipelineStages < ActiveRecord::Migration ...@@ -38,29 +38,29 @@ class RemoveRedundantPipelineStages < ActiveRecord::Migration
end end
def remove_redundant_pipeline_stages! def remove_redundant_pipeline_stages!
disable_statement_timeout disable_statement_timeout do
redundant_stages_ids = <<~SQL
redundant_stages_ids = <<~SQL SELECT id FROM ci_stages WHERE (pipeline_id, name) IN (
SELECT id FROM ci_stages WHERE (pipeline_id, name) IN ( SELECT pipeline_id, name FROM ci_stages
SELECT pipeline_id, name FROM ci_stages GROUP BY pipeline_id, name HAVING COUNT(*) > 1
GROUP BY pipeline_id, name HAVING COUNT(*) > 1 )
)
SQL
execute <<~SQL
UPDATE ci_builds SET stage_id = NULL WHERE stage_id IN (#{redundant_stages_ids})
SQL
if Gitlab::Database.postgresql?
execute <<~SQL
DELETE FROM ci_stages WHERE id IN (#{redundant_stages_ids})
SQL SQL
else # We can't modify a table we are selecting from on MySQL
execute <<~SQL execute <<~SQL
DELETE a FROM ci_stages AS a, ci_stages AS b UPDATE ci_builds SET stage_id = NULL WHERE stage_id IN (#{redundant_stages_ids})
WHERE a.pipeline_id = b.pipeline_id AND a.name = b.name
AND a.id <> b.id
SQL SQL
if Gitlab::Database.postgresql?
execute <<~SQL
DELETE FROM ci_stages WHERE id IN (#{redundant_stages_ids})
SQL
else # We can't modify a table we are selecting from on MySQL
execute <<~SQL
DELETE a FROM ci_stages AS a, ci_stages AS b
WHERE a.pipeline_id = b.pipeline_id AND a.name = b.name
AND a.id <> b.id
SQL
end
end end
end end
end end
...@@ -15,10 +15,10 @@ class RemovePermanentFromRedirectRoutes < ActiveRecord::Migration ...@@ -15,10 +15,10 @@ class RemovePermanentFromRedirectRoutes < ActiveRecord::Migration
# ReworkRedirectRoutesIndexes: # ReworkRedirectRoutesIndexes:
# https://gitlab.com/gitlab-org/gitlab-ce/merge_requests/16211 # https://gitlab.com/gitlab-org/gitlab-ce/merge_requests/16211
if Gitlab::Database.postgresql? if Gitlab::Database.postgresql?
disable_statement_timeout disable_statement_timeout do
execute "DROP INDEX CONCURRENTLY IF EXISTS #{INDEX_NAME_PERM};"
execute "DROP INDEX CONCURRENTLY IF EXISTS #{INDEX_NAME_PERM};" execute "DROP INDEX CONCURRENTLY IF EXISTS #{INDEX_NAME_TEMP};"
execute "DROP INDEX CONCURRENTLY IF EXISTS #{INDEX_NAME_TEMP};" end
end end
remove_column(:redirect_routes, :permanent) remove_column(:redirect_routes, :permanent)
...@@ -28,10 +28,10 @@ class RemovePermanentFromRedirectRoutes < ActiveRecord::Migration ...@@ -28,10 +28,10 @@ class RemovePermanentFromRedirectRoutes < ActiveRecord::Migration
add_column(:redirect_routes, :permanent, :boolean) add_column(:redirect_routes, :permanent, :boolean)
if Gitlab::Database.postgresql? if Gitlab::Database.postgresql?
disable_statement_timeout disable_statement_timeout do
execute("CREATE INDEX CONCURRENTLY #{INDEX_NAME_PERM} ON redirect_routes (lower(path) varchar_pattern_ops) where (permanent);")
execute("CREATE INDEX CONCURRENTLY #{INDEX_NAME_PERM} ON redirect_routes (lower(path) varchar_pattern_ops) where (permanent);") execute("CREATE INDEX CONCURRENTLY #{INDEX_NAME_TEMP} ON redirect_routes (lower(path) varchar_pattern_ops) where (not permanent or permanent is null) ;")
execute("CREATE INDEX CONCURRENTLY #{INDEX_NAME_TEMP} ON redirect_routes (lower(path) varchar_pattern_ops) where (not permanent or permanent is null) ;") end
end end
end end
end end
...@@ -20,10 +20,10 @@ class AddPathIndexToRedirectRoutes < ActiveRecord::Migration ...@@ -20,10 +20,10 @@ class AddPathIndexToRedirectRoutes < ActiveRecord::Migration
def up def up
return unless Gitlab::Database.postgresql? return unless Gitlab::Database.postgresql?
disable_statement_timeout disable_statement_timeout do
unless index_exists_by_name?(:redirect_routes, INDEX_NAME)
unless index_exists_by_name?(:redirect_routes, INDEX_NAME) execute("CREATE UNIQUE INDEX CONCURRENTLY #{INDEX_NAME} ON redirect_routes (lower(path) varchar_pattern_ops);")
execute("CREATE UNIQUE INDEX CONCURRENTLY #{INDEX_NAME} ON redirect_routes (lower(path) varchar_pattern_ops);") end
end end
end end
......
...@@ -17,13 +17,13 @@ class RescheduleBuildsStagesMigration < ActiveRecord::Migration ...@@ -17,13 +17,13 @@ class RescheduleBuildsStagesMigration < ActiveRecord::Migration
end end
def up def up
disable_statement_timeout disable_statement_timeout do
Build.where('stage_id IS NULL').tap do |relation|
Build.where('stage_id IS NULL').tap do |relation| queue_background_migration_jobs_by_range_at_intervals(relation,
queue_background_migration_jobs_by_range_at_intervals(relation, MIGRATION,
MIGRATION, 5.minutes,
5.minutes, batch_size: BATCH_SIZE)
batch_size: BATCH_SIZE) end
end end
end end
......
...@@ -13,13 +13,13 @@ class ScheduleStagesIndexMigration < ActiveRecord::Migration ...@@ -13,13 +13,13 @@ class ScheduleStagesIndexMigration < ActiveRecord::Migration
end end
def up def up
disable_statement_timeout disable_statement_timeout do
Stage.all.tap do |relation|
Stage.all.tap do |relation| queue_background_migration_jobs_by_range_at_intervals(relation,
queue_background_migration_jobs_by_range_at_intervals(relation, MIGRATION,
MIGRATION, 5.minutes,
5.minutes, batch_size: BATCH_SIZE)
batch_size: BATCH_SIZE) end
end end
end end
......
...@@ -12,32 +12,34 @@ class CleanupStagesPositionMigration < ActiveRecord::Migration ...@@ -12,32 +12,34 @@ class CleanupStagesPositionMigration < ActiveRecord::Migration
end end
def up def up
disable_statement_timeout disable_statement_timeout do
Gitlab::BackgroundMigration.steal('MigrateStageIndex')
Gitlab::BackgroundMigration.steal('MigrateStageIndex') unless index_exists_by_name?(:ci_stages, TMP_INDEX_NAME)
add_concurrent_index(:ci_stages, :id, where: 'position IS NULL', name: TMP_INDEX_NAME)
unless index_exists_by_name?(:ci_stages, TMP_INDEX_NAME) end
add_concurrent_index(:ci_stages, :id, where: 'position IS NULL', name: TMP_INDEX_NAME)
end
migratable = <<~SQL migratable = <<~SQL
position IS NULL AND EXISTS ( position IS NULL AND EXISTS (
SELECT 1 FROM ci_builds WHERE stage_id = ci_stages.id AND stage_idx IS NOT NULL SELECT 1 FROM ci_builds WHERE stage_id = ci_stages.id AND stage_idx IS NOT NULL
) )
SQL SQL
Stages.where(migratable).each_batch(of: 1000) do |batch| Stages.where(migratable).each_batch(of: 1000) do |batch|
batch.pluck(:id).each do |stage| batch.pluck(:id).each do |stage|
Gitlab::BackgroundMigration::MigrateStageIndex.new.perform(stage, stage) Gitlab::BackgroundMigration::MigrateStageIndex.new.perform(stage, stage)
end
end end
end
remove_concurrent_index_by_name(:ci_stages, TMP_INDEX_NAME) remove_concurrent_index_by_name(:ci_stages, TMP_INDEX_NAME)
end
end end
def down def down
if index_exists_by_name?(:ci_stages, TMP_INDEX_NAME) if index_exists_by_name?(:ci_stages, TMP_INDEX_NAME)
remove_concurrent_index_by_name(:ci_stages, TMP_INDEX_NAME) disable_statement_timeout do
remove_concurrent_index_by_name(:ci_stages, TMP_INDEX_NAME)
end
end end
end end
end end
...@@ -255,7 +255,7 @@ Example response: ...@@ -255,7 +255,7 @@ Example response:
Get a list of visible events for a particular project. Get a list of visible events for a particular project.
``` ```
GET /:project_id/events GET /projects/:project_id/events
``` ```
Parameters: Parameters:
......
...@@ -153,7 +153,7 @@ page](https://golang.org/dl). ...@@ -153,7 +153,7 @@ page](https://golang.org/dl).
# Remove former Go installation folder # Remove former Go installation folder
sudo rm -rf /usr/local/go sudo rm -rf /usr/local/go
curl --remote-name --progress https://dl.google.com/go/go1.10.3.linux-amd64.tar.gz curl --remote-name --progress https://dl.google.com/go/go1.10.3.linux-amd64.tar.gz
echo 'fa1b0e45d3b647c252f51f5e1204aba049cde4af177ef9f2181f43004f901035 go1.10.3.linux-amd64.tar.gz' | shasum -a256 -c - && \ echo 'fa1b0e45d3b647c252f51f5e1204aba049cde4af177ef9f2181f43004f901035 go1.10.3.linux-amd64.tar.gz' | shasum -a256 -c - && \
sudo tar -C /usr/local -xzf go1.10.3.linux-amd64.tar.gz sudo tar -C /usr/local -xzf go1.10.3.linux-amd64.tar.gz
...@@ -494,11 +494,11 @@ Make GitLab start on boot: ...@@ -494,11 +494,11 @@ Make GitLab start on boot:
### Install Gitaly ### Install Gitaly
# Fetch Gitaly source with Git and compile with Go # Fetch Gitaly source with Git and compile with Go
sudo -u git -H bundle exec rake "gitlab:gitaly:install[/home/git/gitaly]" RAILS_ENV=production sudo -u git -H bundle exec rake "gitlab:gitaly:install[/home/git/gitaly,/home/git/repositories]" RAILS_ENV=production
You can specify a different Git repository by providing it as an extra parameter: You can specify a different Git repository by providing it as an extra parameter:
sudo -u git -H bundle exec rake "gitlab:gitaly:install[/home/git/gitaly,https://example.com/gitaly.git]" RAILS_ENV=production sudo -u git -H bundle exec rake "gitlab:gitaly:install[/home/git/gitaly,/home/git/repositories,https://example.com/gitaly.git]" RAILS_ENV=production
Next, make sure gitaly configured: Next, make sure gitaly configured:
......
...@@ -122,7 +122,10 @@ Issue Board, that is, create or delete lists and drag issues from one list to an ...@@ -122,7 +122,10 @@ Issue Board, that is, create or delete lists and drag issues from one list to an
- **List** - A column on the issue board that displays issues matching certain attributes. In addition to the default lists of 'Open' and 'Closed' issue, each additional list will show issues matching your chosen label or assignee. On the top of that list you can see the number of issues that belong to it. - **List** - A column on the issue board that displays issues matching certain attributes. In addition to the default lists of 'Open' and 'Closed' issue, each additional list will show issues matching your chosen label or assignee. On the top of that list you can see the number of issues that belong to it.
- **Label list**: a list based on a label. It shows all opened issues with that label. - **Label list**: a list based on a label. It shows all opened issues with that label.
- **Assignee list**: a list which includes all issues assigned to a user. - **Assignee list**: a list which includes all issues assigned to a user.
<<<<<<< HEAD
- **Milestone list**: a list which includes all issues with that milestone. - **Milestone list**: a list which includes all issues with that milestone.
=======
>>>>>>> upstream/master
- **Open** (default): shows all open issues that do not belong to one of the other lists. Always appears as the leftmost list. - **Open** (default): shows all open issues that do not belong to one of the other lists. Always appears as the leftmost list.
- **Closed** (default): shows all closed issues. Always appears as the rightmost list. - **Closed** (default): shows all closed issues. Always appears as the rightmost list.
- **Card** - A box in the list that represents an individual issue. The information you can see on a card consists of the issue number, the issue title, the assignee, and the labels associated with the issue. You can drag cards from one list to another to change their label or assignee from that of the source list to that of the destination list. - **Card** - A box in the list that represents an individual issue. The information you can see on a card consists of the issue number, the issue title, the assignee, and the labels associated with the issue. You can drag cards from one list to another to change their label or assignee from that of the source list to that of the destination list.
...@@ -380,6 +383,7 @@ As on another list types, click on the trash icon to remove it. ...@@ -380,6 +383,7 @@ As on another list types, click on the trash icon to remove it.
When dragging issues between lists, different behavior occurs depending on the source list and the target list. When dragging issues between lists, different behavior occurs depending on the source list and the target list.
<<<<<<< HEAD
| | To Open | To Closed | To label `B` list | To assignee `Bob` list | To milestone `2.0` list | | | To Open | To Closed | To label `B` list | To assignee `Bob` list | To milestone `2.0` list |
| --- | --- | --- | --- | --- | --- | | --- | --- | --- | --- | --- | --- |
| From Open | - | Issue closed | `B` added | `Bob` assigned | `2.0` added | | From Open | - | Issue closed | `B` added | `Bob` assigned | `2.0` added |
...@@ -387,6 +391,14 @@ When dragging issues between lists, different behavior occurs depending on the s ...@@ -387,6 +391,14 @@ When dragging issues between lists, different behavior occurs depending on the s
| From label `A` list | `A` removed | Issue closed | `A` removed<br/>`B` added | `Bob` assigned | `2.0` added | | From label `A` list | `A` removed | Issue closed | `A` removed<br/>`B` added | `Bob` assigned | `2.0` added |
| From assignee `Alice` list | `Alice` unassigned | Issue closed | `B` added | `Alice` unassigned<br/>`Bob` assigned | `2.0` added | | From assignee `Alice` list | `Alice` unassigned | Issue closed | `B` added | `Alice` unassigned<br/>`Bob` assigned | `2.0` added |
| From milestone `1.0` list | `1.0` removed | Issue closed | `B` added | `Bob` assigned<br/> | `1.0` removed<br/>`2.0` added | | From milestone `1.0` list | `1.0` removed | Issue closed | `B` added | `Bob` assigned<br/> | `1.0` removed<br/>`2.0` added |
=======
| | To Open | To Closed | To label `B` list | To assignee `Bob` list |
| --- | --- | --- | --- | --- |
| From Open | - | Issue closed | `B` added | `Bob` assigned |
| From Closed | Issue reopened | - | Issue reopened<br/>`B` added | Issue reopened<br/>`Bob` assigned |
| From label `A` list | `A` removed | Issue closed | `A` removed<br/>`B` added | `Bob` assigned |
| From assignee `Alice` list | `Alice` unassigned | Issue closed | `B` added | `Alice` unassigned<br/>`Bob` assigned |
>>>>>>> upstream/master
## Features per tier ## Features per tier
......
...@@ -58,7 +58,6 @@ module Gitlab ...@@ -58,7 +58,6 @@ module Gitlab
if Database.postgresql? if Database.postgresql?
options = options.merge({ algorithm: :concurrently }) options = options.merge({ algorithm: :concurrently })
disable_statement_timeout
end end
if index_exists?(table_name, column_name, options) if index_exists?(table_name, column_name, options)
...@@ -66,7 +65,9 @@ module Gitlab ...@@ -66,7 +65,9 @@ module Gitlab
return return
end end
add_index(table_name, column_name, options) disable_statement_timeout do
add_index(table_name, column_name, options)
end
end end
# Removes an existed index, concurrently when supported # Removes an existed index, concurrently when supported
...@@ -87,7 +88,6 @@ module Gitlab ...@@ -87,7 +88,6 @@ module Gitlab
if supports_drop_index_concurrently? if supports_drop_index_concurrently?
options = options.merge({ algorithm: :concurrently }) options = options.merge({ algorithm: :concurrently })
disable_statement_timeout
end end
unless index_exists?(table_name, column_name, options) unless index_exists?(table_name, column_name, options)
...@@ -95,7 +95,9 @@ module Gitlab ...@@ -95,7 +95,9 @@ module Gitlab
return return
end end
remove_index(table_name, options.merge({ column: column_name })) disable_statement_timeout do
remove_index(table_name, options.merge({ column: column_name }))
end
end end
# Removes an existing index, concurrently when supported # Removes an existing index, concurrently when supported
...@@ -116,7 +118,6 @@ module Gitlab ...@@ -116,7 +118,6 @@ module Gitlab
if supports_drop_index_concurrently? if supports_drop_index_concurrently?
options = options.merge({ algorithm: :concurrently }) options = options.merge({ algorithm: :concurrently })
disable_statement_timeout
end end
unless index_exists_by_name?(table_name, index_name) unless index_exists_by_name?(table_name, index_name)
...@@ -124,7 +125,9 @@ module Gitlab ...@@ -124,7 +125,9 @@ module Gitlab
return return
end end
remove_index(table_name, options.merge({ name: index_name })) disable_statement_timeout do
remove_index(table_name, options.merge({ name: index_name }))
end
end end
# Only available on Postgresql >= 9.2 # Only available on Postgresql >= 9.2
...@@ -171,8 +174,6 @@ module Gitlab ...@@ -171,8 +174,6 @@ module Gitlab
on_delete = 'SET NULL' if on_delete == :nullify on_delete = 'SET NULL' if on_delete == :nullify
end end
disable_statement_timeout
key_name = concurrent_foreign_key_name(source, column) key_name = concurrent_foreign_key_name(source, column)
unless foreign_key_exists?(source, target, column: column) unless foreign_key_exists?(source, target, column: column)
...@@ -199,7 +200,9 @@ module Gitlab ...@@ -199,7 +200,9 @@ module Gitlab
# while running. # while running.
# #
# Note this is a no-op in case the constraint is VALID already # Note this is a no-op in case the constraint is VALID already
execute("ALTER TABLE #{source} VALIDATE CONSTRAINT #{key_name};") disable_statement_timeout do
execute("ALTER TABLE #{source} VALIDATE CONSTRAINT #{key_name};")
end
end end
def foreign_key_exists?(source, target = nil, column: nil) def foreign_key_exists?(source, target = nil, column: nil)
...@@ -224,8 +227,48 @@ module Gitlab ...@@ -224,8 +227,48 @@ module Gitlab
# Long-running migrations may take more than the timeout allowed by # Long-running migrations may take more than the timeout allowed by
# the database. Disable the session's statement timeout to ensure # the database. Disable the session's statement timeout to ensure
# migrations don't get killed prematurely. (PostgreSQL only) # migrations don't get killed prematurely. (PostgreSQL only)
#
# There are two possible ways to disable the statement timeout:
#
# - Per transaction (this is the preferred and default mode)
# - Per connection (requires a cleanup after the execution)
#
# When using a per connection disable statement, code must be inside
# a block so we can automatically execute `RESET ALL` after block finishes
# otherwise the statement will still be disabled until connection is dropped
# or `RESET ALL` is executed
def disable_statement_timeout def disable_statement_timeout
execute('SET statement_timeout TO 0') if Database.postgresql? # bypass disabled_statement logic when not using postgres, but still execute block when one is given
unless Database.postgresql?
if block_given?
yield
end
return
end
if block_given?
begin
execute('SET statement_timeout TO 0')
yield
ensure
execute('RESET ALL')
end
else
unless transaction_open?
raise <<~ERROR
Cannot call disable_statement_timeout() without a transaction open or outside of a transaction block.
If you don't want to use a transaction wrap your code in a block call:
disable_statement_timeout { # code that requires disabled statement here }
This will make sure statement_timeout is disabled before and reset after the block execution is finished.
ERROR
end
execute('SET LOCAL statement_timeout TO 0')
end
end end
def true_value def true_value
...@@ -367,30 +410,30 @@ module Gitlab ...@@ -367,30 +410,30 @@ module Gitlab
'in the body of your migration class' 'in the body of your migration class'
end end
disable_statement_timeout disable_statement_timeout do
transaction do
transaction do if limit
if limit add_column(table, column, type, default: nil, limit: limit)
add_column(table, column, type, default: nil, limit: limit) else
else add_column(table, column, type, default: nil)
add_column(table, column, type, default: nil) end
# Changing the default before the update ensures any newly inserted
# rows already use the proper default value.
change_column_default(table, column, default)
end end
# Changing the default before the update ensures any newly inserted begin
# rows already use the proper default value. update_column_in_batches(table, column, default, &block)
change_column_default(table, column, default)
end
begin
update_column_in_batches(table, column, default, &block)
change_column_null(table, column, false) unless allow_null change_column_null(table, column, false) unless allow_null
# We want to rescue _all_ exceptions here, even those that don't inherit # We want to rescue _all_ exceptions here, even those that don't inherit
# from StandardError. # from StandardError.
rescue Exception => error # rubocop: disable all rescue Exception => error # rubocop: disable all
remove_column(table, column) remove_column(table, column)
raise error raise error
end
end end
end end
......
...@@ -366,18 +366,9 @@ module Gitlab ...@@ -366,18 +366,9 @@ module Gitlab
end end
end end
# Gitaly migration: https://gitlab.com/gitlab-org/gitaly/issues/1233
def new_commits(newrev) def new_commits(newrev)
gitaly_migrate(:new_commits) do |is_enabled| wrapped_gitaly_errors do
if is_enabled gitaly_ref_client.list_new_commits(newrev)
gitaly_ref_client.list_new_commits(newrev)
else
refs = Gitlab::GitalyClient::StorageSettings.allow_disk_access do
rev_list(including: newrev, excluding: :all).split("\n").map(&:strip)
end
Gitlab::Git::Commit.batch_by_oid(self, refs)
end
end end
end end
......
require 'toml-rb'
module Gitlab module Gitlab
module SetupHelper module SetupHelper
class << self class << self
...@@ -9,7 +11,7 @@ module Gitlab ...@@ -9,7 +11,7 @@ module Gitlab
# because it uses a Unix socket. # because it uses a Unix socket.
# For development and testing purposes, an extra storage is added to gitaly, # For development and testing purposes, an extra storage is added to gitaly,
# which is not known to Rails, but must be explicitly stubbed. # which is not known to Rails, but must be explicitly stubbed.
def gitaly_configuration_toml(gitaly_dir, gitaly_ruby: true) def gitaly_configuration_toml(gitaly_dir, storage_paths, gitaly_ruby: true)
storages = [] storages = []
address = nil address = nil
...@@ -24,10 +26,7 @@ module Gitlab ...@@ -24,10 +26,7 @@ module Gitlab
address = val['gitaly_address'] address = val['gitaly_address']
end end
# https://gitlab.com/gitlab-org/gitaly/issues/1238 storages << { name: key, path: storage_paths[key] }
Gitlab::GitalyClient::StorageSettings.allow_disk_access do
storages << { name: key, path: val.legacy_disk_path }
end
end end
if Rails.env.test? if Rails.env.test?
...@@ -44,12 +43,12 @@ module Gitlab ...@@ -44,12 +43,12 @@ module Gitlab
end end
# rubocop:disable Rails/Output # rubocop:disable Rails/Output
def create_gitaly_configuration(dir, force: false) def create_gitaly_configuration(dir, storage_paths, force: false)
config_path = File.join(dir, 'config.toml') config_path = File.join(dir, 'config.toml')
FileUtils.rm_f(config_path) if force FileUtils.rm_f(config_path) if force
File.open(config_path, File::WRONLY | File::CREAT | File::EXCL) do |f| File.open(config_path, File::WRONLY | File::CREAT | File::EXCL) do |f|
f.puts gitaly_configuration_toml(dir) f.puts gitaly_configuration_toml(dir, storage_paths)
end end
rescue Errno::EEXIST rescue Errno::EEXIST
puts "Skipping config.toml generation:" puts "Skipping config.toml generation:"
......
namespace :gitlab do namespace :gitlab do
namespace :gitaly do namespace :gitaly do
desc "GitLab | Install or upgrade gitaly" desc "GitLab | Install or upgrade gitaly"
task :install, [:dir, :repo] => :gitlab_environment do |t, args| task :install, [:dir, :storage_path, :repo] => :gitlab_environment do |t, args|
require 'toml-rb'
warn_user_is_not_gitlab warn_user_is_not_gitlab
unless args.dir.present? unless args.dir.present? && args.storage_path.present?
abort %(Please specify the directory where you want to install gitaly:\n rake "gitlab:gitaly:install[/home/git/gitaly]") abort %(Please specify the directory where you want to install gitaly and the path for the default storage
Usage: rake "gitlab:gitaly:install[/installation/dir,/storage/path]")
end end
args.with_defaults(repo: 'https://gitlab.com/gitlab-org/gitaly.git') args.with_defaults(repo: 'https://gitlab.com/gitlab-org/gitaly.git')
...@@ -27,7 +26,8 @@ namespace :gitlab do ...@@ -27,7 +26,8 @@ namespace :gitlab do
"BUNDLE_PATH=#{Bundler.bundle_path}") "BUNDLE_PATH=#{Bundler.bundle_path}")
end end
Gitlab::SetupHelper.create_gitaly_configuration(args.dir) storage_paths = { 'default' => args.storage_path }
Gitlab::SetupHelper.create_gitaly_configuration(args.dir, storage_paths)
Dir.chdir(args.dir) do Dir.chdir(args.dir) do
# In CI we run scripts/gitaly-test-build instead of this command # In CI we run scripts/gitaly-test-build instead of this command
unless ENV['CI'].present? unless ENV['CI'].present?
...@@ -35,17 +35,5 @@ namespace :gitlab do ...@@ -35,17 +35,5 @@ namespace :gitlab do
end end
end end
end end
desc "GitLab | Print storage configuration in TOML format"
task storage_config: :environment do
require 'toml-rb'
puts "# Gitaly storage configuration generated from #{Gitlab.config.source} on #{Time.current.to_s(:long)}"
puts "# This is in TOML format suitable for use in Gitaly's config.toml file."
# Exclude gitaly-ruby configuration because that depends on the gitaly
# installation directory.
puts Gitlab::SetupHelper.gitaly_configuration_toml('', gitaly_ruby: false)
end
end end
end end
...@@ -4067,7 +4067,14 @@ msgstr "" ...@@ -4067,7 +4067,14 @@ msgstr ""
msgid "Jobs" msgid "Jobs"
msgstr "" msgstr ""
<<<<<<< HEAD
msgid "Job|This job is stuck, because the project doesn't have any runners online assigned to it." msgid "Job|This job is stuck, because the project doesn't have any runners online assigned to it."
=======
msgid "Job|Job has been erased"
msgstr ""
msgid "Job|Job has been erased by"
>>>>>>> upstream/master
msgstr "" msgstr ""
msgid "Jul" msgid "Jul"
......
...@@ -50,7 +50,7 @@ module QA ...@@ -50,7 +50,7 @@ module QA
Page::Project::Pipeline::Show.perform do |pipeline| Page::Project::Pipeline::Show.perform do |pipeline|
expect(pipeline).to have_build('build', status: :success, wait: 600) expect(pipeline).to have_build('build', status: :success, wait: 600)
expect(pipeline).to have_build('test', status: :success, wait: 600) expect(pipeline).to have_build('test', status: :success, wait: 600)
expect(pipeline).to have_build('production', status: :success, wait: 600) expect(pipeline).to have_build('production', status: :success, wait: 1200)
end end
end end
end end
......
import Vue from 'vue';
import { getTimeago } from '~/lib/utils/datetime_utility';
import component from '~/jobs/components/erased_block.vue';
import mountComponent from '../helpers/vue_mount_component_helper';
describe('Erased block', () => {
const Component = Vue.extend(component);
let vm;
const erasedAt = '2016-11-07T11:11:16.525Z';
const timeago = getTimeago();
const formatedDate = timeago.format(erasedAt);
afterEach(() => {
vm.$destroy();
});
describe('with job erased by user', () => {
beforeEach(() => {
vm = mountComponent(Component, {
erasedByUser: true,
username: 'root',
linkToUser: 'gitlab.com/root',
erasedAt,
});
});
it('renders username and link', () => {
expect(vm.$el.querySelector('a').getAttribute('href')).toEqual('gitlab.com/root');
expect(vm.$el.textContent).toContain('Job has been erased by');
expect(vm.$el.textContent).toContain('root');
});
it('renders erasedAt', () => {
expect(vm.$el.textContent).toContain(formatedDate);
});
});
describe('with erased job', () => {
beforeEach(() => {
vm = mountComponent(Component, {
erasedByUser: false,
erasedAt,
});
});
it('renders username and link', () => {
expect(vm.$el.textContent).toContain('Job has been erased');
});
it('renders erasedAt', () => {
expect(vm.$el.textContent).toContain(formatedDate);
});
});
});
import Vue from 'vue';
import component from '~/jobs/components/job_log.vue';
import mountComponent from '../helpers/vue_mount_component_helper';
describe('Job Log', () => {
const Component = Vue.extend(component);
let vm;
const trace = 'Running with gitlab-runner 11.1.0 (081978aa)<br> on docker-auto-scale-com d5ae8d25<br>Using Docker executor with image dev.gitlab.org:5005/gitlab/gitlab-build-images:ruby-2.4.4-golang-1.9-git-2.18-chrome-67.0-node-8.x-yarn-1.2-postgresql-9.6-graphicsmagick-1.3.29 ...<br>';
afterEach(() => {
vm.$destroy();
});
it('renders provided trace', () => {
vm = mountComponent(Component, {
trace,
isReceivingBuildTrace: true,
});
expect(vm.$el.querySelector('code').textContent).toContain('Running with gitlab-runner 11.1.0 (081978aa)');
});
describe('while receiving trace', () => {
it('renders animation', () => {
vm = mountComponent(Component, {
trace,
isReceivingBuildTrace: true,
});
expect(vm.$el.querySelector('.js-log-animation')).not.toBeNull();
});
});
describe('when build trace has finishes', () => {
it('does not render animation', () => {
vm = mountComponent(Component, {
trace,
isReceivingBuildTrace: false,
});
expect(vm.$el.querySelector('.js-log-animation')).toBeNull();
});
});
});
...@@ -48,10 +48,10 @@ describe Gitlab::Database::MigrationHelpers do ...@@ -48,10 +48,10 @@ describe Gitlab::Database::MigrationHelpers do
allow(model).to receive(:transaction_open?).and_return(false) allow(model).to receive(:transaction_open?).and_return(false)
end end
context 'using PostgreSQL' do context 'using PostgreSQL', :postgresql do
before do before do
allow(Gitlab::Database).to receive(:postgresql?).and_return(true) allow(Gitlab::Database).to receive(:postgresql?).and_return(true)
allow(model).to receive(:disable_statement_timeout) allow(model).to receive(:disable_statement_timeout).and_call_original
end end
it 'creates the index concurrently' do it 'creates the index concurrently' do
...@@ -114,12 +114,12 @@ describe Gitlab::Database::MigrationHelpers do ...@@ -114,12 +114,12 @@ describe Gitlab::Database::MigrationHelpers do
before do before do
allow(model).to receive(:transaction_open?).and_return(false) allow(model).to receive(:transaction_open?).and_return(false)
allow(model).to receive(:index_exists?).and_return(true) allow(model).to receive(:index_exists?).and_return(true)
allow(model).to receive(:disable_statement_timeout).and_call_original
end end
context 'using PostgreSQL' do context 'using PostgreSQL' do
before do before do
allow(model).to receive(:supports_drop_index_concurrently?).and_return(true) allow(model).to receive(:supports_drop_index_concurrently?).and_return(true)
allow(model).to receive(:disable_statement_timeout)
end end
describe 'by column name' do describe 'by column name' do
...@@ -162,7 +162,7 @@ describe Gitlab::Database::MigrationHelpers do ...@@ -162,7 +162,7 @@ describe Gitlab::Database::MigrationHelpers do
context 'using MySQL' do context 'using MySQL' do
it 'removes an index' do it 'removes an index' do
expect(Gitlab::Database).to receive(:postgresql?).and_return(false) expect(Gitlab::Database).to receive(:postgresql?).and_return(false).twice
expect(model).to receive(:remove_index) expect(model).to receive(:remove_index)
.with(:users, { column: :foo }) .with(:users, { column: :foo })
...@@ -224,21 +224,26 @@ describe Gitlab::Database::MigrationHelpers do ...@@ -224,21 +224,26 @@ describe Gitlab::Database::MigrationHelpers do
context 'using PostgreSQL' do context 'using PostgreSQL' do
before do before do
allow(Gitlab::Database).to receive(:postgresql?).and_return(true)
allow(Gitlab::Database).to receive(:mysql?).and_return(false) allow(Gitlab::Database).to receive(:mysql?).and_return(false)
end end
it 'creates a concurrent foreign key and validates it' do it 'creates a concurrent foreign key and validates it' do
expect(model).to receive(:disable_statement_timeout) expect(model).to receive(:disable_statement_timeout).and_call_original
expect(model).to receive(:execute).with(/statement_timeout/)
expect(model).to receive(:execute).ordered.with(/NOT VALID/) expect(model).to receive(:execute).ordered.with(/NOT VALID/)
expect(model).to receive(:execute).ordered.with(/VALIDATE CONSTRAINT/) expect(model).to receive(:execute).ordered.with(/VALIDATE CONSTRAINT/)
expect(model).to receive(:execute).with(/RESET ALL/)
model.add_concurrent_foreign_key(:projects, :users, column: :user_id) model.add_concurrent_foreign_key(:projects, :users, column: :user_id)
end end
it 'appends a valid ON DELETE statement' do it 'appends a valid ON DELETE statement' do
expect(model).to receive(:disable_statement_timeout) expect(model).to receive(:disable_statement_timeout).and_call_original
expect(model).to receive(:execute).with(/statement_timeout/)
expect(model).to receive(:execute).with(/ON DELETE SET NULL/) expect(model).to receive(:execute).with(/ON DELETE SET NULL/)
expect(model).to receive(:execute).ordered.with(/VALIDATE CONSTRAINT/) expect(model).to receive(:execute).ordered.with(/VALIDATE CONSTRAINT/)
expect(model).to receive(:execute).with(/RESET ALL/)
model.add_concurrent_foreign_key(:projects, :users, model.add_concurrent_foreign_key(:projects, :users,
column: :user_id, column: :user_id,
...@@ -291,13 +296,68 @@ describe Gitlab::Database::MigrationHelpers do ...@@ -291,13 +296,68 @@ describe Gitlab::Database::MigrationHelpers do
describe '#disable_statement_timeout' do describe '#disable_statement_timeout' do
context 'using PostgreSQL' do context 'using PostgreSQL' do
it 'disables statement timeouts' do it 'disables statement timeouts to current transaction only' do
expect(Gitlab::Database).to receive(:postgresql?).and_return(true) expect(Gitlab::Database).to receive(:postgresql?).and_return(true)
expect(model).to receive(:execute).with('SET statement_timeout TO 0') expect(model).to receive(:execute).with('SET LOCAL statement_timeout TO 0')
model.disable_statement_timeout model.disable_statement_timeout
end end
# this specs runs without an enclosing transaction (:delete truncation method for db_cleaner)
context 'with real environment', :postgresql, :delete do
before do
model.execute("SET statement_timeout TO '20000'")
end
after do
model.execute('RESET ALL')
end
it 'defines statement to 0 only for current transaction' do
expect(model.execute('SHOW statement_timeout').first['statement_timeout']).to eq('20s')
model.connection.transaction do
model.disable_statement_timeout
expect(model.execute('SHOW statement_timeout').first['statement_timeout']).to eq('0')
end
expect(model.execute('SHOW statement_timeout').first['statement_timeout']).to eq('20s')
end
end
context 'when passing a blocks' do
it 'disables statement timeouts on session level and executes the block' do
expect(Gitlab::Database).to receive(:postgresql?).and_return(true)
expect(model).to receive(:execute).with('SET statement_timeout TO 0')
expect(model).to receive(:execute).with('RESET ALL')
expect { |block| model.disable_statement_timeout(&block) }.to yield_control
end
# this specs runs without an enclosing transaction (:delete truncation method for db_cleaner)
context 'with real environment', :postgresql, :delete do
before do
model.execute("SET statement_timeout TO '20000'")
end
after do
model.execute('RESET ALL')
end
it 'defines statement to 0 for any code run inside the block' do
expect(model.execute('SHOW statement_timeout').first['statement_timeout']).to eq('20s')
model.disable_statement_timeout do
model.connection.transaction do
expect(model.execute('SHOW statement_timeout').first['statement_timeout']).to eq('0')
end
expect(model.execute('SHOW statement_timeout').first['statement_timeout']).to eq('0')
end
end
end
end
end end
context 'using MySQL' do context 'using MySQL' do
...@@ -308,6 +368,16 @@ describe Gitlab::Database::MigrationHelpers do ...@@ -308,6 +368,16 @@ describe Gitlab::Database::MigrationHelpers do
model.disable_statement_timeout model.disable_statement_timeout
end end
context 'when passing a blocks' do
it 'executes the block of code' do
expect(Gitlab::Database).to receive(:postgresql?).and_return(false)
expect(model).not_to receive(:execute)
expect { |block| model.disable_statement_timeout(&block) }.to yield_control
end
end
end end
end end
......
...@@ -296,41 +296,31 @@ describe Repository do ...@@ -296,41 +296,31 @@ describe Repository do
end end
describe '#new_commits' do describe '#new_commits' do
shared_examples 'finding unreferenced commits' do set(:project) { create(:project, :repository) }
set(:project) { create(:project, :repository) } let(:repository) { project.repository }
let(:repository) { project.repository }
subject { repository.new_commits(rev) } subject { repository.new_commits(rev) }
context 'when there are no new commits' do context 'when there are no new commits' do
let(:rev) { repository.commit.id } let(:rev) { repository.commit.id }
it 'returns an empty array' do it 'returns an empty array' do
expect(subject).to eq([]) expect(subject).to eq([])
end
end end
end
context 'when new commits are found' do context 'when new commits are found' do
let(:branch) { 'orphaned-branch' } let(:branch) { 'orphaned-branch' }
let!(:rev) { repository.commit(branch).id } let!(:rev) { repository.commit(branch).id }
it 'returns the commits' do it 'returns the commits' do
repository.delete_branch(branch) repository.delete_branch(branch)
expect(subject).not_to be_empty expect(subject).not_to be_empty
expect(subject).to all( be_a(::Commit) ) expect(subject).to all( be_a(::Commit) )
expect(subject.size).to eq(1) expect(subject.size).to eq(1)
end
end end
end end
context 'when Gitaly handles the request' do
it_behaves_like 'finding unreferenced commits'
end
context 'when Gitaly is disabled', :disable_gitaly do
it_behaves_like 'finding unreferenced commits'
end
end end
describe '#commits_by' do describe '#commits_by' do
......
...@@ -67,6 +67,7 @@ module TestEnv ...@@ -67,6 +67,7 @@ module TestEnv
TMP_TEST_PATH = Rails.root.join('tmp', 'tests', '**') TMP_TEST_PATH = Rails.root.join('tmp', 'tests', '**')
REPOS_STORAGE = 'default'.freeze REPOS_STORAGE = 'default'.freeze
BROKEN_STORAGE = 'broken'.freeze
# Test environment # Test environment
# #
...@@ -157,10 +158,11 @@ module TestEnv ...@@ -157,10 +158,11 @@ module TestEnv
component_timed_setup('Gitaly', component_timed_setup('Gitaly',
install_dir: gitaly_dir, install_dir: gitaly_dir,
version: Gitlab::GitalyClient.expected_server_version, version: Gitlab::GitalyClient.expected_server_version,
task: "gitlab:gitaly:install[#{gitaly_dir}]") do task: "gitlab:gitaly:install[#{gitaly_dir},#{repos_path}]") do
# Always re-create config, in case it's outdated. This is fast anyway. # Re-create config, to specify the broken storage path
Gitlab::SetupHelper.create_gitaly_configuration(gitaly_dir, force: true) storage_paths = { 'default' => repos_path, 'broken' => broken_path }
Gitlab::SetupHelper.create_gitaly_configuration(gitaly_dir, storage_paths, force: true)
start_gitaly(gitaly_dir) start_gitaly(gitaly_dir)
end end
...@@ -256,6 +258,10 @@ module TestEnv ...@@ -256,6 +258,10 @@ module TestEnv
@repos_path ||= Gitlab.config.repositories.storages[REPOS_STORAGE].legacy_disk_path @repos_path ||= Gitlab.config.repositories.storages[REPOS_STORAGE].legacy_disk_path
end end
def broken_path
@broken_path ||= Gitlab.config.repositories.storages[BROKEN_STORAGE].legacy_disk_path
end
def backup_path def backup_path
Gitlab.config.backup.path Gitlab.config.backup.path
end end
......
...@@ -8,13 +8,23 @@ describe 'gitlab:gitaly namespace rake task' do ...@@ -8,13 +8,23 @@ describe 'gitlab:gitaly namespace rake task' do
describe 'install' do describe 'install' do
let(:repo) { 'https://gitlab.com/gitlab-org/gitaly.git' } let(:repo) { 'https://gitlab.com/gitlab-org/gitaly.git' }
let(:clone_path) { Rails.root.join('tmp/tests/gitaly').to_s } let(:clone_path) { Rails.root.join('tmp/tests/gitaly').to_s }
let(:storage_path) { Rails.root.join('tmp/tests/repositories').to_s }
let(:version) { File.read(Rails.root.join(Gitlab::GitalyClient::SERVER_VERSION_FILE)).chomp } let(:version) { File.read(Rails.root.join(Gitlab::GitalyClient::SERVER_VERSION_FILE)).chomp }
subject { run_rake_task('gitlab:gitaly:install', clone_path, storage_path) }
context 'no dir given' do context 'no dir given' do
it 'aborts and display a help message' do it 'aborts and display a help message' do
# avoid writing task output to spec progress # avoid writing task output to spec progress
allow($stderr).to receive :write allow($stderr).to receive :write
expect { run_rake_task('gitlab:gitaly:install') }.to raise_error /Please specify the directory where you want to install gitaly/ expect { run_rake_task('gitlab:gitaly:install') }.to raise_error /Please specify the directory where you want to install gitaly and the path for the default storage/
end
end
context 'no storage path given' do
it 'aborts and display a help message' do
allow($stderr).to receive :write
expect { run_rake_task('gitlab:gitaly:install', clone_path) }.to raise_error /Please specify the directory where you want to install gitaly and the path for the default storage/
end end
end end
...@@ -23,7 +33,7 @@ describe 'gitlab:gitaly namespace rake task' do ...@@ -23,7 +33,7 @@ describe 'gitlab:gitaly namespace rake task' do
expect(main_object) expect(main_object)
.to receive(:checkout_or_clone_version).and_raise 'Git error' .to receive(:checkout_or_clone_version).and_raise 'Git error'
expect { run_rake_task('gitlab:gitaly:install', clone_path) }.to raise_error 'Git error' expect { subject }.to raise_error 'Git error'
end end
end end
...@@ -36,7 +46,7 @@ describe 'gitlab:gitaly namespace rake task' do ...@@ -36,7 +46,7 @@ describe 'gitlab:gitaly namespace rake task' do
expect(main_object) expect(main_object)
.to receive(:checkout_or_clone_version).with(version: version, repo: repo, target_dir: clone_path) .to receive(:checkout_or_clone_version).with(version: version, repo: repo, target_dir: clone_path)
run_rake_task('gitlab:gitaly:install', clone_path) subject
end end
end end
...@@ -59,7 +69,7 @@ describe 'gitlab:gitaly namespace rake task' do ...@@ -59,7 +69,7 @@ describe 'gitlab:gitaly namespace rake task' do
expect(Gitlab::Popen).to receive(:popen).with(%w[which gmake]).and_return(['/usr/bin/gmake', 0]) expect(Gitlab::Popen).to receive(:popen).with(%w[which gmake]).and_return(['/usr/bin/gmake', 0])
expect(main_object).to receive(:run_command!).with(command_preamble + %w[gmake]).and_return(true) expect(main_object).to receive(:run_command!).with(command_preamble + %w[gmake]).and_return(true)
run_rake_task('gitlab:gitaly:install', clone_path) subject
end end
end end
...@@ -72,7 +82,7 @@ describe 'gitlab:gitaly namespace rake task' do ...@@ -72,7 +82,7 @@ describe 'gitlab:gitaly namespace rake task' do
it 'calls make in the gitaly directory' do it 'calls make in the gitaly directory' do
expect(main_object).to receive(:run_command!).with(command_preamble + %w[make]).and_return(true) expect(main_object).to receive(:run_command!).with(command_preamble + %w[make]).and_return(true)
run_rake_task('gitlab:gitaly:install', clone_path) subject
end end
context 'when Rails.env is test' do context 'when Rails.env is test' do
...@@ -89,55 +99,10 @@ describe 'gitlab:gitaly namespace rake task' do ...@@ -89,55 +99,10 @@ describe 'gitlab:gitaly namespace rake task' do
it 'calls make in the gitaly directory with --no-deployment flag for bundle' do it 'calls make in the gitaly directory with --no-deployment flag for bundle' do
expect(main_object).to receive(:run_command!).with(command_preamble + command).and_return(true) expect(main_object).to receive(:run_command!).with(command_preamble + command).and_return(true)
run_rake_task('gitlab:gitaly:install', clone_path) subject
end end
end end
end end
end end
end end
describe 'storage_config' do
it 'prints storage configuration in a TOML format' do
config = {
'default' => Gitlab::GitalyClient::StorageSettings.new(
'path' => '/path/to/default',
'gitaly_address' => 'unix:/path/to/my.socket'
),
'nfs_01' => Gitlab::GitalyClient::StorageSettings.new(
'path' => '/path/to/nfs_01',
'gitaly_address' => 'unix:/path/to/my.socket'
)
}
allow(Gitlab.config.repositories).to receive(:storages).and_return(config)
allow(Rails.env).to receive(:test?).and_return(false)
expected_output = ''
Timecop.freeze do
expected_output = <<~TOML
# Gitaly storage configuration generated from #{Gitlab.config.source} on #{Time.current.to_s(:long)}
# This is in TOML format suitable for use in Gitaly's config.toml file.
bin_dir = "tmp/tests/gitaly"
socket_path = "/path/to/my.socket"
[gitlab-shell]
dir = "#{Gitlab.config.gitlab_shell.path}"
[[storage]]
name = "default"
path = "/path/to/default"
[[storage]]
name = "nfs_01"
path = "/path/to/nfs_01"
TOML
end
expect { run_rake_task('gitlab:gitaly:storage_config')}
.to output(expected_output).to_stdout
parsed_output = TomlRB.parse(expected_output)
config.each do |name, params|
Gitlab::GitalyClient::StorageSettings.allow_disk_access do
expect(parsed_output['storage']).to include({ 'name' => name, 'path' => params.legacy_disk_path })
end
end
end
end
end end
...@@ -641,7 +641,7 @@ rollout 100%: ...@@ -641,7 +641,7 @@ rollout 100%:
function install_dependencies() { function install_dependencies() {
apk add -U openssl curl tar gzip bash ca-certificates git apk add -U openssl curl tar gzip bash ca-certificates git
wget -q -O /etc/apk/keys/sgerrand.rsa.pub https://raw.githubusercontent.com/sgerrand/alpine-pkg-glibc/master/sgerrand.rsa.pub wget -q -O /etc/apk/keys/sgerrand.rsa.pub https://alpine-pkgs.sgerrand.com/sgerrand.rsa.pub
wget https://github.com/sgerrand/alpine-pkg-glibc/releases/download/2.23-r3/glibc-2.23-r3.apk wget https://github.com/sgerrand/alpine-pkg-glibc/releases/download/2.23-r3/glibc-2.23-r3.apk
apk add glibc-2.23-r3.apk apk add glibc-2.23-r3.apk
rm glibc-2.23-r3.apk rm glibc-2.23-r3.apk
......
# Full project: https://gitlab.com/pages/middleman # Full project: https://gitlab.com/pages/middleman
image: ruby:2.3 image: ruby:2.4
variables:
LANG: "C.UTF-8"
cache: cache:
paths: paths:
- vendor - vendor
test: before_script:
script:
- apt-get update -yqqq - apt-get update -yqqq
- apt-get install -y nodejs - apt-get install -y nodejs
- bundle install --path vendor - bundle install --path vendor
test:
script:
- bundle exec middleman build - bundle exec middleman build
except: except:
- master - master
pages: pages:
script: script:
- apt-get update -yqqq
- apt-get install -y nodejs
- bundle install --path vendor
- bundle exec middleman build - bundle exec middleman build
artifacts: artifacts:
paths: paths:
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment