Commit 43626526 authored by Robert Speicher's avatar Robert Speicher

Merge branch '52442-initial-mysql-support-removal' into 'master'

Remove dead mysql code

Closes #63191

See merge request gitlab-org/gitlab-ce!29608
parents d77bd32c 4aa76ddd
...@@ -16,7 +16,6 @@ gem 'sprockets', '~> 3.7.0' ...@@ -16,7 +16,6 @@ gem 'sprockets', '~> 3.7.0'
gem 'default_value_for', '~> 3.2.0' gem 'default_value_for', '~> 3.2.0'
# Supported DBs # Supported DBs
gem 'mysql2', '~> 0.4.10', group: :mysql
gem 'pg', '~> 1.1', group: :postgres gem 'pg', '~> 1.1', group: :postgres
gem 'rugged', '~> 0.28' gem 'rugged', '~> 0.28'
...@@ -298,7 +297,6 @@ gem 'batch-loader', '~> 1.4.0' ...@@ -298,7 +297,6 @@ gem 'batch-loader', '~> 1.4.0'
# Perf bar # Perf bar
gem 'peek', '~> 1.0.1' gem 'peek', '~> 1.0.1'
gem 'peek-gc', '~> 0.0.2' gem 'peek-gc', '~> 0.0.2'
gem 'peek-mysql2', '~> 1.2.0', group: :mysql
gem 'peek-pg', '~> 1.3.0', group: :postgres gem 'peek-pg', '~> 1.3.0', group: :postgres
gem 'peek-rblineprof', '~> 0.2.0' gem 'peek-rblineprof', '~> 0.2.0'
......
...@@ -536,7 +536,6 @@ GEM ...@@ -536,7 +536,6 @@ GEM
mustermann (1.0.3) mustermann (1.0.3)
mustermann-grape (1.0.0) mustermann-grape (1.0.0)
mustermann (~> 1.0.0) mustermann (~> 1.0.0)
mysql2 (0.4.10)
nakayoshi_fork (0.0.4) nakayoshi_fork (0.0.4)
net-ldap (0.16.0) net-ldap (0.16.0)
net-ssh (5.2.0) net-ssh (5.2.0)
...@@ -644,11 +643,6 @@ GEM ...@@ -644,11 +643,6 @@ GEM
railties (>= 4.0.0) railties (>= 4.0.0)
peek-gc (0.0.2) peek-gc (0.0.2)
peek peek
peek-mysql2 (1.2.0)
concurrent-ruby
concurrent-ruby-ext
mysql2
peek
peek-pg (1.3.0) peek-pg (1.3.0)
concurrent-ruby concurrent-ruby
concurrent-ruby-ext concurrent-ruby-ext
...@@ -1163,7 +1157,6 @@ DEPENDENCIES ...@@ -1163,7 +1157,6 @@ DEPENDENCIES
mimemagic (~> 0.3.2) mimemagic (~> 0.3.2)
mini_magick mini_magick
minitest (~> 5.11.0) minitest (~> 5.11.0)
mysql2 (~> 0.4.10)
nakayoshi_fork (~> 0.0.4) nakayoshi_fork (~> 0.0.4)
net-ldap net-ldap
net-ssh (~> 5.2) net-ssh (~> 5.2)
...@@ -1191,7 +1184,6 @@ DEPENDENCIES ...@@ -1191,7 +1184,6 @@ DEPENDENCIES
org-ruby (~> 0.9.12) org-ruby (~> 0.9.12)
peek (~> 1.0.1) peek (~> 1.0.1)
peek-gc (~> 0.0.2) peek-gc (~> 0.0.2)
peek-mysql2 (~> 1.2.0)
peek-pg (~> 1.3.0) peek-pg (~> 1.3.0)
peek-rblineprof (~> 0.2.0) peek-rblineprof (~> 0.2.0)
pg (~> 1.1) pg (~> 1.1)
......
...@@ -236,8 +236,6 @@ module Ci ...@@ -236,8 +236,6 @@ module Ci
if limit if limit
ids = relation.limit(limit).select(:id) ids = relation.limit(limit).select(:id)
# MySQL does not support limit in subquery
ids = ids.pluck(:id) if Gitlab::Database.mysql?
relation = relation.where(id: ids) relation = relation.where(id: ids)
end end
......
...@@ -40,14 +40,10 @@ module CaseSensitivity ...@@ -40,14 +40,10 @@ module CaseSensitivity
end end
def lower_value(value) def lower_value(value)
return value if Gitlab::Database.mysql?
Arel::Nodes::NamedFunction.new('LOWER', [Arel::Nodes.build_quoted(value)]) Arel::Nodes::NamedFunction.new('LOWER', [Arel::Nodes.build_quoted(value)])
end end
def lower_column(column) def lower_column(column)
return column if Gitlab::Database.mysql?
column.lower column.lower
end end
end end
......
...@@ -33,29 +33,12 @@ module Routable ...@@ -33,29 +33,12 @@ module Routable
# #
# Returns a single object, or nil. # Returns a single object, or nil.
def find_by_full_path(path, follow_redirects: false) def find_by_full_path(path, follow_redirects: false)
# On MySQL we want to ensure the ORDER BY uses a case-sensitive match so order_sql = Arel.sql("(CASE WHEN routes.path = #{connection.quote(path)} THEN 0 ELSE 1 END)")
# any literal matches come first, for this we have to use "BINARY".
# Without this there's still no guarantee in what order MySQL will return
# rows.
#
# Why do we do this?
#
# Even though we have Rails validation on Route for unique paths
# (case-insensitive), there are old projects in our DB (and possibly
# clients' DBs) that have the same path with different cases.
# See https://gitlab.com/gitlab-org/gitlab-ce/issues/18603. Also note that
# our unique index is case-sensitive in Postgres.
binary = Gitlab::Database.mysql? ? 'BINARY' : ''
order_sql = Arel.sql("(CASE WHEN #{binary} routes.path = #{connection.quote(path)} THEN 0 ELSE 1 END)")
found = where_full_path_in([path]).reorder(order_sql).take found = where_full_path_in([path]).reorder(order_sql).take
return found if found return found if found
if follow_redirects if follow_redirects
if Gitlab::Database.postgresql? joins(:redirect_routes).find_by("LOWER(redirect_routes.path) = LOWER(?)", path)
joins(:redirect_routes).find_by("LOWER(redirect_routes.path) = LOWER(?)", path)
else
joins(:redirect_routes).find_by(redirect_routes: { path: path })
end
end end
end end
...@@ -67,27 +50,13 @@ module Routable ...@@ -67,27 +50,13 @@ module Routable
# #
# Returns an ActiveRecord::Relation. # Returns an ActiveRecord::Relation.
def where_full_path_in(paths) def where_full_path_in(paths)
wheres = [] return none if paths.empty?
cast_lower = Gitlab::Database.postgresql?
paths.each do |path| wheres = paths.map do |path|
path = connection.quote(path) "(LOWER(routes.path) = LOWER(#{connection.quote(path)}))"
where =
if cast_lower
"(LOWER(routes.path) = LOWER(#{path}))"
else
"(routes.path = #{path})"
end
wheres << where
end end
if wheres.empty? joins(:route).where(wheres.join(' OR '))
none
else
joins(:route).where(wheres.join(' OR '))
end
end end
end end
......
...@@ -128,17 +128,8 @@ class Deployment < ApplicationRecord ...@@ -128,17 +128,8 @@ class Deployment < ApplicationRecord
merge_requests = merge_requests.where("merge_request_metrics.merged_at >= ?", previous_deployment.finished_at) merge_requests = merge_requests.where("merge_request_metrics.merged_at >= ?", previous_deployment.finished_at)
end end
# Need to use `map` instead of `select` because MySQL doesn't allow `SELECT`ing from the same table
# that we're updating.
merge_request_ids =
if Gitlab::Database.postgresql?
merge_requests.select(:id)
elsif Gitlab::Database.mysql?
merge_requests.map(&:id)
end
MergeRequest::Metrics MergeRequest::Metrics
.where(merge_request_id: merge_request_ids, first_deployed_to_production_at: nil) .where(merge_request_id: merge_requests.select(:id), first_deployed_to_production_at: nil)
.update_all(first_deployed_to_production_at: finished_at) .update_all(first_deployed_to_production_at: finished_at)
end end
......
...@@ -23,15 +23,7 @@ module RecordsUploads ...@@ -23,15 +23,7 @@ module RecordsUploads
return unless model return unless model
return unless file && file.exists? return unless file && file.exists?
# MySQL InnoDB may encounter a deadlock if a deletion and an Upload.transaction { readd_upload }
# insert is in the same transaction due to its next-key locking
# algorithm, so we need to skip the transaction.
# https://gitlab.com/gitlab-org/gitlab-ce/issues/55161#note_131556351
if Gitlab::Database.mysql?
readd_upload
else
Upload.transaction { readd_upload }
end
end end
def readd_upload def readd_upload
......
...@@ -22,11 +22,6 @@ module Gitlab ...@@ -22,11 +22,6 @@ module Gitlab
require_dependency Rails.root.join('lib/gitlab/middleware/read_only') require_dependency Rails.root.join('lib/gitlab/middleware/read_only')
require_dependency Rails.root.join('lib/gitlab/middleware/basic_health_check') require_dependency Rails.root.join('lib/gitlab/middleware/basic_health_check')
# This needs to be loaded before DB connection is made
# to make sure that all connections have NO_ZERO_DATE
# setting disabled
require_dependency Rails.root.join('lib/mysql_zero_date')
# Settings in config/environments/* take precedence over those specified here. # Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers # Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded. # -- all .rb files in that directory are automatically loaded.
......
#
# PRODUCTION
#
production:
adapter: mysql2
encoding: utf8
collation: utf8_general_ci
reconnect: false
database: gitlabhq_production
pool: 10
username: git
password: "secure password"
host: localhost
# socket: /tmp/mysql.sock
#
# Development specific
#
development:
adapter: mysql2
encoding: utf8
collation: utf8_general_ci
reconnect: false
database: gitlabhq_development
pool: 5
username: root
password: "secure password"
host: localhost
# socket: /tmp/mysql.sock
#
# Staging specific
#
staging:
adapter: mysql2
encoding: utf8
collation: utf8_general_ci
reconnect: false
database: gitlabhq_staging
pool: 10
username: git
password: "secure password"
host: localhost
# socket: /tmp/mysql.sock
# Warning: The database defined as "test" will be erased and
# re-generated from your development database when you run "rake".
# Do not set this db to the same as development or production.
test: &test
adapter: mysql2
encoding: utf8mb4
collation: utf8mb4_general_ci
reconnect: false
database: gitlabhq_test
pool: 5
username: root
password:
host: localhost
# socket: /tmp/mysql.sock
prepared_statements: false
# ActiveRecord custom data type for storing datetimes with timezone information. # ActiveRecord custom data type for storing datetimes with timezone information.
# See https://gitlab.com/gitlab-org/gitlab-ce/merge_requests/11229 # See https://gitlab.com/gitlab-org/gitlab-ce/merge_requests/11229
if Gitlab::Database.postgresql? require 'active_record/connection_adapters/postgresql_adapter'
require 'active_record/connection_adapters/postgresql_adapter'
module ActiveRecord::ConnectionAdapters::PostgreSQL::OID module ActiveRecord::ConnectionAdapters::PostgreSQL::OID
# Add the class `DateTimeWithTimeZone` so we can map `timestamptz` to it. # Add the class `DateTimeWithTimeZone` so we can map `timestamptz` to it.
class DateTimeWithTimeZone < DateTime class DateTimeWithTimeZone < DateTime
def type def type
:datetime_with_timezone :datetime_with_timezone
end
end end
end end
end
module RegisterDateTimeWithTimeZone module RegisterDateTimeWithTimeZone
# Run original `initialize_type_map` and then register `timestamptz` as a # Run original `initialize_type_map` and then register `timestamptz` as a
# `DateTimeWithTimeZone`. # `DateTimeWithTimeZone`.
# #
# Apparently it does not matter that the original `initialize_type_map` # Apparently it does not matter that the original `initialize_type_map`
# aliases `timestamptz` to `timestamp`. # aliases `timestamptz` to `timestamp`.
# #
# When schema dumping, `timestamptz` columns will be output as # When schema dumping, `timestamptz` columns will be output as
# `t.datetime_with_timezone`. # `t.datetime_with_timezone`.
def initialize_type_map(mapping = type_map) def initialize_type_map(mapping = type_map)
super mapping super mapping
mapping.register_type 'timestamptz' do |_, _, sql_type| mapping.register_type 'timestamptz' do |_, _, sql_type|
precision = extract_precision(sql_type) precision = extract_precision(sql_type)
ActiveRecord::ConnectionAdapters::PostgreSQLAdapter::OID::DateTimeWithTimeZone.new(precision: precision) ActiveRecord::ConnectionAdapters::PostgreSQLAdapter::OID::DateTimeWithTimeZone.new(precision: precision)
end
end
end
class ActiveRecord::ConnectionAdapters::PostgreSQLAdapter
prepend RegisterDateTimeWithTimeZone
# Add column type `datetime_with_timezone` so we can do this in
# migrations:
#
# add_column(:users, :datetime_with_timezone)
#
NATIVE_DATABASE_TYPES[:datetime_with_timezone] = { name: 'timestamptz' }
end
elsif Gitlab::Database.mysql?
require 'active_record/connection_adapters/mysql2_adapter'
module RegisterDateTimeWithTimeZone
# Run original `initialize_type_map` and then register `timestamp` as a
# `MysqlDateTimeWithTimeZone`.
#
# When schema dumping, `timestamp` columns will be output as
# `t.datetime_with_timezone`.
def initialize_type_map(mapping = type_map)
super mapping
mapping.register_type(/timestamp/i) do |sql_type|
precision = extract_precision(sql_type)
ActiveRecord::ConnectionAdapters::AbstractMysqlAdapter::MysqlDateTimeWithTimeZone.new(precision: precision)
end
end end
end end
end
class ActiveRecord::ConnectionAdapters::AbstractMysqlAdapter class ActiveRecord::ConnectionAdapters::PostgreSQLAdapter
prepend RegisterDateTimeWithTimeZone prepend RegisterDateTimeWithTimeZone
# Add the class `DateTimeWithTimeZone` so we can map `timestamp` to it.
class MysqlDateTimeWithTimeZone < ActiveRecord::Type::DateTime
def type
:datetime_with_timezone
end
end
# Add column type `datetime_with_timezone` so we can do this in # Add column type `datetime_with_timezone` so we can do this in
# migrations: # migrations:
# #
# add_column(:users, :datetime_with_timezone) # add_column(:users, :datetime_with_timezone)
# #
NATIVE_DATABASE_TYPES[:datetime_with_timezone] = { name: 'timestamp' } NATIVE_DATABASE_TYPES[:datetime_with_timezone] = { name: 'timestamptz' }
end
end end
# Ensure `datetime_with_timezone` columns are correctly written to schema.rb # Ensure `datetime_with_timezone` columns are correctly written to schema.rb
......
# Make sure that MySQL won't try to use CURRENT_TIMESTAMP when the timestamp
# column is NOT NULL. See https://gitlab.com/gitlab-org/gitlab-ce/issues/36405
# And also: https://bugs.mysql.com/bug.php?id=75098
# This patch was based on:
# https://github.com/rails/rails/blob/15ef55efb591e5379486ccf53dd3e13f416564f6/activerecord/lib/active_record/connection_adapters/mysql/schema_creation.rb#L34-L36
if Gitlab::Database.mysql?
require 'active_record/connection_adapters/abstract/schema_creation'
module MySQLTimestampFix
def add_column_options!(sql, options)
# By default, TIMESTAMP columns are NOT NULL, cannot contain NULL values,
# and assigning NULL assigns the current timestamp. To permit a TIMESTAMP
# column to contain NULL, explicitly declare it with the NULL attribute.
# See http://dev.mysql.com/doc/refman/5.7/en/timestamp-initialization.html
if sql.end_with?('timestamp') && !options[:primary_key]
if options[:null] != false
sql << ' NULL'
elsif options[:column].default.nil?
sql << ' DEFAULT 0'
end
end
super
end
end
ActiveRecord::ConnectionAdapters::AbstractAdapter::SchemaCreation
.prepend(MySQLTimestampFix)
end
# frozen_string_literal: true
require 'active_record/connection_adapters/abstract_mysql_adapter'
require 'active_record/connection_adapters/mysql/schema_definitions'
# MySQL (5.6) and MariaDB (10.1) are currently supported versions within GitLab,
# Since they do not support native `json` datatype we force to emulate it as `text`
if Gitlab::Database.mysql?
module ActiveRecord
module ConnectionAdapters
class AbstractMysqlAdapter
JSON_DATASIZE = 1.megabyte
NATIVE_DATABASE_TYPES.merge!(
json: { name: "text", limit: JSON_DATASIZE },
jsonb: { name: "text", limit: JSON_DATASIZE }
)
end
module MySQL
module ColumnMethods
# We add `jsonb` helper, as `json` is already defined for `MySQL` since Rails 5
def jsonb(*args, **options)
args.each { |name| column(name, :json, options) }
end
end
end
end
end
end
require 'active_record/connection_adapters/abstract_mysql_adapter'
module ActiveRecord
module ConnectionAdapters
class AbstractMysqlAdapter
NATIVE_DATABASE_TYPES.merge!(
bigserial: { name: 'bigint(20) auto_increment PRIMARY KEY' },
serial: { name: 'int auto_increment PRIMARY KEY' }
)
end
end
end
# from http://gist.github.com/238999
#
# If your workers are inactive for a long period of time, they'll lose
# their MySQL connection.
#
# This hack ensures we re-connect whenever a connection is
# lost. Because, really. why not?
#
# Stick this in RAILS_ROOT/config/initializers/connection_fix.rb (or somewhere similar)
#
# From:
# http://coderrr.wordpress.com/2009/01/08/activerecord-threading-issues-and-resolutions/
if defined?(ActiveRecord::ConnectionAdapters::Mysql2Adapter)
module ActiveRecord::ConnectionAdapters
class Mysql2Adapter
alias_method :execute_without_retry, :execute
def execute(*args)
execute_without_retry(*args)
rescue ActiveRecord::StatementInvalid => e
if e.message =~ /server has gone away/i
warn "Lost connection to MySQL server during query"
reconnect!
retry
else
raise e
end
end
end
end
end
# This patches ActiveRecord so indexes created using the MySQL adapter ignore
# any PostgreSQL specific options (e.g. `using: :gin`).
#
# These patches do the following for MySQL:
#
# 1. Indexes created using the :opclasses option are ignored (as they serve no
# purpose on MySQL).
# 2. When creating an index with `using: :gin` the `using` option is discarded
# as :gin is not a valid value for MySQL.
# 3. The `:opclasses` option is stripped from add_index_options in case it's
# used anywhere other than in the add_index methods.
if defined?(ActiveRecord::ConnectionAdapters::Mysql2Adapter)
module ActiveRecord
module ConnectionAdapters
class Mysql2Adapter < AbstractMysqlAdapter
alias_method :__gitlab_add_index, :add_index
alias_method :__gitlab_add_index_options, :add_index_options
def add_index(table_name, column_name, options = {})
unless options[:opclasses]
__gitlab_add_index(table_name, column_name, options)
end
end
def add_index_options(table_name, column_name, options = {})
if options[:using] && options[:using] == :gin
options = options.dup
options.delete(:using)
end
if options[:opclasses]
options = options.dup
options.delete(:opclasses)
end
__gitlab_add_index_options(table_name, column_name, options)
end
end
end
end
end
# This patches ActiveRecord so indexes for binary columns created using the
# MySQL adapter apply a length of 20. Otherwise MySQL can't create an index on
# binary columns.
module MysqlSetLengthForBinaryIndexAndIgnorePostgresOptionsForSchema
# This method is used in Rails 5 schema loading as t.index
def index(column_names, options = {})
# Ignore indexes that use opclasses,
# also see config/initializers/mysql_ignore_postgresql_options.rb
if options[:opclasses]
warn "WARNING: index on columns #{column_names} uses unsupported option, skipping."
return
end
options[:length] ||= {}
Array(column_names).each do |column_name|
column = columns.find { |c| c.name == column_name }
if column&.type == :binary
options[:length][column_name] = 20
end
end
super(column_names, options)
end
end
if defined?(ActiveRecord::ConnectionAdapters::MySQL::TableDefinition)
ActiveRecord::ConnectionAdapters::MySQL::TableDefinition.send(:prepend, MysqlSetLengthForBinaryIndexAndIgnorePostgresOptionsForSchema)
end
...@@ -2,11 +2,7 @@ Rails.application.config.peek.adapter = :redis, { client: ::Redis.new(Gitlab::Re ...@@ -2,11 +2,7 @@ Rails.application.config.peek.adapter = :redis, { client: ::Redis.new(Gitlab::Re
Peek.into Peek::Views::Host Peek.into Peek::Views::Host
if Gitlab::Database.mysql? if Gitlab::Database.postgresql?
require 'peek-mysql2'
PEEK_DB_CLIENT = ::Mysql2::Client
PEEK_DB_VIEW = Peek::Views::Mysql2
elsif Gitlab::Database.postgresql?
require 'peek-pg' require 'peek-pg'
PEEK_DB_CLIENT = ::PG::Connection PEEK_DB_CLIENT = ::PG::Connection
PEEK_DB_VIEW = Peek::Views::PG PEEK_DB_VIEW = Peek::Views::PG
......
...@@ -12,24 +12,10 @@ class ResetEventsPrimaryKeySequence < ActiveRecord::Migration[4.2] ...@@ -12,24 +12,10 @@ class ResetEventsPrimaryKeySequence < ActiveRecord::Migration[4.2]
end end
def up def up
if Gitlab::Database.postgresql? reset_pk_sequence!(Event.table_name)
reset_primary_key_for_postgresql
else
reset_primary_key_for_mysql
end
end end
def down def down
# No-op # No-op
end end
def reset_primary_key_for_postgresql
reset_pk_sequence!(Event.table_name)
end
def reset_primary_key_for_mysql
amount = Event.pluck('COALESCE(MAX(id), 1)').first
execute "ALTER TABLE #{Event.table_name} AUTO_INCREMENT = #{amount}"
end
end end
...@@ -30,12 +30,6 @@ class CreateProjectCiCdSettings < ActiveRecord::Migration[4.2] ...@@ -30,12 +30,6 @@ class CreateProjectCiCdSettings < ActiveRecord::Migration[4.2]
end end
def add_foreign_key_with_retry def add_foreign_key_with_retry
if Gitlab::Database.mysql?
# When using MySQL we don't support online upgrades, thus projects can't
# be deleted while we are running this migration.
return add_project_id_foreign_key
end
# Between the initial INSERT and the addition of the foreign key some # Between the initial INSERT and the addition of the foreign key some
# projects may have been removed, leaving orphaned rows in our new settings # projects may have been removed, leaving orphaned rows in our new settings
# table. # table.
......
# See http://doc.gitlab.com/ce/development/migration_style_guide.html
# for more information on how to write migrations for GitLab.
require Rails.root.join('db/migrate/limits_ci_build_trace_chunks_raw_data_for_mysql')
class AddLimitsCiBuildTraceChunksRawDataForMysql < ActiveRecord::Migration[4.2]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
def up
LimitsCiBuildTraceChunksRawDataForMysql.new.up
end
end
# frozen_string_literal: true
# See http://doc.gitlab.com/ce/development/migration_style_guide.html
# for more information on how to write migrations for GitLab.
require Rails.root.join('db/migrate/prometheus_metrics_limits_to_mysql')
class FixPrometheusMetricQueryLimits < ActiveRecord::Migration[4.2]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
def up
PrometheusMetricsLimitsToMysql.new.up
end
def down
# no-op
end
end
...@@ -34,10 +34,6 @@ class AddMissingIndexesForForeignKeys < ActiveRecord::Migration[4.2] ...@@ -34,10 +34,6 @@ class AddMissingIndexesForForeignKeys < ActiveRecord::Migration[4.2]
end end
def down def down
# MySQL requires index for FK,
# thus removal of indexes does fail
return if Gitlab::Database.mysql?
remove_concurrent_index(:application_settings, :usage_stats_set_by_user_id) remove_concurrent_index(:application_settings, :usage_stats_set_by_user_id)
remove_concurrent_index(:ci_pipeline_schedules, :owner_id) remove_concurrent_index(:ci_pipeline_schedules, :owner_id)
remove_concurrent_index(:ci_trigger_requests, :trigger_id) remove_concurrent_index(:ci_trigger_requests, :trigger_id)
......
class IncreaseMysqlTextLimitForGpgKeys < ActiveRecord::Migration[4.2]
# Set this constant to true if this migration requires downtime.
DOWNTIME = false
def up
return unless Gitlab::Database.mysql?
change_column :gpg_keys, :key, :text, limit: 16.megabytes - 1
end
def down
# no-op
end
end
class LimitsCiBuildTraceChunksRawDataForMysql < ActiveRecord::Migration[4.2]
def up
return unless Gitlab::Database.mysql?
# Mysql needs MEDIUMTEXT type (up to 16MB) rather than TEXT (up to 64KB)
# Because 'raw_data' is always capped by Ci::BuildTraceChunk::CHUNK_SIZE, which is 128KB
change_column :ci_build_trace_chunks, :raw_data, :binary, limit: 16.megabytes - 1 # MEDIUMTEXT
end
end
class LimitsToMysql < ActiveRecord::Migration[4.2]
def up
return unless ActiveRecord::Base.configurations[Rails.env]['adapter'] =~ /^mysql/
change_column :snippets, :content, :text, limit: 2147483647
change_column :notes, :st_diff, :text, limit: 2147483647
end
end
class MarkdownCacheLimitsToMysql < ActiveRecord::Migration[4.2]
DOWNTIME = false
def up
return unless Gitlab::Database.mysql?
change_column :snippets, :content_html, :text, limit: 2147483647
end
def down
# no-op
end
end
class MergeRequestDiffFileLimitsToMysql < ActiveRecord::Migration[4.2]
DOWNTIME = false
def up
return unless Gitlab::Database.mysql?
change_column :merge_request_diff_files, :diff, :text, limit: 2147483647, default: nil
end
def down
end
end
class PrometheusMetricsLimitsToMysql < ActiveRecord::Migration[4.2]
DOWNTIME = false
def up
return unless Gitlab::Database.mysql?
change_column :prometheus_metrics, :query, :text, limit: 4096, default: nil
end
def down
end
end
...@@ -9,10 +9,6 @@ class PopulateMissingProjectCiCdSettings < ActiveRecord::Migration[4.2] ...@@ -9,10 +9,6 @@ class PopulateMissingProjectCiCdSettings < ActiveRecord::Migration[4.2]
disable_ddl_transaction! disable_ddl_transaction!
def up def up
# MySQL does not support online upgrades, thus there can't be any missing
# rows.
return if Gitlab::Database.mysql?
# Projects created after the initial migration but before the code started # Projects created after the initial migration but before the code started
# using ProjectCiCdSetting won't have a corresponding row in # using ProjectCiCdSetting won't have a corresponding row in
# project_ci_cd_settings, so let's fix that. # project_ci_cd_settings, so let's fix that.
......
...@@ -49,7 +49,7 @@ class UpdateProjectImportVisibilityLevel < ActiveRecord::Migration[5.0] ...@@ -49,7 +49,7 @@ class UpdateProjectImportVisibilityLevel < ActiveRecord::Migration[5.0]
def update_projects_visibility(visibility) def update_projects_visibility(visibility)
say_with_time("Updating project visibility to #{visibility} on #{Project::IMPORT_TYPE} imports.") do say_with_time("Updating project visibility to #{visibility} on #{Project::IMPORT_TYPE} imports.") do
Project.with_group_visibility(visibility).select(:id).each_batch(of: BATCH_SIZE) do |batch, _index| Project.with_group_visibility(visibility).select(:id).each_batch(of: BATCH_SIZE) do |batch, _index|
batch_sql = Gitlab::Database.mysql? ? batch.pluck(:id).join(', ') : batch.select(:id).to_sql batch_sql = batch.select(:id).to_sql
say("Updating #{batch.size} items.", true) say("Updating #{batch.size} items.", true)
......
...@@ -16,26 +16,6 @@ class MigrateAutoDevOpsDomainToClusterDomain < ActiveRecord::Migration[5.0] ...@@ -16,26 +16,6 @@ class MigrateAutoDevOpsDomainToClusterDomain < ActiveRecord::Migration[5.0]
private private
def update_clusters_domain_query def update_clusters_domain_query
if Gitlab::Database.mysql?
mysql_query
else
postgresql_query
end
end
def mysql_query
<<~HEREDOC
UPDATE clusters, project_auto_devops, cluster_projects
SET
clusters.domain = project_auto_devops.domain
WHERE
cluster_projects.cluster_id = clusters.id
AND project_auto_devops.project_id = cluster_projects.project_id
AND project_auto_devops.domain != ''
HEREDOC
end
def postgresql_query
<<~HEREDOC <<~HEREDOC
UPDATE clusters UPDATE clusters
SET domain = project_auto_devops.domain SET domain = project_auto_devops.domain
......
...@@ -21,34 +21,18 @@ class AddUniqueConstraintToApprovalsUserIdAndMergeRequestId < ActiveRecord::Migr ...@@ -21,34 +21,18 @@ class AddUniqueConstraintToApprovalsUserIdAndMergeRequestId < ActiveRecord::Migr
def remove_duplicates def remove_duplicates
add_concurrent_index :approvals, [:user_id, :merge_request_id, :id] add_concurrent_index :approvals, [:user_id, :merge_request_id, :id]
if Gitlab::Database.mysql? execute <<-SQL
execute <<-SQL DELETE FROM approvals
DELETE FROM a USING (
USING approvals AS a SELECT user_id, merge_request_id, MIN(id) as min_id
INNER JOIN ( FROM approvals
SELECT user_id, merge_request_id, MIN(id) as min_id GROUP BY user_id, merge_request_id
FROM approvals HAVING COUNT(id) > 1
GROUP BY user_id, merge_request_id ) as approvals_with_duplicates
HAVING COUNT(id) > 1 WHERE approvals_with_duplicates.user_id = approvals.user_id
) as approvals_with_duplicates AND approvals_with_duplicates.merge_request_id = approvals.merge_request_id
ON approvals_with_duplicates.user_id = a.user_id AND approvals_with_duplicates.min_id <> approvals.id;
AND approvals_with_duplicates.merge_request_id = a.merge_request_id SQL
WHERE approvals_with_duplicates.min_id <> a.id;
SQL
else
execute <<-SQL
DELETE FROM approvals
USING (
SELECT user_id, merge_request_id, MIN(id) as min_id
FROM approvals
GROUP BY user_id, merge_request_id
HAVING COUNT(id) > 1
) as approvals_with_duplicates
WHERE approvals_with_duplicates.user_id = approvals.user_id
AND approvals_with_duplicates.merge_request_id = approvals.merge_request_id
AND approvals_with_duplicates.min_id <> approvals.id;
SQL
end
remove_concurrent_index :approvals, [:user_id, :merge_request_id, :id] remove_concurrent_index :approvals, [:user_id, :merge_request_id, :id]
end end
......
...@@ -1052,15 +1052,8 @@ module API ...@@ -1052,15 +1052,8 @@ module API
# rubocop: disable CodeReuse/ActiveRecord # rubocop: disable CodeReuse/ActiveRecord
def self.preload_relation(projects_relation, options = {}) def self.preload_relation(projects_relation, options = {})
relation = super(projects_relation, options) relation = super(projects_relation, options)
project_ids = relation.select('projects.id')
# MySQL doesn't support LIMIT inside an IN subquery namespace_ids = relation.select(:namespace_id)
if Gitlab::Database.mysql?
project_ids = relation.pluck('projects.id')
namespace_ids = relation.pluck(:namespace_id)
else
project_ids = relation.select('projects.id')
namespace_ids = relation.select(:namespace_id)
end
options[:project_members] = options[:current_user] options[:project_members] = options[:current_user]
.project_members .project_members
......
...@@ -23,11 +23,6 @@ module Backup ...@@ -23,11 +23,6 @@ module Backup
dump_pid = dump_pid =
case config["adapter"] case config["adapter"]
when /^mysql/ then
progress.print "Dumping MySQL database #{config['database']} ... "
# Workaround warnings from MySQL 5.6 about passwords on cmd line
ENV['MYSQL_PWD'] = config["password"].to_s if config["password"]
spawn('mysqldump', *mysql_args, config['database'], out: compress_wr)
when "postgresql" then when "postgresql" then
progress.print "Dumping PostgreSQL database #{config['database']} ... " progress.print "Dumping PostgreSQL database #{config['database']} ... "
pg_env pg_env
...@@ -57,11 +52,6 @@ module Backup ...@@ -57,11 +52,6 @@ module Backup
restore_pid = restore_pid =
case config["adapter"] case config["adapter"]
when /^mysql/ then
progress.print "Restoring MySQL database #{config['database']} ... "
# Workaround warnings from MySQL 5.6 about passwords on cmd line
ENV['MYSQL_PWD'] = config["password"].to_s if config["password"]
spawn('mysql', *mysql_args, config['database'], in: decompress_rd)
when "postgresql" then when "postgresql" then
progress.print "Restoring PostgreSQL database #{config['database']} ... " progress.print "Restoring PostgreSQL database #{config['database']} ... "
pg_env pg_env
...@@ -80,23 +70,6 @@ module Backup ...@@ -80,23 +70,6 @@ module Backup
protected protected
def mysql_args
args = {
'host' => '--host',
'port' => '--port',
'socket' => '--socket',
'username' => '--user',
'encoding' => '--default-character-set',
# SSL
'sslkey' => '--ssl-key',
'sslcert' => '--ssl-cert',
'sslca' => '--ssl-ca',
'sslcapath' => '--ssl-capath',
'sslcipher' => '--ssl-cipher'
}
args.map { |opt, arg| "#{arg}=#{config[opt]}" if config[opt] }.compact
end
def pg_env def pg_env
args = { args = {
'username' => 'PGUSER', 'username' => 'PGUSER',
......
# frozen_string_literal: true # frozen_string_literal: true
class Forever class Forever
POSTGRESQL_DATE = DateTime.new(3000, 1, 1) DATE = DateTime.new(3000, 1, 1)
MYSQL_DATE = DateTime.new(2038, 01, 19)
# MySQL timestamp has a range of '1970-01-01 00:00:01' UTC to '2038-01-19 03:14:07' UTC
def self.date def self.date
if Gitlab::Database.postgresql? DATE
POSTGRESQL_DATE
else
MYSQL_DATE
end
end end
end end
...@@ -19,18 +19,11 @@ module Gitlab ...@@ -19,18 +19,11 @@ module Gitlab
def perform(start_id, stop_id) def perform(start_id, stop_id)
PagesDomain.where(id: start_id..stop_id).find_each do |domain| PagesDomain.where(id: start_id..stop_id).find_each do |domain|
if Gitlab::Database.mysql? # for some reason activerecord doesn't append timezone, iso8601 forces this
domain.update_columns( domain.update_columns(
certificate_valid_not_before: domain.x509&.not_before, certificate_valid_not_before: domain.x509&.not_before&.iso8601,
certificate_valid_not_after: domain.x509&.not_after certificate_valid_not_after: domain.x509&.not_after&.iso8601
) )
else
# for some reason activerecord doesn't append timezone, iso8601 forces this
domain.update_columns(
certificate_valid_not_before: domain.x509&.not_before&.iso8601,
certificate_valid_not_after: domain.x509&.not_after&.iso8601
)
end
rescue => e rescue => e
Rails.logger.error "Failed to update pages domain certificate valid time. id: #{domain.id}, message: #{e.message}" # rubocop:disable Gitlab/RailsLogger Rails.logger.error "Failed to update pages domain certificate valid time. id: #{domain.id}, message: #{e.message}" # rubocop:disable Gitlab/RailsLogger
end end
......
...@@ -176,23 +176,12 @@ module Gitlab ...@@ -176,23 +176,12 @@ module Gitlab
self.table_name = 'projects' self.table_name = 'projects'
def self.find_by_full_path(path) def self.find_by_full_path(path)
binary = Gitlab::Database.mysql? ? 'BINARY' : '' order_sql = "(CASE WHEN routes.path = #{connection.quote(path)} THEN 0 ELSE 1 END)"
order_sql = "(CASE WHEN #{binary} routes.path = #{connection.quote(path)} THEN 0 ELSE 1 END)"
where_full_path_in(path).reorder(order_sql).take where_full_path_in(path).reorder(order_sql).take
end end
def self.where_full_path_in(path) def self.where_full_path_in(path)
cast_lower = Gitlab::Database.postgresql? where = "(LOWER(routes.path) = LOWER(#{connection.quote(path)}))"
path = connection.quote(path)
where =
if cast_lower
"(LOWER(routes.path) = LOWER(#{path}))"
else
"(routes.path = #{path})"
end
joins("INNER JOIN routes ON routes.source_id = projects.id AND routes.source_type = 'Project'").where(where) joins("INNER JOIN routes ON routes.source_id = projects.id AND routes.source_type = 'Project'").where(where)
end end
end end
......
...@@ -133,12 +133,9 @@ module Gitlab ...@@ -133,12 +133,9 @@ module Gitlab
def insert_sql(file_paths) def insert_sql(file_paths)
if postgresql_pre_9_5? if postgresql_pre_9_5?
"INSERT INTO #{table_columns_and_values_for_insert(file_paths)};" "INSERT INTO #{table_columns_and_values_for_insert(file_paths)};"
elsif postgresql? else
"INSERT INTO #{table_columns_and_values_for_insert(file_paths)}"\ "INSERT INTO #{table_columns_and_values_for_insert(file_paths)}"\
" ON CONFLICT DO NOTHING;" " ON CONFLICT DO NOTHING;"
else # MySQL
"INSERT IGNORE INTO"\
" #{table_columns_and_values_for_insert(file_paths)};"
end end
end end
......
...@@ -4,13 +4,13 @@ module Gitlab ...@@ -4,13 +4,13 @@ module Gitlab
module Database module Database
include Gitlab::Metrics::Methods include Gitlab::Metrics::Methods
# The max value of INTEGER type is the same between MySQL and PostgreSQL:
# https://www.postgresql.org/docs/9.2/static/datatype-numeric.html # https://www.postgresql.org/docs/9.2/static/datatype-numeric.html
# http://dev.mysql.com/doc/refman/5.7/en/integer-types.html
MAX_INT_VALUE = 2147483647 MAX_INT_VALUE = 2147483647
# The max value between MySQL's TIMESTAMP and PostgreSQL's timestampz: # The max value between MySQL's TIMESTAMP and PostgreSQL's timestampz:
# https://www.postgresql.org/docs/9.1/static/datatype-datetime.html # https://www.postgresql.org/docs/9.1/static/datatype-datetime.html
# https://dev.mysql.com/doc/refman/5.7/en/datetime.html # https://dev.mysql.com/doc/refman/5.7/en/datetime.html
# FIXME: this should just be the max value of timestampz
MAX_TIMESTAMP_VALUE = Time.at((1 << 31) - 1).freeze MAX_TIMESTAMP_VALUE = Time.at((1 << 31) - 1).freeze
# Minimum schema version from which migrations are supported # Minimum schema version from which migrations are supported
...@@ -39,11 +39,11 @@ module Gitlab ...@@ -39,11 +39,11 @@ module Gitlab
end end
def self.human_adapter_name def self.human_adapter_name
postgresql? ? 'PostgreSQL' : 'MySQL' if postgresql?
end 'PostgreSQL'
else
def self.mysql? 'Unknown'
adapter_name.casecmp('mysql2').zero? end
end end
def self.postgresql? def self.postgresql?
...@@ -60,15 +60,14 @@ module Gitlab ...@@ -60,15 +60,14 @@ module Gitlab
# Check whether the underlying database is in read-only mode # Check whether the underlying database is in read-only mode
def self.db_read_only? def self.db_read_only?
if postgresql? pg_is_in_recovery =
pg_is_in_recovery = ActiveRecord::Base
ActiveRecord::Base.connection.execute('SELECT pg_is_in_recovery()') .connection
.first.fetch('pg_is_in_recovery') .execute('SELECT pg_is_in_recovery()')
.first
.fetch('pg_is_in_recovery')
Gitlab::Utils.to_boolean(pg_is_in_recovery) Gitlab::Utils.to_boolean(pg_is_in_recovery)
else
false
end
end end
def self.db_read_write? def self.db_read_write?
...@@ -118,51 +117,23 @@ module Gitlab ...@@ -118,51 +117,23 @@ module Gitlab
end end
def self.nulls_last_order(field, direction = 'ASC') def self.nulls_last_order(field, direction = 'ASC')
order = "#{field} #{direction}" Arel.sql("#{field} #{direction} NULLS LAST")
if postgresql?
order = "#{order} NULLS LAST"
else
# `field IS NULL` will be `0` for non-NULL columns and `1` for NULL
# columns. In the (default) ascending order, `0` comes first.
order = "#{field} IS NULL, #{order}" if direction == 'ASC'
end
Arel.sql(order)
end end
def self.nulls_first_order(field, direction = 'ASC') def self.nulls_first_order(field, direction = 'ASC')
order = "#{field} #{direction}" Arel.sql("#{field} #{direction} NULLS FIRST")
if postgresql?
order = "#{order} NULLS FIRST"
else
# `field IS NULL` will be `0` for non-NULL columns and `1` for NULL
# columns. In the (default) ascending order, `0` comes first.
order = "#{field} IS NULL, #{order}" if direction == 'DESC'
end
Arel.sql(order)
end end
def self.random def self.random
postgresql? ? "RANDOM()" : "RAND()" "RANDOM()"
end end
def self.true_value def self.true_value
if postgresql? "'t'"
"'t'"
else
1
end
end end
def self.false_value def self.false_value
if postgresql? "'f'"
"'f'"
else
0
end
end end
def self.with_connection_pool(pool_size) def self.with_connection_pool(pool_size)
...@@ -182,7 +153,7 @@ module Gitlab ...@@ -182,7 +153,7 @@ module Gitlab
# rows - An Array of Hash instances, each mapping the columns to their # rows - An Array of Hash instances, each mapping the columns to their
# values. # values.
# return_ids - When set to true the return value will be an Array of IDs of # return_ids - When set to true the return value will be an Array of IDs of
# the inserted rows, this only works on PostgreSQL. # the inserted rows
# disable_quote - A key or an Array of keys to exclude from quoting (You # disable_quote - A key or an Array of keys to exclude from quoting (You
# become responsible for protection from SQL injection for # become responsible for protection from SQL injection for
# these keys!) # these keys!)
...@@ -191,7 +162,6 @@ module Gitlab ...@@ -191,7 +162,6 @@ module Gitlab
keys = rows.first.keys keys = rows.first.keys
columns = keys.map { |key| connection.quote_column_name(key) } columns = keys.map { |key| connection.quote_column_name(key) }
return_ids = false if mysql?
disable_quote = Array(disable_quote).to_set disable_quote = Array(disable_quote).to_set
tuples = rows.map do |row| tuples = rows.map do |row|
...@@ -258,11 +228,7 @@ module Gitlab ...@@ -258,11 +228,7 @@ module Gitlab
def self.database_version def self.database_version
row = connection.execute("SELECT VERSION()").first row = connection.execute("SELECT VERSION()").first
if postgresql? row['version']
row['version']
else
row.first
end
end end
private_class_method :database_version private_class_method :database_version
......
...@@ -7,8 +7,7 @@ module Gitlab ...@@ -7,8 +7,7 @@ module Gitlab
# the first of the `start_time_attrs` that isn't NULL. `SELECT` the resulting interval # the first of the `start_time_attrs` that isn't NULL. `SELECT` the resulting interval
# along with an alias specified by the `as` parameter. # along with an alias specified by the `as` parameter.
# #
# Note: For MySQL, the interval is returned in seconds. # Note: the interval is returned as an INTERVAL type.
# For PostgreSQL, the interval is returned as an INTERVAL type.
def subtract_datetimes(query_so_far, start_time_attrs, end_time_attrs, as) def subtract_datetimes(query_so_far, start_time_attrs, end_time_attrs, as)
diff_fn = subtract_datetimes_diff(query_so_far, start_time_attrs, end_time_attrs) diff_fn = subtract_datetimes_diff(query_so_far, start_time_attrs, end_time_attrs)
...@@ -16,17 +15,10 @@ module Gitlab ...@@ -16,17 +15,10 @@ module Gitlab
end end
def subtract_datetimes_diff(query_so_far, start_time_attrs, end_time_attrs) def subtract_datetimes_diff(query_so_far, start_time_attrs, end_time_attrs)
if Gitlab::Database.postgresql? Arel::Nodes::Subtraction.new(
Arel::Nodes::Subtraction.new( Arel::Nodes::NamedFunction.new("COALESCE", Array.wrap(end_time_attrs)),
Arel::Nodes::NamedFunction.new("COALESCE", Array.wrap(end_time_attrs)), Arel::Nodes::NamedFunction.new("COALESCE", Array.wrap(start_time_attrs))
Arel::Nodes::NamedFunction.new("COALESCE", Array.wrap(start_time_attrs))) )
elsif Gitlab::Database.mysql?
Arel::Nodes::NamedFunction.new(
"TIMESTAMPDIFF",
[Arel.sql('second'),
Arel::Nodes::NamedFunction.new("COALESCE", Array.wrap(start_time_attrs)),
Arel::Nodes::NamedFunction.new("COALESCE", Array.wrap(end_time_attrs))])
end
end end
end end
end end
......
...@@ -17,13 +17,9 @@ module Gitlab ...@@ -17,13 +17,9 @@ module Gitlab
def extract_median(results) def extract_median(results)
result = results.compact.first result = results.compact.first
if Gitlab::Database.postgresql? result = result.first.presence
result = result.first.presence
result['median']&.to_f if result result['median']&.to_f if result
elsif Gitlab::Database.mysql?
result.to_a.flatten.first
end
end end
def extract_medians(results) def extract_medians(results)
...@@ -34,31 +30,6 @@ module Gitlab ...@@ -34,31 +30,6 @@ module Gitlab
end end
end end
def mysql_median_datetime_sql(arel_table, query_so_far, column_sym)
query = arel_table.from
.from(arel_table.project(Arel.sql('*')).order(arel_table[column_sym]).as(arel_table.table_name))
.project(average([arel_table[column_sym]], 'median'))
.where(
Arel::Nodes::Between.new(
Arel.sql("(select @row_id := @row_id + 1)"),
Arel::Nodes::And.new(
[Arel.sql('@ct/2.0'),
Arel.sql('@ct/2.0 + 1')]
)
)
).
# Disallow negative values
where(arel_table[column_sym].gteq(0))
[
Arel.sql("CREATE TEMPORARY TABLE IF NOT EXISTS #{query_so_far.to_sql}"),
Arel.sql("set @ct := (select count(1) from #{arel_table.table_name});"),
Arel.sql("set @row_id := 0;"),
query.to_sql,
Arel.sql("DROP TEMPORARY TABLE IF EXISTS #{arel_table.table_name};")
]
end
def pg_median_datetime_sql(arel_table, query_so_far, column_sym, partition_column = nil) def pg_median_datetime_sql(arel_table, query_so_far, column_sym, partition_column = nil)
# Create a CTE with the column we're operating on, row number (after sorting by the column # Create a CTE with the column we're operating on, row number (after sorting by the column
# we're operating on), and count of the table we're operating on (duplicated across) all rows # we're operating on), and count of the table we're operating on (duplicated across) all rows
...@@ -113,18 +84,8 @@ module Gitlab ...@@ -113,18 +84,8 @@ module Gitlab
private private
def median_queries(arel_table, query_so_far, column_sym, partition_column = nil)
if Gitlab::Database.postgresql?
pg_median_datetime_sql(arel_table, query_so_far, column_sym, partition_column)
elsif Gitlab::Database.mysql?
raise NotSupportedError, "partition_column is not supported for MySQL" if partition_column
mysql_median_datetime_sql(arel_table, query_so_far, column_sym)
end
end
def execute_queries(arel_table, query_so_far, column_sym, partition_column = nil) def execute_queries(arel_table, query_so_far, column_sym, partition_column = nil)
queries = median_queries(arel_table, query_so_far, column_sym, partition_column) queries = pg_median_datetime_sql(arel_table, query_so_far, column_sym, partition_column)
Array.wrap(queries).map { |query| ActiveRecord::Base.connection.execute(query) } Array.wrap(queries).map { |query| ActiveRecord::Base.connection.execute(query) }
end end
......
...@@ -68,10 +68,7 @@ module Gitlab ...@@ -68,10 +68,7 @@ module Gitlab
end end
end end
# Creates a new index, concurrently when supported # Creates a new index, concurrently
#
# On PostgreSQL this method creates an index concurrently, on MySQL this
# creates a regular index.
# #
# Example: # Example:
# #
...@@ -85,9 +82,7 @@ module Gitlab ...@@ -85,9 +82,7 @@ module Gitlab
'in the body of your migration class' 'in the body of your migration class'
end end
if Database.postgresql? options = options.merge({ algorithm: :concurrently })
options = options.merge({ algorithm: :concurrently })
end
if index_exists?(table_name, column_name, options) if index_exists?(table_name, column_name, options)
Rails.logger.warn "Index not created because it already exists (this may be due to an aborted migration or similar): table_name: #{table_name}, column_name: #{column_name}" # rubocop:disable Gitlab/RailsLogger Rails.logger.warn "Index not created because it already exists (this may be due to an aborted migration or similar): table_name: #{table_name}, column_name: #{column_name}" # rubocop:disable Gitlab/RailsLogger
...@@ -99,9 +94,7 @@ module Gitlab ...@@ -99,9 +94,7 @@ module Gitlab
end end
end end
# Removes an existed index, concurrently when supported # Removes an existed index, concurrently
#
# On PostgreSQL this method removes an index concurrently.
# #
# Example: # Example:
# #
...@@ -129,9 +122,7 @@ module Gitlab ...@@ -129,9 +122,7 @@ module Gitlab
end end
end end
# Removes an existing index, concurrently when supported # Removes an existing index, concurrently
#
# On PostgreSQL this method removes an index concurrently.
# #
# Example: # Example:
# #
...@@ -170,8 +161,7 @@ module Gitlab ...@@ -170,8 +161,7 @@ module Gitlab
# Adds a foreign key with only minimal locking on the tables involved. # Adds a foreign key with only minimal locking on the tables involved.
# #
# This method only requires minimal locking when using PostgreSQL. When # This method only requires minimal locking
# using MySQL this method will use Rails' default `add_foreign_key`.
# #
# source - The source table containing the foreign key. # source - The source table containing the foreign key.
# target - The target table the key points to. # target - The target table the key points to.
...@@ -187,27 +177,7 @@ module Gitlab ...@@ -187,27 +177,7 @@ module Gitlab
raise 'add_concurrent_foreign_key can not be run inside a transaction' raise 'add_concurrent_foreign_key can not be run inside a transaction'
end end
# While MySQL does allow disabling of foreign keys it has no equivalent on_delete = 'SET NULL' if on_delete == :nullify
# of PostgreSQL's "VALIDATE CONSTRAINT". As a result we'll just fall
# back to the normal foreign key procedure.
if Database.mysql?
if foreign_key_exists?(source, target, column: column)
Rails.logger.warn "Foreign key not created because it exists already " \
"(this may be due to an aborted migration or similar): " \
"source: #{source}, target: #{target}, column: #{column}"
return
end
key_options = { column: column, on_delete: on_delete }
# The MySQL adapter tries to create a foreign key without a name when
# `:name` is nil, instead of generating a name for us.
key_options[:name] = name if name
return add_foreign_key(source, target, key_options)
else
on_delete = 'SET NULL' if on_delete == :nullify
end
key_name = name || concurrent_foreign_key_name(source, column) key_name = name || concurrent_foreign_key_name(source, column)
...@@ -265,7 +235,7 @@ module Gitlab ...@@ -265,7 +235,7 @@ module Gitlab
# Long-running migrations may take more than the timeout allowed by # Long-running migrations may take more than the timeout allowed by
# the database. Disable the session's statement timeout to ensure # the database. Disable the session's statement timeout to ensure
# migrations don't get killed prematurely. (PostgreSQL only) # migrations don't get killed prematurely.
# #
# There are two possible ways to disable the statement timeout: # There are two possible ways to disable the statement timeout:
# #
...@@ -277,15 +247,6 @@ module Gitlab ...@@ -277,15 +247,6 @@ module Gitlab
# otherwise the statement will still be disabled until connection is dropped # otherwise the statement will still be disabled until connection is dropped
# or `RESET ALL` is executed # or `RESET ALL` is executed
def disable_statement_timeout def disable_statement_timeout
# bypass disabled_statement logic when not using postgres, but still execute block when one is given
unless Database.postgresql?
if block_given?
yield
end
return
end
if block_given? if block_given?
begin begin
execute('SET statement_timeout TO 0') execute('SET statement_timeout TO 0')
...@@ -535,13 +496,12 @@ module Gitlab ...@@ -535,13 +496,12 @@ module Gitlab
quoted_old = quote_column_name(old_column) quoted_old = quote_column_name(old_column)
quoted_new = quote_column_name(new_column) quoted_new = quote_column_name(new_column)
if Database.postgresql? install_rename_triggers_for_postgresql(
install_rename_triggers_for_postgresql(trigger_name, quoted_table, trigger_name,
quoted_old, quoted_new) quoted_table,
else quoted_old,
install_rename_triggers_for_mysql(trigger_name, quoted_table, quoted_new
quoted_old, quoted_new) )
end
end end
# Changes the type of a column concurrently. # Changes the type of a column concurrently.
...@@ -584,11 +544,7 @@ module Gitlab ...@@ -584,11 +544,7 @@ module Gitlab
check_trigger_permissions!(table) check_trigger_permissions!(table)
if Database.postgresql? remove_rename_triggers_for_postgresql(table, trigger_name)
remove_rename_triggers_for_postgresql(table, trigger_name)
else
remove_rename_triggers_for_mysql(trigger_name)
end
remove_column(table, old) remove_column(table, old)
end end
...@@ -801,38 +757,12 @@ module Gitlab ...@@ -801,38 +757,12 @@ module Gitlab
EOF EOF
end end
# Installs the triggers necessary to perform a concurrent column rename on
# MySQL.
def install_rename_triggers_for_mysql(trigger, table, old, new)
execute <<-EOF.strip_heredoc
CREATE TRIGGER #{trigger}_insert
BEFORE INSERT
ON #{table}
FOR EACH ROW
SET NEW.#{new} = NEW.#{old}
EOF
execute <<-EOF.strip_heredoc
CREATE TRIGGER #{trigger}_update
BEFORE UPDATE
ON #{table}
FOR EACH ROW
SET NEW.#{new} = NEW.#{old}
EOF
end
# Removes the triggers used for renaming a PostgreSQL column concurrently. # Removes the triggers used for renaming a PostgreSQL column concurrently.
def remove_rename_triggers_for_postgresql(table, trigger) def remove_rename_triggers_for_postgresql(table, trigger)
execute("DROP TRIGGER IF EXISTS #{trigger} ON #{table}") execute("DROP TRIGGER IF EXISTS #{trigger} ON #{table}")
execute("DROP FUNCTION IF EXISTS #{trigger}()") execute("DROP FUNCTION IF EXISTS #{trigger}()")
end end
# Removes the triggers used for renaming a MySQL column concurrently.
def remove_rename_triggers_for_mysql(trigger)
execute("DROP TRIGGER IF EXISTS #{trigger}_insert")
execute("DROP TRIGGER IF EXISTS #{trigger}_update")
end
# Returns the (base) name to use for triggers when renaming columns. # Returns the (base) name to use for triggers when renaming columns.
def rename_trigger_name(table, old, new) def rename_trigger_name(table, old, new)
'trigger_' + Digest::SHA256.hexdigest("#{table}_#{old}_#{new}").first(12) 'trigger_' + Digest::SHA256.hexdigest("#{table}_#{old}_#{new}").first(12)
...@@ -882,8 +812,6 @@ module Gitlab ...@@ -882,8 +812,6 @@ module Gitlab
order: index.orders order: index.orders
} }
# These options are not supported by MySQL, so we only add them if
# they were previously set.
options[:using] = index.using if index.using options[:using] = index.using if index.using
options[:where] = index.where if index.where options[:where] = index.where if index.where
...@@ -923,26 +851,16 @@ module Gitlab ...@@ -923,26 +851,16 @@ module Gitlab
end end
# This will replace the first occurrence of a string in a column with # This will replace the first occurrence of a string in a column with
# the replacement # the replacement using `regexp_replace`
# On postgresql we can use `regexp_replace` for that.
# On mysql we find the location of the pattern, and overwrite it
# with the replacement
def replace_sql(column, pattern, replacement) def replace_sql(column, pattern, replacement)
quoted_pattern = Arel::Nodes::Quoted.new(pattern.to_s) quoted_pattern = Arel::Nodes::Quoted.new(pattern.to_s)
quoted_replacement = Arel::Nodes::Quoted.new(replacement.to_s) quoted_replacement = Arel::Nodes::Quoted.new(replacement.to_s)
if Database.mysql? replace = Arel::Nodes::NamedFunction.new(
locate = Arel::Nodes::NamedFunction "regexp_replace", [column, quoted_pattern, quoted_replacement]
.new('locate', [quoted_pattern, column]) )
insert_in_place = Arel::Nodes::NamedFunction
.new('insert', [column, locate, pattern.size, quoted_replacement])
Arel::Nodes::SqlLiteral.new(insert_in_place.to_sql) Arel::Nodes::SqlLiteral.new(replace.to_sql)
else
replace = Arel::Nodes::NamedFunction
.new("regexp_replace", [column, quoted_pattern, quoted_replacement])
Arel::Nodes::SqlLiteral.new(replace.to_sql)
end
end end
def remove_foreign_key_if_exists(*args) def remove_foreign_key_if_exists(*args)
...@@ -984,11 +902,7 @@ database (#{dbname}) using a super user and running: ...@@ -984,11 +902,7 @@ database (#{dbname}) using a super user and running:
ALTER #{user} WITH SUPERUSER ALTER #{user} WITH SUPERUSER
For MySQL you instead need to run: This query will grant the user super user permissions, ensuring you don't run
GRANT ALL PRIVILEGES ON #{dbname}.* TO #{user}@'%'
Both queries will grant the user super user permissions, ensuring you don't run
into similar problems in the future (e.g. when new tables are created). into similar problems in the future (e.g. when new tables are created).
EOF EOF
end end
...@@ -1091,10 +1005,6 @@ into similar problems in the future (e.g. when new tables are created). ...@@ -1091,10 +1005,6 @@ into similar problems in the future (e.g. when new tables are created).
# This will include indexes using an expression on the column, for example: # This will include indexes using an expression on the column, for example:
# `CREATE INDEX CONCURRENTLY index_name ON table (LOWER(column));` # `CREATE INDEX CONCURRENTLY index_name ON table (LOWER(column));`
# #
# For mysql, it falls back to the default ActiveRecord implementation that
# will not find custom indexes. But it will select by name without passing
# a column.
#
# We can remove this when upgrading to Rails 5 with an updated `index_exists?`: # We can remove this when upgrading to Rails 5 with an updated `index_exists?`:
# - https://github.com/rails/rails/commit/edc2b7718725016e988089b5fb6d6fb9d6e16882 # - https://github.com/rails/rails/commit/edc2b7718725016e988089b5fb6d6fb9d6e16882
# #
...@@ -1105,10 +1015,8 @@ into similar problems in the future (e.g. when new tables are created). ...@@ -1105,10 +1015,8 @@ into similar problems in the future (e.g. when new tables are created).
# does not find indexes without passing a column name. # does not find indexes without passing a column name.
if indexes(table).map(&:name).include?(index.to_s) if indexes(table).map(&:name).include?(index.to_s)
true true
elsif Gitlab::Database.postgresql?
postgres_exists_by_name?(table, index)
else else
false postgres_exists_by_name?(table, index)
end end
end end
...@@ -1124,10 +1032,6 @@ into similar problems in the future (e.g. when new tables are created). ...@@ -1124,10 +1032,6 @@ into similar problems in the future (e.g. when new tables are created).
connection.select_value(index_sql).to_i > 0 connection.select_value(index_sql).to_i > 0
end end
def mysql_compatible_index_length
Gitlab::Database.mysql? ? 20 : nil
end
private private
def validate_timestamp_column_name!(column_name) def validate_timestamp_column_name!(column_name)
......
...@@ -51,14 +51,10 @@ module Gitlab ...@@ -51,14 +51,10 @@ module Gitlab
quoted_old_full_path = quote_string(old_full_path) quoted_old_full_path = quote_string(old_full_path)
quoted_old_wildcard_path = quote_string("#{old_full_path}/%") quoted_old_wildcard_path = quote_string("#{old_full_path}/%")
filter = if Database.mysql? filter =
"lower(routes.path) = lower('#{quoted_old_full_path}') "\ "routes.id IN "\
"OR routes.path LIKE '#{quoted_old_wildcard_path}'" "( SELECT routes.id FROM routes WHERE lower(routes.path) = lower('#{quoted_old_full_path}') "\
else "UNION SELECT routes.id FROM routes WHERE routes.path ILIKE '#{quoted_old_wildcard_path}' )"
"routes.id IN "\
"( SELECT routes.id FROM routes WHERE lower(routes.path) = lower('#{quoted_old_full_path}') "\
"UNION SELECT routes.id FROM routes WHERE routes.path ILIKE '#{quoted_old_wildcard_path}' )"
end
replace_statement = replace_sql(Route.arel_table[:path], replace_statement = replace_sql(Route.arel_table[:path],
old_full_path, old_full_path,
......
...@@ -6,9 +6,7 @@ module Gitlab ...@@ -6,9 +6,7 @@ module Gitlab
# Inserts a raw row and returns the ID of the inserted row. # Inserts a raw row and returns the ID of the inserted row.
# #
# attributes - The attributes/columns to set. # attributes - The attributes/columns to set.
# relation - An ActiveRecord::Relation to use for finding the ID of the row # relation - An ActiveRecord::Relation to use for finding the table name
# when using MySQL.
# rubocop: disable CodeReuse/ActiveRecord
def insert_and_return_id(attributes, relation) def insert_and_return_id(attributes, relation)
# We use bulk_insert here so we can bypass any queries executed by # We use bulk_insert here so we can bypass any queries executed by
# callbacks or validation rules, as doing this wouldn't scale when # callbacks or validation rules, as doing this wouldn't scale when
...@@ -16,12 +14,8 @@ module Gitlab ...@@ -16,12 +14,8 @@ module Gitlab
result = Gitlab::Database result = Gitlab::Database
.bulk_insert(relation.table_name, [attributes], return_ids: true) .bulk_insert(relation.table_name, [attributes], return_ids: true)
# MySQL doesn't support returning the IDs of a bulk insert in a way that result.first
# is not a pain, so in this case we'll issue an extra query instead.
result.first ||
relation.where(iid: attributes[:iid]).limit(1).pluck(:id).first
end end
# rubocop: enable CodeReuse/ActiveRecord
end end
end end
end end
# frozen_string_literal: true
# Disable NO_ZERO_DATE mode for mysql in rails 5.
# We use zero date as a default value
# (config/initializers/active_record_mysql_timestamp.rb), in
# Rails 5 using zero date fails by default (https://gitlab.com/gitlab-org/gitlab-ce/-/jobs/75450216)
# and NO_ZERO_DATE has to be explicitly disabled. Disabling strict mode
# is not sufficient.
require 'active_record/connection_adapters/abstract_mysql_adapter'
module MysqlZeroDate
def configure_connection
super
@connection.query "SET @@SESSION.sql_mode = REPLACE(@@SESSION.sql_mode, 'NO_ZERO_DATE', '');" # rubocop:disable Gitlab/ModuleWithInstanceVariables
end
end
ActiveRecord::ConnectionAdapters::AbstractMysqlAdapter.prepend(MysqlZeroDate)
# frozen_string_literal: true # frozen_string_literal: true
module Serializers module Serializers
# This serializer exports data as JSON, # Make the resulting hash have deep indifferent access
# it is designed to be used with interwork compatibility between MySQL and PostgreSQL
# implementations, as used version of MySQL does not support native json type
#
# Secondly, the loader makes the resulting hash to have deep indifferent access
class JSON class JSON
class << self class << self
def dump(obj) def dump(obj)
# MySQL stores data as text
# look at ./config/initializers/ar_mysql_jsonb_support.rb
if Gitlab::Database.mysql?
obj = ActiveSupport::JSON.encode(obj)
end
obj obj
end end
def load(data) def load(data)
return if data.nil? return if data.nil?
# On MySQL we store data as text
# look at ./config/initializers/ar_mysql_jsonb_support.rb
if Gitlab::Database.mysql?
data = ActiveSupport::JSON.decode(data)
end
Gitlab::Utils.deep_indifferent_access(data) Gitlab::Utils.deep_indifferent_access(data)
end end
end end
......
...@@ -28,7 +28,7 @@ sudo -u git -H git pull origin master ...@@ -28,7 +28,7 @@ sudo -u git -H git pull origin master
echo 'Deploy: Bundle and migrate' echo 'Deploy: Bundle and migrate'
# change it to your needs # change it to your needs
sudo -u git -H bundle --without aws development test mysql --deployment sudo -u git -H bundle --without aws development test --deployment
sudo -u git -H bundle exec rake db:migrate RAILS_ENV=production sudo -u git -H bundle exec rake db:migrate RAILS_ENV=production
sudo -u git -H bundle exec rake gitlab:assets:clean RAILS_ENV=production sudo -u git -H bundle exec rake gitlab:assets:clean RAILS_ENV=production
......
...@@ -26,12 +26,9 @@ namespace :gitlab do ...@@ -26,12 +26,9 @@ namespace :gitlab do
task drop_tables: :environment do task drop_tables: :environment do
connection = ActiveRecord::Base.connection connection = ActiveRecord::Base.connection
# If MySQL, turn off foreign key checks # In PostgreSQLAdapter, data_sources returns both views and tables, so use
connection.execute('SET FOREIGN_KEY_CHECKS=0') if Gitlab::Database.mysql? # #tables instead
tables = connection.tables
# connection.tables is deprecated in MySQLAdapter, but in PostgreSQLAdapter
# data_sources returns both views and tables, so use #tables instead
tables = Gitlab::Database.mysql? ? connection.data_sources : connection.tables
# Removes the entry from the array # Removes the entry from the array
tables.delete 'schema_migrations' tables.delete 'schema_migrations'
...@@ -40,12 +37,8 @@ namespace :gitlab do ...@@ -40,12 +37,8 @@ namespace :gitlab do
# Drop tables with cascade to avoid dependent table errors # Drop tables with cascade to avoid dependent table errors
# PG: http://www.postgresql.org/docs/current/static/ddl-depend.html # PG: http://www.postgresql.org/docs/current/static/ddl-depend.html
# MySQL: http://dev.mysql.com/doc/refman/5.7/en/drop-table.html
# Add `IF EXISTS` because cascade could have already deleted a table. # Add `IF EXISTS` because cascade could have already deleted a table.
tables.each { |t| connection.execute("DROP TABLE IF EXISTS #{connection.quote_table_name(t)} CASCADE") } tables.each { |t| connection.execute("DROP TABLE IF EXISTS #{connection.quote_table_name(t)} CASCADE") }
# If MySQL, re-enable foreign key checks
connection.execute('SET FOREIGN_KEY_CHECKS=1') if Gitlab::Database.mysql?
end end
desc 'Configures the database by running migrate, or by loading the schema and seeding if needed' desc 'Configures the database by running migrate, or by loading the schema and seeding if needed'
......
...@@ -31,7 +31,6 @@ namespace :gitlab do ...@@ -31,7 +31,6 @@ namespace :gitlab do
terminate_all_connections unless Rails.env.production? terminate_all_connections unless Rails.env.production?
Rake::Task["db:reset"].invoke Rake::Task["db:reset"].invoke
Rake::Task["add_limits_mysql"].invoke
Rake::Task["setup_postgresql"].invoke Rake::Task["setup_postgresql"].invoke
Rake::Task["db:seed_fu"].invoke Rake::Task["db:seed_fu"].invoke
rescue Gitlab::TaskAbortedByUserError rescue Gitlab::TaskAbortedByUserError
......
require Rails.root.join('db/migrate/limits_to_mysql')
require Rails.root.join('db/migrate/markdown_cache_limits_to_mysql')
require Rails.root.join('db/migrate/merge_request_diff_file_limits_to_mysql')
require Rails.root.join('db/migrate/limits_ci_build_trace_chunks_raw_data_for_mysql')
require Rails.root.join('db/migrate/gpg_keys_limits_to_mysql')
require Rails.root.join('db/migrate/prometheus_metrics_limits_to_mysql')
desc "GitLab | Add limits to strings in mysql database"
task add_limits_mysql: :environment do
puts "Adding limits to schema.rb for mysql"
LimitsToMysql.new.up
MarkdownCacheLimitsToMysql.new.up
MergeRequestDiffFileLimitsToMysql.new.up
LimitsCiBuildTraceChunksRawDataForMysql.new.up
IncreaseMysqlTextLimitForGpgKeys.new.up
PrometheusMetricsLimitsToMysql.new.up
end
#!/bin/bash
mysql --user=root --host=mysql <<EOF
CREATE USER IF NOT EXISTS 'gitlab'@'%';
GRANT ALL PRIVILEGES ON gitlabhq_test.* TO 'gitlab'@'%';
FLUSH PRIVILEGES;
EOF
...@@ -62,13 +62,7 @@ describe 'New project' do ...@@ -62,13 +62,7 @@ describe 'New project' do
find('#import-project-tab').click find('#import-project-tab').click
end end
context 'when using postgres', :postgresql do it { expect(page).to have_link('Manifest file') }
it { expect(page).to have_link('Manifest file') }
end
context 'when using mysql', :mysql do
it { expect(page).not_to have_link('Manifest file') }
end
end end
context 'Visibility level selector', :js do context 'Visibility level selector', :js do
......
...@@ -4,19 +4,9 @@ describe Forever do ...@@ -4,19 +4,9 @@ describe Forever do
describe '.date' do describe '.date' do
subject { described_class.date } subject { described_class.date }
context 'when using PostgreSQL' do it 'returns Postgresql future date' do
it 'returns Postgresql future date' do Timecop.travel(Date.new(2999, 12, 31)) do
allow(Gitlab::Database).to receive(:postgresql?).and_return(true) is_expected.to be > Date.today
expect(subject).to eq(described_class::POSTGRESQL_DATE)
end
end
context 'when using MySQL' do
it 'returns MySQL future date' do
allow(Gitlab::Database).to receive(:postgresql?).and_return(false)
expect(subject).to eq(described_class::MYSQL_DATE)
end end
end end
end end
......
...@@ -28,27 +28,7 @@ describe Gitlab::CycleAnalytics::UsageData do ...@@ -28,27 +28,7 @@ describe Gitlab::CycleAnalytics::UsageData do
end end
end end
shared_examples 'a valid usage data result' do context 'a valid usage data result' do
it 'returns the aggregated usage data of every selected project' do
result = subject.to_json
expect(result).to have_key(:avg_cycle_analytics)
CycleAnalytics::LevelBase::STAGES.each do |stage|
expect(result[:avg_cycle_analytics]).to have_key(stage)
stage_values = result[:avg_cycle_analytics][stage]
expected_values = expect_values_per_stage[stage]
expected_values.each_pair do |op, value|
expect(stage_values).to have_key(op)
expect(stage_values[op]).to eq(value)
end
end
end
end
context 'when using postgresql', :postgresql do
let(:expect_values_per_stage) do let(:expect_values_per_stage) do
{ {
issue: { issue: {
...@@ -89,51 +69,23 @@ describe Gitlab::CycleAnalytics::UsageData do ...@@ -89,51 +69,23 @@ describe Gitlab::CycleAnalytics::UsageData do
} }
end end
it_behaves_like 'a valid usage data result' it 'returns the aggregated usage data of every selected project' do
end result = subject.to_json
context 'when using mysql', :mysql do expect(result).to have_key(:avg_cycle_analytics)
let(:expect_values_per_stage) do
{ CycleAnalytics::LevelBase::STAGES.each do |stage|
issue: { expect(result[:avg_cycle_analytics]).to have_key(stage)
average: nil,
sd: 0,
missing: 2
},
plan: {
average: nil,
sd: 0,
missing: 2
},
code: {
average: nil,
sd: 0,
missing: 2
},
test: {
average: nil,
sd: 0,
missing: 2
},
review: {
average: nil,
sd: 0,
missing: 2
},
staging: {
average: nil,
sd: 0,
missing: 2
},
production: {
average: nil,
sd: 0,
missing: 2
}
}
end
it_behaves_like 'a valid usage data result' stage_values = result[:avg_cycle_analytics][stage]
expected_values = expect_values_per_stage[stage]
expected_values.each_pair do |op, value|
expect(stage_values).to have_key(op)
expect(stage_values[op]).to eq(value)
end
end
end
end end
end end
end end
require 'spec_helper'
describe Gitlab::Database::Median do
let(:dummy_class) do
Class.new do
include Gitlab::Database::Median
end
end
subject(:median) { dummy_class.new }
describe '#median_datetimes' do
it 'raises NotSupportedError', :mysql do
expect { median.median_datetimes(nil, nil, nil, :project_id) }.to raise_error(dummy_class::NotSupportedError, "partition_column is not supported for MySQL")
end
end
end
...@@ -24,21 +24,13 @@ describe Gitlab::Database do ...@@ -24,21 +24,13 @@ describe Gitlab::Database do
expect(described_class.human_adapter_name).to eq('PostgreSQL') expect(described_class.human_adapter_name).to eq('PostgreSQL')
end end
it 'returns MySQL when using MySQL' do it 'returns Unknown when using anything else' do
allow(described_class).to receive(:postgresql?).and_return(false) allow(described_class).to receive(:postgresql?).and_return(false)
expect(described_class.human_adapter_name).to eq('MySQL') expect(described_class.human_adapter_name).to eq('Unknown')
end end
end end
# These are just simple smoke tests to check if the methods work (regardless
# of what they may return).
describe '.mysql?' do
subject { described_class.mysql? }
it { is_expected.to satisfy { |val| val == true || val == false } }
end
describe '.postgresql?' do describe '.postgresql?' do
subject { described_class.postgresql? } subject { described_class.postgresql? }
...@@ -52,15 +44,6 @@ describe Gitlab::Database do ...@@ -52,15 +44,6 @@ describe Gitlab::Database do
described_class.instance_variable_set(:@version, nil) described_class.instance_variable_set(:@version, nil)
end end
context "on mysql" do
it "extracts the version number" do
allow(described_class).to receive(:database_version)
.and_return("5.7.12-standard")
expect(described_class.version).to eq '5.7.12-standard'
end
end
context "on postgresql" do context "on postgresql" do
it "extracts the version number" do it "extracts the version number" do
allow(described_class).to receive(:database_version) allow(described_class).to receive(:database_version)
...@@ -80,7 +63,7 @@ describe Gitlab::Database do ...@@ -80,7 +63,7 @@ describe Gitlab::Database do
end end
describe '.postgresql_9_or_less?' do describe '.postgresql_9_or_less?' do
it 'returns false when using MySQL' do it 'returns false when not using postgresql' do
allow(described_class).to receive(:postgresql?).and_return(false) allow(described_class).to receive(:postgresql?).and_return(false)
expect(described_class.postgresql_9_or_less?).to eq(false) expect(described_class.postgresql_9_or_less?).to eq(false)
...@@ -134,7 +117,7 @@ describe Gitlab::Database do ...@@ -134,7 +117,7 @@ describe Gitlab::Database do
end end
describe '.join_lateral_supported?' do describe '.join_lateral_supported?' do
it 'returns false when using MySQL' do it 'returns false when not using postgresql' do
allow(described_class).to receive(:postgresql?).and_return(false) allow(described_class).to receive(:postgresql?).and_return(false)
expect(described_class.join_lateral_supported?).to eq(false) expect(described_class.join_lateral_supported?).to eq(false)
...@@ -156,7 +139,7 @@ describe Gitlab::Database do ...@@ -156,7 +139,7 @@ describe Gitlab::Database do
end end
describe '.replication_slots_supported?' do describe '.replication_slots_supported?' do
it 'returns false when using MySQL' do it 'returns false when not using postgresql' do
allow(described_class).to receive(:postgresql?).and_return(false) allow(described_class).to receive(:postgresql?).and_return(false)
expect(described_class.replication_slots_supported?).to eq(false) expect(described_class.replication_slots_supported?).to eq(false)
...@@ -248,43 +231,13 @@ describe Gitlab::Database do ...@@ -248,43 +231,13 @@ describe Gitlab::Database do
end end
describe '.nulls_last_order' do describe '.nulls_last_order' do
context 'when using PostgreSQL' do it { expect(described_class.nulls_last_order('column', 'ASC')).to eq 'column ASC NULLS LAST'}
before do it { expect(described_class.nulls_last_order('column', 'DESC')).to eq 'column DESC NULLS LAST'}
expect(described_class).to receive(:postgresql?).and_return(true)
end
it { expect(described_class.nulls_last_order('column', 'ASC')).to eq 'column ASC NULLS LAST'}
it { expect(described_class.nulls_last_order('column', 'DESC')).to eq 'column DESC NULLS LAST'}
end
context 'when using MySQL' do
before do
expect(described_class).to receive(:postgresql?).and_return(false)
end
it { expect(described_class.nulls_last_order('column', 'ASC')).to eq 'column IS NULL, column ASC'}
it { expect(described_class.nulls_last_order('column', 'DESC')).to eq 'column DESC'}
end
end end
describe '.nulls_first_order' do describe '.nulls_first_order' do
context 'when using PostgreSQL' do it { expect(described_class.nulls_first_order('column', 'ASC')).to eq 'column ASC NULLS FIRST'}
before do it { expect(described_class.nulls_first_order('column', 'DESC')).to eq 'column DESC NULLS FIRST'}
expect(described_class).to receive(:postgresql?).and_return(true)
end
it { expect(described_class.nulls_first_order('column', 'ASC')).to eq 'column ASC NULLS FIRST'}
it { expect(described_class.nulls_first_order('column', 'DESC')).to eq 'column DESC NULLS FIRST'}
end
context 'when using MySQL' do
before do
expect(described_class).to receive(:postgresql?).and_return(false)
end
it { expect(described_class.nulls_first_order('column', 'ASC')).to eq 'column ASC'}
it { expect(described_class.nulls_first_order('column', 'DESC')).to eq 'column IS NULL, column DESC'}
end
end end
describe '.with_connection_pool' do describe '.with_connection_pool' do
...@@ -394,10 +347,6 @@ describe Gitlab::Database do ...@@ -394,10 +347,6 @@ describe Gitlab::Database do
end end
context 'when using PostgreSQL' do context 'when using PostgreSQL' do
before do
allow(described_class).to receive(:mysql?).and_return(false)
end
it 'allows the returning of the IDs of the inserted rows' do it 'allows the returning of the IDs of the inserted rows' do
result = double(:result, values: [['10']]) result = double(:result, values: [['10']])
...@@ -463,31 +412,15 @@ describe Gitlab::Database do ...@@ -463,31 +412,15 @@ describe Gitlab::Database do
end end
describe '#true_value' do describe '#true_value' do
it 'returns correct value for PostgreSQL' do it 'returns correct value' do
expect(described_class).to receive(:postgresql?).and_return(true)
expect(described_class.true_value).to eq "'t'" expect(described_class.true_value).to eq "'t'"
end end
it 'returns correct value for MySQL' do
expect(described_class).to receive(:postgresql?).and_return(false)
expect(described_class.true_value).to eq 1
end
end end
describe '#false_value' do describe '#false_value' do
it 'returns correct value for PostgreSQL' do it 'returns correct value' do
expect(described_class).to receive(:postgresql?).and_return(true)
expect(described_class.false_value).to eq "'f'" expect(described_class.false_value).to eq "'f'"
end end
it 'returns correct value for MySQL' do
expect(described_class).to receive(:postgresql?).and_return(false)
expect(described_class.false_value).to eq 0
end
end end
describe '.read_only?' do describe '.read_only?' do
...@@ -497,43 +430,33 @@ describe Gitlab::Database do ...@@ -497,43 +430,33 @@ describe Gitlab::Database do
end end
describe '.db_read_only?' do describe '.db_read_only?' do
context 'when using PostgreSQL' do before do
before do allow(ActiveRecord::Base.connection).to receive(:execute).and_call_original
allow(ActiveRecord::Base.connection).to receive(:execute).and_call_original allow(described_class).to receive(:postgresql?).and_return(true)
allow(described_class).to receive(:postgresql?).and_return(true) end
end
it 'detects a read only database' do
allow(ActiveRecord::Base.connection).to receive(:execute).with('SELECT pg_is_in_recovery()').and_return([{ "pg_is_in_recovery" => "t" }])
expect(described_class.db_read_only?).to be_truthy
end
it 'detects a read only database' do it 'detects a read only database' do
allow(ActiveRecord::Base.connection).to receive(:execute).with('SELECT pg_is_in_recovery()').and_return([{ "pg_is_in_recovery" => true }]) allow(ActiveRecord::Base.connection).to receive(:execute).with('SELECT pg_is_in_recovery()').and_return([{ "pg_is_in_recovery" => "t" }])
expect(described_class.db_read_only?).to be_truthy expect(described_class.db_read_only?).to be_truthy
end end
it 'detects a read write database' do it 'detects a read only database' do
allow(ActiveRecord::Base.connection).to receive(:execute).with('SELECT pg_is_in_recovery()').and_return([{ "pg_is_in_recovery" => "f" }]) allow(ActiveRecord::Base.connection).to receive(:execute).with('SELECT pg_is_in_recovery()').and_return([{ "pg_is_in_recovery" => true }])
expect(described_class.db_read_only?).to be_falsey expect(described_class.db_read_only?).to be_truthy
end end
it 'detects a read write database' do it 'detects a read write database' do
allow(ActiveRecord::Base.connection).to receive(:execute).with('SELECT pg_is_in_recovery()').and_return([{ "pg_is_in_recovery" => false }]) allow(ActiveRecord::Base.connection).to receive(:execute).with('SELECT pg_is_in_recovery()').and_return([{ "pg_is_in_recovery" => "f" }])
expect(described_class.db_read_only?).to be_falsey expect(described_class.db_read_only?).to be_falsey
end
end end
context 'when using MySQL' do it 'detects a read write database' do
before do allow(ActiveRecord::Base.connection).to receive(:execute).with('SELECT pg_is_in_recovery()').and_return([{ "pg_is_in_recovery" => false }])
expect(described_class).to receive(:postgresql?).and_return(false)
end
it { expect(described_class.db_read_only?).to be_falsey } expect(described_class.db_read_only?).to be_falsey
end end
end end
......
...@@ -15,32 +15,15 @@ describe Gitlab::Import::DatabaseHelpers do ...@@ -15,32 +15,15 @@ describe Gitlab::Import::DatabaseHelpers do
let(:attributes) { { iid: 1, title: 'foo' } } let(:attributes) { { iid: 1, title: 'foo' } }
let(:project) { create(:project) } let(:project) { create(:project) }
context 'on PostgreSQL' do it 'returns the ID returned by the query' do
it 'returns the ID returned by the query' do expect(Gitlab::Database)
expect(Gitlab::Database) .to receive(:bulk_insert)
.to receive(:bulk_insert) .with(Issue.table_name, [attributes], return_ids: true)
.with(Issue.table_name, [attributes], return_ids: true) .and_return([10])
.and_return([10])
id = subject.insert_and_return_id(attributes, project.issues) id = subject.insert_and_return_id(attributes, project.issues)
expect(id).to eq(10) expect(id).to eq(10)
end
end
context 'on MySQL' do
it 'uses a separate query to retrieve the ID' do
issue = create(:issue, project: project, iid: attributes[:iid])
expect(Gitlab::Database)
.to receive(:bulk_insert)
.with(Issue.table_name, [attributes], return_ids: true)
.and_return([])
id = subject.insert_and_return_id(attributes, project.issues)
expect(id).to eq(issue.id)
end
end end
end end
end end
...@@ -6,24 +6,8 @@ describe Serializers::JSON do ...@@ -6,24 +6,8 @@ describe Serializers::JSON do
subject { described_class.dump(obj) } subject { described_class.dump(obj) }
context 'when MySQL is used' do it 'returns a hash' do
before do is_expected.to eq(obj)
allow(Gitlab::Database).to receive(:adapter_name) { 'mysql2' }
end
it 'encodes as string' do
is_expected.to eq('{"key":"value"}')
end
end
context 'when PostgreSQL is used' do
before do
allow(Gitlab::Database).to receive(:adapter_name) { 'postgresql' }
end
it 'returns a hash' do
is_expected.to eq(obj)
end
end end
end end
...@@ -31,7 +15,13 @@ describe Serializers::JSON do ...@@ -31,7 +15,13 @@ describe Serializers::JSON do
let(:data_string) { '{"key":"value","variables":[{"key":"VAR1","value":"VALUE1"}]}' } let(:data_string) { '{"key":"value","variables":[{"key":"VAR1","value":"VALUE1"}]}' }
let(:data_hash) { JSON.parse(data_string) } let(:data_hash) { JSON.parse(data_string) }
shared_examples 'having consistent accessor' do context 'when loading a hash' do
subject { described_class.load(data_hash) }
it 'decodes a string' do
is_expected.to be_a(Hash)
end
it 'allows to access with symbols' do it 'allows to access with symbols' do
expect(subject[:key]).to eq('value') expect(subject[:key]).to eq('value')
expect(subject[:variables].first[:key]).to eq('VAR1') expect(subject[:variables].first[:key]).to eq('VAR1')
...@@ -43,59 +33,11 @@ describe Serializers::JSON do ...@@ -43,59 +33,11 @@ describe Serializers::JSON do
end end
end end
context 'when MySQL is used' do context 'when loading a nil' do
before do subject { described_class.load(nil) }
allow(Gitlab::Database).to receive(:adapter_name) { 'mysql2' }
end
context 'when loading a string' do
subject { described_class.load(data_string) }
it 'decodes a string' do
is_expected.to be_a(Hash)
end
it_behaves_like 'having consistent accessor'
end
context 'when loading a different type' do
subject { described_class.load({ key: 'hash' }) }
it 'raises an exception' do
expect { subject }.to raise_error(TypeError)
end
end
context 'when loading a nil' do
subject { described_class.load(nil) }
it 'returns nil' do
is_expected.to be_nil
end
end
end
context 'when PostgreSQL is used' do
before do
allow(Gitlab::Database).to receive(:adapter_name) { 'postgresql' }
end
context 'when loading a hash' do
subject { described_class.load(data_hash) }
it 'decodes a string' do
is_expected.to be_a(Hash)
end
it_behaves_like 'having consistent accessor'
end
context 'when loading a nil' do
subject { described_class.load(nil) }
it 'returns nil' do it 'returns nil' do
is_expected.to be_nil is_expected.to be_nil
end
end end
end end
end end
......
...@@ -28,28 +28,13 @@ describe CaseSensitivity do ...@@ -28,28 +28,13 @@ describe CaseSensitivity do
.to contain_exactly(model_1) .to contain_exactly(model_1)
end end
# Using `mysql` & `postgresql` metadata-tags here because both adapters build it 'builds a query using LOWER' do
# the query slightly differently query = model.iwhere(path: %w(MODEL-1 model-2), name: 'model 1').to_sql
context 'for MySQL', :mysql do expected_query = <<~QRY.strip
it 'builds a simple query' do SELECT \"namespaces\".* FROM \"namespaces\" WHERE (LOWER(\"namespaces\".\"path\") IN (LOWER('MODEL-1'), LOWER('model-2'))) AND (LOWER(\"namespaces\".\"name\") = LOWER('model 1'))
query = model.iwhere(path: %w(MODEL-1 model-2), name: 'model 1').to_sql QRY
expected_query = <<~QRY.strip
SELECT `namespaces`.* FROM `namespaces` WHERE (`namespaces`.`path` IN ('MODEL-1', 'model-2')) AND (`namespaces`.`name` = 'model 1')
QRY
expect(query).to eq(expected_query)
end
end
context 'for PostgreSQL', :postgresql do expect(query).to eq(expected_query)
it 'builds a query using LOWER' do
query = model.iwhere(path: %w(MODEL-1 model-2), name: 'model 1').to_sql
expected_query = <<~QRY.strip
SELECT \"namespaces\".* FROM \"namespaces\" WHERE (LOWER(\"namespaces\".\"path\") IN (LOWER('MODEL-1'), LOWER('model-2'))) AND (LOWER(\"namespaces\".\"name\") = LOWER('model 1'))
QRY
expect(query).to eq(expected_query)
end
end end
end end
end end
...@@ -266,10 +266,6 @@ RSpec.configure do |config| ...@@ -266,10 +266,6 @@ RSpec.configure do |config|
example.run if Gitlab::Database.postgresql? example.run if Gitlab::Database.postgresql?
end end
config.around(:each, :mysql) do |example|
example.run if Gitlab::Database.mysql?
end
# This makes sure the `ApplicationController#can?` method is stubbed with the # This makes sure the `ApplicationController#can?` method is stubbed with the
# original implementation for all view specs. # original implementation for all view specs.
config.before(:each, type: :view) do config.before(:each, type: :view) do
......
...@@ -55,21 +55,13 @@ describe BackgroundMigrationWorker, :sidekiq, :clean_gitlab_redis_shared_state d ...@@ -55,21 +55,13 @@ describe BackgroundMigrationWorker, :sidekiq, :clean_gitlab_redis_shared_state d
end end
describe '#healthy_database?' do describe '#healthy_database?' do
context 'using MySQL', :mysql do context 'when replication lag is too great' do
it 'returns true' do it 'returns false' do
expect(worker.healthy_database?).to eq(true) allow(Postgresql::ReplicationSlot)
end .to receive(:lag_too_great?)
end .and_return(true)
context 'using PostgreSQL', :postgresql do
context 'when replication lag is too great' do
it 'returns false' do
allow(Postgresql::ReplicationSlot)
.to receive(:lag_too_great?)
.and_return(true)
expect(worker.healthy_database?).to eq(false) expect(worker.healthy_database?).to eq(false)
end
end end
context 'when replication lag is small enough' do context 'when replication lag is small enough' do
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment