Commit 968fd96e authored by Stan Hu's avatar Stan Hu

Merge branch '216527-time-zone-usage-services' into 'master'

Update Time.now => Time.current in Services

See merge request gitlab-org/gitlab!31683
parents 079c18c1 cb514e80
......@@ -461,3 +461,13 @@ Performance/ChainArrayAllocation:
- 'lib/gitlab/import_export/**/*'
- 'ee/lib/gitlab/import_export/**/*'
- 'ee/lib/ee/gitlab/import_export/**/*'
Rails/TimeZone:
Enabled: true
EnforcedStyle: 'flexible'
Include:
- 'app/services/**/*'
- 'spec/services/**/*'
- 'ee/app/services/**/*'
- 'ee/spec/services/**/*'
......@@ -88,12 +88,12 @@ class Iteration < ApplicationRecord
# ensure dates are in the future
def future_date
if start_date_changed?
errors.add(:start_date, s_("Iteration|cannot be in the past")) if start_date < Date.today
errors.add(:start_date, s_("Iteration|cannot be in the past")) if start_date < Date.current
errors.add(:start_date, s_("Iteration|cannot be more than 500 years in the future")) if start_date > 500.years.from_now
end
if due_date_changed?
errors.add(:due_date, s_("Iteration|cannot be in the past")) if due_date < Date.today
errors.add(:due_date, s_("Iteration|cannot be in the past")) if due_date < Date.current
errors.add(:due_date, s_("Iteration|cannot be more than 500 years in the future")) if due_date > 500.years.from_now
end
end
......
......@@ -52,7 +52,7 @@ module Auth
end
def self.token_expire_at
Time.now + Gitlab::CurrentSettings.container_registry_token_expire_delay.minutes
Time.current + Gitlab::CurrentSettings.container_registry_token_expire_delay.minutes
end
private
......
......@@ -201,7 +201,7 @@ module Ci
labels[:shard] = shard.gsub(METRICS_SHARD_TAG_PREFIX, '') if shard
end
job_queue_duration_seconds.observe(labels, Time.now - job.queued_at) unless job.queued_at.nil?
job_queue_duration_seconds.observe(labels, Time.current - job.queued_at) unless job.queued_at.nil?
attempt_counter.increment
end
......
......@@ -33,7 +33,7 @@ module Clusters
end
def timed_out?
Time.now.utc - app.updated_at.utc > ClusterWaitForAppInstallationWorker::TIMEOUT
Time.current.utc - app.updated_at.utc > ClusterWaitForAppInstallationWorker::TIMEOUT
end
def remove_installation_pod
......
......@@ -31,7 +31,7 @@ module Clusters
end
def timed_out?
Time.now.utc - app.updated_at.utc > WaitForUninstallAppWorker::TIMEOUT
Time.current.utc - app.updated_at.utc > WaitForUninstallAppWorker::TIMEOUT
end
def remove_uninstallation_pod
......
......@@ -46,7 +46,7 @@ module Clusters
end
def timed_out?
Time.now.utc - app.updated_at.to_time.utc > ::ClusterWaitForAppUpdateWorker::TIMEOUT
Time.current.utc - app.updated_at.to_time.utc > ::ClusterWaitForAppUpdateWorker::TIMEOUT
end
def remove_pod
......
......@@ -16,9 +16,9 @@ module Clusters
return unless application
if recently_scheduled?
worker_class.perform_in(BACKOFF_DELAY, application.name, application.id, project.id, Time.now)
worker_class.perform_in(BACKOFF_DELAY, application.name, application.id, project.id, Time.current)
else
worker_class.perform_async(application.name, application.id, project.id, Time.now)
worker_class.perform_async(application.name, application.id, project.id, Time.current)
end
end
......@@ -31,7 +31,7 @@ module Clusters
def recently_scheduled?
return false unless application.last_update_started_at
application.last_update_started_at.utc >= Time.now.utc - BACKOFF_DELAY
application.last_update_started_at.utc >= Time.current.utc - BACKOFF_DELAY
end
end
end
......
......@@ -35,7 +35,7 @@ module Clusters
end
def elapsed_time_from_creation(operation)
Time.now.utc - operation.start_time.to_time.utc
Time.current.utc - operation.start_time.to_time.utc
end
def finalize_creation
......
......@@ -54,8 +54,8 @@ module Clusters
cert = OpenSSL::X509::Certificate.new
cert.version = 2
cert.serial = 0
cert.not_before = Time.now
cert.not_after = Time.now + 1000.years
cert.not_before = Time.current
cert.not_after = Time.current + 1000.years
cert.public_key = key.public_key
cert.subject = name
......
......@@ -221,7 +221,7 @@ class IssuableBaseService < BaseService
issuable.assign_attributes(params)
if has_title_or_description_changed?(issuable)
issuable.assign_attributes(last_edited_at: Time.now, last_edited_by: current_user)
issuable.assign_attributes(last_edited_at: Time.current, last_edited_by: current_user)
end
before_update(issuable)
......@@ -270,7 +270,7 @@ class IssuableBaseService < BaseService
if issuable.changed? || params.present?
issuable.assign_attributes(params.merge(updated_by: current_user,
last_edited_at: Time.now,
last_edited_at: Time.current,
last_edited_by: current_user))
before_update(issuable, skip_spam_check: true)
......
......@@ -8,7 +8,7 @@ module Members
source.members.create(
access_level: Gitlab::Access::DEVELOPER,
user: current_user,
requested_at: Time.now.utc)
requested_at: Time.current.utc)
end
private
......
......@@ -33,7 +33,7 @@ module Projects
end
def order_by_date(tags)
now = DateTime.now
now = DateTime.current
tags.sort_by { |tag| tag.created_at || now }.reverse
end
......
......@@ -70,7 +70,7 @@ module Projects
#
# @param [String] new_path
def discard_path!(new_path)
discarded_path = "#{new_path}-#{Time.now.utc.to_i}"
discarded_path = "#{new_path}-#{Time.current.utc.to_i}"
logger.info("Moving existing empty attachments folder from '#{new_path}' to '#{discarded_path}', (PROJECT_ID=#{project.id})")
FileUtils.mv(new_path, discarded_path)
......
......@@ -121,7 +121,7 @@ module Projects
end
def moved_path(path)
"#{path}+#{project.id}+moved+#{Time.now.to_i}"
"#{path}+#{project.id}+moved+#{Time.current.to_i}"
end
# The underlying FetchInternalRemote call uses a `git fetch` to move data
......
......@@ -26,7 +26,7 @@ module ResourceEvents
def since_fetch_at(events)
return events unless params[:last_fetched_at].present?
last_fetched_at = Time.at(params.fetch(:last_fetched_at).to_i)
last_fetched_at = Time.zone.at(params.fetch(:last_fetched_at).to_i)
events.created_after(last_fetched_at - NotesFinder::FETCH_OVERLAP)
end
......
......@@ -4,7 +4,7 @@ module ResourceEvents
class ChangeMilestoneService
attr_reader :resource, :user, :event_created_at, :milestone, :old_milestone
def initialize(resource, user, created_at: Time.now, old_milestone:)
def initialize(resource, user, created_at: Time.current, old_milestone:)
@resource = resource
@user = user
@event_created_at = created_at
......
......@@ -17,7 +17,7 @@ module Spam
params = {
type: 'comment',
text: text,
created_at: DateTime.now,
created_at: DateTime.current,
author: owner_name,
author_email: owner_email,
referrer: options[:referrer]
......
......@@ -42,7 +42,7 @@ module Terraform
state.lock_xid = params[:lock_id]
state.locked_by_user = current_user
state.locked_at = Time.now
state.locked_at = Time.current
state.save!
end
......
......@@ -37,7 +37,7 @@ class VerifyPagesDomainService < BaseService
# Prevent any pre-existing grace period from being truncated
reverify = [domain.enabled_until, VERIFICATION_PERIOD.from_now].compact.max
domain.assign_attributes(verified_at: Time.now, enabled_until: reverify, remove_at: nil)
domain.assign_attributes(verified_at: Time.current, enabled_until: reverify, remove_at: nil)
domain.save!(validate: false)
if was_disabled
......@@ -73,7 +73,7 @@ class VerifyPagesDomainService < BaseService
# A domain is only expired until `disable!` has been called
def expired?
domain.enabled_until && domain.enabled_until < Time.now
domain.enabled_until && domain.enabled_until < Time.current
end
def dns_record_present?
......
......@@ -20,7 +20,7 @@ module Ci
def notify_on_total_usage
return unless namespace.shared_runners_minutes_used? && namespace.last_ci_minutes_notification_at.nil?
namespace.update_columns(last_ci_minutes_notification_at: Time.now)
namespace.update_columns(last_ci_minutes_notification_at: Time.current)
CiMinutesUsageMailer.notify(namespace.name, recipients).deliver_later
end
......
......@@ -9,7 +9,7 @@ class ClearNamespaceSharedRunnersMinutesService < BaseService
def execute
NamespaceStatistics.where(namespace: @namespace).update_all(
shared_runners_seconds: 0,
shared_runners_seconds_last_reset: Time.now
shared_runners_seconds_last_reset: Time.current
)
end
# rubocop: enable CodeReuse/ActiveRecord
......
......@@ -15,8 +15,8 @@ module EE
def attributes
base_payload.merge(type: SecurityEvent.to_s,
created_at: DateTime.now,
updated_at: DateTime.now,
created_at: DateTime.current,
updated_at: DateTime.current,
details: @details.to_yaml)
end
......
......@@ -38,7 +38,7 @@ module EE
if weight_changes_tracking_enabled?
# Only create a resource event here if is_update is true to exclude the move issue operation.
# ResourceEvents for moved issues are written within AttributesRewriter.
EE::ResourceEvents::ChangeWeightService.new([issuable], current_user, Time.now).execute if is_update
EE::ResourceEvents::ChangeWeightService.new([issuable], current_user, Time.current).execute if is_update
else
::SystemNoteService.change_weight_note(issuable, issuable.project, current_user)
end
......
......@@ -54,7 +54,7 @@ module Elastic
private
def execute_with_redis(redis)
start_time = Time.now
start_time = Time.current
specs = redis.zrangebyscore(REDIS_SET_KEY, '-inf', '+inf', limit: [0, LIMIT], with_scores: true)
return if specs.empty?
......@@ -85,7 +85,7 @@ module Elastic
failures_count: failures.count,
first_score: first_score,
last_score: last_score,
bulk_execution_duration_s: Time.now - start_time
bulk_execution_duration_s: Time.current - start_time
)
end
......
......@@ -25,7 +25,7 @@ module Geo
# @return [Boolean] true if synced, false if not
def execute
try_obtain_lease do
start_time = Time.now
start_time = Time.current
registry.start!
......@@ -60,7 +60,7 @@ module Geo
download_success: download_result.success,
bytes_downloaded: download_result.bytes_downloaded,
primary_missing_file: download_result.primary_missing_file,
download_time_s: (Time.now - start_time).to_f.round(3),
download_time_s: (Time.current - start_time).to_f.round(3),
reason: download_result.reason
}
metadata.merge(download_result.extra_details) if download_result.extra_details
......
......@@ -65,7 +65,7 @@ module Geo
verification_checksum: checksum,
verification_checksum_mismatched: mismatch,
checksum_mismatch: mismatch.present?,
verified_at: Time.now,
verified_at: Time.current,
verification_failure: failure,
verification_retry_count: verification_retry_count,
retry_at: resync_retry_at,
......
......@@ -75,7 +75,7 @@ module Geo
# rubocop: enable CodeReuse/ActiveRecord
def download_time_in_seconds
(Time.now.to_f - registry.last_synced_at.to_f).round(3)
(Time.current.to_f - registry.last_synced_at.to_f).round(3)
end
def redownload?
......
......@@ -15,7 +15,7 @@ module Geo
def execute
try_obtain_lease do
start_time = Time.now
start_time = Time.current
download_result = downloader.execute
......@@ -49,7 +49,7 @@ module Geo
bytes_downloaded: download_result.bytes_downloaded,
failed_before_transfer: download_result.failed_before_transfer,
primary_missing_file: download_result.primary_missing_file,
download_time_s: (Time.now - start_time).to_f.round(3)
download_time_s: (Time.current - start_time).to_f.round(3)
}
log_info("File download", metadata)
......
......@@ -33,7 +33,7 @@ module Geo
repository_state.update!(
"#{type}_verification_checksum" => checksum,
"last_#{type}_verification_ran_at" => Time.now,
"last_#{type}_verification_ran_at" => Time.current,
"last_#{type}_verification_failure" => failure,
"#{type}_retry_at" => retry_at,
"#{type}_retry_count" => retry_count
......
......@@ -74,7 +74,7 @@ module Geo
"#{type}_verification_checksum_sha" => checksum,
"#{type}_verification_checksum_mismatched" => mismatch,
"#{type}_checksum_mismatch" => mismatch.present?,
"last_#{type}_verification_ran_at" => Time.now,
"last_#{type}_verification_ran_at" => Time.current,
"last_#{type}_verification_failure" => failure,
"#{type}_verification_retry_count" => verification_retry_count,
"resync_#{type}" => reverify,
......
......@@ -27,7 +27,7 @@ module Groups
end
def deletion_schedule_params
{ marked_for_deletion_on: Time.now.utc, deleting_user: current_user }
{ marked_for_deletion_on: Time.current.utc, deleting_user: current_user }
end
def log_audit_event
......
......@@ -12,7 +12,7 @@ module MergeRequests
title_slug = Gitlab::Utils.slugify(vulnerability.title)
source_branch = "remediate/%s-%s" % [
title_slug[0..74],
Time.now.strftime("D%Y%m%dT%H%M%S")
Time.current.strftime("D%Y%m%dT%H%M%S")
]
target_branch = vulnerability.target_branch || @project.default_branch
......
......@@ -10,7 +10,7 @@ module Projects
project,
current_user,
{ archived: true,
marked_for_deletion_at: Time.now.utc,
marked_for_deletion_at: Time.current.utc,
deleting_user: current_user }
).execute
log_event if result[:status] == :success
......
......@@ -67,7 +67,7 @@ module VulnerabilityExports
[
exportable.full_path.parameterize,
'_vulnerabilities_',
Time.now.utc.strftime('%FT%H%M'),
Time.current.utc.strftime('%FT%H%M'),
'.',
format
].join
......
......@@ -11,7 +11,7 @@ describe Ci::Minutes::BatchResetService do
id: id,
shared_runners_minutes_limit: 100,
extra_shared_runners_minutes_limit: 50,
last_ci_minutes_notification_at: Time.now,
last_ci_minutes_notification_at: Time.current,
last_ci_minutes_usage_notification_level: 30)
create(:namespace_statistics, namespace: namespace, shared_runners_seconds: seconds_used)
......
......@@ -125,7 +125,7 @@ describe Ci::Minutes::EmailNotificationService do
context 'when last_ci_minutes_notification_at has a value' do
before do
namespace.update_attribute(:last_ci_minutes_notification_at, Time.now)
namespace.update_attribute(:last_ci_minutes_notification_at, Time.current)
end
it 'does not notify owners' do
......
......@@ -18,7 +18,7 @@ describe ClearNamespaceSharedRunnersMinutesService do
it 'resets timer' do
subject
expect(namespace.namespace_statistics.reload.shared_runners_seconds_last_reset).to be_like_time(Time.now)
expect(namespace.namespace_statistics.reload.shared_runners_seconds_last_reset).to be_like_time(Time.current)
end
it 'successfully clears minutes' do
......
......@@ -14,7 +14,7 @@ describe EE::AuditEvents::BulkInsertService do
EE::AuditEvents::RepositoryPushAuditEventService.new(user, entity, target_ref, from, to)
end
end
let(:timestamp) { Time.new(2019, 10, 10) }
let(:timestamp) { Time.zone.local(2019, 10, 10) }
let(:attrs) do
{
author_id: user.id,
......
......@@ -12,7 +12,7 @@ describe EE::AuditEvents::RepositoryPushAuditEventService do
let(:service) { described_class.new(user, entity, target_ref, from, to) }
describe '#attributes' do
let(:timestamp) { Time.new(2019, 10, 10) }
let(:timestamp) { Time.zone.local(2019, 10, 10) }
let(:attrs) do
{
author_id: user.id,
......
......@@ -32,7 +32,7 @@ describe Boards::Issues::ListService, services: true do
let!(:opened_issue1) { create(:labeled_issue, project: project, milestone: m1, weight: 9, title: 'Issue 1', labels: [bug]) }
let!(:opened_issue2) { create(:labeled_issue, project: project, milestone: m2, weight: 1, title: 'Issue 2', labels: [p2]) }
let!(:opened_issue3) { create(:labeled_issue, project: project, milestone: m2, title: 'Assigned Issue', labels: [p3]) }
let!(:reopened_issue1) { create(:issue, state: 'opened', project: project, title: 'Issue 3', closed_at: Time.now ) }
let!(:reopened_issue1) { create(:issue, state: 'opened', project: project, title: 'Issue 3', closed_at: Time.current ) }
let(:list1_issue1) { create(:labeled_issue, project: project, milestone: m1, labels: [p2, development]) }
let(:list1_issue2) { create(:labeled_issue, project: project, milestone: m2, labels: [development]) }
......
......@@ -52,7 +52,7 @@ describe Issuable::CommonSystemNotesService do
end
context 'when issuable is an epic' do
let(:timestamp) { Time.now }
let(:timestamp) { Time.current }
let(:issuable) { create(:epic, end_date: timestamp) }
subject { described_class.new(nil, user).execute(issuable, old_labels: []) }
......
......@@ -14,7 +14,7 @@ describe EE::MergeRequestMetricsService do
describe '#merge' do
let(:merge_request) { create(:merge_request, :merged) }
let(:expected_commit_count) { 21 }
let(:event) { instance_double('Event', author_id: merge_request.author.id, created_at: Time.now) }
let(:event) { instance_double('Event', author_id: merge_request.author.id, created_at: Time.current) }
it 'saves metrics with productivity_data' do
allow(merge_request).to receive(:commits_count).and_return(expected_commit_count)
......
......@@ -26,7 +26,7 @@ describe ResourceEvents::MergeIntoNotesService do
let_it_be(:scoped_label_group2_1) { create(:label, project: project, title: 'key2::value') }
let_it_be(:scoped_label_group2_2) { create(:label, project: project, title: 'key2::value2') }
let_it_be(:scoped_label_group3_1) { create(:label, project: project, title: 'key3::value') }
let(:time) { Time.now }
let(:time) { Time.current }
describe '#execute' do
it 'squashes events with same time and author into single note but scoped labels are separated' do
......
......@@ -73,7 +73,7 @@ describe EE::SystemNotes::EpicsService do
end
describe '#change_epic_date_note' do
let(:timestamp) { Time.now }
let(:timestamp) { Time.current }
context 'when start date was changed' do
let(:noteable) { create(:epic) }
......
......@@ -67,7 +67,7 @@ describe Geo::BlobVerificationSecondaryService, :geo do
expect(registry.reload).to have_attributes(
verification_checksum: '62fc1ec4ce60',
checksum_mismatch: false,
verified_at: be_within(1.minute).of(Time.now),
verified_at: be_within(1.minute).of(Time.current),
verification_failure: nil,
verification_retry_count: nil,
retry_at: nil,
......@@ -87,7 +87,7 @@ describe Geo::BlobVerificationSecondaryService, :geo do
verification_checksum: nil,
verification_checksum_mismatched: '99fc1ec4ce60',
checksum_mismatch: true,
verified_at: be_within(1.minute).of(Time.now),
verified_at: be_within(1.minute).of(Time.current),
verification_failure: 'checksum mismatch',
verification_retry_count: 1,
retry_at: be_present,
......@@ -119,7 +119,7 @@ describe Geo::BlobVerificationSecondaryService, :geo do
verification_checksum: nil,
verification_checksum_mismatched: nil,
checksum_mismatch: false,
verified_at: be_within(1.minute).of(Time.now),
verified_at: be_within(1.minute).of(Time.current),
verification_failure: 'Error calculating checksum',
verification_retry_count: 1,
retry_at: be_present,
......
......@@ -194,7 +194,7 @@ describe Geo::FileDownloadService do
execute!
expect(registry.last.reload.retry_count).to eq(1)
expect(registry.last.retry_at > Time.now).to be_truthy
expect(registry.last.retry_at > Time.current).to be_truthy
end
end
end
......@@ -223,7 +223,7 @@ describe Geo::FileDownloadService do
execute!
expect(registry.last.reload.retry_count).to eq(1)
expect(registry.last.retry_at > Time.now).to be_truthy
expect(registry.last.retry_at > Time.current).to be_truthy
end
end
end
......@@ -315,7 +315,7 @@ describe Geo::FileDownloadService do
execute!
expect(registry_entry.reload.retry_count).to eq(4)
expect(registry_entry.retry_at > Time.now).to be_truthy
expect(registry_entry.retry_at > Time.current).to be_truthy
end
end
......@@ -354,7 +354,7 @@ describe Geo::FileDownloadService do
execute!
expect(registry_entry.reload.retry_count).to eq(4)
expect(registry_entry.retry_at > Time.now).to be_truthy
expect(registry_entry.retry_at > Time.current).to be_truthy
end
end
......
......@@ -11,7 +11,7 @@ describe Geo::MetricsUpdateService, :geo, :prometheus do
subject { described_class.new }
let(:event_date) { Time.now.utc }
let(:event_date) { Time.current.utc }
let(:data) do
{
......
......@@ -365,7 +365,7 @@ describe Geo::RepositorySyncService do
end
it 'successfully redownloads the repository even if the retry time exceeds max value' do
timestamp = Time.now.utc
timestamp = Time.current.utc
registry = create(
:geo_project_registry,
project: project,
......
......@@ -76,8 +76,8 @@ describe Geo::RepositoryVerificationPrimaryService do
subject.execute
expect(project.repository_state).to have_attributes(
last_repository_verification_ran_at: be_within(100.seconds).of(Time.now),
last_wiki_verification_ran_at: be_within(100.seconds).of(Time.now)
last_repository_verification_ran_at: be_within(100.seconds).of(Time.current),
last_wiki_verification_ran_at: be_within(100.seconds).of(Time.current)
)
end
......
......@@ -51,7 +51,7 @@ describe Geo::RepositoryVerificationSecondaryService, :geo do
expect(registry).to have_attributes(
"#{type}_verification_checksum_sha" => '62fc1ec4ce60',
"#{type}_checksum_mismatch" => false,
"last_#{type}_verification_ran_at" => be_within(1.minute).of(Time.now),
"last_#{type}_verification_ran_at" => be_within(1.minute).of(Time.current),
"last_#{type}_verification_failure" => nil,
"#{type}_verification_retry_count" => nil,
"resync_#{type}" => false,
......@@ -70,7 +70,7 @@ describe Geo::RepositoryVerificationSecondaryService, :geo do
expect(registry).to have_attributes(
"#{type}_verification_checksum_sha" => '0000000000000000000000000000000000000000',
"#{type}_checksum_mismatch" => false,
"last_#{type}_verification_ran_at" => be_within(1.minute).of(Time.now),
"last_#{type}_verification_ran_at" => be_within(1.minute).of(Time.current),
"last_#{type}_verification_failure" => nil,
"#{type}_verification_retry_count" => nil,
"resync_#{type}" => false,
......@@ -91,7 +91,7 @@ describe Geo::RepositoryVerificationSecondaryService, :geo do
"#{type}_verification_checksum_sha" => nil,
"#{type}_verification_checksum_mismatched" => '99fc1ec4ce60',
"#{type}_checksum_mismatch" => true,
"last_#{type}_verification_ran_at" => be_within(1.minute).of(Time.now),
"last_#{type}_verification_ran_at" => be_within(1.minute).of(Time.current),
"last_#{type}_verification_failure" => "#{type.to_s.capitalize} checksum mismatch",
"#{type}_verification_retry_count" => 1,
"resync_#{type}" => true,
......@@ -125,7 +125,7 @@ describe Geo::RepositoryVerificationSecondaryService, :geo do
"#{type}_verification_checksum_sha" => nil,
"#{type}_verification_checksum_mismatched" => nil,
"#{type}_checksum_mismatch" => false,
"last_#{type}_verification_ran_at" => be_within(1.minute).of(Time.now),
"last_#{type}_verification_ran_at" => be_within(1.minute).of(Time.current),
"last_#{type}_verification_failure" => "Error calculating #{type} checksum",
"#{type}_verification_retry_count" => 1,
"resync_#{type}" => true,
......
......@@ -20,7 +20,7 @@ describe Iterations::CreateService do
{
title: 'v2.1.9',
description: 'Patch release to fix security issue',
start_date: Time.now.to_s,
start_date: Time.current.to_s,
due_date: 1.day.from_now.to_s
}
end
......
......@@ -6,8 +6,8 @@ describe LdapGroupResetService do
# TODO: refactor to multi-ldap setup
let(:group) { create(:group) }
let(:user) { create(:user) }
let(:ldap_user) { create(:omniauth_user, extern_uid: 'john', provider: 'ldap', last_credential_check_at: Time.now) }
let(:ldap_user_2) { create(:omniauth_user, extern_uid: 'mike', provider: 'ldap', last_credential_check_at: Time.now) }
let(:ldap_user) { create(:omniauth_user, extern_uid: 'john', provider: 'ldap', last_credential_check_at: Time.current) }
let(:ldap_user_2) { create(:omniauth_user, extern_uid: 'mike', provider: 'ldap', last_credential_check_at: Time.current) }
before do
group.add_owner(user)
......
......@@ -226,7 +226,7 @@ describe Security::WafAnomalySummaryService do
context 'with time window' do
it 'passes time frame to ElasticSearch' do
from = 1.day.ago
to = Time.now
to = Time.current
subject = described_class.new(
environment: environment,
......
......@@ -103,7 +103,7 @@ describe VulnerabilityExports::ExportService do
let(:mock_relation) { double(:relation, with_findings_and_scanner: vulnerabilities) }
let(:mock_vulnerability_finder_service_object) { instance_double(Security::VulnerabilitiesFinder, execute: mock_relation) }
let(:exportable_full_path) { 'foo' }
let(:time_suffix) { Time.now.utc.strftime('%FT%H%M') }
let(:time_suffix) { Time.current.utc.strftime('%FT%H%M') }
let(:expected_file_name) { "#{exportable_full_path}_vulnerabilities_#{time_suffix}.csv" }
before do
......
......@@ -35,11 +35,11 @@ describe Auth::ContainerRegistryAuthenticationService do
it { expect(payload).to include('access') }
context 'a expirable' do
let(:expires_at) { Time.at(payload['exp']) }
let(:expires_at) { Time.zone.at(payload['exp']) }
let(:expire_delay) { 10 }
context 'for default configuration' do
it { expect(expires_at).not_to be_within(2.seconds).of(Time.now + expire_delay.minutes) }
it { expect(expires_at).not_to be_within(2.seconds).of(Time.current + expire_delay.minutes) }
end
context 'for changed configuration' do
......@@ -47,7 +47,7 @@ describe Auth::ContainerRegistryAuthenticationService do
stub_application_setting(container_registry_token_expire_delay: expire_delay)
end
it { expect(expires_at).to be_within(2.seconds).of(Time.now + expire_delay.minutes) }
it { expect(expires_at).to be_within(2.seconds).of(Time.current + expire_delay.minutes) }
end
end
end
......
......@@ -87,7 +87,7 @@ describe Boards::Issues::ListService do
let!(:opened_issue1) { create(:labeled_issue, project: project, milestone: m1, title: 'Issue 1', labels: [bug]) }
let!(:opened_issue2) { create(:labeled_issue, project: project, milestone: m2, title: 'Issue 2', labels: [p2, p2_project]) }
let!(:opened_issue3) { create(:labeled_issue, project: project_archived, milestone: m1, title: 'Issue 3', labels: [bug]) }
let!(:reopened_issue1) { create(:issue, state: 'opened', project: project, title: 'Reopened Issue 1', closed_at: Time.now ) }
let!(:reopened_issue1) { create(:issue, state: 'opened', project: project, title: 'Reopened Issue 1', closed_at: Time.current ) }
let!(:list1_issue1) { create(:labeled_issue, project: project, milestone: m1, labels: [p2, p2_project, development]) }
let!(:list1_issue2) { create(:labeled_issue, project: project, milestone: m2, labels: [development]) }
......
......@@ -571,7 +571,7 @@ module Ci
end
describe '#register_success' do
let!(:current_time) { Time.new(2018, 4, 5, 14, 0, 0) }
let!(:current_time) { Time.zone.local(2018, 4, 5, 14, 0, 0) }
let!(:attempt_counter) { double('Gitlab::Metrics::NullMetric') }
let!(:job_queue_duration_seconds) { double('Gitlab::Metrics::NullMetric') }
......
......@@ -13,10 +13,10 @@ describe Clusters::Applications::ScheduleUpdateService do
context 'when application is able to be updated' do
context 'when the application was recently scheduled' do
it 'schedules worker with a backoff delay' do
application = create(:clusters_applications_prometheus, :installed, last_update_started_at: Time.now + 5.minutes)
application = create(:clusters_applications_prometheus, :installed, last_update_started_at: Time.current + 5.minutes)
service = described_class.new(application, project)
expect(::ClusterUpdateAppWorker).to receive(:perform_in).with(described_class::BACKOFF_DELAY, application.name, application.id, project.id, Time.now).once
expect(::ClusterUpdateAppWorker).to receive(:perform_in).with(described_class::BACKOFF_DELAY, application.name, application.id, project.id, Time.current).once
service.execute
end
......@@ -27,7 +27,7 @@ describe Clusters::Applications::ScheduleUpdateService do
application = create(:clusters_applications_prometheus, :installed)
service = described_class.new(application, project)
expect(::ClusterUpdateAppWorker).to receive(:perform_async).with(application.name, application.id, project.id, Time.now).once
expect(::ClusterUpdateAppWorker).to receive(:perform_async).with(application.name, application.id, project.id, Time.current).once
service.execute
end
......
......@@ -120,8 +120,8 @@ describe Clusters::Kubernetes::ConfigureIstioIngressService, '#execute' do
expect(certificate.subject.to_s).to include(serverless_domain_cluster.knative.hostname)
expect(certificate.not_before).to be_within(1.minute).of(Time.now)
expect(certificate.not_after).to be_within(1.minute).of(Time.now + 1000.years)
expect(certificate.not_before).to be_within(1.minute).of(Time.current)
expect(certificate.not_after).to be_within(1.minute).of(Time.current + 1000.years)
expect(WebMock).to have_requested(:put, api_url + '/api/v1/namespaces/istio-system/secrets/istio-ingressgateway-ca-certs').with(
body: hash_including(
......
......@@ -13,7 +13,7 @@ describe CohortsService do
6.times do |months_ago|
months_ago_time = (months_ago * 2).months.ago
create(:user, created_at: months_ago_time, last_activity_on: Time.now)
create(:user, created_at: months_ago_time, last_activity_on: Time.current)
create(:user, created_at: months_ago_time, last_activity_on: months_ago_time)
end
......
......@@ -315,7 +315,7 @@ describe Git::BranchPushService, services: true do
let(:issue) { create :issue, project: project }
let(:commit_author) { create :user }
let(:commit) { project.commit }
let(:commit_time) { Time.now }
let(:commit_time) { Time.current }
before do
project.add_developer(commit_author)
......
......@@ -6,7 +6,7 @@ describe IncidentManagement::CreateIssueService do
let(:project) { create(:project, :repository, :private) }
let_it_be(:user) { User.alert_bot }
let(:service) { described_class.new(project, alert_payload) }
let(:alert_starts_at) { Time.now }
let(:alert_starts_at) { Time.current }
let(:alert_title) { 'TITLE' }
let(:alert_annotations) { { title: alert_title } }
......@@ -302,7 +302,7 @@ describe IncidentManagement::CreateIssueService do
private
def build_alert_payload(annotations: {}, starts_at: Time.now)
def build_alert_payload(annotations: {}, starts_at: Time.current)
{
'annotations' => annotations.stringify_keys
}.tap do |payload|
......
......@@ -146,7 +146,7 @@ describe Issues::CloseService do
context 'when `metrics.first_mentioned_in_commit_at` is already set' do
before do
issue.metrics.update!(first_mentioned_in_commit_at: Time.now)
issue.metrics.update!(first_mentioned_in_commit_at: Time.current)
end
it 'does not update the metrics' do
......
......@@ -510,7 +510,7 @@ describe Issues::UpdateService, :mailer do
end
it 'updates updated_at' do
expect(issue.reload.updated_at).to be > Time.now
expect(issue.reload.updated_at).to be > Time.current
end
end
end
......
......@@ -118,7 +118,7 @@ describe MergeRequests::MergeService do
it 'closes GitLab issue tracker issues' do
issue = create :issue, project: project
commit = instance_double('commit', safe_message: "Fixes #{issue.to_reference}", date: Time.now, authored_date: Time.now)
commit = instance_double('commit', safe_message: "Fixes #{issue.to_reference}", date: Time.current, authored_date: Time.current)
allow(merge_request).to receive(:commits).and_return([commit])
merge_request.cache_merge_request_closes_issues!
......
......@@ -635,7 +635,7 @@ describe MergeRequests::RefreshService do
references: [issue],
author_name: commit_author.name,
author_email: commit_author.email,
committed_date: Time.now
committed_date: Time.current
)
allow_any_instance_of(MergeRequest).to receive(:commits).and_return(CommitCollection.new(@project, [commit], 'feature'))
......
......@@ -453,7 +453,7 @@ describe MergeRequests::UpdateService, :mailer do
end
it 'updates updated_at' do
expect(merge_request.reload.updated_at).to be > Time.now
expect(merge_request.reload.updated_at).to be > Time.current
end
end
......
......@@ -25,18 +25,18 @@ describe NoteSummary do
it 'returns note hash' do
Timecop.freeze do
expect(create_note_summary.note).to eq(noteable: noteable, project: project, author: user, note: 'note',
created_at: Time.now)
created_at: Time.current)
end
end
context 'when noteable is a commit' do
let(:noteable) { build(:commit, system_note_timestamp: Time.at(43)) }
let(:noteable) { build(:commit, system_note_timestamp: Time.zone.at(43)) }
it 'returns note hash specific to commit' do
expect(create_note_summary.note).to eq(
noteable: nil, project: project, author: user, note: 'note',
noteable_type: 'Commit', commit_id: noteable.id,
created_at: Time.at(43)
created_at: Time.zone.at(43)
)
end
end
......
......@@ -425,7 +425,7 @@ describe Notes::CreateService do
expect do
existing_note
Timecop.freeze(Time.now + 1.minute) { subject }
Timecop.freeze(Time.current + 1.minute) { subject }
existing_note.reload
end.to change { existing_note.type }.from(nil).to('DiscussionNote')
......
......@@ -119,7 +119,7 @@ describe PagesDomains::ObtainLetsEncryptCertificateService do
cert = OpenSSL::X509::Certificate.new
cert.subject = cert.issuer = OpenSSL::X509::Name.parse(subject)
cert.not_before = Time.now
cert.not_before = Time.current
cert.not_after = 1.year.from_now
cert.public_key = key.public_key
cert.serial = 0x0
......
......@@ -72,7 +72,7 @@ describe Projects::Alerting::NotifyService do
describe '#execute' do
let(:token) { 'invalid-token' }
let(:starts_at) { Time.now.change(usec: 0) }
let(:starts_at) { Time.current.change(usec: 0) }
let(:service) { described_class.new(project, nil, payload) }
let(:payload_raw) do
{
......@@ -121,7 +121,7 @@ describe Projects::Alerting::NotifyService do
expect(last_alert_attributes).to match(
project_id: project.id,
title: payload_raw.fetch(:title),
started_at: Time.parse(payload_raw.fetch(:start_time)),
started_at: Time.zone.parse(payload_raw.fetch(:start_time)),
severity: payload_raw.fetch(:severity),
status: AlertManagement::Alert::STATUSES[:triggered],
events: 1,
......@@ -154,7 +154,7 @@ describe Projects::Alerting::NotifyService do
expect(last_alert_attributes).to match(
project_id: project.id,
title: payload_raw.fetch(:title),
started_at: Time.parse(payload_raw.fetch(:start_time)),
started_at: Time.zone.parse(payload_raw.fetch(:start_time)),
severity: 'critical',
status: AlertManagement::Alert::STATUSES[:triggered],
events: 1,
......
......@@ -31,7 +31,7 @@ describe Projects::HashedStorage::BaseAttachmentService do
expect(Dir.exist?(target_path)).to be_truthy
Timecop.freeze do
suffix = Time.now.utc.to_i
suffix = Time.current.utc.to_i
subject.send(:discard_path!, target_path)
expected_renamed_path = "#{target_path}-#{suffix}"
......
......@@ -50,7 +50,7 @@ describe Projects::Prometheus::Alerts::CreateEventsService do
let(:events) { service.execute }
context 'with a firing payload' do
let(:started_at) { truncate_to_second(Time.now) }
let(:started_at) { truncate_to_second(Time.current) }
let(:firing_event) { alert_payload(status: 'firing', started_at: started_at) }
let(:alerts_payload) { { 'alerts' => [firing_event] } }
......@@ -87,7 +87,7 @@ describe Projects::Prometheus::Alerts::CreateEventsService do
end
context 'with a resolved payload' do
let(:started_at) { truncate_to_second(Time.now) }
let(:started_at) { truncate_to_second(Time.current) }
let(:ended_at) { started_at + 1 }
let(:payload_key) { PrometheusAlertEvent.payload_key_for(alert.prometheus_metric_id, utc_rfc3339(started_at)) }
let(:resolved_event) { alert_payload(status: 'resolved', started_at: started_at, ended_at: ended_at) }
......@@ -285,7 +285,7 @@ describe Projects::Prometheus::Alerts::CreateEventsService do
private
def alert_payload(status: 'firing', started_at: Time.now, ended_at: Time.now, gitlab_alert_id: alert.prometheus_metric_id, title: nil, environment: nil)
def alert_payload(status: 'firing', started_at: Time.current, ended_at: Time.current, gitlab_alert_id: alert.prometheus_metric_id, title: nil, environment: nil)
payload = {}
payload['status'] = status if status
......
......@@ -8,7 +8,7 @@ describe Projects::UpdateRepositoryStorageService do
subject { described_class.new(repository_storage_move) }
describe "#execute" do
let(:time) { Time.now }
let(:time) { Time.current }
before do
allow(Time).to receive(:now).and_return(time)
......
......@@ -361,7 +361,7 @@ describe QuickActions::InterpretService do
expect(updates).to eq(spend_time: {
duration: 3600,
user_id: developer.id,
spent_at: DateTime.now.to_date
spent_at: DateTime.current.to_date
})
end
......@@ -379,7 +379,7 @@ describe QuickActions::InterpretService do
expect(updates).to eq(spend_time: {
duration: -1800,
user_id: developer.id,
spent_at: DateTime.now.to_date
spent_at: DateTime.current.to_date
})
end
end
......
......@@ -110,6 +110,8 @@ describe RepositoryArchiveCleanUpService do
def create_temporary_files(dir, extensions, mtime)
FileUtils.mkdir_p(dir)
# rubocop: disable Rails/TimeZone
FileUtils.touch(extensions.map { |ext| File.join(dir, "sample.#{ext}") }, mtime: Time.now - mtime)
# rubocop: enable Rails/TimeZone
end
end
......@@ -21,7 +21,7 @@ describe ResourceEvents::MergeIntoNotesService do
let_it_be(:resource) { create(:issue, project: project) }
let_it_be(:label) { create(:label, project: project) }
let_it_be(:label2) { create(:label, project: project) }
let(:time) { Time.now }
let(:time) { Time.current }
describe '#execute' do
it 'merges label events into notes in order of created_at' do
......
......@@ -9,7 +9,7 @@ describe MergeRequestMetricsService do
it 'updates metrics' do
user = create(:user)
service = described_class.new(metrics)
event = double(Event, author_id: user.id, created_at: Time.now)
event = double(Event, author_id: user.id, created_at: Time.current)
service.merge(event)
......@@ -22,7 +22,7 @@ describe MergeRequestMetricsService do
it 'updates metrics' do
user = create(:user)
service = described_class.new(metrics)
event = double(Event, author_id: user.id, created_at: Time.now)
event = double(Event, author_id: user.id, created_at: Time.current)
service.close(event)
......
......@@ -348,7 +348,7 @@ describe VerifyPagesDomainService do
end
it 'does not shorten any grace period' do
grace = Time.now + 1.year
grace = Time.current + 1.year
domain.update!(enabled_until: grace)
disallow_resolver!
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment