Commit c946fa7c authored by Robert May's avatar Robert May

Autocorrect Gitlab/Json cop in /lib

Runs the Gitlab/Json cop autocorrector over the lib directory.
parent 4f46a580
......@@ -51,7 +51,7 @@ module API
def parse_env
return {} if params[:env].blank?
JSON.parse(params[:env])
Gitlab::Json.parse(params[:env])
rescue JSON::ParserError
{}
end
......
......@@ -83,7 +83,7 @@ module ContainerRegistry
image = {
config: {}
}
image, image_digest = upload_raw_blob(path, JSON.pretty_generate(image))
image, image_digest = upload_raw_blob(path, Gitlab::Json.pretty_generate(image))
return unless image
{
......@@ -109,7 +109,7 @@ module ContainerRegistry
def put_tag(name, reference, manifest)
response = faraday.put("/v2/#{name}/manifests/#{reference}") do |req|
req.headers['Content-Type'] = DOCKER_DISTRIBUTION_MANIFEST_V2_TYPE
req.body = JSON.pretty_generate(manifest)
req.body = Gitlab::Json.pretty_generate(manifest)
end
response.headers['docker-content-digest'] if response.success?
......
......@@ -6,7 +6,7 @@ module ContainerRegistry
def initialize(tag, blob)
@tag, @blob = tag, blob
@data = JSON.parse(blob.data)
@data = Gitlab::Json.parse(blob.data)
end
def [](key)
......
......@@ -79,7 +79,7 @@ module Gitlab
data = { 'jira_tracker_data' => [], 'issue_tracker_data' => [] }
select_all(query).each do |service|
begin
properties = JSON.parse(service['properties'])
properties = Gitlab::Json.parse(service['properties'])
rescue JSON::ParserError
logger.warn(
message: 'Properties data not parsed - invalid json',
......
......@@ -16,7 +16,7 @@ module Gitlab
def get
Gitlab::Redis::SharedState.with do |redis|
data = redis.get(redis_shared_state_key)
JSON.parse(data, symbolize_names: true) if data
Gitlab::Json.parse(data, symbolize_names: true) if data
end
end
......
......@@ -353,7 +353,7 @@ module Gitlab
def restore_state(new_state, stream)
state = Base64.urlsafe_decode64(new_state)
state = JSON.parse(state, symbolize_names: true)
state = Gitlab::Json.parse(state, symbolize_names: true)
return if state[:offset].to_i > stream.size
STATE_PARAMS.each do |param|
......
......@@ -90,7 +90,7 @@ module Gitlab
decoded_state = Base64.urlsafe_decode64(state)
return unless decoded_state.present?
JSON.parse(decoded_state)
Gitlab::Json.parse(decoded_state)
end
end
end
......
......@@ -32,7 +32,7 @@ module Gitlab
raise ParserError, 'Errors field not found!' unless errors
begin
JSON.parse(errors)
Gitlab::Json.parse(errors)
rescue JSON::ParserError
raise ParserError, 'Invalid errors field!'
end
......@@ -71,7 +71,7 @@ module Gitlab
next unless path =~ match_pattern
next if path =~ INVALID_PATH_PATTERN
entries[path] = JSON.parse(meta, symbolize_names: true)
entries[path] = Gitlab::Json.parse(meta, symbolize_names: true)
rescue JSON::ParserError, Encoding::CompatibilityError
next
end
......
......@@ -8,7 +8,7 @@ module Gitlab
TfplanParserError = Class.new(Gitlab::Ci::Parsers::ParserError)
def parse!(json_data, terraform_reports, artifact:)
tfplan = JSON.parse(json_data).tap do |parsed_data|
tfplan = Gitlab::Json.parse(json_data).tap do |parsed_data|
parsed_data['job_path'] = Gitlab::Routing.url_helpers.project_job_path(
artifact.job.project, artifact.job
)
......
......@@ -25,8 +25,8 @@ module Gitlab
)}x.freeze
def initialize
names = JSON.parse(File.read(DIGESTS)).keys +
JSON.parse(File.read(ALIASES)).keys
names = Gitlab::Json.parse(File.read(DIGESTS)).keys +
Gitlab::Json.parse(File.read(ALIASES)).keys
@emoji = names.map { |name| ":#{name}:" }
end
......
......@@ -16,7 +16,7 @@ module Gitlab
raise HTTPError, "Failed to read #{url}: #{rsp.code} #{rsp.message}"
end
JSON.parse(rsp.body)
Gitlab::Json.parse(rsp.body)
end
end
end
......
......@@ -157,7 +157,7 @@ module Gitlab
failed_reverts = []
while rename_info = redis.lpop(key)
path_before_rename, path_after_rename = JSON.parse(rename_info)
path_before_rename, path_after_rename = Gitlab::Json.parse(rename_info)
say "renaming #{type} from #{path_after_rename} back to #{path_before_rename}"
begin
yield(path_before_rename, path_after_rename)
......
......@@ -39,7 +39,7 @@ module Gitlab
end
def json
@json ||= JSON.parse(plain_text) rescue nil
@json ||= Gitlab::Json.parse(plain_text) rescue nil
end
end
end
......
......@@ -156,7 +156,7 @@ module Gitlab
end
results.map! do |result|
JSON.parse(extract_data(result), symbolize_names: true) unless result.nil?
Gitlab::Json.parse(extract_data(result), symbolize_names: true) unless result.nil?
end
file_paths.zip(results).to_h
......
......@@ -68,7 +68,7 @@ module Gitlab
end
def to_json(opts = nil)
JSON.generate(formatter.to_h, opts)
Gitlab::Json.generate(formatter.to_h, opts)
end
def as_json(opts = nil)
......
......@@ -42,7 +42,7 @@ module Gitlab
content.map! do |lines|
next unless lines
JSON.parse(lines).map! do |line|
Gitlab::Json.parse(lines).map! do |line|
Gitlab::Diff::Line.safe_init_from_hash(line)
end
end
......
......@@ -21,7 +21,7 @@ module Gitlab
end
def emojis_aliases
@emoji_aliases ||= JSON.parse(File.read(Rails.root.join('fixtures', 'emojis', 'aliases.json')))
@emoji_aliases ||= Gitlab::Json.parse(File.read(Rails.root.join('fixtures', 'emojis', 'aliases.json')))
end
def emoji_filename(name)
......@@ -63,7 +63,7 @@ module Gitlab
def emoji_unicode_versions_by_name
@emoji_unicode_versions_by_name ||=
JSON.parse(File.read(Rails.root.join('fixtures', 'emojis', 'emoji-unicode-version-map.json')))
Gitlab::Json.parse(File.read(Rails.root.join('fixtures', 'emojis', 'emoji-unicode-version-map.json')))
end
end
end
......@@ -28,7 +28,7 @@ module Gitlab
end
def parse_response!
JSON.parse(@excon_response.body)
Gitlab::Json.parse(@excon_response.body)
rescue JSON::JSONError
# The JSON response is optional, so don't fail when it's missing
nil
......
......@@ -130,7 +130,7 @@ module Gitlab
end
def self.address_metadata(storage)
Base64.strict_encode64(JSON.dump(storage => connection_data(storage)))
Base64.strict_encode64(Gitlab::Json.dump(storage => connection_data(storage)))
end
def self.connection_data(storage)
......@@ -458,7 +458,7 @@ module Gitlab
def self.filesystem_id_from_disk(storage)
metadata_file = File.read(storage_metadata_file_path(storage))
metadata_hash = JSON.parse(metadata_file)
metadata_hash = Gitlab::Json.parse(metadata_file)
metadata_hash['gitaly_filesystem_id']
rescue Errno::ENOENT, Errno::EACCES, JSON::ParserError
nil
......
......@@ -128,7 +128,7 @@ module Gitlab
end
def ordering_from_encoded_json(cursor)
JSON.parse(decode(cursor))
Gitlab::Json.parse(decode(cursor))
rescue JSON::ParserError
raise Gitlab::Graphql::Errors::ArgumentError, "Please provide a valid cursor"
end
......
......@@ -20,7 +20,7 @@ module Gitlab
case ambiguous_param
when String
if ambiguous_param.present?
ensure_hash(JSON.parse(ambiguous_param))
ensure_hash(Gitlab::Json.parse(ambiguous_param))
else
{}
end
......
......@@ -21,7 +21,7 @@ module Gitlab
return unless Gitlab::Runtime.puma?
stats = Puma.stats
stats = JSON.parse(stats)
stats = Gitlab::Json.parse(stats)
# If `workers` is missing this means that
# Puma server is running in single mode
......
......@@ -26,7 +26,7 @@ module Gitlab
json_stats = puma_stats
return unless json_stats
stats = JSON.parse(json_stats)
stats = Gitlab::Json.parse(json_stats)
if cluster?(stats)
sample_cluster(stats)
......
......@@ -9,7 +9,7 @@ module Gitlab
"Phabricator responded with #{http_response.status}"
end
response = new(JSON.parse(http_response.body))
response = new(Gitlab::Json.parse(http_response.body))
unless response.success?
raise ResponseError,
......
......@@ -163,7 +163,7 @@ module Gitlab
end
def parse_json(response_body)
JSON.parse(response_body)
Gitlab::Json.parse(response_body)
rescue JSON::ParserError
raise PrometheusClient::Error, 'Parsing response failed'
end
......
......@@ -152,7 +152,7 @@ module Gitlab
raise "failed to get exif tags: #{output}" if status != 0
JSON.parse(output).first
Gitlab::Json.parse(output).first
end
end
end
......
......@@ -134,7 +134,7 @@ module Gitlab
end
def safe_parse(message)
JSON.parse(message)
Gitlab::Json.parse(message)
rescue JSON::ParserError
end
......
......@@ -4,7 +4,7 @@ module Gitlab
module SidekiqMiddleware
class ArgumentsLogger
def call(worker, job, queue)
Sidekiq.logger.info "arguments: #{JSON.dump(job['args'])}"
Sidekiq.logger.info "arguments: #{Gitlab::Json.dump(job['args'])}"
yield
end
end
......
......@@ -205,7 +205,7 @@ module Gitlab
# This is the outermost encoding of a senddata: header. It is safe for
# inclusion in HTTP response headers
def encode(hash)
Base64.urlsafe_encode64(JSON.dump(hash))
Base64.urlsafe_encode64(Gitlab::Json.dump(hash))
end
# This is for encoding individual fields inside the senddata JSON that
......
......@@ -49,7 +49,7 @@ module Mattermost
end
def json_response(response)
json_response = JSON.parse(response.body)
json_response = Gitlab::Json.parse(response.body)
unless response.success?
raise Mattermost::ClientError.new(json_response['message'] || 'Undefined error')
......
......@@ -65,7 +65,7 @@ module Quality
%(--output json),
*args
]
releases = JSON.parse(run_command(command))
releases = Gitlab::Json.parse(run_command(command))
releases.map do |release|
Release.new(*release.values_at(*RELEASE_JSON_ATTRIBUTES))
......
......@@ -40,7 +40,7 @@ module RspecFlaky
new_flaky_examples = flaky_examples - suite_flaky_examples
if new_flaky_examples.any?
Rails.logger.warn "\nNew flaky examples detected:\n"
Rails.logger.warn JSON.pretty_generate(new_flaky_examples.to_h)
Rails.logger.warn Gitlab::Json.pretty_generate(new_flaky_examples.to_h)
RspecFlaky::Report.new(new_flaky_examples).write(RspecFlaky::Config.new_flaky_examples_report_path)
# write_report_file(new_flaky_examples, RspecFlaky::Config.new_flaky_examples_report_path)
......
......@@ -19,7 +19,7 @@ module RspecFlaky
end
def self.load_json(json)
new(RspecFlaky::FlakyExamplesCollection.new(JSON.parse(json)))
new(RspecFlaky::FlakyExamplesCollection.new(Gitlab::Json.parse(json)))
end
def initialize(flaky_examples)
......@@ -40,7 +40,7 @@ module RspecFlaky
report_path_dir = File.dirname(file_path)
FileUtils.mkdir_p(report_path_dir) unless Dir.exist?(report_path_dir)
File.write(file_path, JSON.pretty_generate(flaky_examples.to_h))
File.write(file_path, Gitlab::Json.pretty_generate(flaky_examples.to_h))
end
def prune_outdated(days: OUTDATED_DAYS_THRESHOLD)
......
......@@ -7,7 +7,7 @@ namespace :gemojione do
aliases = {}
index_file = File.join(Rails.root, 'fixtures', 'emojis', 'index.json')
index = JSON.parse(File.read(index_file))
index = Gitlab::Json.parse(File.read(index_file))
index.each_pair do |key, data|
data['aliases'].each do |a|
......@@ -19,7 +19,7 @@ namespace :gemojione do
out = File.join(Rails.root, 'fixtures', 'emojis', 'aliases.json')
File.open(out, 'w') do |handle|
handle.write(JSON.pretty_generate(aliases, indent: ' ', space: '', space_before: ''))
handle.write(Gitlab::Json.pretty_generate(aliases, indent: ' ', space: '', space_before: ''))
end
end
......@@ -58,7 +58,7 @@ namespace :gemojione do
out = File.join(Rails.root, 'fixtures', 'emojis', 'digests.json')
File.open(out, 'w') do |handle|
handle.write(JSON.pretty_generate(resultant_emoji_map))
handle.write(Gitlab::Json.pretty_generate(resultant_emoji_map))
end
end
......
......@@ -3,7 +3,7 @@
require 'spec_helper'
describe BitbucketServer::Representation::Activity do
let(:activities) { JSON.parse(fixture_file('importers/bitbucket_server/activities.json'))['values'] }
let(:activities) { Gitlab::Json.parse(fixture_file('importers/bitbucket_server/activities.json'))['values'] }
let(:inline_comment) { activities.first }
let(:comment) { activities[3] }
let(:merge_event) { activities[4] }
......
......@@ -3,7 +3,7 @@
require 'spec_helper'
describe BitbucketServer::Representation::Comment do
let(:activities) { JSON.parse(fixture_file('importers/bitbucket_server/activities.json'))['values'] }
let(:activities) { Gitlab::Json.parse(fixture_file('importers/bitbucket_server/activities.json'))['values'] }
let(:comment) { activities.first }
subject { described_class.new(comment) }
......
......@@ -3,7 +3,7 @@
require 'spec_helper'
describe BitbucketServer::Representation::PullRequestComment do
let(:activities) { JSON.parse(fixture_file('importers/bitbucket_server/activities.json'))['values'] }
let(:activities) { Gitlab::Json.parse(fixture_file('importers/bitbucket_server/activities.json'))['values'] }
let(:comment) { activities.second }
subject { described_class.new(comment) }
......
......@@ -3,7 +3,7 @@
require 'spec_helper'
describe BitbucketServer::Representation::PullRequest do
let(:sample_data) { JSON.parse(fixture_file('importers/bitbucket_server/pull_request.json')) }
let(:sample_data) { Gitlab::Json.parse(fixture_file('importers/bitbucket_server/pull_request.json')) }
subject { described_class.new(sample_data) }
......
......@@ -50,7 +50,7 @@ describe BitbucketServer::Representation::Repo do
DATA
end
subject { described_class.new(JSON.parse(sample_data)) }
subject { described_class.new(Gitlab::Json.parse(sample_data)) }
describe '#project_key' do
it { expect(subject.project_key).to eq('TEST') }
......
......@@ -9,10 +9,10 @@ describe Gitlab::AppJsonLogger do
let(:string_message) { 'Information' }
it 'logs a hash as a JSON' do
expect(JSON.parse(subject.format_message('INFO', Time.now, nil, hash_message))).to include(hash_message)
expect(Gitlab::Json.parse(subject.format_message('INFO', Time.now, nil, hash_message))).to include(hash_message)
end
it 'logs a string as a JSON' do
expect(JSON.parse(subject.format_message('INFO', Time.now, nil, string_message))).to include('message' => string_message)
expect(Gitlab::Json.parse(subject.format_message('INFO', Time.now, nil, string_message))).to include('message' => string_message)
end
end
......@@ -242,7 +242,7 @@ describe Gitlab::Database::RenameReservedPathsMigration::V1::RenameBase, :delete
old_path, new_path = [nil, nil]
Gitlab::Redis::SharedState.with do |redis|
rename_info = redis.lpop(key)
old_path, new_path = JSON.parse(rename_info)
old_path, new_path = Gitlab::Json.parse(rename_info)
end
expect(old_path).to eq('path/to/namespace')
......@@ -278,7 +278,7 @@ describe Gitlab::Database::RenameReservedPathsMigration::V1::RenameBase, :delete
end
expect(rename_count).to eq(1)
expect(JSON.parse(stored_renames.first)).to eq(%w(old_path new_path))
expect(Gitlab::Json.parse(stored_renames.first)).to eq(%w(old_path new_path))
end
end
end
......@@ -639,11 +639,11 @@ describe Gitlab::Diff::Position do
let(:diff_position) { described_class.new(args) }
it "returns the position as JSON" do
expect(JSON.parse(diff_position.to_json)).to eq(args.stringify_keys)
expect(Gitlab::Json.parse(diff_position.to_json)).to eq(args.stringify_keys)
end
it "works when nested under another hash" do
expect(JSON.parse(JSON.generate(pos: diff_position))).to eq('pos' => args.stringify_keys)
expect(Gitlab::Json.parse(Gitlab::Json.generate(pos: diff_position))).to eq('pos' => args.stringify_keys)
end
end
......
......@@ -10,7 +10,7 @@ describe Gitlab::Elasticsearch::Logs::Lines do
let(:es_message_3) { { timestamp: "2019-12-13T14:35:36.034Z", pod: "production-6866bc8974-m4sk4", message: "10.8.2.1 - - [04/Nov/2019:23:09:24 UTC] \"GET / HTTP/1.1\" 200 13" } }
let(:es_message_4) { { timestamp: "2019-12-13T14:35:37.034Z", pod: "production-6866bc8974-m4sk4", message: "- -\u003e /" } }
let(:es_response) { JSON.parse(fixture_file('lib/elasticsearch/logs_response.json')) }
let(:es_response) { Gitlab::Json.parse(fixture_file('lib/elasticsearch/logs_response.json')) }
subject { described_class.new(client) }
......@@ -22,14 +22,14 @@ describe Gitlab::Elasticsearch::Logs::Lines do
let(:end_time) { "2019-12-13T14:35:34.034Z" }
let(:cursor) { "9999934,1572449784442" }
let(:body) { JSON.parse(fixture_file('lib/elasticsearch/query.json')) }
let(:body_with_container) { JSON.parse(fixture_file('lib/elasticsearch/query_with_container.json')) }
let(:body_with_search) { JSON.parse(fixture_file('lib/elasticsearch/query_with_search.json')) }
let(:body_with_times) { JSON.parse(fixture_file('lib/elasticsearch/query_with_times.json')) }
let(:body_with_start_time) { JSON.parse(fixture_file('lib/elasticsearch/query_with_start_time.json')) }
let(:body_with_end_time) { JSON.parse(fixture_file('lib/elasticsearch/query_with_end_time.json')) }
let(:body_with_cursor) { JSON.parse(fixture_file('lib/elasticsearch/query_with_cursor.json')) }
let(:body_with_filebeat_6) { JSON.parse(fixture_file('lib/elasticsearch/query_with_filebeat_6.json')) }
let(:body) { Gitlab::Json.parse(fixture_file('lib/elasticsearch/query.json')) }
let(:body_with_container) { Gitlab::Json.parse(fixture_file('lib/elasticsearch/query_with_container.json')) }
let(:body_with_search) { Gitlab::Json.parse(fixture_file('lib/elasticsearch/query_with_search.json')) }
let(:body_with_times) { Gitlab::Json.parse(fixture_file('lib/elasticsearch/query_with_times.json')) }
let(:body_with_start_time) { Gitlab::Json.parse(fixture_file('lib/elasticsearch/query_with_start_time.json')) }
let(:body_with_end_time) { Gitlab::Json.parse(fixture_file('lib/elasticsearch/query_with_end_time.json')) }
let(:body_with_cursor) { Gitlab::Json.parse(fixture_file('lib/elasticsearch/query_with_cursor.json')) }
let(:body_with_filebeat_6) { Gitlab::Json.parse(fixture_file('lib/elasticsearch/query_with_filebeat_6.json')) }
RSpec::Matchers.define :a_hash_equal_to_json do |expected|
match do |actual|
......
......@@ -5,8 +5,8 @@ require 'spec_helper'
describe Gitlab::Elasticsearch::Logs::Pods do
let(:client) { Elasticsearch::Transport::Client }
let(:es_query) { JSON.parse(fixture_file('lib/elasticsearch/pods_query.json'), symbolize_names: true) }
let(:es_response) { JSON.parse(fixture_file('lib/elasticsearch/pods_response.json')) }
let(:es_query) { Gitlab::Json.parse(fixture_file('lib/elasticsearch/pods_query.json'), symbolize_names: true) }
let(:es_response) { Gitlab::Json.parse(fixture_file('lib/elasticsearch/pods_response.json')) }
let(:namespace) { "autodevops-deploy-9-production" }
subject { described_class.new(client) }
......
......@@ -3,7 +3,7 @@
require "spec_helper"
describe Gitlab::GoogleCodeImport::Client do
let(:raw_data) { JSON.parse(fixture_file("GoogleCodeProjectHosting.json")) }
let(:raw_data) { Gitlab::Json.parse(fixture_file("GoogleCodeProjectHosting.json")) }
subject { described_class.new(raw_data) }
......
......@@ -4,7 +4,7 @@ require "spec_helper"
describe Gitlab::GoogleCodeImport::Importer do
let(:mapped_user) { create(:user, username: "thilo123") }
let(:raw_data) { JSON.parse(fixture_file("GoogleCodeProjectHosting.json")) }
let(:raw_data) { Gitlab::Json.parse(fixture_file("GoogleCodeProjectHosting.json")) }
let(:client) { Gitlab::GoogleCodeImport::Client.new(raw_data) }
let(:import_data) do
{
......
......@@ -30,7 +30,7 @@ describe Gitlab::GrapeLogging::Formatters::LogrageWithTimestamp do
}
end
let(:time) { Time.now }
let(:result) { JSON.parse(subject) }
let(:result) { Gitlab::Json.parse(subject) }
subject { described_class.new.call(:info, time, nil, log_entry) }
......
......@@ -18,7 +18,7 @@ describe Gitlab::Graphql::Pagination::Keyset::Connection do
end
def decoded_cursor(cursor)
JSON.parse(Base64Bp.urlsafe_decode64(cursor))
Gitlab::Json.parse(Base64Bp.urlsafe_decode64(cursor))
end
describe '#cursor_for' do
......
......@@ -28,7 +28,7 @@ describe Gitlab::GraphqlLogger do
output = subject.format_message('INFO', now, 'test', analyzer_memo)
data = JSON.parse(output)
data = Gitlab::Json.parse(output)
expect(data['severity']).to eq('INFO')
expect(data['time']).to eq(now.utc.iso8601(3))
expect(data['complexity']).to eq(181)
......
......@@ -8,7 +8,7 @@ describe Gitlab::ImportExport::FastHashSerializer do
# Wrapping the result into JSON generating/parsing is for making
# the testing more convenient. Doing this, we can check that
# all items are properly serialized while traversing the simple hash.
subject { JSON.parse(JSON.generate(described_class.new(project, tree).execute)) }
subject { Gitlab::Json.parse(Gitlab::Json.generate(described_class.new(project, tree).execute)) }
let!(:project) { setup_project }
let(:user) { create(:user) }
......
......@@ -46,8 +46,8 @@ describe Gitlab::ImportExport do
export_path: test_tmp_path)
).to be true
imported_json = JSON.parse(File.read("#{test_fixture_path}/project.json"))
exported_json = JSON.parse(File.read("#{test_tmp_path}/project.json"))
imported_json = Gitlab::Json.parse(File.read("#{test_fixture_path}/project.json"))
exported_json = Gitlab::Json.parse(File.read("#{test_tmp_path}/project.json"))
assert_relations_match(imported_json, exported_json)
end
......
......@@ -7,7 +7,7 @@ describe Gitlab::ImportExport::JSON::LegacyReader::File do
it_behaves_like 'import/export json legacy reader' do
let(:valid_path) { 'spec/fixtures/lib/gitlab/import_export/light/project.json' }
let(:data) { valid_path }
let(:json_data) { JSON.parse(File.read(valid_path)) }
let(:json_data) { Gitlab::Json.parse(File.read(valid_path)) }
end
describe '#exist?' do
......
......@@ -9,8 +9,8 @@ describe Gitlab::ImportExport::JSON::LegacyReader::Hash do
# the hash is modified by the `LegacyReader`
# we need to deep-dup it
let(:json_data) { JSON.parse(File.read(path)) }
let(:data) { JSON.parse(File.read(path)) }
let(:json_data) { Gitlab::Json.parse(File.read(path)) }
let(:data) { Gitlab::Json.parse(File.read(path)) }
end
describe '#exist?' do
......
......@@ -6,7 +6,7 @@ describe Gitlab::ImportExport::JSON::NdjsonReader do
include ImportExport::CommonUtil
let(:fixture) { 'spec/fixtures/lib/gitlab/import_export/light/tree' }
let(:root_tree) { JSON.parse(File.read(File.join(fixture, 'project.json'))) }
let(:root_tree) { Gitlab::Json.parse(File.read(File.join(fixture, 'project.json'))) }
let(:ndjson_reader) { described_class.new(dir_path) }
let(:importable_path) { 'project' }
......@@ -93,8 +93,8 @@ describe Gitlab::ImportExport::JSON::NdjsonReader do
context 'relation file contains multiple lines' do
let(:key) { 'custom_attributes' }
let(:attr_1) { JSON.parse('{"id":201,"project_id":5,"created_at":"2016-06-14T15:01:51.315Z","updated_at":"2016-06-14T15:01:51.315Z","key":"color","value":"red"}') }
let(:attr_2) { JSON.parse('{"id":202,"project_id":5,"created_at":"2016-06-14T15:01:51.315Z","updated_at":"2016-06-14T15:01:51.315Z","key":"size","value":"small"}') }
let(:attr_1) { Gitlab::Json.parse('{"id":201,"project_id":5,"created_at":"2016-06-14T15:01:51.315Z","updated_at":"2016-06-14T15:01:51.315Z","key":"color","value":"red"}') }
let(:attr_2) { Gitlab::Json.parse('{"id":202,"project_id":5,"created_at":"2016-06-14T15:01:51.315Z","updated_at":"2016-06-14T15:01:51.315Z","key":"size","value":"small"}') }
it 'yields every relation value to the Enumerator' do
expect(subject.to_a).to eq([[attr_1, 0], [attr_2, 1]])
......
......@@ -26,7 +26,7 @@ describe Gitlab::ImportExport::LfsSaver do
let(:lfs_json_file) { File.join(shared.export_path, Gitlab::ImportExport.lfs_objects_filename) }
def lfs_json
JSON.parse(IO.read(lfs_json_file))
Gitlab::Json.parse(IO.read(lfs_json_file))
end
before do
......
......@@ -14,7 +14,7 @@ describe Gitlab::JsonLogger do
it 'formats strings' do
output = subject.format_message('INFO', now, 'test', 'Hello world')
data = JSON.parse(output)
data = Gitlab::Json.parse(output)
expect(data['severity']).to eq('INFO')
expect(data['time']).to eq(now.utc.iso8601(3))
......@@ -24,7 +24,7 @@ describe Gitlab::JsonLogger do
it 'formats hashes' do
output = subject.format_message('INFO', now, 'test', { hello: 1 })
data = JSON.parse(output)
data = Gitlab::Json.parse(output)
expect(data['severity']).to eq('INFO')
expect(data['time']).to eq(now.utc.iso8601(3))
......
......@@ -9,9 +9,9 @@ describe Gitlab::Metrics::Dashboard::Stages::GrafanaFormatter do
let_it_be(:project) { create(:project, namespace: namespace, name: 'bar') }
describe '#transform!' do
let(:grafana_dashboard) { JSON.parse(fixture_file('grafana/simplified_dashboard_response.json'), symbolize_names: true) }
let(:datasource) { JSON.parse(fixture_file('grafana/datasource_response.json'), symbolize_names: true) }
let(:expected_dashboard) { JSON.parse(fixture_file('grafana/expected_grafana_embed.json'), symbolize_names: true) }
let(:grafana_dashboard) { Gitlab::Json.parse(fixture_file('grafana/simplified_dashboard_response.json'), symbolize_names: true) }
let(:datasource) { Gitlab::Json.parse(fixture_file('grafana/datasource_response.json'), symbolize_names: true) }
let(:expected_dashboard) { Gitlab::Json.parse(fixture_file('grafana/expected_grafana_embed.json'), symbolize_names: true) }
subject(:dashboard) { described_class.new(project, {}, params).transform! }
......
......@@ -2,8 +2,8 @@
require 'spec_helper'
describe Gitlab::PhabricatorImport::Conduit::Response do
let(:response) { described_class.new(JSON.parse(fixture_file('phabricator_responses/maniphest.search.json')))}
let(:error_response) { described_class.new(JSON.parse(fixture_file('phabricator_responses/auth_failed.json'))) }
let(:response) { described_class.new(Gitlab::Json.parse(fixture_file('phabricator_responses/maniphest.search.json')))}
let(:error_response) { described_class.new(Gitlab::Json.parse(fixture_file('phabricator_responses/auth_failed.json'))) }
describe '.parse!' do
it 'raises a ResponseError if the http response was not successfull' do
......
......@@ -4,7 +4,7 @@ require 'spec_helper'
describe Gitlab::PhabricatorImport::Conduit::TasksResponse do
let(:conduit_response) do
Gitlab::PhabricatorImport::Conduit::Response
.new(JSON.parse(fixture_file('phabricator_responses/maniphest.search.json')))
.new(Gitlab::Json.parse(fixture_file('phabricator_responses/maniphest.search.json')))
end
subject(:response) { described_class.new(conduit_response) }
......
......@@ -4,7 +4,7 @@ require 'spec_helper'
describe Gitlab::PhabricatorImport::Conduit::UsersResponse do
let(:conduit_response) do
Gitlab::PhabricatorImport::Conduit::Response
.new(JSON.parse(fixture_file('phabricator_responses/user.search.json')))
.new(Gitlab::Json.parse(fixture_file('phabricator_responses/user.search.json')))
end
subject(:response) { described_class.new(conduit_response) }
......
......@@ -7,7 +7,7 @@ describe Gitlab::PhabricatorImport::Issues::Importer do
let(:response) do
Gitlab::PhabricatorImport::Conduit::TasksResponse.new(
Gitlab::PhabricatorImport::Conduit::Response
.new(JSON.parse(fixture_file('phabricator_responses/maniphest.search.json')))
.new(Gitlab::Json.parse(fixture_file('phabricator_responses/maniphest.search.json')))
)
end
......
......@@ -313,7 +313,7 @@ describe Gitlab::PrometheusClient do
req_stub = stub_prometheus_request(query_url, body: prometheus_value_body('vector'))
response = subject.proxy('query', { query: prometheus_query })
json_response = JSON.parse(response.body)
json_response = Gitlab::Json.parse(response.body)
expect(response.code).to eq(200)
expect(json_response).to eq({
......@@ -332,7 +332,7 @@ describe Gitlab::PrometheusClient do
req_stub = stub_prometheus_request(query_url, status: 400, body: { error: 'error' })
response = subject.proxy('query', { query: prometheus_query })
json_response = JSON.parse(response.body)
json_response = Gitlab::Json.parse(response.body)
expect(req_stub).to have_been_requested
expect(response.code).to eq(400)
......
......@@ -9,7 +9,7 @@ describe Gitlab::SidekiqLogging::JSONFormatter do
let(:timestamp_iso8601) { now.iso8601(3) }
describe 'with a Hash' do
subject { JSON.parse(described_class.new.call('INFO', now, 'my program', hash_input)) }
subject { Gitlab::Json.parse(described_class.new.call('INFO', now, 'my program', hash_input)) }
let(:hash_input) do
{
......@@ -63,7 +63,7 @@ describe Gitlab::SidekiqLogging::JSONFormatter do
it 'accepts strings with no changes' do
result = subject.call('DEBUG', now, 'my string', message)
data = JSON.parse(result)
data = Gitlab::Json.parse(result)
expected_output = {
severity: 'DEBUG',
time: timestamp_iso8601,
......
......@@ -9,7 +9,7 @@ describe Gitlab::Workhorse do
def decode_workhorse_header(array)
key, value = array
command, encoded_params = value.split(":")
params = JSON.parse(Base64.urlsafe_decode64(encoded_params))
params = Gitlab::Json.parse(Base64.urlsafe_decode64(encoded_params))
[key, command, params]
end
......
......@@ -3,8 +3,8 @@
require 'spec_helper'
describe Grafana::Validator do
let(:grafana_dashboard) { JSON.parse(fixture_file('grafana/simplified_dashboard_response.json'), symbolize_names: true) }
let(:datasource) { JSON.parse(fixture_file('grafana/datasource_response.json'), symbolize_names: true) }
let(:grafana_dashboard) { Gitlab::Json.parse(fixture_file('grafana/simplified_dashboard_response.json'), symbolize_names: true) }
let(:datasource) { Gitlab::Json.parse(fixture_file('grafana/datasource_response.json'), symbolize_names: true) }
let(:panel) { grafana_dashboard[:dashboard][:panels].first }
let(:query_params) do
......
......@@ -31,7 +31,7 @@ describe RspecFlaky::Report, :aggregate_failures do
describe '.load' do
let!(:report_file) do
Tempfile.new(%w[rspec_flaky_report .json]).tap do |f|
f.write(JSON.pretty_generate(suite_flaky_example_report))
f.write(Gitlab::Json.pretty_generate(suite_flaky_example_report))
f.rewind
end
end
......@@ -48,7 +48,7 @@ describe RspecFlaky::Report, :aggregate_failures do
describe '.load_json' do
let(:report_json) do
JSON.pretty_generate(suite_flaky_example_report)
Gitlab::Json.pretty_generate(suite_flaky_example_report)
end
it 'loads the report file' do
......@@ -103,7 +103,7 @@ describe RspecFlaky::Report, :aggregate_failures do
expect(File.exist?(report_file_path)).to be(true)
expect(File.read(report_file_path))
.to eq(JSON.pretty_generate(report.flaky_examples.to_h))
.to eq(Gitlab::Json.pretty_generate(report.flaky_examples.to_h))
end
end
end
......
......@@ -18,7 +18,7 @@ describe Sentry::Client do
describe '#issue_latest_event' do
let(:sample_response) do
Gitlab::Utils.deep_indifferent_access(
JSON.parse(fixture_file('sentry/issue_latest_event_sample_response.json'))
Gitlab::Json.parse(fixture_file('sentry/issue_latest_event_sample_response.json'))
)
end
let(:issue_id) { '1234' }
......
......@@ -16,7 +16,7 @@ describe Sentry::Client::IssueLink do
let(:sentry_issue_link_url) { "https://sentrytest.gitlab.com/api/0/groups/#{sentry_issue_id}/integrations/#{integration_id}/" }
let(:integration_id) { 44444 }
let(:issue_link_sample_response) { JSON.parse(fixture_file('sentry/global_integration_link_sample_response.json')) }
let(:issue_link_sample_response) { Gitlab::Json.parse(fixture_file('sentry/global_integration_link_sample_response.json')) }
let(:sentry_api_response) { issue_link_sample_response }
let!(:sentry_api_request) { stub_sentry_request(sentry_issue_link_url, :put, body: sentry_api_response, status: 201) }
......@@ -42,7 +42,7 @@ describe Sentry::Client::IssueLink do
let(:sentry_issue_link_url) { "https://sentrytest.gitlab.com/api/0/issues/#{sentry_issue_id}/plugins/gitlab/link/" }
let(:integration_id) { nil }
let(:issue_link_sample_response) { JSON.parse(fixture_file('sentry/plugin_link_sample_response.json')) }
let(:issue_link_sample_response) { Gitlab::Json.parse(fixture_file('sentry/plugin_link_sample_response.json')) }
let!(:sentry_api_request) { stub_sentry_request(sentry_issue_link_url, :post, body: sentry_api_response) }
it_behaves_like 'calls sentry api'
......
......@@ -23,7 +23,7 @@ describe Sentry::Client::Issue do
let(:issues_sample_response) do
Gitlab::Utils.deep_indifferent_access(
JSON.parse(fixture_file('sentry/issues_sample_response.json'))
Gitlab::Json.parse(fixture_file('sentry/issues_sample_response.json'))
)
end
......@@ -201,7 +201,7 @@ describe Sentry::Client::Issue do
describe '#issue_details' do
let(:issue_sample_response) do
Gitlab::Utils.deep_indifferent_access(
JSON.parse(fixture_file('sentry/issue_sample_response.json'))
Gitlab::Json.parse(fixture_file('sentry/issue_sample_response.json'))
)
end
......
......@@ -10,7 +10,7 @@ describe Sentry::Client::Projects do
let(:client) { Sentry::Client.new(sentry_url, token) }
let(:projects_sample_response) do
Gitlab::Utils.deep_indifferent_access(
JSON.parse(fixture_file('sentry/list_projects_sample_response.json'))
Gitlab::Json.parse(fixture_file('sentry/list_projects_sample_response.json'))
)
end
......
......@@ -8,7 +8,7 @@ describe Sentry::Client::Repo do
let(:sentry_url) { 'https://sentrytest.gitlab.com/api/0/projects/sentry-org/sentry-project' }
let(:token) { 'test-token' }
let(:client) { Sentry::Client.new(sentry_url, token) }
let(:repos_sample_response) { JSON.parse(fixture_file('sentry/repos_sample_response.json')) }
let(:repos_sample_response) { Gitlab::Json.parse(fixture_file('sentry/repos_sample_response.json')) }
describe '#repos' do
let(:organization_slug) { 'gitlab' }
......
......@@ -15,7 +15,7 @@ describe Serializers::JSON do
describe '.load' do
let(:data_string) { '{"key":"value","variables":[{"key":"VAR1","value":"VALUE1"}]}' }
let(:data_hash) { JSON.parse(data_string) }
let(:data_hash) { Gitlab::Json.parse(data_string) }
context 'when loading a hash' do
subject { described_class.load(data_hash) }
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment