Commit d17e480b authored by Robert Speicher's avatar Robert Speicher

Merge branch 'ce-to-ee-2018-08-15' into 'master'

CE upstream - 2018-08-15 00:22 UTC

See merge request gitlab-org/gitlab-ee!6898
parents 46215bdc 5bf9465c
......@@ -62,6 +62,8 @@ module IconsHelper
names = "key"
when "two-factor"
names = "key"
when "google_oauth2"
names = "google"
end
options.include?(:base) ? fa_stacked_icon(names, options) : fa_icon(names, options)
......
......@@ -36,7 +36,7 @@ module Ci
where(file_type: types)
end
delegate :exists?, :open, to: :file
delegate :filename, :exists?, :open, to: :file
enum file_type: {
archive: 1,
......
---
title: Show google icon in audit log
merge_request: 21207
author: Jan Beckmann
type: fixed
---
title: Expose all artifacts sizes in jobs api
merge_request: 20821
author: Peter Marko
type: added
......@@ -33,7 +33,6 @@ Example of response
},
"coverage": null,
"created_at": "2015-12-24T15:51:21.727Z",
"artifacts_file": null,
"finished_at": "2015-12-24T17:54:24.921Z",
"artifacts_expire_at": "2016-01-23T17:54:24.921Z",
"id": 6,
......@@ -45,6 +44,7 @@ Example of response
"status": "pending"
},
"ref": "master",
"artifacts": [],
"runner": null,
"stage": "test",
"started_at": "2015-12-24T17:54:24.729Z",
......@@ -82,6 +82,12 @@ Example of response
"filename": "artifacts.zip",
"size": 1000
},
"artifacts": [
{"file_type": "archive", "size": 1000, "filename": "artifacts.zip", "file_format": "zip"},
{"file_type": "metadata", "size": 186, "filename": "metadata.gz", "file_format": "gzip"},
{"file_type": "trace", "size": 1500, "filename": "job.log", "file_format": "raw"},
{"file_type": "junit", "size": 750, "filename": "junit.xml.gz", "file_format": "gzip"}
],
"finished_at": "2015-12-24T17:54:27.895Z",
"artifacts_expire_at": "2016-01-23T17:54:27.895Z",
"id": 7,
......@@ -93,6 +99,7 @@ Example of response
"status": "pending"
},
"ref": "master",
"artifacts": [],
"runner": null,
"stage": "test",
"started_at": "2015-12-24T17:54:27.722Z",
......@@ -151,7 +158,6 @@ Example of response
},
"coverage": null,
"created_at": "2015-12-24T15:51:21.727Z",
"artifacts_file": null,
"finished_at": "2015-12-24T17:54:24.921Z",
"artifacts_expire_at": "2016-01-23T17:54:24.921Z",
"id": 6,
......@@ -163,6 +169,7 @@ Example of response
"status": "pending"
},
"ref": "master",
"artifacts": [],
"runner": null,
"stage": "test",
"started_at": "2015-12-24T17:54:24.729Z",
......@@ -200,6 +207,12 @@ Example of response
"filename": "artifacts.zip",
"size": 1000
},
"artifacts": [
{"file_type": "archive", "size": 1000, "filename": "artifacts.zip", "file_format": "zip"},
{"file_type": "metadata", "size": 186, "filename": "metadata.gz", "file_format": "gzip"},
{"file_type": "trace", "size": 1500, "filename": "job.log", "file_format": "raw"},
{"file_type": "junit", "size": 750, "filename": "junit.xml.gz", "file_format": "gzip"}
],
"finished_at": "2015-12-24T17:54:27.895Z",
"artifacts_expire_at": "2016-01-23T17:54:27.895Z",
"id": 7,
......@@ -211,6 +224,7 @@ Example of response
"status": "pending"
},
"ref": "master",
"artifacts": [],
"runner": null,
"stage": "test",
"started_at": "2015-12-24T17:54:27.722Z",
......@@ -267,7 +281,6 @@ Example of response
},
"coverage": null,
"created_at": "2015-12-24T15:51:21.880Z",
"artifacts_file": null,
"finished_at": "2015-12-24T17:54:31.198Z",
"artifacts_expire_at": "2016-01-23T17:54:31.198Z",
"id": 8,
......@@ -279,6 +292,7 @@ Example of response
"status": "pending"
},
"ref": "master",
"artifacts": [],
"runner": null,
"stage": "test",
"started_at": "2015-12-24T17:54:30.733Z",
......@@ -492,11 +506,11 @@ Example of response
},
"coverage": null,
"created_at": "2016-01-11T10:13:33.506Z",
"artifacts_file": null,
"finished_at": "2016-01-11T10:14:09.526Z",
"id": 42,
"name": "rubocop",
"ref": "master",
"artifacts": [],
"runner": null,
"stage": "test",
"started_at": null,
......@@ -539,11 +553,11 @@ Example of response
},
"coverage": null,
"created_at": "2016-01-11T10:13:33.506Z",
"artifacts_file": null,
"finished_at": null,
"id": 42,
"name": "rubocop",
"ref": "master",
"artifacts": [],
"runner": null,
"stage": "test",
"started_at": null,
......@@ -593,6 +607,7 @@ Example of response
"id": 42,
"name": "rubocop",
"ref": "master",
"artifacts": [],
"runner": null,
"stage": "test",
"created_at": "2016-01-11T10:13:33.506Z",
......@@ -644,6 +659,7 @@ Example response:
"id": 42,
"name": "rubocop",
"ref": "master",
"artifacts": [],
"runner": null,
"stage": "test",
"created_at": "2016-01-11T10:13:33.506Z",
......@@ -688,11 +704,11 @@ Example of response
},
"coverage": null,
"created_at": "2016-01-11T10:13:33.506Z",
"artifacts_file": null,
"finished_at": null,
"id": 42,
"name": "rubocop",
"ref": "master",
"artifacts": [],
"runner": null,
"stage": "test",
"started_at": null,
......
......@@ -1108,6 +1108,10 @@ module API
expose :filename, :size
end
class JobArtifact < Grape::Entity
expose :file_type, :size, :filename, :file_format
end
class JobBasic < Grape::Entity
expose :id, :status, :stage, :name, :ref, :tag, :coverage
expose :created_at, :started_at, :finished_at
......@@ -1122,7 +1126,9 @@ module API
end
class Job < JobBasic
# artifacts_file is included in job_artifacts, but kept for backward compatibility (remove in api/v5)
expose :artifacts_file, using: JobArtifactFile, if: -> (job, opts) { job.artifacts? }
expose :job_artifacts, as: :artifacts, using: JobArtifact
expose :runner, with: Runner
expose :artifacts_expire_at
end
......
......@@ -38,7 +38,7 @@ module API
builds = user_project.builds.order('id DESC')
builds = filter_builds(builds, params[:scope])
builds = builds.preload(:user, :job_artifacts_archive, :runner, pipeline: :project)
builds = builds.preload(:user, :job_artifacts_archive, :job_artifacts, :runner, pipeline: :project)
present paginate(builds), with: Entities::Job
end
......@@ -54,7 +54,7 @@ module API
pipeline = user_project.pipelines.find(params[:pipeline_id])
builds = pipeline.builds
builds = filter_builds(builds, params[:scope])
builds = builds.preload(:job_artifacts_archive, project: [:namespace])
builds = builds.preload(:job_artifacts_archive, :job_artifacts, project: [:namespace])
present paginate(builds), with: Entities::Job
end
......
......@@ -80,6 +80,26 @@ describe IconsHelper do
end
end
describe 'audit icon' do
it 'returns right icon name for standard auth' do
icon_name = 'standard'
expect(audit_icon(icon_name).to_s)
.to eq '<i class="fa fa-key"></i>'
end
it 'returns right icon name for two-factor auth' do
icon_name = 'two-factor'
expect(audit_icon(icon_name).to_s)
.to eq '<i class="fa fa-key"></i>'
end
it 'returns right icon name for google_oauth2 auth' do
icon_name = 'google_oauth2'
expect(audit_icon(icon_name).to_s)
.to eq '<i class="fa fa-google"></i>'
end
end
describe 'file_type_icon_class' do
it 'returns folder class' do
expect(file_type_icon_class('folder', 0, 'folder_name')).to eq 'folder'
......
......@@ -3,6 +3,32 @@ require 'spec_helper'
describe API::Jobs do
include HttpIOHelpers
shared_examples 'a job with artifacts and trace' do |result_is_array: true|
context 'with artifacts and trace' do
let!(:second_job) { create(:ci_build, :trace_artifact, :artifacts, :test_reports, pipeline: pipeline) }
it 'returns artifacts and trace data', :skip_before_request do
get api(api_endpoint, api_user)
json_job = result_is_array ? json_response.select { |job| job['id'] == second_job.id }.first : json_response
expect(json_job['artifacts_file']).not_to be_nil
expect(json_job['artifacts_file']).not_to be_empty
expect(json_job['artifacts_file']['filename']).to eq(second_job.artifacts_file.filename)
expect(json_job['artifacts_file']['size']).to eq(second_job.artifacts_file.size)
expect(json_job['artifacts']).not_to be_nil
expect(json_job['artifacts']).to be_an Array
expect(json_job['artifacts'].size).to eq(second_job.job_artifacts.length)
json_job['artifacts'].each do |artifact|
expect(artifact).not_to be_nil
file_type = Ci::JobArtifact.file_types[artifact['file_type']]
expect(artifact['size']).to eq(second_job.job_artifacts.where(file_type: file_type).first.size)
expect(artifact['filename']).to eq(second_job.job_artifacts.where(file_type: file_type).first.filename)
expect(artifact['file_format']).to eq(second_job.job_artifacts.where(file_type: file_type).first.file_format)
end
end
end
end
set(:project) do
create(:project, :repository, public_builds: false)
end
......@@ -50,6 +76,20 @@ describe API::Jobs do
expect(Time.parse(json_response.first['artifacts_expire_at'])).to be_like_time(job.artifacts_expire_at)
end
context 'without artifacts and trace' do
it 'returns no artifacts nor trace data' do
json_job = json_response.first
expect(json_job['artifacts_file']).to be_nil
expect(json_job['artifacts']).to be_an Array
expect(json_job['artifacts']).to be_empty
end
end
it_behaves_like 'a job with artifacts and trace' do
let(:api_endpoint) { "/projects/#{project.id}/jobs" }
end
it 'returns pipeline data' do
json_job = json_response.first
......@@ -61,7 +101,7 @@ describe API::Jobs do
end
it 'avoids N+1 queries', :skip_before_request do
first_build = create(:ci_build, :artifacts, pipeline: pipeline)
first_build = create(:ci_build, :trace_artifact, :artifacts, :test_reports, pipeline: pipeline)
first_build.runner = create(:ci_runner)
first_build.user = create(:user)
first_build.save
......@@ -69,7 +109,7 @@ describe API::Jobs do
control_count = ActiveRecord::QueryRecorder.new { go }.count
second_pipeline = create(:ci_empty_pipeline, project: project, sha: project.commit.id, ref: project.default_branch)
second_build = create(:ci_build, :artifacts, pipeline: second_pipeline)
second_build = create(:ci_build, :trace_artifact, :artifacts, :test_reports, pipeline: second_pipeline)
second_build.runner = create(:ci_runner)
second_build.user = create(:user)
second_build.save
......@@ -118,10 +158,12 @@ describe API::Jobs do
describe 'GET /projects/:id/pipelines/:pipeline_id/jobs' do
let(:query) { Hash.new }
before do
before do |example|
unless example.metadata[:skip_before_request]
job
get api("/projects/#{project.id}/pipelines/#{pipeline.id}/jobs", api_user), query
end
end
context 'authorized user' do
it 'returns pipeline jobs' do
......@@ -134,6 +176,13 @@ describe API::Jobs do
expect(json_response).not_to be_empty
expect(json_response.first['commit']['id']).to eq project.commit.id
expect(Time.parse(json_response.first['artifacts_expire_at'])).to be_like_time(job.artifacts_expire_at)
expect(json_response.first['artifacts_file']).to be_nil
expect(json_response.first['artifacts']).to be_an Array
expect(json_response.first['artifacts']).to be_empty
end
it_behaves_like 'a job with artifacts and trace' do
let(:api_endpoint) { "/projects/#{project.id}/pipelines/#{pipeline.id}/jobs" }
end
it 'returns pipeline data' do
......@@ -184,7 +233,7 @@ describe API::Jobs do
get api("/projects/#{project.id}/pipelines/#{pipeline.id}/jobs", api_user), query
end.count
3.times { create(:ci_build, :artifacts, pipeline: pipeline) }
3.times { create(:ci_build, :trace_artifact, :artifacts, :test_reports, pipeline: pipeline) }
expect do
get api("/projects/#{project.id}/pipelines/#{pipeline.id}/jobs", api_user), query
......@@ -202,9 +251,11 @@ describe API::Jobs do
end
describe 'GET /projects/:id/jobs/:job_id' do
before do
before do |example|
unless example.metadata[:skip_before_request]
get api("/projects/#{project.id}/jobs/#{job.id}", api_user)
end
end
context 'authorized user' do
it 'returns specific job data' do
......@@ -220,10 +271,17 @@ describe API::Jobs do
expect(Time.parse(json_response['started_at'])).to be_like_time(job.started_at)
expect(Time.parse(json_response['finished_at'])).to be_like_time(job.finished_at)
expect(Time.parse(json_response['artifacts_expire_at'])).to be_like_time(job.artifacts_expire_at)
expect(json_response['artifacts_file']).to be_nil
expect(json_response['artifacts']).to be_an Array
expect(json_response['artifacts']).to be_empty
expect(json_response['duration']).to eq(job.duration)
expect(json_response['web_url']).to be_present
end
it_behaves_like 'a job with artifacts and trace', result_is_array: false do
let(:api_endpoint) { "/projects/#{project.id}/jobs/#{second_job.id}" }
end
it 'returns pipeline data' do
json_job = json_response
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment