Commit 61864a5a authored by Zeger-Jan van de Weg's avatar Zeger-Jan van de Weg Committed by Kamil Trzcinski

Rename Artifact to JobArtifact, split metadata out

Two things at ones, as there was no clean way to seperate the commit and
give me feedback from the tests.

But the model Artifact is now JobArtifact, and the table does not have a
type anymore, but the metadata is now its own model:
Ci::JobArtifactMetadata.
parent 25df6661
module Ci
class Artifact < ActiveRecord::Base
extend Gitlab::Ci::Model
belongs_to :project
belongs_to :build, class_name: "Ci::Build", foreign_key: :ci_build_id
before_save :set_size, if: :file_changed?
mount_uploader :file, ArtifactUploader
enum type: { archive: 0, metadata: 1 }
# Allow us to use `type` as column name, without Rails thinking we're using
# STI: https://stackoverflow.com/a/29663933
def self.inheritance_column
nil
end
def set_size
self.size = file.exists? ? file.size : 0
end
end
end
......@@ -11,10 +11,15 @@ module Ci
belongs_to :erased_by, class_name: 'User'
has_many :deployments, as: :deployable
has_many :artifacts, class_name: 'Ci::Artifact', foreign_key: :ci_build_id
has_one :last_deployment, -> { order('deployments.id DESC') }, as: :deployable, class_name: 'Deployment'
has_many :trace_sections, class_name: 'Ci::BuildTraceSection'
has_many :job_artifacts, class_name: 'Ci::JobArtifact', foreign_key: :ci_job_id
has_one :job_archive, -> () { where(file_type: Ci::JobArtifact.file_types[:archive]) }, class_name: 'Ci::JobArtifact', foreign_key: :ci_job_id
has_one :job_metadata, -> () { where(file_type: Ci::JobArtifact.file_types[:metadata]) }, class_name: 'Ci::JobArtifact', foreign_key: :ci_job_id
# The "environment" field for builds is a String, and is the unexpanded name
def persisted_environment
@persisted_environment ||= Environment.find_by(
......@@ -33,7 +38,9 @@ module Ci
scope :unstarted, ->() { where(runner_id: nil) }
scope :ignore_failures, ->() { where(allow_failure: false) }
scope :with_artifacts, ->() { where.not(artifacts_file: [nil, '']) }
scope :with_artifacts, ->() do
where('(artifacts_file IS NOT NULL AND artifacts_file <> ?) OR EXISTS (?)', '', Ci::JobArtifact.select(1).where('ci_builds.id = ci_job_artifacts.ci_job_id'))
end
scope :with_artifacts_not_expired, ->() { with_artifacts.where('artifacts_expire_at IS NULL OR artifacts_expire_at > ?', Time.now) }
scope :with_expired_artifacts, ->() { with_artifacts.where('artifacts_expire_at < ?', Time.now) }
scope :last_month, ->() { where('created_at > ?', Date.today - 1.month) }
......@@ -423,7 +430,7 @@ module Ci
Gitlab::Ci::Build::Image.from_services(self)
end
def artifacts_options
def artifacts
[options[:artifacts]]
end
......@@ -464,12 +471,6 @@ module Ci
super(options).merge(when: read_attribute(:when))
end
def update_project_statistics
return unless project
ProjectCacheWorker.perform_async(project_id, [], [:build_artifacts_size])
end
private
def update_artifacts_size
......@@ -560,6 +561,11 @@ module Ci
pipeline.config_processor.build_attributes(name)
end
def update_project_statistics
return unless project
ProjectCacheWorker.perform_async(project_id, [], [:build_artifacts_size])
end
def update_project_statistics_after_save
if previous_changes.include?('artifacts_size')
......
module Ci
class JobArtifact < ActiveRecord::Base
extend Gitlab::Ci::Model
belongs_to :project
belongs_to :job, class_name: "Ci::Build", foreign_key: :ci_job_id
before_save :set_size, if: :file_changed?
after_commit :remove_file!, on: :destroy
mount_uploader :file, JobArtifactUploader
enum file_type: {
archive: 1,
metadata: 2
}
def self.artifacts_size_for(project)
self.where(project: project).sum(:size)
end
def set_size
self.size = file.size
end
end
end
......@@ -3,15 +3,14 @@
# Meant to be prepended so the interface can stay the same
module ArtifactMigratable
def artifacts_file
artifacts.archive.first&.file || super
job_archive&.file || super
end
def artifacts_metadata
artifacts.metadata.first&.file || super
job_metadata&.file || super
end
def artifacts?
byebug
!artifacts_expired? && artifacts_file.exists?
end
......@@ -19,19 +18,28 @@ module ArtifactMigratable
artifacts? && artifacts_metadata.exists?
end
def artifacts_file_changed?
job_archive&.file_changed? || super
end
def remove_artifacts_file!
artifacts_file.remove!
if job_archive
job_archive.destroy
else
super
end
end
def remove_artifacts_metadata!
artifacts_metadata.remove!
if job_metadata
job_metadata.destroy
else
super
end
def artifacts_file=(file)
artifacts.create(project: project, type: :archive, file: file)
end
def artifacts_metadata=(file)
artifacts.create(project: project, type: :metadata, file: file)
def artifacts_size
read_attribute(:artifacts_size).to_i +
job_archive&.size.to_i + job_metadata&.size.to_i
end
end
......@@ -35,7 +35,10 @@ class ProjectStatistics < ActiveRecord::Base
end
def update_build_artifacts_size
self.build_artifacts_size = project.builds.sum(:artifacts_size)
self.build_artifacts_size =
project.builds.sum(:artifacts_size) +
Ci::JobArtifact.artifacts_size_for(self) +
Ci::JobArtifactMetadata.artifacts_size_for(self)
end
def update_storage_size
......
......@@ -12,10 +12,6 @@ class ArtifactUploader < GitlabUploader
end
def initialize(job, field)
# Temporairy conditional, needed to move artifacts to their own table,
# but keeping compat with Ci::Build for the time being
job = job.build if job.respond_to?(:build)
@job, @field = job, field
end
......@@ -38,6 +34,8 @@ class ArtifactUploader < GitlabUploader
end
def default_path
File.join(job.created_at.utc.strftime('%Y_%m'), job.project_id.to_s, job.id.to_s)
File.join(job.project_id.to_s,
job.created_at.utc.strftime('%Y_%m'),
job.id.to_s)
end
end
class JobArtifactUploader < ArtifactUploader
def initialize(artifact, _field)
@artifact = artifact
end
# If this record exists, the associatied artifact is there. Every artifact
# persisted will have an associated file
def exists?
true
end
def size
return super unless @artifact.size
@artifact.size
end
private
def disk_hash
@disk_hash ||= Digest::SHA2.hexdigest(job.project_id.to_s)
end
def default_path
creation_date = job.created_at.utc.strftime('%Y_%m_%d')
File.join(disk_hash[0..1], disk_hash[2..3], disk_hash,
creation_date, job.id.to_s, @artifact.id.to_s)
end
def job
@artifact.job
end
end
class CreateArtifacts < ActiveRecord::Migration
def up
create_table :ci_artifacts do |t|
t.belongs_to :project, null: false, foreign_key: { on_delete: :cascade }
t.belongs_to :ci_build, null: false, foreign_key: { on_delete: :cascade }
t.integer :type, default: 0, null: false
t.integer :size, limit: 8, default: 0
t.datetime_with_timezone :created_at, null: false
t.datetime_with_timezone :updated_at, null: false
t.text :file
end
add_index(:ci_artifacts, [:project_id, :ci_build_id], unique: true)
end
def down
drop_table(:ci_artifacts)
end
end
class CreateJobArtifacts < ActiveRecord::Migration
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
def change
create_table :ci_job_artifacts do |t|
t.belongs_to :project, null: false, index: true, foreign_key: { on_delete: :cascade }
t.integer :ci_job_id, null: false, index: true
t.integer :size, limit: 8
t.integer :file_type, null: false, index: true
t.datetime_with_timezone :created_at, null: false
t.datetime_with_timezone :updated_at, null: false
t.string :file
end
end
end
......@@ -226,18 +226,6 @@ ActiveRecord::Schema.define(version: 20171124150326) do
add_index "chat_teams", ["namespace_id"], name: "index_chat_teams_on_namespace_id", unique: true, using: :btree
create_table "ci_artifacts", force: :cascade do |t|
t.integer "project_id", null: false
t.integer "ci_build_id", null: false
t.integer "type", default: 0, null: false
t.integer "size", limit: 8, default: 0
t.datetime_with_timezone "created_at", null: false
t.datetime_with_timezone "updated_at", null: false
t.text "file"
end
add_index "ci_artifacts", ["project_id", "ci_build_id"], name: "index_ci_artifacts_on_project_id_and_ci_build_id", unique: true, using: :btree
create_table "ci_build_trace_section_names", force: :cascade do |t|
t.integer "project_id", null: false
t.string "name", null: false
......@@ -331,6 +319,20 @@ ActiveRecord::Schema.define(version: 20171124150326) do
add_index "ci_group_variables", ["group_id", "key"], name: "index_ci_group_variables_on_group_id_and_key", unique: true, using: :btree
create_table "ci_job_artifacts", force: :cascade do |t|
t.integer "project_id", null: false
t.integer "ci_job_id", null: false
t.integer "size", limit: 8
t.integer "file_type", null: false
t.datetime_with_timezone "created_at", null: false
t.datetime_with_timezone "updated_at", null: false
t.string "file"
end
add_index "ci_job_artifacts", ["ci_job_id"], name: "index_ci_job_artifacts_on_ci_job_id", using: :btree
add_index "ci_job_artifacts", ["file_type"], name: "index_ci_job_artifacts_on_file_type", using: :btree
add_index "ci_job_artifacts", ["project_id"], name: "index_ci_job_artifacts_on_project_id", using: :btree
create_table "ci_pipeline_schedule_variables", force: :cascade do |t|
t.string "key", null: false
t.text "value"
......@@ -391,9 +393,9 @@ ActiveRecord::Schema.define(version: 20171124150326) do
t.integer "auto_canceled_by_id"
t.integer "pipeline_schedule_id"
t.integer "source"
t.integer "config_source"
t.boolean "protected"
t.integer "failure_reason"
t.integer "config_source"
end
add_index "ci_pipelines", ["auto_canceled_by_id"], name: "index_ci_pipelines_on_auto_canceled_by_id", using: :btree
......@@ -1913,8 +1915,6 @@ ActiveRecord::Schema.define(version: 20171124150326) do
add_foreign_key "boards", "projects", name: "fk_f15266b5f9", on_delete: :cascade
add_foreign_key "chat_teams", "namespaces", on_delete: :cascade
add_foreign_key "ci_artifacts", "ci_builds", on_delete: :cascade
add_foreign_key "ci_artifacts", "projects", on_delete: :cascade
add_foreign_key "ci_build_trace_section_names", "projects", on_delete: :cascade
add_foreign_key "ci_build_trace_sections", "ci_build_trace_section_names", column: "section_name_id", name: "fk_264e112c66", on_delete: :cascade
add_foreign_key "ci_build_trace_sections", "ci_builds", column: "build_id", name: "fk_4ebe41f502", on_delete: :cascade
......@@ -1923,6 +1923,7 @@ ActiveRecord::Schema.define(version: 20171124150326) do
add_foreign_key "ci_builds", "ci_stages", column: "stage_id", name: "fk_3a9eaa254d", on_delete: :cascade
add_foreign_key "ci_builds", "projects", name: "fk_befce0568a", on_delete: :cascade
add_foreign_key "ci_group_variables", "namespaces", column: "group_id", name: "fk_33ae4d58d8", on_delete: :cascade
add_foreign_key "ci_job_artifacts", "projects", on_delete: :cascade
add_foreign_key "ci_pipeline_schedule_variables", "ci_pipeline_schedules", column: "pipeline_schedule_id", name: "fk_41c35fda51", on_delete: :cascade
add_foreign_key "ci_pipeline_schedules", "projects", name: "fk_8ead60fcc4", on_delete: :cascade
add_foreign_key "ci_pipeline_schedules", "users", column: "owner_id", name: "fk_9ea99f58d2", on_delete: :nullify
......
......@@ -1006,13 +1006,9 @@ module API
expose :type, :url, :username, :password
end
class ArtifactFile < Grape::Entity
expose :filename, :size
end
class Dependency < Grape::Entity
expose :id, :name, :token
expose :artifacts_file, using: ArtifactFile, if: ->(job, _) { job.artifacts? }
expose :artifacts_file, using: JobArtifactFile, if: ->(job, _) { job.artifacts? }
end
class Response < Grape::Entity
......@@ -1036,7 +1032,7 @@ module API
expose :steps, using: Step
expose :image, using: Image
expose :services, using: Service
expose :artifacts_options, as: :artifacts, using: Artifacts
expose :artifacts, using: Artifacts
expose :cache, using: Cache
expose :credentials, using: Credentials
expose :dependencies, using: Dependency
......
......@@ -215,15 +215,16 @@ module API
job = authenticate_job!
forbidden!('Job is not running!') unless job.running?
artifacts_upload_path = ArtifactUploader.artifacts_upload_path
artifacts_upload_path = JobArtifactUploader.artifacts_upload_path
artifacts = uploaded_file(:file, artifacts_upload_path)
metadata = uploaded_file(:metadata, artifacts_upload_path)
bad_request!('Missing artifacts file!') unless artifacts
file_to_large! unless artifacts.size < max_artifacts_size
job.artifacts_file = artifacts
job.artifacts_metadata = metadata
job.job_artifacts.build(project: job.project, file_type: :archive, file: artifacts)
job.job_artifacts.build(project: job.project, file_type: :metadata, file: metadata)
job.artifacts_expire_in = params['expire_in'] ||
Gitlab::CurrentSettings.current_application_settings.default_artifacts_expire_in
......
......@@ -155,14 +155,12 @@ FactoryGirl.define do
end
trait :artifacts do
after(:create) do |build, _|
create(:artifact, build: build)
create(:artifact_metadata, build: build)
end
job_archive factory: :ci_job_artifact
job_metadata factory: :ci_job_metadata
end
trait :expired do
artifacts_expire_at = 1.minute.ago
artifacts_expire_at 1.minute.ago
end
trait :with_commit do
......
include ActionDispatch::TestProcess
FactoryGirl.define do
factory :artifact, class: Ci::Artifact do
factory :ci_job_artifact, class: Ci::JobArtifact do
project
build factory: :ci_build
job factory: :ci_build
file_type :archive
after :create do |artifact|
artifact.file = fixture_file_upload(Rails.root.join('spec/fixtures/ci_build_artifacts.zip'), 'application/zip')
if artifact.archive?
artifact.file = fixture_file_upload(Rails.root.join('spec/fixtures/ci_build_artifacts.zip'),
'application/zip')
artifact.save
end
end
end
factory :artifact_metadata do
type :metadata
factory :ci_job_metadata, parent: :ci_job_artifact do
file_type :metadata
after :create do |artifact|
artifact.file = fixture_file_upload(Rails.root.join('spec/fixtures/ci_build_artifacts_metadata.gz'), 'application/x-gzip')
artifact.file = fixture_file_upload(Rails.root.join('spec/fixtures/ci_build_artifacts_metadata.gz'),
'application/x-gzip')
artifact.save
end
end
end
end
......@@ -146,7 +146,7 @@ describe Ci::Build do
it { is_expected.to be_truthy }
context 'is expired' do
let(:build) { create(:ci_build, :artifacts, :expired) }
let!(:build) { create(:ci_build, :artifacts, :expired) }
it { is_expected.to be_falsy }
end
......
require 'spec_helper'
describe Ci::Artifact do
describe Ci::JobArtifact do
set(:artifact) { create(:ci_job_artifact) }
describe "Associations" do
it { is_expected.to belong_to(:project) }
it { is_expected.to belong_to(:build) }
it { is_expected.to belong_to(:job) }
end
it { is_expected.to respond_to(:file) }
it { is_expected.to respond_to(:created_at) }
it { is_expected.to respond_to(:updated_at) }
it { is_expected.to respond_to(:type) }
let(:artifact) { create(:artifact) }
describe '#type' do
it 'defaults to archive' do
expect(artifact.type).to eq("archive")
end
end
describe '#set_size' do
it 'sets the size' do
......@@ -32,28 +27,4 @@ describe Ci::Artifact do
it { is_expected.to respond_to(:work_dir) }
end
end
describe '#remove_file' do
it 'removes the file from the database' do
artifact.remove_file!
expect(artifact.file.exists?).to be_falsy
end
end
describe '#exists?' do
context 'when the artifact file is present' do
it 'is returns true' do
expect(artifact.exists?).to be(true)
end
end
context 'when the file has been removed' do
it 'does not exist' do
artifact.remove_file!
expect(artifact.exists?).to be_falsy
end
end
end
end
......@@ -133,15 +133,17 @@ describe ProjectStatistics do
describe '#update_build_artifacts_size' do
let!(:pipeline) { create(:ci_pipeline, project: project) }
let!(:build1) { create(:ci_build, pipeline: pipeline, artifacts_size: 45.megabytes) }
let!(:build2) { create(:ci_build, pipeline: pipeline, artifacts_size: 56.megabytes) }
let!(:ci_build) { create(:ci_build, pipeline: pipeline, artifacts_size: 45.megabytes) }
before do
create(:ci_build, pipeline: pipeline, artifacts_size: 56.megabytes)
create(:ci_job_artifact, project: pipeline.project, job: ci_build)
statistics.update_build_artifacts_size
end
it "stores the size of related build artifacts" do
expect(statistics.build_artifacts_size).to eq 101.megabytes
expect(statistics.build_artifacts_size).to eq(106012541)
end
end
......
......@@ -945,7 +945,7 @@ describe API::Runner do
context 'when artifacts are being stored inside of tmp path' do
before do
# by configuring this path we allow to pass temp file from any path
allow(ArtifactUploader).to receive(:artifacts_upload_path).and_return('/')
allow(JobArtifactUploader).to receive(:artifacts_upload_path).and_return('/')
end
context 'when job has been erased' do
......@@ -1106,7 +1106,7 @@ describe API::Runner do
expect(response).to have_gitlab_http_status(201)
expect(stored_artifacts_file.original_filename).to eq(artifacts.original_filename)
expect(stored_metadata_file.original_filename).to eq(metadata.original_filename)
expect(stored_artifacts_size).to eq(71759)
expect(stored_artifacts_size).to eq(72821)
end
end
......
require 'spec_helper'
describe PipelineSerializer do
let(:user) { create(:user) }
set(:user) { create(:user) }
let(:serializer) do
described_class.new(current_user: user)
......
require 'rails_helper'
describe ArtifactUploader do
let(:job) { create(:ci_build) }
set(:job) { create(:ci_build) }
let(:uploader) { described_class.new(job, :artifacts_file) }
let(:path) { Gitlab.config.artifacts.path }
......@@ -26,7 +26,7 @@ describe ArtifactUploader do
subject { uploader.store_dir }
it { is_expected.to start_with(path) }
it { is_expected.to end_with("#{job.project_id}/#{job.id}") }
it { is_expected.to end_with("#{job.project_id}/#{job.created_at.utc.strftime('%Y_%m')}/#{job.id}") }
end
describe '#cache_dir' do
......
require 'spec_helper'
describe JobArtifactUploader do
set(:job_artifact) { create(:ci_job_artifact) }
let(:job) { job_artifact.job }
let(:uploader) { described_class.new(job_artifact, :file) }
describe '#store_dir' do
subject { uploader.store_dir }
it { is_expected.to start_with(Gitlab.config.artifacts.path) }
it { is_expected.not_to end_with("#{job.project_id}/#{job.created_at.utc.strftime('%Y_%m')}/#{job.id}") }
it { is_expected.to match(/\h{2}\/\h{2}\/\h{64}\/\d{4}_\d{1,2}_\d{1,2}\/\d+\/\d+\z/) }
end
end
......@@ -11,12 +11,8 @@ describe ExpireBuildInstanceArtifactsWorker do
end
context 'with expired artifacts' do
let(:artifacts_expiry) { { artifacts_expire_at: Time.now - 7.days } }
context 'when associated project is valid' do
let(:build) do
create(:ci_build, :artifacts, artifacts_expiry)
end
let(:build) { create(:ci_build, :artifacts, :expired) }
it 'does expire' do
expect(build.reload.artifacts_expired?).to be_truthy
......@@ -26,14 +22,14 @@ describe ExpireBuildInstanceArtifactsWorker do
expect(build.reload.artifacts_file.exists?).to be_falsey
end
it 'does nullify artifacts_file column' do
expect(build.reload.artifacts_file_identifier).to be_nil
it 'does remove the job artifact record' do
expect(build.reload.job_archive).to be_nil
end
end
end
context 'with not yet expired artifacts' do
let(:build) do
set(:build) do
create(:ci_build, :artifacts, artifacts_expire_at: Time.now + 7.days)
end
......@@ -45,8 +41,8 @@ describe ExpireBuildInstanceArtifactsWorker do
expect(build.reload.artifacts_file.exists?).to be_truthy
end
it 'does not nullify artifacts_file column' do
expect(build.reload.artifacts_file_identifier).not_to be_nil
it 'does not remove the job artifact record' do
expect(build.reload.job_archive).not_to be_nil
end
end
......@@ -61,13 +57,13 @@ describe ExpireBuildInstanceArtifactsWorker do
expect(build.reload.artifacts_file.exists?).to be_truthy
end
it 'does not nullify artifacts_file column' do
expect(build.reload.artifacts_file_identifier).not_to be_nil
it 'does not remove the job artifact record' do
expect(build.reload.job_archive).not_to be_nil
end
end
context 'for expired artifacts' do
let(:build) { create(:ci_build, artifacts_expire_at: Time.now - 7.days) }
let(:build) { create(:ci_build, :expired) }
it 'is still expired' do
expect(build.reload.artifacts_expired?).to be_truthy
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment