Commit 01e0afa2 authored by Vitali Tatarintev's avatar Vitali Tatarintev

Merge branch '276498_introduce_new_vulnerability_ingestion_service_classes' into 'master'

Implement new ingestion flow for the security reports

See merge request gitlab-org/gitlab!66735
parents da1009a2 9624adf6
---
name: security_report_ingestion_framework
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/66735
rollout_issue_url:
milestone: '14.4'
type: development
group: group::threat insights
default_enabled: false
...@@ -40,18 +40,47 @@ module Security ...@@ -40,18 +40,47 @@ module Security
.merge(Vulnerabilities::Feedback.for_dismissal) .merge(Vulnerabilities::Feedback.for_dismissal)
end end
scope :latest_successful_by_build, -> { joins(:build).where(ci_builds: { status: 'success', retried: [nil, false] }) } scope :latest, -> { where(latest: true) }
scope :latest_successful_by_build, -> { joins(:build).where(ci_builds: { retried: [nil, false], status: 'success' }) }
scope :without_errors, -> { where("jsonb_array_length(COALESCE(info->'errors', '[]'::jsonb)) = 0") }
delegate :name, to: :build delegate :name, to: :build
before_save :ensure_project_id_pipeline_id before_save :ensure_project_id_pipeline_id
def has_errors? def has_errors?
info&.fetch('errors', []).present? processing_errors.present?
end
def processing_errors
info&.fetch('errors', [])
end
def processing_errors=(errors)
info['errors'] = errors
end
def add_processing_error!(error)
info['errors'] = processing_errors.push(error.stringify_keys)
save!
end
# Returns the findings from the source report
def report_findings
@report_findings ||= security_report&.findings.to_a
end end
private private
def security_report
job_artifact&.security_report
end
def job_artifact
build.job_artifacts.find_by_file_type(scan_type)
end
def ensure_project_id_pipeline_id def ensure_project_id_pipeline_id
self.project_id ||= build.project_id self.project_id ||= build.project_id
self.pipeline_id ||= build.commit_id self.pipeline_id ||= build.commit_id
......
...@@ -30,6 +30,7 @@ module Vulnerabilities ...@@ -30,6 +30,7 @@ module Vulnerabilities
validates :pipeline, same_project_association: true, if: :pipeline_id? validates :pipeline, same_project_association: true, if: :pipeline_id?
scope :with_associations, -> { includes(:pipeline, :issue, :merge_request, :author, :comment_author) } scope :with_associations, -> { includes(:pipeline, :issue, :merge_request, :author, :comment_author) }
scope :by_finding_uuid, -> (uuids) { where(finding_uuid: uuids) }
scope :all_preloaded, -> do scope :all_preloaded, -> do
preload(:author, :comment_author, :project, :issue, :merge_request, :pipeline) preload(:author, :comment_author, :project, :issue, :merge_request, :pipeline)
......
...@@ -11,6 +11,10 @@ module Vulnerabilities ...@@ -11,6 +11,10 @@ module Vulnerabilities
# https://gitlab.com/gitlab-org/gitlab/-/issues/214563#note_370782508 is why the table names are not renamed # https://gitlab.com/gitlab-org/gitlab/-/issues/214563#note_370782508 is why the table names are not renamed
self.table_name = "vulnerability_occurrences" self.table_name = "vulnerability_occurrences"
# This is necessary to prevent updating the
# created_at attribute with upsert queries.
attr_readonly(:created_at)
FINDINGS_PER_PAGE = 20 FINDINGS_PER_PAGE = 20
MAX_NUMBER_OF_IDENTIFIERS = 20 MAX_NUMBER_OF_IDENTIFIERS = 20
REPORT_TYPES_WITH_LOCATION_IMAGE = %w[container_scanning cluster_image_scanning].freeze REPORT_TYPES_WITH_LOCATION_IMAGE = %w[container_scanning cluster_image_scanning].freeze
......
...@@ -4,6 +4,10 @@ module Vulnerabilities ...@@ -4,6 +4,10 @@ module Vulnerabilities
class FindingIdentifier < ApplicationRecord class FindingIdentifier < ApplicationRecord
self.table_name = "vulnerability_occurrence_identifiers" self.table_name = "vulnerability_occurrence_identifiers"
# This is necessary to prevent updating the
# created_at attribute with upsert queries.
attr_readonly(:created_at)
alias_attribute :finding_id, :occurrence_id alias_attribute :finding_id, :occurrence_id
belongs_to :finding, class_name: 'Vulnerabilities::Finding', inverse_of: :finding_identifiers, foreign_key: 'occurrence_id' belongs_to :finding, class_name: 'Vulnerabilities::Finding', inverse_of: :finding_identifiers, foreign_key: 'occurrence_id'
......
...@@ -9,5 +9,7 @@ module Vulnerabilities ...@@ -9,5 +9,7 @@ module Vulnerabilities
validates :finding, presence: true validates :finding, presence: true
validates :url, presence: true, length: { maximum: 255 } validates :url, presence: true, length: { maximum: 255 }
validates :name, length: { maximum: 2048 } validates :name, length: { maximum: 2048 }
scope :by_finding_id, -> (finding_ids) { where(vulnerability_occurrence_id: finding_ids) }
end end
end end
...@@ -5,7 +5,13 @@ module Vulnerabilities ...@@ -5,7 +5,13 @@ module Vulnerabilities
class FindingRemediation < ApplicationRecord class FindingRemediation < ApplicationRecord
self.table_name = 'vulnerability_findings_remediations' self.table_name = 'vulnerability_findings_remediations'
# This is necessary to prevent updating the
# created_at attribute with upsert queries.
attr_readonly(:created_at)
belongs_to :finding, class_name: 'Vulnerabilities::Finding', inverse_of: :finding_remediations, foreign_key: 'vulnerability_occurrence_id', optional: false belongs_to :finding, class_name: 'Vulnerabilities::Finding', inverse_of: :finding_remediations, foreign_key: 'vulnerability_occurrence_id', optional: false
belongs_to :remediation, class_name: 'Vulnerabilities::Remediation', inverse_of: :finding_remediations, foreign_key: 'vulnerability_remediation_id', optional: false belongs_to :remediation, class_name: 'Vulnerabilities::Remediation', inverse_of: :finding_remediations, foreign_key: 'vulnerability_remediation_id', optional: false
scope :by_finding_id, -> (finding_ids) { where(vulnerability_occurrence_id: finding_ids) }
end end
end end
...@@ -2,11 +2,15 @@ ...@@ -2,11 +2,15 @@
module Vulnerabilities module Vulnerabilities
class FindingSignature < ApplicationRecord class FindingSignature < ApplicationRecord
self.table_name = 'vulnerability_finding_signatures'
include BulkInsertSafe include BulkInsertSafe
include VulnerabilityFindingSignatureHelpers include VulnerabilityFindingSignatureHelpers
self.table_name = 'vulnerability_finding_signatures'
# This is necessary to prevent updating the
# created_at attribute with upsert queries.
attr_readonly(:created_at)
belongs_to :finding, foreign_key: 'finding_id', inverse_of: :signatures, class_name: 'Vulnerabilities::Finding' belongs_to :finding, foreign_key: 'finding_id', inverse_of: :signatures, class_name: 'Vulnerabilities::Finding'
enum algorithm_type: VulnerabilityFindingSignatureHelpers::ALGORITHM_TYPES, _prefix: :algorithm enum algorithm_type: VulnerabilityFindingSignatureHelpers::ALGORITHM_TYPES, _prefix: :algorithm
validates :finding, presence: true validates :finding, presence: true
......
...@@ -8,7 +8,7 @@ module Security ...@@ -8,7 +8,7 @@ module Security
delegator_override :errors delegator_override :errors
def errors def errors
info['errors'].to_a.map { |error| format(ERROR_MESSAGE_FORMAT, error.symbolize_keys) } processing_errors.to_a.map { |error| format(ERROR_MESSAGE_FORMAT, error.symbolize_keys) }
end end
end end
end end
# frozen_string_literal: true
module Security
module Ingestion
class AbstractTask
def self.execute(pipeline, finding_maps)
new(pipeline, finding_maps).execute
end
def initialize(pipeline, finding_maps)
@pipeline = pipeline
@finding_maps = finding_maps
end
def execute
raise "Implement the `execute` template method!"
end
private
attr_reader :pipeline, :finding_maps
end
end
end
# frozen_string_literal: true
module Security
module Ingestion
#
# Provides a DSL to define bulk insertable ingestion tasks.
#
# Tasks including this module should set some configuration value(s)
# and implement the template method(s).
#
# Configuration values;
#
# `model`: The ActiveRecord model which the task is ingesting the data for.
# `unique_by`: Optional value to set unique constraint which will be used by
# PostgreSQL to update records on conflict. The task raises an exception
# in case of a conflict if this is not set.
# `uses`: Optional value to set return columns of the insert query.
# The method named `after_ingest` will be called if this value is set.
#
# Template methods;
#
# `attributes`: Returns an array of Hash objects that contain the name of the attributes and their values
# as key & value pairs.
# `after_ingest`: If the task uses the return value(s) of insert query, this method will
# be called. The return data of the insert query can be accessible by the `return_data` method.
#
module BulkInsertableTask
include Gitlab::Utils::StrongMemoize
def self.included(base)
base.singleton_class.attr_accessor :model, :unique_by, :uses
end
def execute
result_set
after_ingest if uses
end
private
delegate :unique_by, :model, :uses, :cast_values, to: :'self.class', private: true
def return_data
@return_data ||= result_set&.cast_values(model.attribute_types).to_a
end
def result_set
strong_memoize(:result_set) do
if insert_attributes.present?
ActiveRecord::InsertAll.new(model, insert_attributes, on_duplicate: on_duplicate, returning: uses, unique_by: unique_by).execute
end
end
end
def after_ingest
raise "Implement the `after_ingest` template method!"
end
def attributes
raise "Implement the `attributes` template method!"
end
def insert_attributes
@insert_attributes ||= attributes.map { |values| values.merge(timestamps) }
end
def timestamps
@timestamps ||= Time.zone.now.then { |time| { created_at: time, updated_at: time } }
end
def on_duplicate
unique_by.present? ? :update : :skip
end
end
end
end
# frozen_string_literal: true
module Security
module Ingestion
#
# Provides a DSL to define bulk updatable ingestion tasks.
#
# Tasks including this module should set a configuration value
# and implement the template method.
#
# Configuration value;
#
# `model`: The ActiveRecord model which the task is ingesting the data for.
#
# Template method;
#
# `attributes`: Returns an array of Hash objects that contain the name of the attributes and their values
# as key & value pairs.
#
module BulkUpdatableTask
include Gitlab::Utils::StrongMemoize
SQL_TEMPLATE = <<~SQL
UPDATE
%<table_name>s
SET
%<set_values>s
FROM
(%<values>s) AS map(%<map_schema>s)
WHERE
%<table_name>s.%<primary_key>s = map.%<primary_key>s
SQL
def self.included(base)
base.singleton_class.attr_accessor :model
end
def execute
return unless attribute_names.present?
connection.execute(update_sql)
end
private
delegate :model, to: :'self.class', private: true
delegate :table_name, :primary_key, :column_for_attribute, :type_for_attribute, :connection, to: :model, private: true
def update_sql
format(SQL_TEMPLATE, table_name: table_name, set_values: set_values, values: values, primary_key: primary_key, map_schema: map_schema)
end
def set_values
attribute_names.map do |attribute|
"#{attribute} = map.#{attribute}::#{sql_type_for(attribute)}"
end.join(', ')
end
def sql_type_for(attribute)
column_for_attribute(attribute).sql_type
end
def values
attributes.map { |attribute_map| build_values_for(attribute_map) }
.then { |serialized_attributes| Arel::Nodes::ValuesList.new(serialized_attributes) }
.to_sql
end
def build_values_for(attribute_map)
attribute_map.map { |attribute, value| type_for_attribute(attribute).serialize(value) }
end
def map_schema
attribute_names.join(', ')
end
def attribute_names
strong_memoize(:attribute_names) do
attributes.first&.keys
end
end
def attributes
raise "Implement the `attributes` template method!"
end
end
end
end
# frozen_string_literal: true
module Security
module Ingestion
# This entity is used in ingestion services to
# map security_finding - report_finding - vulnerability_id - finding_id
#
# You can think this as the Message object in the pipeline design pattern
# which is passed between tasks.
class FindingMap
FINDING_ATTRIBUTES = %i[confidence metadata_version name raw_metadata report_type severity details].freeze
RAW_METADATA_ATTRIBUTES = %w[description message solution cve location].freeze
RAW_METADATA_PLACEHOLDER = { description: nil, message: nil, solution: nil, cve: nil, location: nil }.freeze
attr_reader :security_finding, :report_finding
attr_accessor :finding_id, :vulnerability_id, :new_record, :identifier_ids
delegate :uuid, :scanner_id, to: :security_finding
def initialize(security_finding, report_finding)
@security_finding = security_finding
@report_finding = report_finding
@identifier_ids = []
end
def identifiers
@identifiers ||= report_finding.identifiers.first(Vulnerabilities::Finding::MAX_NUMBER_OF_IDENTIFIERS)
end
def set_identifier_ids_by(fingerprint_id_map)
@identifier_ids = identifiers.map { |identifier| fingerprint_id_map[identifier.fingerprint] }
end
def to_hash
# This was already an existing problem so we've used it here as well.
# TODO: https://gitlab.com/gitlab-org/gitlab/-/issues/342043
parsed_from_raw_metadata = Gitlab::Json.parse(report_finding.raw_metadata).slice(*RAW_METADATA_ATTRIBUTES).symbolize_keys
report_finding.to_hash
.slice(*FINDING_ATTRIBUTES)
.merge(RAW_METADATA_PLACEHOLDER)
.merge(parsed_from_raw_metadata)
.merge(primary_identifier_id: identifier_ids.first, location_fingerprint: report_finding.location.fingerprint, project_fingerprint: report_finding.project_fingerprint)
.merge(uuid: uuid, scanner_id: scanner_id)
end
end
end
end
# frozen_string_literal: true
module Security
module Ingestion
class FindingMapCollection
include Enumerable
def initialize(security_scan)
@security_scan = security_scan
end
def each
return to_enum(:each) unless block_given?
deduplicated_findings.each do |security_finding|
yield create_finding_map_for(security_finding)
end
end
private
attr_reader :security_scan
delegate :findings, :report_findings, to: :security_scan, private: true
def create_finding_map_for(security_finding)
# For SAST findings, we override the finding UUID with an existing finding UUID
# if we have a matching one.
report_uuid = security_finding.overridden_uuid || security_finding.uuid
FindingMap.new(security_finding, report_findings_map[report_uuid])
end
def report_findings_map
@report_findings_map ||= report_findings.index_by(&:uuid)
end
def deduplicated_findings
@deduplicated_findings ||= findings.deduplicated
end
end
end
end
# frozen_string_literal: true
module Security
module Ingestion
# This class splits the ingestion of the vulnerabilities
# of a security scan into multiple batches.
#
# Returns the ingested vulnerability IDs for each batch.
class IngestReportService
BATCH_SIZE = 50
SCAN_INGESTION_ERROR = {
type: 'IngestionError',
message: 'Ingestion failed for some vulnerabilities'
}.freeze
def self.execute(security_scan)
new(security_scan).execute
end
def initialize(security_scan)
@security_scan = security_scan
@errored = false
end
def execute
finding_map_collection.each_slice(BATCH_SIZE).flat_map { |slice| ingest_slice(slice) }
end
private
attr_reader :security_scan
attr_accessor :errored
delegate :pipeline, to: :security_scan, private: true
def finding_map_collection
@finding_map_collection ||= FindingMapCollection.new(security_scan)
end
def ingest_slice(slice)
IngestReportSliceService.execute(pipeline, slice)
rescue StandardError => error
process_error(error)
end
def process_error(error)
Gitlab::ErrorTracking.track_exception(error)
set_ingestion_error!
# we are explicitly returning an empty array for the caller service.
# Otherwise, the return value will be the result of the `set_ingestion_error!` method.
[]
end
def set_ingestion_error!
return if errored
self.errored = true
security_scan.add_processing_error!(SCAN_INGESTION_ERROR)
end
end
end
end
# frozen_string_literal: true
module Security
module Ingestion
# Base class to organize the chain of responsibilities
# for the report slice.
#
# Returns the ingested vulnerability IDs.
class IngestReportSliceService
TASKS = %i[
IngestIdentifiers
IngestFindings
IngestVulnerabilities
AttachFindingsToVulnerabilities
IngestFindingPipelines
IngestFindingIdentifiers
IngestFindingLinks
IngestFindingSignatures
IngestRemediations
].freeze
def self.execute(pipeline, finding_maps)
new(pipeline, finding_maps).execute
end
def initialize(pipeline, finding_maps)
@pipeline = pipeline
@finding_maps = finding_maps
end
def execute
ApplicationRecord.transaction do
TASKS.each { |task| execute_task(task) }
end
@finding_maps.map(&:vulnerability_id)
end
private
def execute_task(task)
Tasks.const_get(task, false).execute(@pipeline, @finding_maps)
end
end
end
end
# frozen_string_literal: true
module Security
module Ingestion
# Service for starting the ingestion of the security reports
# into the database.
class IngestReportsService
def self.execute(pipeline)
new(pipeline).execute
end
def initialize(pipeline)
@pipeline = pipeline
end
def execute
store_reports
mark_project_as_vulnerable!
set_latest_pipeline!
end
private
attr_reader :pipeline
delegate :project, to: :pipeline, private: true
def store_reports
latest_security_scans.flat_map(&method(:ingest))
.then(&method(:mark_resolved_vulnerabilities))
end
def latest_security_scans
pipeline.security_scans.without_errors.latest
end
def ingest(security_scan)
IngestReportService.execute(security_scan)
end
# This can cause issues if we have lots of existing ids
# or, if we try to update lots of records at once.
# Maybe we can extract this into a different service class
# and update the records iteratively.
def mark_resolved_vulnerabilities(existing_ids)
project.vulnerabilities
.id_not_in(existing_ids)
.update_all(resolved_on_default_branch: true)
end
def mark_project_as_vulnerable!
project.project_setting.update!(has_vulnerabilities: true)
end
def set_latest_pipeline!
Vulnerabilities::Statistic.set_latest_pipeline_with(pipeline)
end
end
end
end
# frozen_string_literal: true
module Security
module Ingestion
module Tasks
# Updates the `vulnerability_id` attribute of finding records.
class AttachFindingsToVulnerabilities < AbstractTask
include BulkUpdatableTask
self.model = Vulnerabilities::Finding
private
def attributes
new_finding_maps.map { |finding_map| attributes_for(finding_map) }
end
def new_finding_maps
@new_finding_maps ||= finding_maps.select(&:new_record)
end
def attributes_for(finding_map)
{
id: finding_map.finding_id,
vulnerability_id: finding_map.vulnerability_id
}
end
end
end
end
end
# frozen_string_literal: true
module Security
module Ingestion
module Tasks
# Links findings with identifiers by creating the
# `Vulnerabilities::FindingIdentifier` records.
class IngestFindingIdentifiers < AbstractTask
include BulkInsertableTask
self.model = Vulnerabilities::FindingIdentifier
self.unique_by = %i[occurrence_id identifier_id].freeze
private
def attributes
finding_maps.flat_map do |finding_map|
finding_map.identifier_ids.map do |identifier_id|
{
occurrence_id: finding_map.finding_id,
identifier_id: identifier_id
}
end
end
end
end
end
end
end
# frozen_string_literal: true
module Security
module Ingestion
module Tasks
# Creates new `Vulnerabilities::FindingLink` records.
class IngestFindingLinks < AbstractTask
include BulkInsertableTask
self.model = Vulnerabilities::FindingLink
private
def attributes
finding_maps.flat_map do |finding_map|
new_links_for(finding_map).map do |link|
{
vulnerability_occurrence_id: finding_map.finding_id,
name: link.name,
url: link.url
}
end
end
end
def new_links_for(finding_map)
existing_links = existing_finding_links[finding_map.finding_id].to_a
existing_urls = existing_links.map(&:url)
finding_map.report_finding.links.reject { |link| existing_urls.include?(link.url) }
end
def existing_finding_links
@existing_finding_links ||= Vulnerabilities::FindingLink.by_finding_id(finding_ids)
.group_by(&:vulnerability_occurrence_id)
end
def finding_ids
finding_maps.map(&:finding_id)
end
end
end
end
end
# frozen_string_literal: true
module Security
module Ingestion
module Tasks
# Links findings with pipelines by creating the
# `Vulnerabilities::FindingPipeline` records.
class IngestFindingPipelines < AbstractTask
include BulkInsertableTask
self.model = Vulnerabilities::FindingPipeline
private
def attributes
finding_maps.map do |finding_map|
{ pipeline_id: pipeline.id, occurrence_id: finding_map.finding_id }
end
end
end
end
end
end
# frozen_string_literal: true
module Security
module Ingestion
module Tasks
class IngestFindingSignatures < AbstractTask
include BulkInsertableTask
self.model = Vulnerabilities::FindingSignature
self.unique_by = %i[finding_id algorithm_type signature_sha].freeze
private
def attributes
finding_maps.flat_map { |finding_map| attributes_for(finding_map) }
end
def attributes_for(finding_map)
finding_map.report_finding.signatures.map do |signature|
{
finding_id: finding_map.finding_id,
algorithm_type: signature.algorithm_type,
signature_sha: signature.signature_sha
}
end
end
end
end
end
end
# frozen_string_literal: true
module Security
module Ingestion
module Tasks
class IngestFindings < AbstractTask
include BulkInsertableTask
self.model = Vulnerabilities::Finding
self.unique_by = :uuid
self.uses = %i[id vulnerability_id].freeze
private
delegate :project, to: :pipeline, private: true
def after_ingest
return_data.each_with_index do |(finding_id, vulnerability_id), index|
finding_map = finding_maps[index]
finding_map.finding_id = finding_id
finding_map.vulnerability_id = vulnerability_id
end
end
def attributes
finding_maps.map { |finding_map| finding_map.to_hash.merge(project_id: project.id) }
end
end
end
end
end
# frozen_string_literal: true
module Security
module Ingestion
module Tasks
# UPSERTs the identifiers for the given findings and
# sets the identifier IDs for each `finding_map`.
class IngestIdentifiers < AbstractTask
include BulkInsertableTask
self.model = Vulnerabilities::Identifier
self.unique_by = %i[project_id fingerprint]
self.uses = %i[fingerprint id]
private
delegate :project, to: :pipeline, private: true
def after_ingest
return_data.to_h.then do |fingerprint_to_id_map|
finding_maps.each { |finding_map| finding_map.set_identifier_ids_by(fingerprint_to_id_map) }
end
end
def attributes
report_identifiers.map do |identifier|
identifier.to_hash.merge!(project_id: project.id)
end
end
def report_identifiers
@report_identifiers ||= finding_maps.flat_map(&:identifiers).uniq(&:fingerprint)
end
end
end
end
end
# frozen_string_literal: true
module Security
module Ingestion
module Tasks
# Creates Vulnerabilities::Remediation records for the new remediations
# and updates the Vulnerabilities::FindingRemediation records.
class IngestRemediations < AbstractTask
include BulkInsertableTask
self.model = Vulnerabilities::FindingRemediation
self.unique_by = %i[vulnerability_occurrence_id vulnerability_remediation_id]
self.uses = :id
private
delegate :project, to: :pipeline
def after_ingest
Vulnerabilities::FindingRemediation.by_finding_id(finding_maps.map(&:finding_id))
.id_not_in(return_data.flatten)
.delete_all
end
def attributes
finding_maps.flat_map do |finding_map|
remediations_for(finding_map.report_finding).map do |remediation|
{
vulnerability_occurrence_id: finding_map.finding_id,
vulnerability_remediation_id: remediation.id
}
end
end
end
def remediations_for(report_finding)
checksums = report_finding.remediations.map(&:checksum)
return [] unless checksums.present?
all_remediations.select { |remediation| checksums.include?(remediation.checksum) }
end
def all_remediations
@all_remediations ||= new_remediations + existing_remediations
end
def new_remediations
new_report_remediations.map do |remediation|
project.vulnerability_remediations.create(summary: remediation.summary, file: remediation.diff_file, checksum: remediation.checksum)
end
end
def new_report_remediations
existing_remediation_checksums = existing_remediations.map(&:checksum)
report_remediations.select { |remediation| !existing_remediation_checksums.include?(remediation.checksum) }
end
def existing_remediations
@existing_remediations ||= project.vulnerability_remediations.by_checksum(report_remediations.map(&:checksum)).to_a
end
def report_remediations
@report_remediations ||= finding_maps.map(&:report_finding).flat_map(&:remediations).uniq(&:checksum)
end
end
end
end
end
# frozen_string_literal: true
module Security
module Ingestion
module Tasks
class IngestVulnerabilities < AbstractTask
def execute
create_new_vulnerabilities
update_existing_vulnerabilities
finding_maps
end
private
def create_new_vulnerabilities
IngestVulnerabilities::Create.new(pipeline, partitioned_maps.first).execute
end
def update_existing_vulnerabilities
IngestVulnerabilities::Update.new(pipeline, partitioned_maps.second).execute
end
def partitioned_maps
@partitioned_maps ||= finding_maps.partition { |finding_map| finding_map.vulnerability_id.nil? }
end
end
end
end
end
# frozen_string_literal: true
module Security
module Ingestion
module Tasks
class IngestVulnerabilities
# Creates new vulnerability records in database for the given
# findings map by using a single database query.
class Create < AbstractTask
include BulkInsertableTask
self.model = Vulnerability
self.uses = :id
private
def after_ingest
return_data.each_with_index do |vulnerability_id, index|
finding_map = finding_maps[index]
finding_map.vulnerability_id = vulnerability_id
finding_map.new_record = true
end
end
def attributes
finding_maps.map { |finding_map| attributes_for(finding_map.report_finding, dismissal_feedback[finding_map.uuid]) }
end
def attributes_for(report_finding, feedback)
{
author_id: pipeline.user_id,
project_id: pipeline.project_id,
title: report_finding.name.to_s.truncate(::Issuable::TITLE_LENGTH_MAX),
state: :detected,
severity: report_finding.severity,
confidence: report_finding.confidence,
report_type: report_finding.report_type,
dismissed_at: feedback&.created_at,
dismissed_by_id: feedback&.author_id
}
end
def dismissal_feedback
@dismissal_feedback ||= Vulnerabilities::Feedback.by_finding_uuid(finding_uuids).index_by(&:finding_uuid)
end
def finding_uuids
finding_maps.map(&:uuid)
end
end
end
end
end
end
# frozen_string_literal: true
module Security
module Ingestion
module Tasks
class IngestVulnerabilities
# Updates the existing vulnerability records
# by using a single database query.
class Update < AbstractTask
include BulkUpdatableTask
self.model = Vulnerability
private
def attributes
finding_maps.map { |finding_map| attributes_for(finding_map.vulnerability_id, finding_map.report_finding) }
end
def attributes_for(vulnerability_id, report_finding)
{
id: vulnerability_id,
title: report_finding.name.truncate(::Issuable::TITLE_LENGTH_MAX),
severity: report_finding.severity,
confidence: report_finding.confidence,
updated_at: Time.zone.now
}
end
end
end
end
end
end
...@@ -43,7 +43,7 @@ module Security ...@@ -43,7 +43,7 @@ module Security
def security_scan def security_scan
@security_scan ||= Security::Scan.safe_find_or_create_by!(build: artifact.job, scan_type: artifact.file_type) do |scan| @security_scan ||= Security::Scan.safe_find_or_create_by!(build: artifact.job, scan_type: artifact.file_type) do |scan|
scan.info['errors'] = security_report.errors.map(&:stringify_keys) if security_report.errored? scan.processing_errors = security_report.errors.map(&:stringify_keys) if security_report.errored?
end end
end end
......
...@@ -18,7 +18,11 @@ class StoreSecurityReportsWorker # rubocop:disable Scalability/IdempotentWorker ...@@ -18,7 +18,11 @@ class StoreSecurityReportsWorker # rubocop:disable Scalability/IdempotentWorker
Ci::Pipeline.find(pipeline_id).try do |pipeline| Ci::Pipeline.find(pipeline_id).try do |pipeline|
break unless pipeline.project.can_store_security_reports? break unless pipeline.project.can_store_security_reports?
if Feature.enabled?(:security_report_ingestion_framework, pipeline.project)
::Security::Ingestion::IngestReportsService.execute(pipeline)
else
::Security::StoreReportsService.new(pipeline).execute ::Security::StoreReportsService.new(pipeline).execute
end
if revoke_secret_detection_token?(pipeline) if revoke_secret_detection_token?(pipeline)
logger.info "StoreSecurityReportsWorker: token revocation started for pipeline: #{pipeline.id}" logger.info "StoreSecurityReportsWorker: token revocation started for pipeline: #{pipeline.id}"
......
# frozen_string_literal: true
FactoryBot.define do
factory :ci_reports_security_finding_signature, class: '::Gitlab::Ci::Reports::Security::FindingSignature' do
algorithm_type { :hash }
signature_value { SecureRandom.hex(50) }
skip_create
initialize_with do
::Gitlab::Ci::Reports::Security::FindingSignature.new(**attributes)
end
end
end
# frozen_string_literal: true
FactoryBot.define do
factory :finding_map, class: '::Security::Ingestion::FindingMap' do
security_finding
report_finding factory: :ci_reports_security_finding
trait :with_finding do
finding factory: :vulnerabilities_finding
end
trait :new_record do
with_finding
new_record { true }
vulnerability factory: :vulnerability
end
initialize_with do
::Security::Ingestion::FindingMap.new(*attributes.values_at(:security_finding, :report_finding)).tap do |object|
object.finding_id = attributes[:finding]&.id
object.vulnerability_id = attributes[:vulnerability]&.id
object.new_record = attributes[:new_record]
object.identifier_ids = attributes[:identifier_ids].to_a
end
end
skip_create
end
end
...@@ -6,5 +6,9 @@ FactoryBot.define do ...@@ -6,5 +6,9 @@ FactoryBot.define do
build factory: [:ci_build, :success] build factory: [:ci_build, :success]
pipeline { build.pipeline } pipeline { build.pipeline }
project { build.project } project { build.project }
trait :with_error do
info { { errors: [{ type: 'ParsingError', message: 'Unknown error happened' }] } }
end
end end
end end
...@@ -106,6 +106,75 @@ RSpec.describe Security::Scan do ...@@ -106,6 +106,75 @@ RSpec.describe Security::Scan do
it { is_expected.to match_array(expected_scans) } it { is_expected.to match_array(expected_scans) }
end end
describe '.without_errors' do
let(:scan_1) { create(:security_scan, :with_error) }
let(:scan_2) { create(:security_scan) }
subject { described_class.without_errors }
it { is_expected.to contain_exactly(scan_2) }
end
describe '.latest' do
let!(:latest_scan) { create(:security_scan, latest: true) }
let!(:retried_scan) { create(:security_scan, latest: false) }
subject { described_class.latest }
it { is_expected.to match_array([latest_scan]) }
end
describe '#report_findings' do
let(:artifact) { create(:ee_ci_job_artifact, :dast) }
let(:scan) { create(:security_scan, build: artifact.job) }
let(:artifact_finding_uuids) { artifact.security_report.findings.map(&:uuid) }
subject { scan.report_findings.map(&:uuid) }
it { is_expected.to match_array(artifact_finding_uuids) }
end
describe '#processing_errors' do
let(:scan) { build(:security_scan, :with_error) }
subject { scan.processing_errors }
it { is_expected.to eq([{ 'type' => 'ParsingError', 'message' => 'Unknown error happened' }]) }
end
describe '#processing_errors=' do
let(:scan) { create(:security_scan) }
subject(:set_processing_errors) { scan.processing_errors = [:foo] }
it 'sets the processing errors' do
expect { set_processing_errors }.to change { scan.info['errors'] }.from(nil).to([:foo])
end
end
describe '#add_processing_error!' do
let(:error) { { type: 'foo', message: 'bar' } }
subject(:add_processing_error) { scan.add_processing_error!(error) }
context 'when the scan does not have any errors' do
let(:scan) { create(:security_scan) }
it 'persists the error' do
expect { add_processing_error }.to change { scan.reload.info['errors'] }.from(nil).to([{ 'type' => 'foo', 'message' => 'bar' }])
end
end
context 'when the scan already has some errors' do
let(:scan) { create(:security_scan, :with_error) }
it 'persists the new error with the existing ones' do
expect { add_processing_error }.to change { scan.reload.info['errors'] }.from([{ 'type' => 'ParsingError', 'message' => 'Unknown error happened' }])
.to([{ 'type' => 'ParsingError', 'message' => 'Unknown error happened' }, { 'type' => 'foo', 'message' => 'bar' }])
end
end
end
it_behaves_like 'having unique enum values' it_behaves_like 'having unique enum values'
it 'sets `project_id` and `pipeline_id` before save' do it 'sets `project_id` and `pipeline_id` before save' do
......
...@@ -159,6 +159,15 @@ RSpec.describe Vulnerabilities::Feedback do ...@@ -159,6 +159,15 @@ RSpec.describe Vulnerabilities::Feedback do
end end
end end
describe '.by_finding_uuid' do
let(:feedback_1) { create(:vulnerability_feedback) }
let(:feedback_2) { create(:vulnerability_feedback) }
subject { described_class.by_finding_uuid([feedback_2.finding_uuid]) }
it { is_expected.to eq([feedback_2]) }
end
describe '.with_category' do describe '.with_category' do
it 'filters by category' do it 'filters by category' do
described_class.categories.each do |category, _| described_class.categories.each do |category, _|
......
...@@ -5,4 +5,13 @@ require 'spec_helper' ...@@ -5,4 +5,13 @@ require 'spec_helper'
RSpec.describe Vulnerabilities::FindingRemediation do RSpec.describe Vulnerabilities::FindingRemediation do
it { is_expected.to belong_to(:finding).class_name('Vulnerabilities::Finding').required } it { is_expected.to belong_to(:finding).class_name('Vulnerabilities::Finding').required }
it { is_expected.to belong_to(:remediation).class_name('Vulnerabilities::Remediation').required } it { is_expected.to belong_to(:remediation).class_name('Vulnerabilities::Remediation').required }
describe '.by_finding_id' do
let(:finding_1) { create(:vulnerabilities_finding) }
let!(:remediation) { create(:vulnerabilities_remediation, findings: [finding_1]) }
subject { described_class.by_finding_id(finding_1.id) }
it { is_expected.to eq(remediation.finding_remediations) }
end
end end
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Security::Ingestion::FindingMapCollection do
describe '#each_slice' do
let(:security_scan) { create(:security_scan) }
let(:security_findings) { create_list(:security_finding, 3, scan: security_scan, deduplicated: true) }
let(:report_findings) { [] }
let(:finding_map_collection) { described_class.new(security_scan) }
let(:finding_maps) { [] }
let(:finding_pairs) { finding_maps.map { |finding_map| [finding_map.security_finding, finding_map.report_finding] } }
let(:test_block) { proc { |slice| finding_maps.concat(slice) } }
let(:expected_finding_pairs) do
[
[security_findings[0], report_findings[0]],
[security_findings[1], report_findings[1]],
[security_findings[2], report_findings[2]]
]
end
before do
create(:security_finding, scan: security_scan, deduplicated: false)
security_findings.each { |security_finding| report_findings << create(:ci_reports_security_finding, uuid: security_finding.uuid) }
allow(security_scan).to receive(:report_findings).and_return(report_findings)
allow(finding_maps).to receive(:concat).and_call_original
end
context 'when the size argument given' do
subject(:run_each_slice) { finding_map_collection.each_slice(1, &test_block) }
it 'calls the given block for each slice by the given size argument' do
run_each_slice
expect(finding_maps).to have_received(:concat).exactly(3).times
expect(finding_pairs).to match_array(expected_finding_pairs)
end
end
end
end
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Security::Ingestion::FindingMap do
let(:security_finding) { build(:security_finding) }
let(:identifier) { build(:ci_reports_security_identifier) }
let(:report_finding) { build(:ci_reports_security_finding, identifiers: [identifier]) }
let(:finding_map) { build(:finding_map, security_finding: security_finding, report_finding: report_finding) }
describe '#uuid' do
subject { finding_map }
it { is_expected.to delegate_method(:uuid).to(:security_finding) }
end
describe '#identifiers' do
subject { finding_map.identifiers }
it { is_expected.to eq([identifier]) }
end
describe '#set_identifier_ids_by' do
let(:identifiers_map) { { identifier.fingerprint => 1 } }
subject(:set_idenrifier_ids) { finding_map.set_identifier_ids_by(identifiers_map) }
it 'changes the identifier_ids of the finding_map' do
expect { set_idenrifier_ids }.to change { finding_map.identifier_ids }.from([]).to([1])
end
end
describe '#to_hash' do
let(:expected_hash) do
{
uuid: security_finding.uuid,
scanner_id: security_finding.scanner_id,
primary_identifier_id: nil,
location_fingerprint: report_finding.location.fingerprint,
project_fingerprint: report_finding.project_fingerprint,
name: 'Cipher with no integrity',
report_type: :sast,
severity: :high,
confidence: :medium,
metadata_version: 'sast:1.0',
details: {},
raw_metadata: report_finding.raw_metadata,
description: 'The cipher does not provide data integrity update 1',
solution: 'GCM mode introduces an HMAC into the resulting encrypted data, providing integrity of the result.',
message: nil,
cve: nil,
location: {
"class" => "com.gitlab.security_products.tests.App",
"end_line" => 29,
"file" => "maven/src/main/java/com/gitlab/security_products/tests/App.java",
"method" => "insecureCypher",
"start_line" => 29
}
}
end
subject { finding_map.to_hash }
it { is_expected.to eq(expected_hash) }
end
end
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Security::Ingestion::IngestReportService do
let(:service_object) { described_class.new(security_scan) }
describe '#execute' do
let(:security_scan) { create(:security_scan, scan_type: :sast) }
subject(:ingest_report) { service_object.execute }
before do
create_list(:security_finding, 2, scan: security_scan, deduplicated: true)
stub_const("#{described_class}::BATCH_SIZE", 1)
allow(Security::Ingestion::FindingMapCollection).to receive(:new).with(security_scan).and_return([:foo, :bar])
allow(Security::Ingestion::IngestReportSliceService).to receive(:execute).with(security_scan.pipeline, [:foo]).and_return([1])
allow(Security::Ingestion::IngestReportSliceService).to receive(:execute).with(security_scan.pipeline, [:bar]).and_return([2])
end
it 'calls IngestReportSliceService for each slice of findings and accumulates the return values' do
expect(ingest_report).to eq([1, 2])
expect(Security::Ingestion::IngestReportSliceService).to have_received(:execute).twice
end
context 'when ingesting a slice of vulnerabilities fails' do
let(:exception) { RuntimeError.new }
let(:expected_processing_error) { { 'type' => 'IngestionError', 'message' => 'Ingestion failed for some vulnerabilities' } }
before do
allow(Security::Ingestion::IngestReportSliceService).to receive(:execute).with(security_scan.pipeline, [:foo]).and_raise(exception)
allow(Gitlab::ErrorTracking).to receive(:track_exception)
end
it 'tracks the exception' do
ingest_report
expect(Gitlab::ErrorTracking).to have_received(:track_exception).with(exception)
end
it 'captures the error and sets the processing error for security scan record' do
expect { ingest_report }.to change { security_scan.processing_errors }.from([]).to([expected_processing_error])
end
it 'accumulates the return value of only the succeeded executions' do
expect(ingest_report).to eq([2])
end
end
end
end
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Security::Ingestion::IngestReportSliceService do
let(:service_object) { described_class.new(pipeline, finding_maps) }
let(:pipeline) { create(:ci_pipeline) }
let(:finding_maps) { [create(:finding_map)] }
describe '#execute' do
subject(:ingest_report_slice) { service_object.execute }
before do
described_class::TASKS.each do |task_name|
task = Object.const_get("Security::Ingestion::Tasks::#{task_name}", false)
allow(task).to receive(:execute)
end
end
it 'runs the series of tasks in correct order' do
ingest_report_slice
expect(Security::Ingestion::Tasks::IngestIdentifiers).to have_received(:execute).ordered.with(pipeline, finding_maps)
expect(Security::Ingestion::Tasks::IngestFindings).to have_received(:execute).ordered.with(pipeline, finding_maps)
expect(Security::Ingestion::Tasks::IngestVulnerabilities).to have_received(:execute).ordered.with(pipeline, finding_maps)
expect(Security::Ingestion::Tasks::AttachFindingsToVulnerabilities).to have_received(:execute).ordered.with(pipeline, finding_maps)
expect(Security::Ingestion::Tasks::IngestFindingPipelines).to have_received(:execute).ordered.with(pipeline, finding_maps)
expect(Security::Ingestion::Tasks::IngestFindingIdentifiers).to have_received(:execute).ordered.with(pipeline, finding_maps)
expect(Security::Ingestion::Tasks::IngestFindingLinks).to have_received(:execute).ordered.with(pipeline, finding_maps)
expect(Security::Ingestion::Tasks::IngestFindingSignatures).to have_received(:execute).ordered.with(pipeline, finding_maps)
expect(Security::Ingestion::Tasks::IngestRemediations).to have_received(:execute).ordered.with(pipeline, finding_maps)
end
context 'when an exception happens' do
let(:mock_task_1) { double(:task) }
let(:mock_task_2) { double(:task) }
before do
allow(mock_task_1).to receive(:execute) { |pipeline, *| pipeline.update_column(:updated_at, 3.months.from_now) }
allow(mock_task_2).to receive(:execute) { raise 'foo' }
allow(Security::Ingestion::Tasks).to receive(:const_get).with(:IngestIdentifiers, false).and_return(mock_task_1)
allow(Security::Ingestion::Tasks).to receive(:const_get).with(:IngestFindings, false).and_return(mock_task_2)
end
it 'rollsback the recent changes to not to leave the database in an inconsistent state' do
expect { ingest_report_slice }.to raise_error('foo')
.and not_change { pipeline.reload.updated_at }
end
end
end
end
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Security::Ingestion::IngestReportsService do
let(:service_object) { described_class.new(pipeline) }
let_it_be(:project) { create(:project) }
let_it_be(:pipeline) { create(:ci_pipeline, project: project) }
let_it_be(:build) { create(:ci_build, pipeline: pipeline) }
let_it_be(:security_scan_1) { create(:security_scan, build: build, scan_type: :sast) }
let_it_be(:security_scan_2) { create(:security_scan, :with_error, build: build, scan_type: :dast) }
let_it_be(:security_scan_3) { create(:security_scan, build: build, scan_type: :secret_detection) }
let_it_be(:vulnerability_1) { create(:vulnerability, project: pipeline.project) }
let_it_be(:vulnerability_2) { create(:vulnerability, project: pipeline.project) }
describe '#execute' do
let(:ids_1) { [vulnerability_1.id] }
let(:ids_2) { [] }
subject(:ingest_reports) { service_object.execute }
before do
allow(Security::Ingestion::IngestReportService).to receive(:execute).and_return(ids_1, ids_2)
end
it 'calls IngestReportService for each succeeded security scan' do
ingest_reports
expect(Security::Ingestion::IngestReportService).to have_received(:execute).twice
expect(Security::Ingestion::IngestReportService).to have_received(:execute).once.with(security_scan_1)
expect(Security::Ingestion::IngestReportService).to have_received(:execute).once.with(security_scan_3)
end
it 'sets the resolved vulnerabilities, latest pipeline ID and has_vulnerabilities flag' do
expect { ingest_reports }.to change { project.reload.project_setting&.has_vulnerabilities }.to(true)
.and change { project.reload.vulnerability_statistic&.latest_pipeline_id }.to(pipeline.id)
.and change { vulnerability_2.reload.resolved_on_default_branch }.from(false).to(true)
.and not_change { vulnerability_1.reload.resolved_on_default_branch }.from(false)
end
end
end
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Security::Ingestion::Tasks::AttachFindingsToVulnerabilities do
describe '#execute' do
let(:pipeline) { create(:ci_pipeline) }
let(:finding_maps) { create_list(:finding_map, 3, :new_record) }
let(:service_object) { described_class.new(pipeline, finding_maps) }
let(:finding_1) { Vulnerabilities::Finding.find(finding_maps.first.finding_id) }
let(:finding_2) { Vulnerabilities::Finding.find(finding_maps.second.finding_id) }
let(:finding_3) { Vulnerabilities::Finding.find(finding_maps.third.finding_id) }
let(:vulnerability_id_1) { finding_maps.first.vulnerability_id }
let(:vulnerability_id_2) { finding_maps.second.vulnerability_id }
let(:vulnerability_id_3) { finding_maps.third.vulnerability_id }
subject(:attach_findings_to_vulnerabilities) { service_object.execute }
before do
finding_maps.third.new_record = false
end
it 'associates the findings with vulnerabilities for the new records' do
expect { attach_findings_to_vulnerabilities }.to change { finding_1.reload.vulnerability_id }.from(nil).to(vulnerability_id_1)
.and change { finding_2.reload.vulnerability_id }.from(nil).to(vulnerability_id_2)
.and not_change { finding_3.reload.vulnerability_id }.from(nil)
end
end
end
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Security::Ingestion::Tasks::IngestFindingIdentifiers do
describe '#execute' do
let(:pipeline) { create(:ci_pipeline) }
let(:identifier) { create(:vulnerabilities_identifier) }
let(:finding_1) { create(:vulnerabilities_finding) }
let(:finding_2) { create(:vulnerabilities_finding) }
let(:finding_map_1) { create(:finding_map, finding: finding_1, identifier_ids: [identifier.id]) }
let(:finding_map_2) { create(:finding_map, finding: finding_2, identifier_ids: [identifier.id]) }
let(:service_object) { described_class.new(pipeline, [finding_map_1, finding_map_2]) }
subject(:ingest_finding_identifiers) { service_object.execute }
before do
finding_1.identifiers << identifier
end
it 'associates findings with the identifiers' do
expect { ingest_finding_identifiers }.to change { Vulnerabilities::FindingIdentifier.count }.by(1)
.and change { finding_2.reload.identifiers }.from([]).to([identifier])
end
end
end
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Security::Ingestion::Tasks::IngestFindingLinks do
describe '#execute' do
let(:pipeline) { create(:ci_pipeline) }
let(:finding_link) { create(:ci_reports_security_link) }
let(:finding_1) { create(:vulnerabilities_finding) }
let(:finding_2) { create(:vulnerabilities_finding) }
let(:report_finding_1) { create(:ci_reports_security_finding, links: [finding_link]) }
let(:report_finding_2) { create(:ci_reports_security_finding, links: [finding_link]) }
let(:finding_map_1) { create(:finding_map, finding: finding_1, report_finding: report_finding_1) }
let(:finding_map_2) { create(:finding_map, finding: finding_2, report_finding: report_finding_2) }
let(:service_object) { described_class.new(pipeline, [finding_map_1, finding_map_2]) }
subject(:ingest_finding_links) { service_object.execute }
before do
create(:finding_link, finding: finding_2, url: finding_link.url)
end
it 'creates finding links for the new records' do
expect { ingest_finding_links }.to change { Vulnerabilities::FindingLink.count }.by(1)
.and change { finding_1.finding_links.count }.by(1)
end
end
end
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Security::Ingestion::Tasks::IngestFindingPipelines do
describe '#execute' do
let(:pipeline) { create(:ci_pipeline) }
let(:finding) { create(:vulnerabilities_finding) }
let(:finding_maps) { create_list(:finding_map, 1, finding: finding) }
let(:service_object) { described_class.new(pipeline, finding_maps) }
subject(:ingest_finding_pipelines) { service_object.execute }
it 'associates the findings with pipeline' do
expect { ingest_finding_pipelines }.to change { finding.finding_pipelines.pluck(:pipeline_id) }.from([]).to([pipeline.id])
end
end
end
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Security::Ingestion::Tasks::IngestFindingSignatures do
describe '#execute' do
let(:pipeline) { create(:ci_pipeline) }
let(:finding_1) { create(:vulnerabilities_finding) }
let(:finding_2) { create(:vulnerabilities_finding) }
let(:report_signature) { create(:ci_reports_security_finding_signature) }
let(:report_finding_1) { create(:ci_reports_security_finding, signatures: [report_signature]) }
let(:report_finding_2) { create(:ci_reports_security_finding, signatures: [report_signature]) }
let(:finding_map_1) { create(:finding_map, finding: finding_1, report_finding: report_finding_1) }
let(:finding_map_2) { create(:finding_map, finding: finding_2, report_finding: report_finding_2) }
let(:service_object) { described_class.new(pipeline, [finding_map_1, finding_map_2]) }
subject(:ingest_finding_signatures) { service_object.execute }
before do
create(:vulnerabilities_finding_signature, finding: finding_1, signature_sha: report_signature.signature_sha)
end
it 'ingests new finding signatures' do
expect { ingest_finding_signatures }.to change { Vulnerabilities::FindingSignature.count }.by(1)
.and change { finding_2.signatures.count }.by(1)
end
end
end
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Security::Ingestion::Tasks::IngestFindings do
describe '#execute' do
let_it_be(:pipeline) { create(:ci_pipeline) }
let_it_be(:identifier) { create(:vulnerabilities_identifier) }
let(:finding_maps) { create_list(:finding_map, 4, identifier_ids: [identifier.id]) }
let!(:existing_finding) { create(:vulnerabilities_finding, :detected, uuid: finding_maps.first.uuid) }
subject(:ingest_findings) { described_class.new(pipeline, finding_maps).execute }
it 'ingests findings' do
expect { ingest_findings }.to change { Vulnerabilities::Finding.count }.by(3)
end
it 'sets the finding and vulnerability ids' do
expected_finding_ids = Array.new(3) { an_instance_of(Integer) }.unshift(existing_finding.id)
expected_vulnerability_ids = [existing_finding.vulnerability_id, nil, nil, nil]
expect {ingest_findings }.to change { finding_maps.map(&:finding_id) }.from(Array.new(4)).to(expected_finding_ids)
.and change { finding_maps.map(&:vulnerability_id) }.from(Array.new(4)).to(expected_vulnerability_ids)
end
end
end
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Security::Ingestion::Tasks::IngestIdentifiers do
describe '#execute' do
let_it_be(:pipeline) { create(:ci_pipeline) }
let(:existing_fingerprint) { Digest::SHA1.hexdigest('type:id') }
let(:vulnerability_identifier) { create(:vulnerabilities_identifier, project: pipeline.project, fingerprint: existing_fingerprint, name: 'bar') }
let(:existing_report_identifier) { create(:ci_reports_security_identifier, external_id: 'id', external_type: 'type') }
let(:extra_identifiers) { Array.new(21) { |index| create(:ci_reports_security_identifier, external_id: "id-#{index}", external_type: 'type') } }
let(:identifiers) { extra_identifiers.unshift(existing_report_identifier) }
let(:expected_fingerprints) { Array.new(19) { |index| Digest::SHA1.hexdigest("type:id-#{index}") }.unshift(existing_fingerprint).sort }
let(:report_finding) { create(:ci_reports_security_finding, identifiers: identifiers) }
let(:finding_map) { create(:finding_map, report_finding: report_finding) }
let(:service_object) { described_class.new(pipeline, [finding_map]) }
let(:project_identifiers) { pipeline.project.vulnerability_identifiers }
subject(:ingest_identifiers) { service_object.execute }
it 'creates new records and updates the existing ones' do
expect { ingest_identifiers }.to change { project_identifiers.count }.from(1).to(20)
.and change { vulnerability_identifier.reload.name }
end
it 'sets the identifier_ids for the finding_map object' do
expect { ingest_identifiers }.to(
change { project_identifiers.where(id: finding_map.identifier_ids).pluck(:fingerprint).sort }
.from([])
.to(expected_fingerprints))
end
end
end
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Security::Ingestion::Tasks::IngestRemediations do
describe '#execute' do
let(:pipeline) { create(:ci_pipeline) }
let(:existing_checksum) { Digest::SHA256.hexdigest('foo') }
let(:existing_remediation_1) { create(:vulnerabilities_remediation, project: pipeline.project, checksum: existing_checksum) }
let(:existing_remediation_2) { create(:vulnerabilities_remediation, project: pipeline.project) }
let(:report_remediation_1) { create(:ci_reports_security_remediation, diff: 'foo') }
let(:report_remediation_2) { create(:ci_reports_security_remediation, diff: 'bar') }
let(:finding_1) { create(:vulnerabilities_finding, remediations: [existing_remediation_1, existing_remediation_2]) }
let(:finding_2) { create(:vulnerabilities_finding) }
let(:report_finding_1) { create(:ci_reports_security_finding, remediations: [report_remediation_1, report_remediation_2]) }
let(:report_finding_2) { create(:ci_reports_security_finding, remediations: [report_remediation_1, report_remediation_2]) }
let(:finding_map_1) { create(:finding_map, finding: finding_1, report_finding: report_finding_1) }
let(:finding_map_2) { create(:finding_map, finding: finding_2, report_finding: report_finding_2) }
let!(:service_object) { described_class.new(pipeline, [finding_map_1, finding_map_2]) }
subject(:ingest_finding_remediations) { service_object.execute }
it 'creates remediations and updates the associations' do
expect { ingest_finding_remediations }.to change { Vulnerabilities::Remediation.count }.by(1)
.and change { existing_remediation_2.reload.findings }.from([finding_1]).to([])
.and change { finding_2.reload.association(:remediations).scope.count }.from(0).to(2)
.and not_change { finding_1.reload.association(:remediations).scope.count }.from(2)
end
end
end
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Security::Ingestion::Tasks::IngestVulnerabilities do
describe '#execute' do
let_it_be(:user) { create(:user) }
let_it_be(:pipeline) { create(:ci_pipeline, user: user) }
let_it_be(:identifier) { create(:vulnerabilities_identifier) }
let(:finding_maps) { create_list(:finding_map, 4) }
let(:existing_finding) { create(:vulnerabilities_finding, :detected) }
subject(:ingest_vulnerabilities) { described_class.new(pipeline, finding_maps).execute }
before do
finding_maps.first.vulnerability_id = existing_finding.vulnerability_id
finding_maps.each { |finding_map| finding_map.identifier_ids << identifier.id }
end
it 'ingests vulnerabilities' do
expect { ingest_vulnerabilities }.to change { Vulnerability.count }.by(3)
end
end
end
...@@ -63,20 +63,38 @@ RSpec.describe StoreSecurityReportsWorker do ...@@ -63,20 +63,38 @@ RSpec.describe StoreSecurityReportsWorker do
stub_licensed_features(report_type => true) stub_licensed_features(report_type => true)
end end
it 'executes StoreReportsService for given pipeline' do it 'scans security reports for token revocation' do
expect(Security::StoreReportsService).to receive(:new) expect(::ScanSecurityReportSecretsWorker).to receive(:perform_async)
.with(pipeline).once.and_call_original
described_class.new.perform(pipeline.id) described_class.new.perform(pipeline.id)
end end
it 'scans security reports for token revocation' do context 'when the `security_report_ingestion_framework` feature is enabled' do
expect(::ScanSecurityReportSecretsWorker).to receive(:perform_async) before do
stub_feature_flags(security_report_ingestion_framework: project)
end
it 'executes IngestReportsService for given pipeline' do
expect(::Security::Ingestion::IngestReportsService).to receive(:execute).with(pipeline)
described_class.new.perform(pipeline.id)
end
end
context 'when the `security_report_ingestion_framework` feature is disabled' do
before do
stub_feature_flags(security_report_ingestion_framework: false)
end
it 'executes StoreReportsService for given pipeline' do
expect(Security::StoreReportsService).to receive(:new)
.with(pipeline).once.and_call_original
described_class.new.perform(pipeline.id) described_class.new.perform(pipeline.id)
end end
end end
end end
end
context "when security reports feature is not available" do context "when security reports feature is not available" do
let(:default_branch) { pipeline.ref } let(:default_branch) { pipeline.ref }
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment