Commit 72800e57 authored by Tetiana Chupryna's avatar Tetiana Chupryna

Merge branch '343332_remove_security_report_ingestion_framework_ff' into 'master'

Remove `security_report_ingestion_framework` feature flag

See merge request gitlab-org/gitlab!81021
parents 85832b8e db93cc96
---
name: security_report_ingestion_framework
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/66735
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/343332
milestone: '14.4'
type: development
group: group::threat insights
default_enabled: true
# frozen_string_literal: true
module Security
# Service for storing a given security report into the database.
#
class StoreReportService < ::BaseService
include Gitlab::Utils::StrongMemoize
attr_reader :pipeline, :report, :project, :vulnerability_finding_to_finding_map, :new_vulnerabilities
BATCH_SIZE = 1000
def initialize(pipeline, report)
@pipeline = pipeline
@report = report
@project = @pipeline.project
@vulnerability_finding_to_finding_map = {}
@new_vulnerabilities = []
end
def execute
# Ensure we're not trying to insert data twice for this report
return error("#{@report.type} report already stored for this pipeline, skipping...") if executed?
OverrideUuidsService.execute(@report) if override_uuids?
vulnerability_ids = create_all_vulnerabilities!
mark_as_resolved_except(vulnerability_ids)
execute_new_vulnerabilities_hooks
start_auto_fix
success
end
private
def executed?
pipeline.vulnerability_findings.report_type(@report.type).any?
end
def override_uuids?
project.licensed_feature_available?(:vulnerability_finding_signatures)
end
def create_all_vulnerabilities!
# Look for existing Findings using UUID
finding_uuids = @report.findings.map(&:uuid)
vulnerability_findings_by_uuid = project.vulnerability_findings
.where(uuid: finding_uuids) # rubocop: disable CodeReuse/ActiveRecord
.to_h { |vf| [vf.uuid, vf] }
update_vulnerability_scanners!(@report.findings)
vulnerability_ids = @report.findings.map do |finding|
create_vulnerability_finding(vulnerability_findings_by_uuid, finding)&.id
end.compact.uniq
update_vulnerability_links_info if Feature.enabled?(:vulnerability_finding_replace_metadata)
create_vulnerability_pipeline_objects
update_vulnerabilities_identifiers
update_vulnerabilities_finding_identifiers
if project.licensed_feature_available?(:sast_fp_reduction)
create_vulnerability_flags_info
end
vulnerability_ids
end
def execute_new_vulnerabilities_hooks
new_vulnerabilities.each { |v| v.execute_hooks }
end
def mark_as_resolved_except(vulnerability_ids)
return if ::Vulnerabilities::Finding::REPORT_TYPES_REQUIRING_MANUAL_RESOLUTION.include?(report.type)
project.vulnerabilities
.with_report_types(report.type)
.id_not_in(vulnerability_ids)
.update_all(resolved_on_default_branch: true)
end
def create_vulnerability_finding(vulnerability_findings_by_uuid, finding)
unless finding.valid?
put_warning_for(finding)
return
end
vulnerability_params = finding.to_hash.except(:compare_key, :identifiers, :location, :scanner, :scan, :links, :signatures, :flags)
entity_params = Gitlab::Json.parse(vulnerability_params&.dig(:raw_metadata)).slice('description', 'message', 'solution', 'cve', 'location').symbolize_keys
# Vulnerabilities::Finding (`vulnerability_occurrences`)
vulnerability_finding = vulnerability_findings_by_uuid[finding.uuid] ||
find_or_create_vulnerability_finding(finding, vulnerability_params.merge(entity_params))
return unless vulnerability_finding
vulnerability_finding_to_finding_map[vulnerability_finding] = finding
update_vulnerability_finding(vulnerability_finding, vulnerability_params.merge(location: entity_params[:location], location_fingerprint: finding.location_fingerprint))
reset_remediations_for(vulnerability_finding, finding)
if project.licensed_feature_available?(:vulnerability_finding_signatures)
update_finding_signatures(finding, vulnerability_finding)
end
create_vulnerability(vulnerability_finding, pipeline)
end
# rubocop: disable CodeReuse/ActiveRecord
def find_or_create_vulnerability_finding(finding, create_params)
find_params = {
scanner: scanners_objects[finding.scanner.key],
primary_identifier: identifiers_objects[finding.primary_identifier.key],
location_fingerprint: finding.location_fingerprint
}
begin
# If there's no Finding then we're dealing with one of two cases:
# 1. The Finding is a new one
# 2. The Finding is already saved but has UUIDv4
project.vulnerability_findings
.create_with(create_params)
.find_or_initialize_by(find_params).tap do |f|
f.location = create_params[:location]
f.uuid = finding.uuid
f.save!
end
rescue ActiveRecord::RecordNotUnique => e
# This might happen if we're processing another report in parallel and it finds the same Finding
# faster. In that case we need to perform the lookup again
vulnerability_finding = project.vulnerability_findings.reset.find_by(uuid: finding.uuid)
return vulnerability_finding if vulnerability_finding
vulnerability_finding = project.vulnerability_findings.reset.find_by(find_params)
return vulnerability_finding if vulnerability_finding
Gitlab::ErrorTracking.track_and_raise_exception(e, find_params: find_params, uuid: finding.uuid)
rescue ActiveRecord::RecordInvalid => e
Gitlab::ErrorTracking.track_exception(e, create_params: create_params&.dig(:raw_metadata))
nil
rescue ActiveRecord::ActiveRecordError => e
Gitlab::ErrorTracking.track_and_raise_exception(e, create_params: create_params&.dig(:raw_metadata))
end
end
def update_vulnerability_scanner(finding)
scanner = scanners_objects[finding.scanner.key]
scanner.update!(finding.scanner.to_hash)
end
def vulnerability_scanner_attributes_keys
strong_memoize(:vulnerability_scanner_attributes_keys) do
Vulnerabilities::Scanner.new.attributes.keys
end
end
def valid_vulnerability_scanner_record?(record)
return false if (record.keys - vulnerability_scanner_attributes_keys).present?
record.values.all? {|value| value.present?}
end
def create_vulnerability_scanner_records(findings)
findings.map do |finding|
scanner = scanners_objects[finding.scanner.key]
next nil if scanner.nil?
scanner_attr = scanner.attributes.with_indifferent_access.except(:id)
.merge(finding.scanner.to_hash)
scanner_attr.compact!
scanner_attr
end
end
def update_vulnerability_scanners!(report_findings)
report_findings.in_groups_of(BATCH_SIZE, false) do |findings|
records = create_vulnerability_scanner_records(findings)
records.compact!
records.uniq!
records.each { |record| record.merge!({ created_at: Time.current, updated_at: Time.current }) }
records.filter! { |record| valid_vulnerability_scanner_record?(record) }
Vulnerabilities::Scanner.insert_all(records) if records.present?
end
rescue StandardError => e
Gitlab::ErrorTracking.track_exception(e)
ensure
clear_memoization(:scanners_objects)
clear_memoization(:existing_scanner_objects)
end
def update_vulnerability_finding(vulnerability_finding, update_params)
vulnerability_finding.update!(update_params)
end
def update_vulnerabilities_identifiers
vulnerability_finding_to_finding_map.keys.in_groups_of(BATCH_SIZE, false) do |vulnerability_findings|
identifier_object_records = get_vulnerability_identifier_objects_for(vulnerability_findings)
records_with_id, records_without_id = identifier_object_records
.partition { |identifier| identifier[:id].present? }
update_existing_vulnerability_identifiers_for(records_with_id)
insert_new_vulnerability_identifiers_for(records_without_id)
end
rescue StandardError => e
Gitlab::ErrorTracking.track_exception(e)
ensure
clear_memoization(:identifiers_objects)
clear_memoization(:existing_identifiers_objects)
end
def get_vulnerability_identifier_objects_for(vulnerability_findings)
timestamps = { created_at: Time.current, updated_at: Time.current }
vulnerability_findings.flat_map do |vulnerability_finding|
finding = vulnerability_finding_to_finding_map[vulnerability_finding]
finding.identifiers.take(Vulnerabilities::Finding::MAX_NUMBER_OF_IDENTIFIERS).map do |identifier|
identifier_object = identifiers_objects[identifier.key]
identifier_object.attributes.with_indifferent_access.merge(**timestamps)
.merge(identifier.to_hash).compact
end
end
end
def insert_new_vulnerability_identifiers_for(identifier_object_records)
identifier_object_records = identifier_object_records.uniq.group_by(&:keys).values
identifier_object_records.each { |records| Vulnerabilities::Identifier.insert_all(records) }
end
def update_existing_vulnerability_identifiers_for(identifier_object_records)
identifier_object_records = identifier_object_records.uniq.group_by(&:keys).values
identifier_object_records.each { |records| Vulnerabilities::Identifier.upsert_all(records) }
end
def update_vulnerabilities_finding_identifiers
vulnerability_finding_to_finding_map.keys.in_groups_of(BATCH_SIZE, false) do |vulnerability_findings|
finding_identifier_records = get_finding_identifier_objects_for(vulnerability_findings)
finding_identifier_records.uniq!
Vulnerabilities::FindingIdentifier.insert_all(finding_identifier_records) if finding_identifier_records.present?
end
rescue StandardError => e
Gitlab::ErrorTracking.track_exception(e)
end
def get_finding_identifier_objects_for(vulnerability_findings)
timestamps = { created_at: Time.current, updated_at: Time.current }
vulnerability_findings.flat_map do |vulnerability_finding|
finding = vulnerability_finding_to_finding_map[vulnerability_finding]
finding.identifiers.take(Vulnerabilities::Finding::MAX_NUMBER_OF_IDENTIFIERS).map do |identifier|
identifier_object = identifiers_objects[identifier.key]
next nil unless identifier_object.id
{ occurrence_id: vulnerability_finding.id, identifier_id: identifier_object.id, **timestamps }.compact
end.compact
end
end
def create_or_update_vulnerability_identifier_object(vulnerability_finding, identifier)
identifier_object = identifiers_objects[identifier.key]
vulnerability_finding.finding_identifiers.find_or_create_by!(identifier: identifier_object)
identifier_object.update!(identifier.to_hash)
rescue ActiveRecord::RecordNotUnique
end
def create_vulnerability_flags_info
timestamps = { created_at: Time.current, updated_at: Time.current }
vulnerability_finding_to_finding_map.each_slice(BATCH_SIZE) do |vf_to_findings|
records = vf_to_findings.flat_map do |vulnerability_finding, finding|
finding.flags.map { |flag| timestamps.merge(**flag.to_h, vulnerability_occurrence_id: vulnerability_finding.id) }
end
records.uniq!
Vulnerabilities::Flag.insert_all(records) if records.present?
track_events(records) if records.present?
end
rescue StandardError => e
Gitlab::ErrorTracking.track_exception(e, project_id: project.id, pipeline_id: pipeline.id)
end
def track_events(records)
records.each do |record|
Gitlab::Tracking.event(
self.class.to_s, 'flag_vulnerability', label: record[:flag_type].to_s
)
end
end
def update_vulnerability_links_info
timestamps = { created_at: Time.current, updated_at: Time.current }
vulnerability_finding_to_finding_map.each_slice(BATCH_SIZE) do |vf_to_findings|
records = vf_to_findings.flat_map do |vulnerability_finding, finding|
finding.links.map { |link| { vulnerability_occurrence_id: vulnerability_finding.id, **link.to_hash, **timestamps } }
end
records.uniq!
Vulnerabilities::FindingLink.insert_all(records) if records.present?
end
rescue StandardError => e
Gitlab::ErrorTracking.track_exception(e)
end
def create_or_update_vulnerability_links(finding, vulnerability_finding)
return if finding.links.blank?
finding.links.each do |link|
vulnerability_finding.finding_links.safe_find_or_create_by!(link.to_hash)
end
rescue ActiveRecord::RecordNotUnique
end
def reset_remediations_for(vulnerability_finding, finding)
existing_remediations = find_existing_remediations_for(finding)
new_remediations = build_new_remediations_for(finding, existing_remediations)
vulnerability_finding.remediations = existing_remediations + new_remediations
end
def find_existing_remediations_for(finding)
checksums = finding.remediations.map(&:checksum)
@project.vulnerability_remediations.by_checksum(checksums)
end
def build_new_remediations_for(finding, existing_remediations)
find_missing_remediations_for(finding, existing_remediations)
.map { |remediation| build_vulnerability_remediation(remediation) }
end
def find_missing_remediations_for(finding, existing_remediations)
existing_remediation_checksums = existing_remediations.map(&:checksum)
finding.remediations.select { |remediation| !remediation.checksum.in?(existing_remediation_checksums) }
end
def build_vulnerability_remediation(remediation)
@project.vulnerability_remediations.new(summary: remediation.summary, file: remediation.diff_file, checksum: remediation.checksum)
end
def create_vulnerability_pipeline_objects
timestamps = { created_at: Time.current, updated_at: Time.current }
vulnerability_finding_to_finding_map.keys.in_groups_of(BATCH_SIZE, false) do |vulnerability_findings|
records = vulnerability_findings.map do |vulnerability_finding|
{ occurrence_id: vulnerability_finding.id, pipeline_id: pipeline.id, **timestamps }
end
records.uniq!
Vulnerabilities::FindingPipeline.insert_all(records) if records.present?
end
rescue StandardError => e
Gitlab::ErrorTracking.track_exception(e)
end
def create_vulnerability_pipeline_object(vulnerability_finding, pipeline)
vulnerability_finding.finding_pipelines.find_or_create_by!(pipeline: pipeline)
rescue ActiveRecord::RecordNotUnique
end
# rubocop: enable CodeReuse/ActiveRecord
def update_finding_signatures(finding, vulnerability_finding)
to_update = {}
to_create = []
poro_signatures = finding.signatures.index_by(&:algorithm_type)
vulnerability_finding.signatures.each do |signature|
# NOTE: index_by takes the last entry if there are duplicates of the same algorithm, which should never occur.
poro_signature = poro_signatures[signature.algorithm_type]
# We're no longer generating these types of signatures. Since
# we're updating the persisted vulnerability, no need to do anything
# with these signatures now. We will track growth with
# https://gitlab.com/gitlab-org/gitlab/-/issues/322186
next if poro_signature.nil?
poro_signatures.delete(signature.algorithm_type)
to_update[signature.id] = poro_signature.to_hash
end
# any remaining poro signatures left are new
poro_signatures.values.each do |poro_signature|
attributes = poro_signature.to_hash.merge(finding_id: vulnerability_finding.id)
to_create << ::Vulnerabilities::FindingSignature.new(attributes: attributes, created_at: Time.zone.now, updated_at: Time.zone.now)
end
::Vulnerabilities::FindingSignature.transaction do
if to_update.count > 0
::Vulnerabilities::FindingSignature.update(to_update.keys, to_update.values)
end
if to_create.count > 0
::Vulnerabilities::FindingSignature.bulk_insert!(to_create)
end
end
end
def create_vulnerability(vulnerability_finding, pipeline)
vulnerability = if vulnerability_finding.vulnerability_id
Vulnerabilities::UpdateService.new(vulnerability_finding.project, pipeline.user, finding: vulnerability_finding, resolved_on_default_branch: false).execute
else
Vulnerabilities::CreateService.new(vulnerability_finding.project, pipeline.user, finding_id: vulnerability_finding.id).execute.tap do |vuln|
new_vulnerabilities << vuln
end
end
create_vulnerability_issue_link(vulnerability)
vulnerability
end
def create_vulnerability_issue_link(vulnerability)
vulnerability_issue_feedback = vulnerability.finding.feedback(feedback_type: 'issue')
return unless vulnerability_issue_feedback
vulnerability.issue_links.safe_find_or_create_by!(issue_id: vulnerability_issue_feedback.issue_id)
end
def scanners_objects
strong_memoize(:scanners_objects) do
@report.scanners.to_h do |key, scanner|
[key, existing_scanner_objects[key] || project.vulnerability_scanners.build(scanner&.to_hash)]
end
end
end
def all_scanners_external_ids
@report.scanners.values.map(&:external_id)
end
def existing_scanner_objects
strong_memoize(:existing_scanner_objects) do
project.vulnerability_scanners.with_external_id(all_scanners_external_ids).to_h do |scanner|
[scanner.external_id, scanner]
end
end
end
def identifiers_objects
strong_memoize(:identifiers_objects) do
@report.identifiers.to_h do |key, identifier|
[key, existing_identifiers_objects[key] || project.vulnerability_identifiers.build(identifier.to_hash)]
end
end
end
def all_identifiers_fingerprints
@report.identifiers.values.map(&:fingerprint)
end
def existing_identifiers_objects
strong_memoize(:existing_identifiers_objects) do
project.vulnerability_identifiers.with_fingerprint(all_identifiers_fingerprints).to_h do |identifier|
[identifier.fingerprint, identifier]
end
end
end
def put_warning_for(finding)
Gitlab::AppLogger.warn(message: "Invalid vulnerability finding record found", finding: finding.to_hash)
end
def start_auto_fix
return unless auto_fix_enabled?
::Security::AutoFixWorker.perform_async(pipeline.id)
end
def auto_fix_enabled?
return false unless project.security_setting&.auto_fix_enabled?
project.security_setting.auto_fix_enabled_types.include?(report.type.to_sym)
end
end
end
# frozen_string_literal: true
module Security
# Service for storing security reports into the database.
#
class StoreReportsService < ::BaseService
def initialize(pipeline)
@pipeline = pipeline
@errors = []
end
def execute
set_latest_pipeline!
mark_project_as_vulnerable!
store_reports
errors.any? ? error(full_errors) : success
end
private
attr_reader :pipeline, :errors
delegate :project, to: :pipeline, private: true
def store_reports
pipeline.security_reports.reports.each do |report_type, report|
result = StoreReportService.new(pipeline, report).execute
errors << result[:message] if result[:status] == :error
end
end
def mark_project_as_vulnerable!
project.project_setting.update!(has_vulnerabilities: true)
end
def set_latest_pipeline!
Vulnerabilities::Statistic.set_latest_pipeline_with(pipeline)
end
def full_errors
errors.join(", ")
end
end
end
...@@ -18,11 +18,7 @@ class StoreSecurityReportsWorker # rubocop:disable Scalability/IdempotentWorker ...@@ -18,11 +18,7 @@ class StoreSecurityReportsWorker # rubocop:disable Scalability/IdempotentWorker
Ci::Pipeline.find(pipeline_id).try do |pipeline| Ci::Pipeline.find(pipeline_id).try do |pipeline|
break unless pipeline.project.can_store_security_reports? break unless pipeline.project.can_store_security_reports?
if Feature.enabled?(:security_report_ingestion_framework, pipeline.project, default_enabled: :yaml) ::Security::Ingestion::IngestReportsService.execute(pipeline)
::Security::Ingestion::IngestReportsService.execute(pipeline)
else
::Security::StoreReportsService.new(pipeline).execute
end
if revoke_secret_detection_token?(pipeline) if revoke_secret_detection_token?(pipeline)
logger.info "StoreSecurityReportsWorker: token revocation started for pipeline: #{pipeline.id}" logger.info "StoreSecurityReportsWorker: token revocation started for pipeline: #{pipeline.id}"
......
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Security::StoreReportService, '#execute', :snowplow do
using RSpec::Parameterized::TableSyntax
let_it_be(:user) { create(:user) }
let(:artifact) { create(:ee_ci_job_artifact, trait) }
let(:report_type) { artifact.file_type }
let(:project) { artifact.project }
let(:pipeline) { artifact.job.pipeline }
let(:report) { pipeline.security_reports.get_report(report_type.to_s, artifact) }
subject(:store_report) { described_class.new(pipeline, report).execute }
where(:vulnerability_finding_signatures) do
[true, false]
end
with_them do
before do
stub_licensed_features(
sast: true,
dependency_scanning: true,
container_scanning: true,
security_dashboard: true,
sast_fp_reduction: true,
vulnerability_finding_signatures: vulnerability_finding_signatures
)
allow(Security::AutoFixWorker).to receive(:perform_async)
end
context 'without existing data' do
before(:all) do
checksum = 'f00bc6261fa512f0960b7fc3bfcce7fb31997cf32b96fa647bed5668b2c77fee'
create(:vulnerabilities_remediation, checksum: checksum)
end
before do
project.add_developer(user)
allow(pipeline).to receive(:user).and_return(user)
end
context 'for different security reports' do
where(:case_name, :trait, :scanners, :identifiers, :findings, :finding_identifiers, :finding_pipelines, :remediations, :signatures, :finding_links, :finding_flags) do
'with SAST report' | :sast | 1 | 6 | 5 | 7 | 5 | 0 | 2 | 2 | 0
'with exceeding identifiers' | :with_exceeding_identifiers | 1 | 20 | 1 | 20 | 1 | 0 | 0 | 0 | 0
'with Dependency Scanning report' | :dependency_scanning_remediation | 1 | 3 | 2 | 3 | 2 | 1 | 0 | 6 | 0
'with Container Scanning report' | :container_scanning | 1 | 8 | 8 | 8 | 8 | 0 | 0 | 8 | 0
'with vulnerability flags' | :sast_with_vulnerability_flags | 1 | 6 | 5 | 7 | 5 | 0 | 2 | 0 | 2
end
with_them do
it 'inserts all scanners' do
expect { subject }.to change { Vulnerabilities::Scanner.count }.by(scanners)
end
it 'inserts all identifiers' do
expect { subject }.to change { Vulnerabilities::Identifier.count }.by(identifiers)
end
it 'inserts all findings' do
expect { subject }.to change { Vulnerabilities::Finding.count }.by(findings)
end
context 'vulnerability flags' do
it 'inserts all finding flags' do
expect { subject }.to change { Vulnerabilities::Flag.count }.by(finding_flags)
end
it 'tracks the snowplow event' do
subject
if case_name == 'with vulnerability flags'
expect_snowplow_event(
category: 'Security::StoreReportService',
action: 'flag_vulnerability',
label: 'false_positive'
)
end
end
end
it 'inserts all finding links' do
expect { subject }.to change { Vulnerabilities::FindingLink.count }.by(finding_links)
end
context 'when finding links creation is disabled' do
before do
stub_feature_flags(vulnerability_finding_replace_metadata: false)
end
it 'does not insert finding links' do
expect { subject }.not_to change { Vulnerabilities::FindingLink.count }
end
end
it 'inserts all finding identifiers (join model)' do
expect { subject }.to change { Vulnerabilities::FindingIdentifier.count }.by(finding_identifiers)
end
it 'inserts all finding pipelines (join model)' do
expect { subject }.to change { Vulnerabilities::FindingPipeline.count }.by(finding_pipelines)
end
it 'inserts all remediations' do
expect { subject }.to change { project.vulnerability_remediations.count }.by(remediations)
end
it 'inserts all vulnerabilities' do
expect { subject }.to change { Vulnerability.count }.by(findings)
end
it 'inserts all signatures' do
signatures_count = vulnerability_finding_signatures ? signatures : 0
expect { subject }.to change { Vulnerabilities::FindingSignature.count }.by(signatures_count)
end
end
end
context 'when there is an exception' do
let(:trait) { :sast }
subject { described_class.new(pipeline, report) }
it 'does not insert any scanner' do
allow(Vulnerabilities::Scanner).to receive(:insert_all).with(anything).and_raise(StandardError)
expect { subject.send(:update_vulnerability_scanners!, report.findings) }.to change { Vulnerabilities::Scanner.count }.by(0)
end
end
context 'when some attributes are missing in the identifiers' do
let(:trait) { :sast }
let(:other_params) {{ external_type: 'find_sec_bugs_type', external_id: 'PREDICTABLE_RANDOM', name: 'Find Security Bugs-PREDICTABLE_RANDOM', url: 'https://find-sec-bugs.github.io/bugs.htm#PREDICTABLE_RANDOM', created_at: Time.current, updated_at: Time.current }}
let(:record_1) {{ id: 4, project_id: 2, fingerprint: '5848739446034d982ef7beece3bb19bff4044ffb', **other_params }}
let(:record_2) {{ project_id: 2, fingerprint: '5848739446034d982ef7beece3bb19bff4044ffb', **other_params }}
let(:record_3) {{ id: 4, fingerprint: '5848739446034d982ef7beece3bb19bff4044ffb', **other_params }}
let(:record_4) {{ id: 5, fingerprint: '6848739446034d982ef7beece3bb19bff4044ffb', **other_params }}
let(:record_5) {{ fingerprint: '5848739446034d982ef7beece3bb19bff4044ffb', **other_params }}
let(:record_6) {{ fingerprint: '6848739446034d982ef7beece3bb19bff4044ffb', **other_params }}
subject { described_class.new(pipeline, report) }
it 'updates existing vulnerability identifiers in groups' do
expect(Vulnerabilities::Identifier).to receive(:upsert_all).with([record_1])
expect(Vulnerabilities::Identifier).to receive(:upsert_all).with([record_3, record_4])
subject.send(:update_existing_vulnerability_identifiers_for, [record_1, record_3, record_4])
end
it 'does not update any identifier for an empty list of records' do
expect(Vulnerabilities::Identifier).not_to receive(:upsert_all)
subject.send(:update_existing_vulnerability_identifiers_for, [])
end
it 'inserts new vulnerability identifiers in groups' do
expect(Vulnerabilities::Identifier).to receive(:insert_all).with([record_2])
expect(Vulnerabilities::Identifier).to receive(:insert_all).with([record_5, record_6])
subject.send(:insert_new_vulnerability_identifiers_for, [record_2, record_5, record_6])
end
it 'does not insert any identifier for an empty list of records' do
expect(Vulnerabilities::Identifier).not_to receive(:insert_all)
subject.send(:insert_new_vulnerability_identifiers_for, [])
end
end
context 'when N+1 database queries have been removed' do
let(:trait) { :sast }
let(:bandit_scanner) { build(:ci_reports_security_scanner, external_id: 'bandit', name: 'Bandit') }
let(:link) { build(:ci_reports_security_link) }
let(:bandit_finding) { build(:ci_reports_security_finding, scanner: bandit_scanner, links: [link]) }
let(:vulnerability_findings) { [] }
subject { described_class.new(pipeline, report) }
it "avoids N+1 database queries for updating vulnerability scanners", :use_sql_query_cache do
report.add_scanner(bandit_scanner)
control_count = ActiveRecord::QueryRecorder.new(skip_cached: false) { subject.send(:update_vulnerability_scanners!, report.findings) }.count
2.times { add_finding_to_report }
expect { subject.send(:update_vulnerability_scanners!, report.findings) }.not_to exceed_query_limit(control_count)
end
it "avoids N+1 database queries for updating finding_links", :use_sql_query_cache do
report.add_scanner(bandit_scanner)
add_finding_to_report
stub_vulnerability_finding_id_to_finding_map
control_count = ActiveRecord::QueryRecorder.new(skip_cached: false) { subject.send(:update_vulnerability_links_info) }.count
2.times { add_finding_to_report }
stub_vulnerability_finding_id_to_finding_map
expect { subject.send(:update_vulnerability_links_info) }.not_to exceed_query_limit(control_count)
end
it "avoids N+1 database queries for updating vulnerabilities_identifiers", :use_sql_query_cache do
report.add_scanner(bandit_scanner)
add_finding_to_report
stub_vulnerability_finding_id_to_finding_map
stub_vulnerability_findings
control_count = ActiveRecord::QueryRecorder.new(skip_cached: false) { subject.send(:update_vulnerabilities_identifiers) }.count
2.times { add_finding_to_report }
stub_vulnerability_finding_id_to_finding_map
stub_vulnerability_findings
expect { subject.send(:update_vulnerabilities_identifiers) }.not_to exceed_query_limit(control_count)
end
def add_finding_to_report
report.add_finding(bandit_finding)
end
def stub_vulnerability_findings
allow(subject).to receive(:vulnerability_findings)
.and_return(vulnerability_findings)
end
def stub_vulnerability_finding_id_to_finding_map
allow(subject).to receive(:vulnerability_finding_id_to_finding_map)
.and_return(vulnerability_finding_id_to_finding_map)
end
def vulnerability_finding_id_to_finding_map
vulnerability_findings.clear
report.findings.to_h do |finding|
vulnerability_finding = create(:vulnerabilities_finding)
vulnerability_findings << vulnerability_finding
[vulnerability_finding.id, finding]
end
end
end
context 'when report data includes all raw_metadata' do
let(:trait) { :dependency_scanning_remediation }
it 'inserts top level finding data', :aggregate_failures do
subject
finding = Vulnerabilities::Finding.last
finding.raw_metadata = nil
expect(finding.metadata).to be_blank
expect(finding.cve).not_to be_nil
expect(finding.description).not_to be_nil
expect(finding.location).not_to be_nil
expect(finding.message).not_to be_nil
expect(finding.solution).not_to be_nil
end
end
context 'invalid data' do
let(:artifact) { create(:ee_ci_job_artifact, :sast) }
let(:finding_without_name) { build(:ci_reports_security_finding, name: nil) }
let(:report) { Gitlab::Ci::Reports::Security::Report.new('container_scanning', nil, nil) }
before do
allow(Gitlab::ErrorTracking).to receive(:track_exception).and_call_original
report.add_finding(finding_without_name)
end
it 'does not raise any exception' do
expect { store_report }.not_to raise_error
end
it 'reports the error to sentry' do
store_report
expected_params = finding_without_name.to_hash.dig(:raw_metadata)
expect(Gitlab::ErrorTracking).to have_received(:track_exception).with(an_instance_of(ActiveRecord::RecordInvalid), create_params: expected_params)
end
end
end
context 'with existing data from previous pipeline' do
let(:finding_identifier_fingerprint) do
build(:ci_reports_security_identifier, external_id: "CIPHER_INTEGRITY").fingerprint
end
let(:scanner) { build(:vulnerabilities_scanner, project: project, external_id: 'find_sec_bugs', name: 'Find Security Bugs') }
let(:identifier) { build(:vulnerabilities_identifier, project: project, fingerprint: finding_identifier_fingerprint) }
let(:different_identifier) { build(:vulnerabilities_identifier, project: project) }
let!(:new_artifact) { create(:ee_ci_job_artifact, :sast, job: new_build) }
let(:new_build) { create(:ci_build, pipeline: new_pipeline) }
let(:new_pipeline) { create(:ci_pipeline, project: project) }
let(:new_report) { new_pipeline.security_reports.get_report(report_type.to_s, artifact) }
let(:existing_signature) { create(:vulnerabilities_finding_signature, finding: finding) }
let(:trait) { :sast }
let(:finding_location_fingerprint) do
build(
:ci_reports_security_locations_sast,
file_path: "groovy/src/main/java/com/gitlab/security_products/tests/App.groovy",
start_line: "29",
end_line: "29"
).fingerprint
end
let!(:finding) do
created_finding = create(:vulnerabilities_finding,
identifiers: [identifier],
primary_identifier: identifier,
scanner: scanner,
project: project,
uuid: "e5388f40-18f5-566d-95c6-d64c6f46a00a",
location_fingerprint: finding_location_fingerprint)
existing_finding = report.findings.find { |f| f.location.fingerprint == created_finding.location_fingerprint }
create(:vulnerabilities_finding_signature,
finding: created_finding,
algorithm_type: existing_finding.signatures.first.algorithm_type,
signature_sha: existing_finding.signatures.first.signature_sha)
create(:finding_link,
finding: created_finding)
created_finding
end
let!(:vulnerability) { create(:vulnerability, findings: [finding], project: project) }
let(:desired_uuid) do
Security::VulnerabilityUUID.generate(
report_type: finding.report_type,
primary_identifier_fingerprint: finding.primary_identifier.fingerprint,
location_fingerprint: finding.location_fingerprint,
project_id: finding.project_id
)
end
let!(:finding_with_uuidv5) do
create(:vulnerabilities_finding,
identifiers: [different_identifier],
primary_identifier: different_identifier,
scanner: scanner,
project: project,
location_fingerprint: '34661e23abcf78ff80dfcc89d0700437612e3f88')
end
let(:identifier_of_corrupted_finding) do
create(:vulnerabilities_identifier,
project: project,
fingerprint: '5848739446034d982ef7beece3bb19bff4044ffb')
end
let!(:finding_with_wrong_uuidv5) do
create(:vulnerabilities_finding,
identifiers: [identifier_of_corrupted_finding],
primary_identifier: identifier_of_corrupted_finding,
scanner: scanner,
project: project,
uuid: 'd588ff5c-7f65-5ac1-9d11-4f57d65f3faf',
location_fingerprint: '650bd2dbdad33d2859747c6ae83dcf448ce02394')
end
let!(:vulnerability_with_uuid5) { create(:vulnerability, findings: [finding_with_uuidv5], project: project) }
before do
project.add_developer(user)
allow(new_pipeline).to receive(:user).and_return(user)
end
subject { described_class.new(new_pipeline, new_report).execute }
it 'does not change existing UUIDv5' do
expect { subject }.not_to change(finding_with_uuidv5, :uuid)
end
it 'updates UUIDv4 to UUIDv5' do
# We run `OverrideUuidsService` if the signatures are enabled which
# kinda disables the logic of this test.
next if vulnerability_finding_signatures
finding.uuid = '00000000-1111-2222-3333-444444444444'
finding.save!
# this report_finding should be used to update the finding's uuid
report_finding = new_report.findings.find { |f| f.location.fingerprint == '0e7d0291d912f56880e39d4fbd80d99dd5d327ba' }
allow(report_finding).to receive(:uuid).and_return(desired_uuid)
subject
expect(finding.reload.uuid).to eq(desired_uuid)
end
it 'reuses existing scanner' do
expect { subject }.not_to change { Vulnerabilities::Scanner.count }
end
it 'inserts only new identifiers and reuse existing ones' do
expect { subject }.to change { Vulnerabilities::Identifier.count }.by(4)
end
it 'inserts only new links and reuse existing ones' do
expect { subject }.to change { Vulnerabilities::FindingLink.count }.by(2)
end
it 'inserts only new findings and reuse existing ones' do
expect { subject }.to change { Vulnerabilities::Finding.count }.by(3)
end
it 'inserts all finding pipelines (join model) for this new pipeline' do
expect { subject }.to change { Vulnerabilities::FindingPipeline.where(pipeline: new_pipeline).count }.by(5)
end
it 'inserts new vulnerabilities with data from findings from this new pipeline' do
expect { subject }.to change { Vulnerability.count }.by(4)
end
it 'triggers project hooks on new vulnerabilities' do
expect_next_instances_of(Vulnerability, 4) do |vulnerability|
expect(vulnerability).to receive(:execute_hooks)
end
subject
end
it 'updates existing findings with new data' do
subject
expect(finding.reload).to have_attributes(severity: 'medium', name: 'Cipher with no integrity')
end
it 'updates signatures to match new values' do
next unless vulnerability_finding_signatures
expect(finding.signatures.count).to eq(1)
expect(finding.signatures.first.algorithm_type).to eq('hash')
existing_signature = finding.signatures.first
subject
finding.reload
existing_signature.reload
expect(finding.signatures.count).to eq(2)
signature_algs = finding.signatures.sort_by(&:priority).map(&:algorithm_type)
expect(signature_algs).to eq(%w[hash scope_offset])
# check that the existing hash signature was updated/reused
expect(existing_signature.id).to eq(finding.signatures.find(&:algorithm_hash?).id)
end
it 'updates existing vulnerability with new data' do
subject
expect(vulnerability.reload).to have_attributes(severity: 'medium', title: 'Cipher with no integrity', title_html: 'Cipher with no integrity')
end
context 'when the existing vulnerability is resolved with the latest report' do
let!(:existing_vulnerability) { create(:vulnerability, report_type: report_type, project: project) }
it 'marks the vulnerability as resolved on default branch' do
expect { subject }.to change { existing_vulnerability.reload.resolved_on_default_branch }.from(false).to(true)
end
end
context 'when the existing vulnerability requires manual resolution' do
let(:trait) { :secret_detection }
let!(:finding) { create(:vulnerabilities_finding, :with_secret_detection, project: project) }
it 'wont mark the vulnerability as resolved on default branch' do
expect { subject }.not_to change { finding.vulnerability.reload.resolved_on_default_branch }
end
end
context 'when the existing resolved vulnerability is discovered again on the latest report' do
before do
vulnerability.update_column(:resolved_on_default_branch, true)
end
it 'marks the vulnerability as not resolved on default branch' do
expect { subject }.to change { vulnerability.reload.resolved_on_default_branch }.from(true).to(false)
end
end
context 'when the finding is not valid' do
before do
allow(Gitlab::AppLogger).to receive(:warn)
allow_next_instance_of(::Gitlab::Ci::Reports::Security::Finding) do |finding|
allow(finding).to receive(:valid?).and_return(false)
end
end
it 'does not create a new finding' do
expect { subject }.not_to change { Vulnerabilities::Finding.count }
end
it 'does not raise an error' do
expect { subject }.not_to raise_error
end
it 'puts a warning log' do
subject
expect(Gitlab::AppLogger).to have_received(:warn).exactly(new_report.findings.length).times
end
end
context 'vulnerability issue link' do
context 'when there is no assoiciated issue feedback with finding' do
it 'does not insert issue links from the new pipeline' do
expect { subject }.to change { Vulnerabilities::IssueLink.count }.by(0)
end
end
context 'when there is an associated issue feedback with finding' do
let(:issue) { create(:issue, project: project) }
let!(:issue_feedback) do
create(
:vulnerability_feedback,
:sast,
:issue,
issue: issue,
project: project,
project_fingerprint: new_report.findings.first.project_fingerprint
)
end
it 'inserts issue links from the new pipeline' do
expect { subject }.to change { Vulnerabilities::IssueLink.count }.by(1)
end
it 'the issue link is valid' do
first_finding_uuid = new_report.findings.first.uuid
subject
finding = Vulnerabilities::Finding.find_by(uuid: first_finding_uuid)
vulnerability_id = finding.vulnerability_id
issue_id = issue.id
issue_link = Vulnerabilities::IssueLink.find_by(
vulnerability_id: vulnerability_id,
issue_id: issue_id
)
expect(issue_link).not_to be_nil
end
end
context 'when there is an issue link created for an issue for a vulnerabiltiy' do
let(:issue) { create(:issue, project: project) }
let!(:issue_feedback) do
create(
:vulnerability_feedback,
:sast,
:issue,
issue: issue,
project: project,
project_fingerprint: new_report.findings.find { |f| f.location.fingerprint == finding.location_fingerprint }.project_fingerprint
)
end
let!(:issue_link) { create(:vulnerabilities_issue_link, issue: issue, vulnerability_id: vulnerability.id) }
it 'will not raise an error' do
expect { subject }.not_to raise_error
end
it 'does not insert issue link from the new pipeline' do
expect { subject }.to change { Vulnerabilities::IssueLink.count }.by(0)
end
end
end
end
context 'with existing data from same pipeline' do
let(:finding) { create(:vulnerabilities_finding, :with_pipeline, project: project) }
let!(:finding_pipeline) { create(:vulnerabilities_finding_pipeline, finding: finding, pipeline: pipeline) }
let(:trait) { :sast }
it 'skips report' do
expect(subject).to eq({
status: :error,
message: "sast report already stored for this pipeline, skipping..."
})
end
end
context 'start auto_fix' do
before do
stub_licensed_features(vulnerability_auto_fix: true)
end
context 'with auto fix supported report type' do
let(:trait) { :dependency_scanning }
context 'when auto fix enabled' do
it 'start auto fix worker' do
expect(Security::AutoFixWorker).to receive(:perform_async).with(pipeline.id)
subject
end
end
context 'when auto fix disabled' do
context 'when feature flag is disabled' do
before do
stub_feature_flags(security_auto_fix: false)
end
it 'does not start auto fix worker' do
expect(Security::AutoFixWorker).not_to receive(:perform_async)
subject
end
end
context 'when auto fix feature is disabled' do
before do
project.security_setting.update_column(:auto_fix_dependency_scanning, false)
end
it 'does not start auto fix worker' do
expect(Security::AutoFixWorker).not_to receive(:perform_async)
subject
end
end
context 'when licensed feature is unavailable' do
before do
stub_licensed_features(vulnerability_auto_fix: false)
end
it 'does not start auto fix worker' do
expect(Security::AutoFixWorker).not_to receive(:perform_async)
subject
end
end
context 'when security setting is not created' do
before do
project.security_setting.destroy!
project.reload
end
it 'does not start auto fix worker' do
expect(Security::AutoFixWorker).not_to receive(:perform_async)
expect(subject[:status]).to eq(:success)
end
end
end
end
context 'with auto fix not supported report type' do
let(:trait) { :sast }
before do
stub_licensed_features(vulnerability_auto_fix: true)
end
it 'does not start auto fix worker' do
expect(Security::AutoFixWorker).not_to receive(:perform_async)
subject
end
end
end
end
context 'vulnerability tracking' do
let!(:artifact) { create(:ee_ci_job_artifact, :sast_minimal) }
def generate_new_pipeline
pipeline = create(:ci_pipeline, :success, project: project)
build = create(:ci_build, :success, pipeline: pipeline, project: project)
artifact = create(:ee_ci_job_artifact, :sast_minimal, job: build, project: project)
[
pipeline,
pipeline.security_reports.get_report('sast', artifact)
]
end
before do
project.add_developer(user)
allow(pipeline).to receive(:user).and_return(user)
end
# This spec runs three pipelines, ensuring findings are tracked as expected:
# 1. pipeline creates initial findings without tracking signatures
# 2. pipeline updates previous findings with tracking signatures
# 3. pipeline updates previous findings using tracking signatures
it 'remaps findings across pipeline executions', :aggregate_failures do
stub_licensed_features(
sast: true,
security_dashboard: true,
vulnerability_finding_signatures: false
)
expect do
expect do
described_class.new(pipeline, report).execute
end.not_to(raise_error)
end.to change { Vulnerabilities::FindingPipeline.count }.by(1)
.and change { Vulnerability.count }.by(1)
.and change { Vulnerabilities::Finding.count }.by(1)
.and change { Vulnerabilities::FindingSignature.count }.by(0)
stub_licensed_features(
sast: true,
security_dashboard: true,
vulnerability_finding_signatures: true
)
pipeline, report = generate_new_pipeline
allow(pipeline).to receive(:user).and_return(user)
expect do
expect do
described_class.new(pipeline, report).execute
end.not_to(raise_error)
end.to change { Vulnerabilities::FindingPipeline.count }.by(1)
.and change { Vulnerability.count }.by(0)
.and change { Vulnerabilities::Finding.count }.by(0)
.and change { Vulnerabilities::FindingSignature.count }.by(2)
pipeline, report = generate_new_pipeline
# Update the location of the finding to trigger persistence of signatures
finding = report.findings.first
location_data = finding.location.as_json.symbolize_keys.tap { |h| h.delete(:fingerprint) }
location_data[:start_line] += 1
location_data[:end_line] += 1
allow(finding).to receive(:location).and_return(
Gitlab::Ci::Reports::Security::Locations::Sast.new(**location_data)
)
allow(finding).to receive(:raw_metadata).and_return(
Gitlab::Json.parse(finding.raw_metadata).merge("location" => location_data).to_json
)
allow(pipeline).to receive(:user).and_return(user)
expect do
expect do
described_class.new(pipeline, report).execute
end.not_to(raise_error)
end.to change { Vulnerabilities::FindingPipeline.count }.by(1)
.and change { Vulnerability.count }.by(0)
.and change { Vulnerabilities::Finding.count }.by(0)
.and change { Vulnerabilities::FindingSignature.count }.by(0)
.and change { Vulnerabilities::Finding.last.location['start_line'] }.from(29).to(30)
.and change { Vulnerabilities::Finding.last.location['end_line'] }.from(29).to(30)
end
end
context 'for container scanning' do
let(:trait) { :container_scanning }
before do
stub_licensed_features(container_scanning: true, security_dashboard: true)
allow(pipeline).to receive(:user).and_return(project.first_owner)
end
it 'populates finding location' do
subject
last_finding = Vulnerabilities::Finding.last
expect(last_finding.read_attribute(:location)).to eq(last_finding.location)
end
end
end
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Security::StoreReportsService do
let_it_be(:user) { create(:user) }
let_it_be(:group) { create(:group) }
let_it_be(:project) { create(:project, :public, namespace: group) }
let_it_be(:pipeline) { create(:ci_pipeline, project: project) }
describe '#execute' do
subject(:execute_service_object) { described_class.new(pipeline).execute }
context 'when there are reports' do
before do
stub_licensed_features(sast: true, dependency_scanning: true, container_scanning: true, security_dashboard: true)
create(:ee_ci_build, :sast, pipeline: pipeline)
create(:ee_ci_build, :dependency_scanning, pipeline: pipeline)
create(:ee_ci_build, :container_scanning, pipeline: pipeline)
project.add_developer(user)
allow(pipeline).to receive(:user).and_return(user)
end
it 'initializes and execute a StoreReportService for each report' do
expect(Security::StoreReportService).to receive(:new)
.exactly(3).times.with(pipeline, instance_of(::Gitlab::Ci::Reports::Security::Report))
.and_wrap_original do |method, *original_args|
method.call(*original_args).tap do |store_service|
expect(store_service).to receive(:execute).once.and_call_original
end
end
execute_service_object
end
it 'marks the project as vulnerable' do
expect { execute_service_object }.to change { project.reload.project_setting.has_vulnerabilities }.from(false).to(true)
end
it 'updates the `latest_pipeline_id` attribute of the associated `vulnerability_statistic` record' do
expect { execute_service_object }.to change { project.reload.vulnerability_statistic&.latest_pipeline_id }.from(nil).to(pipeline.id)
end
context 'when the StoreReportService raises an error' do
let(:error) { RuntimeError.new('foo') }
before do
allow_next_instance_of(Security::StoreReportService) do |service_object|
allow(service_object).to receive(:execute).and_raise(error)
end
end
it 'marks the project as vulnerable' do
expect { execute_service_object }.to raise_error(error)
.and change { project.reload.project_setting.has_vulnerabilities }.from(false).to(true)
end
it 'updates the `latest_pipeline_id` attribute of the associated `vulnerability_statistic` record' do
expect { execute_service_object }.to raise_error(error)
.and change { project.reload.vulnerability_statistic&.latest_pipeline_id }.from(nil).to(pipeline.id)
end
end
context 'when StoreReportService returns an error for a report' do
let(:reports) { Gitlab::Ci::Reports::Security::Reports.new(pipeline) }
let(:sast_report) { reports.get_report('sast', sast_artifact) }
let(:dast_report) { reports.get_report('dast', dast_artifact) }
let(:success) { { status: :success } }
let(:error) { { status: :error, message: "something went wrong" } }
let(:sast_artifact) { create(:ee_ci_job_artifact, :sast) }
let(:dast_artifact) { create(:ee_ci_job_artifact, :dast) }
before do
allow(pipeline).to receive(:security_reports).and_return(reports)
end
it 'returns the errors after having processed all reports' do
expect_next_instance_of(Security::StoreReportService, pipeline, sast_report) do |store_service|
expect(store_service).to receive(:execute).and_return(error)
end
expect_next_instance_of(Security::StoreReportService, pipeline, dast_report) do |store_service|
expect(store_service).to receive(:execute).and_return(success)
end
is_expected.to eq(error)
end
end
end
end
end
...@@ -70,29 +70,10 @@ RSpec.describe StoreSecurityReportsWorker do ...@@ -70,29 +70,10 @@ RSpec.describe StoreSecurityReportsWorker do
described_class.new.perform(pipeline.id) described_class.new.perform(pipeline.id)
end end
context 'when the `security_report_ingestion_framework` feature is enabled' do it 'executes IngestReportsService for given pipeline' do
before do expect(::Security::Ingestion::IngestReportsService).to receive(:execute).with(pipeline)
stub_feature_flags(security_report_ingestion_framework: project)
end
it 'executes IngestReportsService for given pipeline' do described_class.new.perform(pipeline.id)
expect(::Security::Ingestion::IngestReportsService).to receive(:execute).with(pipeline)
described_class.new.perform(pipeline.id)
end
end
context 'when the `security_report_ingestion_framework` feature is disabled' do
before do
stub_feature_flags(security_report_ingestion_framework: false)
end
it 'executes StoreReportsService for given pipeline' do
expect(Security::StoreReportsService).to receive(:new)
.with(pipeline).once.and_call_original
described_class.new.perform(pipeline.id)
end
end end
end end
end end
...@@ -100,8 +81,8 @@ RSpec.describe StoreSecurityReportsWorker do ...@@ -100,8 +81,8 @@ RSpec.describe StoreSecurityReportsWorker do
context "when security reports feature is not available" do context "when security reports feature is not available" do
let(:default_branch) { pipeline.ref } let(:default_branch) { pipeline.ref }
it 'does not execute StoreReportsService' do it 'does not execute IngestReportsService' do
expect(Security::StoreReportsService).not_to receive(:new) expect(::Security::Ingestion::IngestReportsService).not_to receive(:execute)
described_class.new.perform(pipeline.id) described_class.new.perform(pipeline.id)
end end
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment