Commit 2a77b038 authored by Sarah Yasonik's avatar Sarah Yasonik Committed by Heinrich Lee Yu

Move shared alert processing logic to concern

parent 5d3bedde
...@@ -2,9 +2,8 @@ ...@@ -2,9 +2,8 @@
module AlertManagement module AlertManagement
class ProcessPrometheusAlertService class ProcessPrometheusAlertService
include BaseServiceUtility extend ::Gitlab::Utils::Override
include Gitlab::Utils::StrongMemoize include ::AlertManagement::AlertProcessing
include ::IncidentManagement::Settings
def initialize(project, payload) def initialize(project, payload)
@project = project @project = project
...@@ -14,11 +13,10 @@ module AlertManagement ...@@ -14,11 +13,10 @@ module AlertManagement
def execute def execute
return bad_request unless incoming_payload.has_required_attributes? return bad_request unless incoming_payload.has_required_attributes?
process_alert_management_alert process_alert
return bad_request unless alert.persisted? return bad_request unless alert.persisted?
process_incident_issues if process_issues? complete_post_processing_tasks
send_alert_email if send_email?
ServiceResponse.success ServiceResponse.success
end end
...@@ -27,110 +25,31 @@ module AlertManagement ...@@ -27,110 +25,31 @@ module AlertManagement
attr_reader :project, :payload attr_reader :project, :payload
def process_alert_management_alert override :process_new_alert
if incoming_payload.resolved? def process_new_alert
process_resolved_alert_management_alert return if resolving_alert?
else
process_firing_alert_management_alert
end
end
def process_firing_alert_management_alert
if alert.persisted?
alert.register_new_event!
reset_alert_management_alert_status
else
create_alert_management_alert
end
end
def reset_alert_management_alert_status super
return if alert.trigger
logger.warn(
message: 'Unable to update AlertManagement::Alert status to triggered',
project_id: project.id,
alert_id: alert.id
)
end end
def create_alert_management_alert override :process_firing_alert
if alert.save def process_firing_alert
alert.execute_services super
SystemNoteService.create_new_alert(alert, Gitlab::AlertManagement::Payload::MONITORING_TOOLS[:prometheus])
return
end
logger.warn( reset_alert_status
message: 'Unable to create AlertManagement::Alert',
project_id: project.id,
alert_errors: alert.errors.messages
)
end end
def process_resolved_alert_management_alert def reset_alert_status
return unless alert.persisted? return if alert.trigger
return unless auto_close_incident?
if alert.resolve(incoming_payload.ends_at)
close_issue(alert.issue)
return
end
logger.warn( logger.warn(
message: 'Unable to update AlertManagement::Alert status to resolved', message: 'Unable to update AlertManagement::Alert status to triggered',
project_id: project.id, project_id: project.id,
alert_id: alert.id alert_id: alert.id
) )
end end
def close_issue(issue) override :incoming_payload
return if issue.blank? || issue.closed?
Issues::CloseService
.new(project, User.alert_bot)
.execute(issue, system_note: false)
SystemNoteService.auto_resolve_prometheus_alert(issue, project, User.alert_bot) if issue.reset.closed?
end
def process_incident_issues
return if alert.issue || alert.resolved?
IncidentManagement::ProcessAlertWorker.perform_async(nil, nil, alert.id)
end
def send_alert_email
notification_service
.async
.prometheus_alerts_fired(project, [alert])
end
def logger
@logger ||= Gitlab::AppLogger
end
def alert
strong_memoize(:alert) do
existing_alert || new_alert
end
end
def existing_alert
strong_memoize(:existing_alert) do
AlertManagement::Alert.not_resolved.for_fingerprint(project, incoming_payload.gitlab_fingerprint).first
end
end
def new_alert
strong_memoize(:new_alert) do
AlertManagement::Alert.new(
**incoming_payload.alert_params,
ended_at: nil
)
end
end
def incoming_payload def incoming_payload
strong_memoize(:incoming_payload) do strong_memoize(:incoming_payload) do
Gitlab::AlertManagement::Payload.parse( Gitlab::AlertManagement::Payload.parse(
...@@ -141,6 +60,11 @@ module AlertManagement ...@@ -141,6 +60,11 @@ module AlertManagement
end end
end end
override :resolving_alert?
def resolving_alert?
incoming_payload.resolved?
end
def bad_request def bad_request
ServiceResponse.error(message: 'Bad Request', http_status: :bad_request) ServiceResponse.error(message: 'Bad Request', http_status: :bad_request)
end end
......
# frozen_string_literal: true
module AlertManagement
# Module to support the processing of new alert payloads
# from various sources. Payloads may be for new alerts,
# existing alerts, or acting as a resolving alert.
#
# Performs processing-related tasks, such as creating system
# notes, creating or resolving related issues, and notifying
# stakeholders of the alert.
#
# Requires #project [Project] and #payload [Hash] methods
# to be defined.
module AlertProcessing
include BaseServiceUtility
include Gitlab::Utils::StrongMemoize
include ::IncidentManagement::Settings
# Updates or creates alert from payload for project
# including system notes
def process_alert
if alert.persisted?
process_existing_alert
else
process_new_alert
end
end
# Creates or closes issue for alert and notifies stakeholders
def complete_post_processing_tasks
process_incident_issues if process_issues?
send_alert_email if send_email?
end
def process_existing_alert
if resolving_alert?
process_resolved_alert
else
process_firing_alert
end
end
def process_resolved_alert
return unless auto_close_incident?
return close_issue(alert.issue) if alert.resolve(incoming_payload.ends_at)
logger.warn(
message: 'Unable to update AlertManagement::Alert status to resolved',
project_id: project.id,
alert_id: alert.id
)
end
def process_firing_alert
alert.register_new_event!
end
def close_issue(issue)
return if issue.blank? || issue.closed?
::Issues::CloseService
.new(project, User.alert_bot)
.execute(issue, system_note: false)
SystemNoteService.auto_resolve_prometheus_alert(issue, project, User.alert_bot) if issue.reset.closed?
end
def process_new_alert
if alert.save
alert.execute_services
SystemNoteService.create_new_alert(alert, alert_source)
else
logger.warn(
message: "Unable to create AlertManagement::Alert from #{alert_source}",
project_id: project.id,
alert_errors: alert.errors.messages
)
end
end
def process_incident_issues
return if alert.issue || alert.resolved?
::IncidentManagement::ProcessAlertWorker.perform_async(nil, nil, alert.id)
end
def send_alert_email
notification_service
.async
.prometheus_alerts_fired(project, [alert])
end
def incoming_payload
strong_memoize(:incoming_payload) do
Gitlab::AlertManagement::Payload.parse(project, payload.to_h)
end
end
def alert
strong_memoize(:alert) do
find_existing_alert || build_new_alert
end
end
def find_existing_alert
return unless incoming_payload.gitlab_fingerprint
AlertManagement::Alert.not_resolved.for_fingerprint(project, incoming_payload.gitlab_fingerprint).first
end
def build_new_alert
AlertManagement::Alert.new(**incoming_payload.alert_params, ended_at: nil)
end
def resolving_alert?
incoming_payload.ends_at.present?
end
def alert_source
alert.monitoring_tool
end
def logger
@logger ||= Gitlab::AppLogger
end
end
end
...@@ -3,9 +3,8 @@ ...@@ -3,9 +3,8 @@
module Projects module Projects
module Alerting module Alerting
class NotifyService class NotifyService
include BaseServiceUtility extend ::Gitlab::Utils::Override
include Gitlab::Utils::StrongMemoize include ::AlertManagement::AlertProcessing
include ::IncidentManagement::Settings
def initialize(project, payload) def initialize(project, payload)
@project = project @project = project
...@@ -22,8 +21,7 @@ module Projects ...@@ -22,8 +21,7 @@ module Projects
process_alert process_alert
return bad_request unless alert.persisted? return bad_request unless alert.persisted?
process_incident_issues if process_issues? complete_post_processing_tasks
send_alert_email if send_email?
ServiceResponse.success ServiceResponse.success
end end
...@@ -32,93 +30,15 @@ module Projects ...@@ -32,93 +30,15 @@ module Projects
attr_reader :project, :payload, :integration attr_reader :project, :payload, :integration
def process_alert def valid_payload_size?
if alert.persisted? Gitlab::Utils::DeepSize.new(payload).valid?
process_existing_alert
else
create_alert
end
end
def process_existing_alert
if incoming_payload.ends_at.present?
process_resolved_alert
else
alert.register_new_event!
end
alert
end
def process_resolved_alert
return unless auto_close_incident?
if alert.resolve(incoming_payload.ends_at)
close_issue(alert.issue)
end
alert
end
def close_issue(issue)
return if issue.blank? || issue.closed?
::Issues::CloseService
.new(project, User.alert_bot)
.execute(issue, system_note: false)
SystemNoteService.auto_resolve_prometheus_alert(issue, project, User.alert_bot) if issue.reset.closed?
end
def create_alert
return unless alert.save
alert.execute_services
SystemNoteService.create_new_alert(alert, notification_source)
end
def process_incident_issues
return if alert.issue || alert.resolved?
::IncidentManagement::ProcessAlertWorker.perform_async(nil, nil, alert.id)
end
def send_alert_email
notification_service
.async
.prometheus_alerts_fired(project, [alert])
end
def alert
strong_memoize(:alert) do
existing_alert || new_alert
end
end
def existing_alert
return unless incoming_payload.gitlab_fingerprint
AlertManagement::Alert.not_resolved.for_fingerprint(project, incoming_payload.gitlab_fingerprint).first
end
def new_alert
AlertManagement::Alert.new(**incoming_payload.alert_params, ended_at: nil)
end
def incoming_payload
strong_memoize(:incoming_payload) do
Gitlab::AlertManagement::Payload.parse(project, payload.to_h)
end
end end
def notification_source override :alert_source
def alert_source
alert.monitoring_tool || integration&.name || 'Generic Alert Endpoint' alert.monitoring_tool || integration&.name || 'Generic Alert Endpoint'
end end
def valid_payload_size?
Gitlab::Utils::DeepSize.new(payload).valid?
end
def active_integration? def active_integration?
integration&.active? integration&.active?
end end
......
...@@ -3,7 +3,8 @@ ...@@ -3,7 +3,8 @@
module AlertManagement module AlertManagement
# Create alerts coming K8 through gitlab-agent # Create alerts coming K8 through gitlab-agent
class NetworkAlertService class NetworkAlertService
include Gitlab::Utils::StrongMemoize extend ::Gitlab::Utils::Override
include ::AlertManagement::AlertProcessing
MONITORING_TOOL = Gitlab::AlertManagement::Payload::MONITORING_TOOLS.fetch(:cilium) MONITORING_TOOL = Gitlab::AlertManagement::Payload::MONITORING_TOOLS.fetch(:cilium)
...@@ -12,12 +13,10 @@ module AlertManagement ...@@ -12,12 +13,10 @@ module AlertManagement
@payload = payload @payload = payload
end end
# Users of this service need to check the agent token before calling `execute`.
# https://gitlab.com/gitlab-org/gitlab/-/issues/292707 will handle token within the service.
def execute def execute
return bad_request unless valid_payload_size? return bad_request unless valid_payload_size?
process_request process_alert
return bad_request unless alert.persisted? return bad_request unless alert.persisted?
...@@ -32,57 +31,27 @@ module AlertManagement ...@@ -32,57 +31,27 @@ module AlertManagement
Gitlab::Utils::DeepSize.new(payload).valid? Gitlab::Utils::DeepSize.new(payload).valid?
end end
def process_request override :build_new_alert
if alert.persisted?
alert.register_new_event!
else
create_alert
end
end
def create_alert
if alert.save
alert.execute_services
SystemNoteService.create_new_alert(
alert,
MONITORING_TOOL
)
return
end
logger.warn(
message:
"Unable to create AlertManagement::Alert from #{MONITORING_TOOL}",
project_id: project.id,
alert_errors: alert.errors.messages
)
end
def logger
@logger ||= Gitlab::AppLogger
end
def alert
strong_memoize(:alert) { find_existing_alert || build_new_alert }
end
def find_existing_alert
AlertManagement::Alert.not_resolved.for_fingerprint(
project,
incoming_payload.gitlab_fingerprint
).first
end
def build_new_alert def build_new_alert
AlertManagement::Alert.new(**incoming_payload.alert_params, domain: :threat_monitoring, ended_at: nil) AlertManagement::Alert.new(
**incoming_payload.alert_params,
domain: :threat_monitoring,
ended_at: nil
)
end end
override :incoming_payload
def incoming_payload def incoming_payload
strong_memoize(:incoming_payload) do strong_memoize(:incoming_payload) do
Gitlab::AlertManagement::Payload.parse(project, payload, monitoring_tool: MONITORING_TOOL) Gitlab::AlertManagement::Payload.parse(project, payload, monitoring_tool: MONITORING_TOOL)
end end
end end
override :resolving_alert?
def resolving_alert?
false
end
def bad_request def bad_request
ServiceResponse.error(message: 'Bad Request', http_status: :bad_request) ServiceResponse.error(message: 'Bad Request', http_status: :bad_request)
end end
......
...@@ -158,7 +158,7 @@ RSpec.describe AlertManagement::ProcessPrometheusAlertService do ...@@ -158,7 +158,7 @@ RSpec.describe AlertManagement::ProcessPrometheusAlertService do
it 'writes a warning to the log' do it 'writes a warning to the log' do
expect(Gitlab::AppLogger).to receive(:warn).with( expect(Gitlab::AppLogger).to receive(:warn).with(
message: 'Unable to create AlertManagement::Alert', message: 'Unable to create AlertManagement::Alert from Prometheus',
project_id: project.id, project_id: project.id,
alert_errors: { hosts: ['hosts array is over 255 chars'] } alert_errors: { hosts: ['hosts array is over 255 chars'] }
) )
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment