Commit 9e5bc0e5 authored by GitLab Bot's avatar GitLab Bot

Automatic merge of gitlab-org/gitlab master

parents 5adb5a11 0b00bb96
import $ from 'jquery'; import $ from 'jquery';
import { debounce } from 'lodash';
import DEFAULT_PROJECT_TEMPLATES from 'ee_else_ce/projects/default_project_templates'; import DEFAULT_PROJECT_TEMPLATES from 'ee_else_ce/projects/default_project_templates';
import axios from '../lib/utils/axios_utils';
import { import {
convertToTitleCase, convertToTitleCase,
humanize, humanize,
...@@ -9,6 +11,23 @@ import { ...@@ -9,6 +11,23 @@ import {
let hasUserDefinedProjectPath = false; let hasUserDefinedProjectPath = false;
let hasUserDefinedProjectName = false; let hasUserDefinedProjectName = false;
const invalidInputClass = 'gl-field-error-outline';
const validateImportCredentials = (url, user, password) => {
const endpoint = `${gon.relative_url_root}/import/url/validate`;
return axios
.post(endpoint, {
url,
user,
password,
})
.then(({ data }) => data)
.catch(() => ({
// intentionally reporting success in case of validation error
// we do not want to block users from trying import in case of validation exception
success: true,
}));
};
const onProjectNameChange = ($projectNameInput, $projectPathInput) => { const onProjectNameChange = ($projectNameInput, $projectPathInput) => {
const slug = slugify(convertUnicodeToAscii($projectNameInput.val())); const slug = slugify(convertUnicodeToAscii($projectNameInput.val()));
...@@ -85,7 +104,10 @@ const bindHowToImport = () => { ...@@ -85,7 +104,10 @@ const bindHowToImport = () => {
const bindEvents = () => { const bindEvents = () => {
const $newProjectForm = $('#new_project'); const $newProjectForm = $('#new_project');
const $projectImportUrl = $('#project_import_url'); const $projectImportUrl = $('#project_import_url');
const $projectImportUrlWarning = $('.js-import-url-warning'); const $projectImportUrlUser = $('#project_import_url_user');
const $projectImportUrlPassword = $('#project_import_url_password');
const $projectImportUrlError = $('.js-import-url-error');
const $projectImportForm = $('.project-import form');
const $projectPath = $('.tab-pane.active #project_path'); const $projectPath = $('.tab-pane.active #project_path');
const $useTemplateBtn = $('.template-button > input'); const $useTemplateBtn = $('.template-button > input');
const $projectFieldsForm = $('.project-fields-form'); const $projectFieldsForm = $('.project-fields-form');
...@@ -139,12 +161,15 @@ const bindEvents = () => { ...@@ -139,12 +161,15 @@ const bindEvents = () => {
$projectPath.val($projectPath.val().trim()); $projectPath.val($projectPath.val().trim());
}); });
function updateUrlPathWarningVisibility() { const updateUrlPathWarningVisibility = debounce(async () => {
const url = $projectImportUrl.val(); const { success: isUrlValid } = await validateImportCredentials(
const URL_PATTERN = /(?:git|https?):\/\/.*\/.*\.git$/; $projectImportUrl.val(),
const isUrlValid = URL_PATTERN.test(url); $projectImportUrlUser.val(),
$projectImportUrlWarning.toggleClass('hide', isUrlValid); $projectImportUrlPassword.val(),
} );
$projectImportUrl.toggleClass(invalidInputClass, !isUrlValid);
$projectImportUrlError.toggleClass('hide', isUrlValid);
}, 500);
let isProjectImportUrlDirty = false; let isProjectImportUrlDirty = false;
$projectImportUrl.on('blur', () => { $projectImportUrl.on('blur', () => {
...@@ -153,9 +178,22 @@ const bindEvents = () => { ...@@ -153,9 +178,22 @@ const bindEvents = () => {
}); });
$projectImportUrl.on('keyup', () => { $projectImportUrl.on('keyup', () => {
deriveProjectPathFromUrl($projectImportUrl); deriveProjectPathFromUrl($projectImportUrl);
// defer error message till first input blur });
if (isProjectImportUrlDirty) {
updateUrlPathWarningVisibility(); [$projectImportUrl, $projectImportUrlUser, $projectImportUrlPassword].forEach(($f) => {
$f.on('input', () => {
if (isProjectImportUrlDirty) {
updateUrlPathWarningVisibility();
}
});
});
$projectImportForm.on('submit', (e) => {
const $invalidFields = $projectImportForm.find(`.${invalidInputClass}`);
if ($invalidFields.length > 0) {
$invalidFields[0].focus();
e.preventDefault();
e.stopPropagation();
} }
}); });
......
# frozen_string_literal: true
class Import::UrlController < ApplicationController
feature_category :importers
def validate
result = Import::ValidateRemoteGitEndpointService.new(validate_params).execute
if result.success?
render json: { success: true }
else
render json: { success: false, message: result.message }
end
end
private
def validate_params
params.permit(:user, :password, :url)
end
end
...@@ -2,6 +2,15 @@ ...@@ -2,6 +2,15 @@
module VulnerabilityFindingHelpers module VulnerabilityFindingHelpers
extend ActiveSupport::Concern extend ActiveSupport::Concern
# Manually resolvable report types cannot be considered fixed once removed from the
# target branch due to requiring active triage, such as rotation of an exposed token.
REPORT_TYPES_REQUIRING_MANUAL_RESOLUTION = %w[secret_detection].freeze
def requires_manual_resolution?
REPORT_TYPES_REQUIRING_MANUAL_RESOLUTION.include?(report_type)
end
def matches_signatures(other_signatures, other_uuid) def matches_signatures(other_signatures, other_uuid)
other_signature_types = other_signatures.index_by(&:algorithm_type) other_signature_types = other_signatures.index_by(&:algorithm_type)
......
...@@ -11,8 +11,6 @@ module Ci ...@@ -11,8 +11,6 @@ module Ci
def execute def execute
increment_processing_counter increment_processing_counter
update_retried
Ci::PipelineProcessing::AtomicProcessingService Ci::PipelineProcessing::AtomicProcessingService
.new(pipeline) .new(pipeline)
.execute .execute
...@@ -24,41 +22,6 @@ module Ci ...@@ -24,41 +22,6 @@ module Ci
private private
# This method is for compatibility and data consistency and should be removed with 9.3 version of GitLab
# This replicates what is db/post_migrate/20170416103934_upate_retried_for_ci_build.rb
# and ensures that functionality will not be broken before migration is run
# this updates only when there are data that needs to be updated, there are two groups with no retried flag
# rubocop: disable CodeReuse/ActiveRecord
def update_retried
return if Feature.enabled?(:ci_remove_update_retried_from_process_pipeline, pipeline.project, default_enabled: :yaml)
# find the latest builds for each name
latest_statuses = pipeline.latest_statuses
.group(:name)
.having('count(*) > 1')
.pluck(Arel.sql('MAX(id)'), 'name')
# mark builds that are retried
if latest_statuses.any?
updated_count = pipeline.latest_statuses
.where(name: latest_statuses.map(&:second))
.where.not(id: latest_statuses.map(&:first))
.update_all(retried: true)
# This counter is temporary. It will be used to check whether if we still use this method or not
# after setting correct value of `GenericCommitStatus#retried`.
# More info: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/50465#note_491657115
if updated_count > 0
Gitlab::AppJsonLogger.info(event: 'update_retried_is_used',
project_id: pipeline.project.id,
pipeline_id: pipeline.id)
metrics.legacy_update_jobs_counter.increment
end
end
end
# rubocop: enable CodeReuse/ActiveRecord
def increment_processing_counter def increment_processing_counter
metrics.pipeline_processing_events_counter.increment metrics.pipeline_processing_events_counter.increment
end end
......
# frozen_string_literal: true
module Import
class ValidateRemoteGitEndpointService
# Validates if the remote endpoint is a valid GIT repository
# Only smart protocol is supported
# Validation rules are taken from https://git-scm.com/docs/http-protocol#_smart_clients
GIT_SERVICE_NAME = "git-upload-pack"
GIT_EXPECTED_FIRST_PACKET_LINE = "# service=#{GIT_SERVICE_NAME}"
GIT_BODY_MESSAGE_REGEXP = /^[0-9a-f]{4}#{GIT_EXPECTED_FIRST_PACKET_LINE}/.freeze
# https://github.com/git/git/blob/master/Documentation/technical/protocol-common.txt#L56-L59
GIT_PROTOCOL_PKT_LEN = 4
GIT_MINIMUM_RESPONSE_LENGTH = GIT_PROTOCOL_PKT_LEN + GIT_EXPECTED_FIRST_PACKET_LINE.length
EXPECTED_CONTENT_TYPE = "application/x-#{GIT_SERVICE_NAME}-advertisement"
def initialize(params)
@params = params
end
def execute
uri = Gitlab::Utils.parse_url(@params[:url])
return error("Invalid URL") unless uri
uri.fragment = nil
url = Gitlab::Utils.append_path(uri.to_s, "/info/refs?service=#{GIT_SERVICE_NAME}")
response_body = ''
result = nil
Gitlab::HTTP.try_get(url, stream_body: true, follow_redirects: false, basic_auth: auth) do |fragment|
response_body += fragment
next if response_body.length < GIT_MINIMUM_RESPONSE_LENGTH
result = if status_code_is_valid(fragment) && content_type_is_valid(fragment) && response_body_is_valid(response_body)
:success
else
:error
end
# We are interested only in the first chunks of the response
# So we're using stream_body: true and breaking when receive enough body
break
end
if result == :success
ServiceResponse.success
else
ServiceResponse.error(message: "#{uri} is not a valid HTTP Git repository")
end
end
private
def auth
unless @params[:user].to_s.blank?
{
username: @params[:user],
password: @params[:password]
}
end
end
def status_code_is_valid(fragment)
fragment.http_response.code == '200'
end
def content_type_is_valid(fragment)
fragment.http_response['content-type'] == EXPECTED_CONTENT_TYPE
end
def response_body_is_valid(response_body)
response_body.match?(GIT_BODY_MESSAGE_REGEXP)
end
end
end
...@@ -83,7 +83,7 @@ ...@@ -83,7 +83,7 @@
.js-toggle-content.toggle-import-form{ class: ('hide' if active_tab != 'import') } .js-toggle-content.toggle-import-form{ class: ('hide' if active_tab != 'import') }
= form_for @project, html: { class: 'new_project' } do |f| = form_for @project, html: { class: 'new_project gl-show-field-errors' } do |f|
%hr %hr
= render "shared/import_form", f: f = render "shared/import_form", f: f
= render 'projects/new_project_fields', f: f, project_name_id: "import-url-name", hide_init_with_readme: true, track_label: track_label = render 'projects/new_project_fields', f: f, project_name_id: "import-url-name", hide_init_with_readme: true, track_label: track_label
...@@ -9,17 +9,12 @@ ...@@ -9,17 +9,12 @@
= f.text_field :import_url, value: import_url.sanitized_url, = f.text_field :import_url, value: import_url.sanitized_url,
autocomplete: 'off', class: 'form-control gl-form-input', placeholder: 'https://gitlab.company.com/group/project.git', required: true autocomplete: 'off', class: 'form-control gl-form-input', placeholder: 'https://gitlab.company.com/group/project.git', required: true
= render 'shared/global_alert', = render 'shared/global_alert',
variant: :warning, variant: :danger,
alert_class: 'gl-mt-3 js-import-url-warning hide', alert_class: 'gl-mt-3 js-import-url-error hide',
dismissible: false, dismissible: false,
close_button_class: 'js-close-2fa-enabled-success-alert' do close_button_class: 'js-close-2fa-enabled-success-alert' do
.gl-alert-body .gl-alert-body
= s_('Import|A repository URL usually ends in a .git suffix, although this is not required. Double check to make sure your repository URL is correct.') = s_('Import|There is not a valid Git repository at this URL. If your HTTP repository is not publicly accessible, verify your credentials.')
.gl-alert.gl-alert-not-dismissible.gl-alert-warning.gl-mt-3.hide#project_import_url_warning
.gl-alert-container
= sprite_icon('warning-solid', css_class: 'gl-icon s16 gl-alert-icon gl-alert-icon-no-title')
.gl-alert-content{ role: 'alert' }
.row .row
.form-group.col-md-6 .form-group.col-md-6
= f.label :import_url_user, class: 'label-bold' do = f.label :import_url_user, class: 'label-bold' do
......
--- ---
name: async_filtering name: reference_cache_memoization
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/36421 introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/71310
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/243781 rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/341849
milestone: '13.3' milestone: '14.4'
type: development type: development
group: group::product planning group: group::source code
default_enabled: true default_enabled: false
---
name: security_report_ingestion_framework
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/66735
rollout_issue_url:
milestone: '14.4'
type: development
group: group::threat insights
default_enabled: false
--- ---
name: ci_remove_update_retried_from_process_pipeline name: use_cte_for_any_project_with_shared_runners_enabled
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/54300 introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/71452
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/321630 rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/342024
milestone: '13.9' milestone: '14.4'
type: development type: development
group: group::pipeline authoring group: group::optimize
default_enabled: true default_enabled: false
...@@ -12,6 +12,10 @@ end ...@@ -12,6 +12,10 @@ end
namespace :import do namespace :import do
resources :available_namespaces, only: [:index], controller: :available_namespaces resources :available_namespaces, only: [:index], controller: :available_namespaces
namespace :url do
post :validate
end
resource :github, only: [:create, :new], controller: :github do resource :github, only: [:create, :new], controller: :github do
post :personal_access_token post :personal_access_token
get :status get :status
......
...@@ -162,6 +162,9 @@ be disabled on the **primary** site: ...@@ -162,6 +162,9 @@ be disabled on the **primary** site:
## Finish replicating and verifying all data ## Finish replicating and verifying all data
NOTE:
GitLab 13.9 through GitLab 14.3 are affected by a bug in which the Geo secondary site statuses will appear to stop updating and become unhealthy. For more information, see [Geo Admin Area shows 'Unhealthy' after enabling Maintenance Mode](../replication/troubleshooting.md#geo-admin-area-shows-unhealthy-after-enabling-maintenance-mode).
1. If you are manually replicating any data not managed by Geo, trigger the 1. If you are manually replicating any data not managed by Geo, trigger the
final replication process now. final replication process now.
1. On the **primary** node: 1. On the **primary** node:
...@@ -192,12 +195,13 @@ At this point, your **secondary** node contains an up-to-date copy of everything ...@@ -192,12 +195,13 @@ At this point, your **secondary** node contains an up-to-date copy of everything
## Promote the **secondary** node ## Promote the **secondary** node
Finally, follow the [Disaster Recovery docs](index.md) to promote the After the replication is finished, [promote the **secondary** node to a **primary** node](index.md). This process causes a brief outage on the **secondary** node, and users may need to log in again. If you follow the steps correctly, the old primary Geo site should still be disabled and user traffic should go to the newly-promoted site instead.
**secondary** node to a **primary** node. This process causes a brief outage on the **secondary** node, and users may need to log in again.
Once it is completed, the maintenance window is over! Your new **primary** node, now When the promotion is completed, the maintenance window is over, and your new **primary** node now
begin to diverge from the old one. If problems do arise at this point, failing begins to diverge from the old one. If problems do arise at this point, failing
back to the old **primary** node [is possible](bring_primary_back.md), but likely to result back to the old **primary** node [is possible](bring_primary_back.md), but likely to result
in the loss of any data uploaded to the new **primary** in the meantime. in the loss of any data uploaded to the new **primary** in the meantime.
Don't forget to remove the broadcast message after failover is complete. Don't forget to remove the broadcast message after the failover is complete.
Finally, you can bring the [old site back as a secondary](bring_primary_back.md#configure-the-former-primary-node-to-be-a-secondary-node).
...@@ -63,6 +63,9 @@ Before following any of those steps, make sure you have `root` access to the ...@@ -63,6 +63,9 @@ Before following any of those steps, make sure you have `root` access to the
**secondary** to promote it, since there isn't provided an automated way to **secondary** to promote it, since there isn't provided an automated way to
promote a Geo replica and perform a failover. promote a Geo replica and perform a failover.
NOTE:
GitLab 13.9 through GitLab 14.3 are affected by a bug in which the Geo secondary site statuses will appear to stop updating and become unhealthy. For more information, see [Geo Admin Area shows 'Unhealthy' after enabling Maintenance Mode](../../replication/troubleshooting.md#geo-admin-area-shows-unhealthy-after-enabling-maintenance-mode).
On the **secondary** node: On the **secondary** node:
1. On the top bar, select **Menu > Admin**. 1. On the top bar, select **Menu > Admin**.
......
...@@ -51,6 +51,9 @@ Before following any of those steps, make sure you have `root` access to the ...@@ -51,6 +51,9 @@ Before following any of those steps, make sure you have `root` access to the
**secondary** to promote it, since there isn't provided an automated way to **secondary** to promote it, since there isn't provided an automated way to
promote a Geo replica and perform a failover. promote a Geo replica and perform a failover.
NOTE:
GitLab 13.9 through GitLab 14.3 are affected by a bug in which the Geo secondary site statuses will appear to stop updating and become unhealthy. For more information, see [Geo Admin Area shows 'Unhealthy' after enabling Maintenance Mode](../../replication/troubleshooting.md#geo-admin-area-shows-unhealthy-after-enabling-maintenance-mode).
On the **secondary** node, navigate to the **Admin Area > Geo** dashboard to On the **secondary** node, navigate to the **Admin Area > Geo** dashboard to
review its status. Replicated objects (shown in green) should be close to 100%, review its status. Replicated objects (shown in green) should be close to 100%,
and there should be no failures (shown in red). If a large proportion of and there should be no failures (shown in red). If a large proportion of
......
...@@ -83,7 +83,7 @@ Checking Geo ... Finished ...@@ -83,7 +83,7 @@ Checking Geo ... Finished
#### Sync status Rake task #### Sync status Rake task
Current sync information can be found manually by running this Rake task on any Current sync information can be found manually by running this Rake task on any
**secondary** app node: node running Rails (Puma, Sidekiq, or Geo Log Cursor) on the Geo **secondary** site:
```shell ```shell
sudo gitlab-rake geo:status sudo gitlab-rake geo:status
...@@ -923,6 +923,14 @@ To resolve this issue: ...@@ -923,6 +923,14 @@ To resolve this issue:
If using a load balancer, ensure that the load balancer's URL is set as the `external_url` in the If using a load balancer, ensure that the load balancer's URL is set as the `external_url` in the
`/etc/gitlab/gitlab.rb` of the nodes behind the load balancer. `/etc/gitlab/gitlab.rb` of the nodes behind the load balancer.
### Geo Admin Area shows 'Unhealthy' after enabling Maintenance Mode
In GitLab 13.9 through GitLab 14.3, when [GitLab Maintenance Mode](../../maintenance_mode/index.md) is enabled, the status of Geo secondary sites will stop getting updated. After 10 minutes, the status will become `Unhealthy`.
Geo secondary sites will continue to replicate and verify data, and the secondary sites should still be usable. You can use the [Sync status Rake task](#sync-status-rake-task) to determine the actual status of a secondary site during Maintenance Mode.
This bug was [fixed in GitLab 14.4](https://gitlab.com/gitlab-org/gitlab/-/issues/292983).
### GitLab Pages return 404 errors after promoting ### GitLab Pages return 404 errors after promoting
This is due to [Pages data not being managed by Geo](datatypes.md#limitations-on-replicationverification). This is due to [Pages data not being managed by Geo](datatypes.md#limitations-on-replicationverification).
......
...@@ -13,6 +13,8 @@ for updating Geo nodes. ...@@ -13,6 +13,8 @@ for updating Geo nodes.
## Updating to 14.1, 14.2, 14.3 ## Updating to 14.1, 14.2, 14.3
### Multi-arch images
We found an [issue](https://gitlab.com/gitlab-org/gitlab/-/issues/336013) where the Container Registry replication wasn't fully working if you used multi-arch images. In case of a multi-arch image, only the primary architecture (for example `amd64`) would be replicated to the secondary node. This has been [fixed in GitLab 14.3](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/67624) and was backported to 14.2 and 14.1, but manual steps are required to force a re-sync. We found an [issue](https://gitlab.com/gitlab-org/gitlab/-/issues/336013) where the Container Registry replication wasn't fully working if you used multi-arch images. In case of a multi-arch image, only the primary architecture (for example `amd64`) would be replicated to the secondary node. This has been [fixed in GitLab 14.3](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/67624) and was backported to 14.2 and 14.1, but manual steps are required to force a re-sync.
You can check if you are affected by running: You can check if you are affected by running:
...@@ -46,18 +48,28 @@ Otherwise, on all your **secondary** nodes, in a [Rails console](../../operation ...@@ -46,18 +48,28 @@ Otherwise, on all your **secondary** nodes, in a [Rails console](../../operation
If you are running a version prior to 14.1 and are using Geo and multi-arch containers in your Container Registry, we recommend [upgrading](updating_the_geo_sites.md) to at least GitLab 14.1. If you are running a version prior to 14.1 and are using Geo and multi-arch containers in your Container Registry, we recommend [upgrading](updating_the_geo_sites.md) to at least GitLab 14.1.
### Geo Admin Area shows 'Unhealthy' after enabling Maintenance Mode
GitLab 13.9 through GitLab 14.3 are affected by a bug in which enabling [GitLab Maintenance Mode](../../maintenance_mode/index.md) will cause Geo secondary site statuses to appear to stop updating and become unhealthy. For more information, see [Troubleshooting - Geo Admin Area shows 'Unhealthy' after enabling Maintenance Mode](troubleshooting.md#geo-admin-area-shows-unhealthy-after-enabling-maintenance-mode).
## Updating to GitLab 14.0/14.1 ## Updating to GitLab 14.0/14.1
### Primary sites can not be removed from the UI
We found an issue where [Primary sites can not be removed from the UI](https://gitlab.com/gitlab-org/gitlab/-/issues/338231). We found an issue where [Primary sites can not be removed from the UI](https://gitlab.com/gitlab-org/gitlab/-/issues/338231).
This bug only exists in the UI and does not block the removal of Primary sites using any other method. This bug only exists in the UI and does not block the removal of Primary sites using any other method.
### If you have already updated to an affected version and need to remove your Primary site If you are running an affected version and need to remove your Primary site, you can manually remove the Primary site by using the [Geo Nodes API](../../../api/geo_nodes.md#delete-a-geo-node).
You can manually remove the Primary site by using the [Geo Nodes API](../../../api/geo_nodes.md#delete-a-geo-node). ### Geo Admin Area shows 'Unhealthy' after enabling Maintenance Mode
GitLab 13.9 through GitLab 14.3 are affected by a bug in which enabling [GitLab Maintenance Mode](../../maintenance_mode/index.md) will cause Geo secondary site statuses to appear to stop updating and become unhealthy. For more information, see [Troubleshooting - Geo Admin Area shows 'Unhealthy' after enabling Maintenance Mode](troubleshooting.md#geo-admin-area-shows-unhealthy-after-enabling-maintenance-mode).
## Updating to GitLab 13.12 ## Updating to GitLab 13.12
### Secondary nodes re-download all LFS files upon update
We found an issue where [secondary nodes re-download all LFS files](https://gitlab.com/gitlab-org/gitlab/-/issues/334550) upon update. This bug: We found an issue where [secondary nodes re-download all LFS files](https://gitlab.com/gitlab-org/gitlab/-/issues/334550) upon update. This bug:
- Only applies to Geo secondary sites that have replicated LFS objects. - Only applies to Geo secondary sites that have replicated LFS objects.
...@@ -68,7 +80,7 @@ We found an issue where [secondary nodes re-download all LFS files](https://gitl ...@@ -68,7 +80,7 @@ We found an issue where [secondary nodes re-download all LFS files](https://gitl
If you don't have many LFS objects or can stand a bit of churn, then it is safe to let the secondary sites re-download LFS objects. If you don't have many LFS objects or can stand a bit of churn, then it is safe to let the secondary sites re-download LFS objects.
If you do have many LFS objects, or many Geo secondary sites, or limited bandwidth, or a combination of them all, then we recommend you skip GitLab 13.12.0 through 13.12.6 and update to GitLab 13.12.7 or newer. If you do have many LFS objects, or many Geo secondary sites, or limited bandwidth, or a combination of them all, then we recommend you skip GitLab 13.12.0 through 13.12.6 and update to GitLab 13.12.7 or newer.
### If you have already updated to an affected version, and the re-sync is ongoing #### If you have already updated to an affected version, and the re-sync is ongoing
You can manually migrate the legacy sync state to the new state column by running the following command in a [Rails console](../../operations/rails_console.md). It should take under a minute: You can manually migrate the legacy sync state to the new state column by running the following command in a [Rails console](../../operations/rails_console.md). It should take under a minute:
...@@ -76,15 +88,31 @@ You can manually migrate the legacy sync state to the new state column by runnin ...@@ -76,15 +88,31 @@ You can manually migrate the legacy sync state to the new state column by runnin
Geo::LfsObjectRegistry.where(state: 0, success: true).update_all(state: 2) Geo::LfsObjectRegistry.where(state: 0, success: true).update_all(state: 2)
``` ```
### Geo Admin Area shows 'Unhealthy' after enabling Maintenance Mode
GitLab 13.9 through GitLab 14.3 are affected by a bug in which enabling [GitLab Maintenance Mode](../../maintenance_mode/index.md) will cause Geo secondary site statuses to appear to stop updating and become unhealthy. For more information, see [Troubleshooting - Geo Admin Area shows 'Unhealthy' after enabling Maintenance Mode](troubleshooting.md#geo-admin-area-shows-unhealthy-after-enabling-maintenance-mode).
## Updating to GitLab 13.11 ## Updating to GitLab 13.11
We found an [issue with Git clone/pull through HTTP(s)](https://gitlab.com/gitlab-org/gitlab/-/issues/330787) on Geo secondaries and on any GitLab instance if maintenance mode is enabled. This was caused by a regression in GitLab Workhorse. This is fixed in the [GitLab 13.11.4 patch release](https://about.gitlab.com/releases/2021/05/14/gitlab-13-11-4-released/). To avoid this issue, upgrade to GitLab 13.11.4 or later. We found an [issue with Git clone/pull through HTTP(s)](https://gitlab.com/gitlab-org/gitlab/-/issues/330787) on Geo secondaries and on any GitLab instance if maintenance mode is enabled. This was caused by a regression in GitLab Workhorse. This is fixed in the [GitLab 13.11.4 patch release](https://about.gitlab.com/releases/2021/05/14/gitlab-13-11-4-released/). To avoid this issue, upgrade to GitLab 13.11.4 or later.
### Geo Admin Area shows 'Unhealthy' after enabling Maintenance Mode
GitLab 13.9 through GitLab 14.3 are affected by a bug in which enabling [GitLab Maintenance Mode](../../maintenance_mode/index.md) will cause Geo secondary site statuses to appear to stop updating and become unhealthy. For more information, see [Troubleshooting - Geo Admin Area shows 'Unhealthy' after enabling Maintenance Mode](troubleshooting.md#geo-admin-area-shows-unhealthy-after-enabling-maintenance-mode).
## Updating to GitLab 13.10
### Geo Admin Area shows 'Unhealthy' after enabling Maintenance Mode
GitLab 13.9 through GitLab 14.3 are affected by a bug in which enabling [GitLab Maintenance Mode](../../maintenance_mode/index.md) will cause Geo secondary site statuses to appear to stop updating and become unhealthy. For more information, see [Troubleshooting - Geo Admin Area shows 'Unhealthy' after enabling Maintenance Mode](troubleshooting.md#geo-admin-area-shows-unhealthy-after-enabling-maintenance-mode).
## Updating to GitLab 13.9 ## Updating to GitLab 13.9
### Error during zero-downtime update: "cannot drop column asset_proxy_whitelist"
We've detected an issue [with a column rename](https://gitlab.com/gitlab-org/gitlab/-/issues/324160) We've detected an issue [with a column rename](https://gitlab.com/gitlab-org/gitlab/-/issues/324160)
that will prevent upgrades to GitLab 13.9.0, 13.9.1, 13.9.2 and 13.9.3 when following the zero-downtime steps. It is necessary that will prevent upgrades to GitLab 13.9.0, 13.9.1, 13.9.2 and 13.9.3 when following the zero-downtime steps. It is necessary
to perform the following additional steps for the zero-downtime upgrade: to perform the following additional steps for the zero-downtime update:
1. Before running the final `sudo gitlab-rake db:migrate` command on the deploy node, 1. Before running the final `sudo gitlab-rake db:migrate` command on the deploy node,
execute the following queries using the PostgreSQL console (or `sudo gitlab-psql`) execute the following queries using the PostgreSQL console (or `sudo gitlab-psql`)
...@@ -118,6 +146,10 @@ DETAIL: trigger trigger_0d588df444c8 on table application_settings depends on co ...@@ -118,6 +146,10 @@ DETAIL: trigger trigger_0d588df444c8 on table application_settings depends on co
To work around this bug, follow the previous steps to complete the update. To work around this bug, follow the previous steps to complete the update.
More details are available [in this issue](https://gitlab.com/gitlab-org/gitlab/-/issues/324160). More details are available [in this issue](https://gitlab.com/gitlab-org/gitlab/-/issues/324160).
### Geo Admin Area shows 'Unhealthy' after enabling Maintenance Mode
GitLab 13.9 through GitLab 14.3 are affected by a bug in which enabling [GitLab Maintenance Mode](../../maintenance_mode/index.md) will cause Geo secondary site statuses to appear to stop updating and become unhealthy. For more information, see [Troubleshooting - Geo Admin Area shows 'Unhealthy' after enabling Maintenance Mode](troubleshooting.md#geo-admin-area-shows-unhealthy-after-enabling-maintenance-mode).
## Updating to GitLab 13.7 ## Updating to GitLab 13.7
We've detected an issue with the `FetchRemove` call used by Geo secondaries. We've detected an issue with the `FetchRemove` call used by Geo secondaries.
......
...@@ -42,6 +42,11 @@ export default { ...@@ -42,6 +42,11 @@ export default {
required: false, required: false,
default: undefined, default: undefined,
}, },
badgeText: {
type: String,
required: false,
default: '',
},
}, },
computed: { computed: {
noteAnchor() { noteAnchor() {
......
import { __ } from '~/locale';
export const issueStates = {
OPENED: 'opened',
CLOSED: 'closed',
};
export const issueStateLabels = {
[issueStates.OPENED]: __('Open'),
[issueStates.CLOSED]: __('Closed'),
};
export const labelsFilterParam = 'labels';
...@@ -7,6 +7,7 @@ import { ...@@ -7,6 +7,7 @@ import {
GlBadge, GlBadge,
GlTooltipDirective as GlTooltip, GlTooltipDirective as GlTooltip,
} from '@gitlab/ui'; } from '@gitlab/ui';
import Note from 'ee/external_issues_show/components/note.vue';
import { fetchIssue, fetchIssueStatuses, updateIssue } from 'ee/integrations/jira/issues_show/api'; import { fetchIssue, fetchIssueStatuses, updateIssue } from 'ee/integrations/jira/issues_show/api';
import JiraIssueSidebar from 'ee/integrations/jira/issues_show/components/sidebar/jira_issues_sidebar_root.vue'; import JiraIssueSidebar from 'ee/integrations/jira/issues_show/components/sidebar/jira_issues_sidebar_root.vue';
...@@ -15,7 +16,6 @@ import createFlash from '~/flash'; ...@@ -15,7 +16,6 @@ import createFlash from '~/flash';
import IssuableShow from '~/issuable_show/components/issuable_show_root.vue'; import IssuableShow from '~/issuable_show/components/issuable_show_root.vue';
import { convertObjectPropsToCamelCase } from '~/lib/utils/common_utils'; import { convertObjectPropsToCamelCase } from '~/lib/utils/common_utils';
import { s__ } from '~/locale'; import { s__ } from '~/locale';
import Note from './note.vue';
export default { export default {
name: 'JiraIssuesShow', name: 'JiraIssuesShow',
......
<script> <script>
import Assignee from 'ee/external_issues_show/components/sidebar/assignee.vue';
import IssueDueDate from 'ee/external_issues_show/components/sidebar/issue_due_date.vue';
import IssueField from 'ee/external_issues_show/components/sidebar/issue_field.vue';
import { labelsFilterParam } from 'ee/integrations/jira/issues_show/constants'; import { labelsFilterParam } from 'ee/integrations/jira/issues_show/constants';
import { __, s__ } from '~/locale'; import { __, s__ } from '~/locale';
import CopyableField from '~/vue_shared/components/sidebar/copyable_field.vue'; import CopyableField from '~/vue_shared/components/sidebar/copyable_field.vue';
import LabelsSelect from '~/vue_shared/components/sidebar/labels_select_vue/labels_select_root.vue'; import LabelsSelect from '~/vue_shared/components/sidebar/labels_select_vue/labels_select_root.vue';
import glFeatureFlagsMixin from '~/vue_shared/mixins/gl_feature_flags_mixin'; import glFeatureFlagsMixin from '~/vue_shared/mixins/gl_feature_flags_mixin';
import Assignee from './assignee.vue';
import IssueDueDate from './issue_due_date.vue';
import IssueField from './issue_field.vue';
export default { export default {
name: 'JiraIssuesSidebar', name: 'JiraIssuesSidebar',
......
...@@ -69,13 +69,10 @@ export default { ...@@ -69,13 +69,10 @@ export default {
'filterParams', 'filterParams',
]), ]),
showFilteredSearchbar() { showFilteredSearchbar() {
if (this.glFeatures.asyncFiltering) { if (this.epicsFetchResultEmpty) {
if (this.epicsFetchResultEmpty) { return this.hasFiltersApplied;
return this.hasFiltersApplied;
}
return true;
} }
return false; return true;
}, },
timeframeStart() { timeframeStart() {
return this.timeframe[0]; return this.timeframe[0];
......
...@@ -203,7 +203,7 @@ export default { ...@@ -203,7 +203,7 @@ export default {
</gl-form-group> </gl-form-group>
<gl-dropdown <gl-dropdown
:text="selectedEpicStateTitle" :text="selectedEpicStateTitle"
class="gl-mr-0 gl-lg-mr-3 mb-sm-2" class="gl-mr-0 gl-lg-mr-3 mb-sm-2 dropdown-epics-state"
toggle-class="gl-rounded-base!" toggle-class="gl-rounded-base!"
> >
<gl-dropdown-item <gl-dropdown-item
......
...@@ -8,7 +8,6 @@ module Groups ...@@ -8,7 +8,6 @@ module Groups
before_action :check_epics_available! before_action :check_epics_available!
before_action :persist_roadmap_layout, only: [:show] before_action :persist_roadmap_layout, only: [:show]
before_action do before_action do
push_frontend_feature_flag(:async_filtering, @group, default_enabled: true)
push_frontend_feature_flag(:performance_roadmap, @group, default_enabled: :yaml) push_frontend_feature_flag(:performance_roadmap, @group, default_enabled: :yaml)
push_frontend_feature_flag(:roadmap_daterange_filter, @group, type: :development, default_enabled: :yaml) push_frontend_feature_flag(:roadmap_daterange_filter, @group, type: :development, default_enabled: :yaml)
end end
......
...@@ -260,10 +260,10 @@ module EE ...@@ -260,10 +260,10 @@ module EE
def any_project_with_shared_runners_enabled? def any_project_with_shared_runners_enabled?
if ::Feature.enabled?(:cache_shared_runners_enabled, self, default_enabled: :yaml) if ::Feature.enabled?(:cache_shared_runners_enabled, self, default_enabled: :yaml)
Rails.cache.fetch([self, :has_project_with_shared_runners_enabled], expires_in: 5.minutes) do Rails.cache.fetch([self, :has_project_with_shared_runners_enabled], expires_in: 5.minutes) do
all_projects.with_shared_runners.any? any_project_with_shared_runners_enabled_with_cte?
end end
else else
all_projects.with_shared_runners.any? any_project_with_shared_runners_enabled_with_cte?
end end
end end
...@@ -420,6 +420,25 @@ module EE ...@@ -420,6 +420,25 @@ module EE
private private
def any_project_with_shared_runners_enabled_with_cte?
if ::Feature.enabled?(:use_cte_for_any_project_with_shared_runners_enabled, self, default_enabled: :yaml)
projects_query = if user_namespace?
projects
else
cte = ::Gitlab::SQL::CTE.new(:namespace_self_and_descendants_cte, self_and_descendant_ids)
::Project
.with(cte.to_arel)
.from([::Project.table_name, cte.table.name].join(', '))
.where(::Project.arel_table[:namespace_id].eq(cte.table[:id]))
end
projects_query.with_shared_runners.any?
else
all_projects.with_shared_runners.any?
end
end
def fallback_plan def fallback_plan
if ::Gitlab.com? if ::Gitlab.com?
::Plan.free ::Plan.free
......
...@@ -40,18 +40,47 @@ module Security ...@@ -40,18 +40,47 @@ module Security
.merge(Vulnerabilities::Feedback.for_dismissal) .merge(Vulnerabilities::Feedback.for_dismissal)
end end
scope :latest_successful_by_build, -> { joins(:build).where(ci_builds: { status: 'success', retried: [nil, false] }) } scope :latest, -> { where(latest: true) }
scope :latest_successful_by_build, -> { joins(:build).where(ci_builds: { retried: [nil, false], status: 'success' }) }
scope :without_errors, -> { where("jsonb_array_length(COALESCE(info->'errors', '[]'::jsonb)) = 0") }
delegate :name, to: :build delegate :name, to: :build
before_save :ensure_project_id_pipeline_id before_save :ensure_project_id_pipeline_id
def has_errors? def has_errors?
info&.fetch('errors', []).present? processing_errors.present?
end
def processing_errors
info&.fetch('errors', [])
end
def processing_errors=(errors)
info['errors'] = errors
end
def add_processing_error!(error)
info['errors'] = processing_errors.push(error.stringify_keys)
save!
end
# Returns the findings from the source report
def report_findings
@report_findings ||= security_report&.findings.to_a
end end
private private
def security_report
job_artifact&.security_report
end
def job_artifact
build.job_artifacts.find_by_file_type(scan_type)
end
def ensure_project_id_pipeline_id def ensure_project_id_pipeline_id
self.project_id ||= build.project_id self.project_id ||= build.project_id
self.pipeline_id ||= build.commit_id self.pipeline_id ||= build.commit_id
......
...@@ -30,6 +30,7 @@ module Vulnerabilities ...@@ -30,6 +30,7 @@ module Vulnerabilities
validates :pipeline, same_project_association: true, if: :pipeline_id? validates :pipeline, same_project_association: true, if: :pipeline_id?
scope :with_associations, -> { includes(:pipeline, :issue, :merge_request, :author, :comment_author) } scope :with_associations, -> { includes(:pipeline, :issue, :merge_request, :author, :comment_author) }
scope :by_finding_uuid, -> (uuids) { where(finding_uuid: uuids) }
scope :all_preloaded, -> do scope :all_preloaded, -> do
preload(:author, :comment_author, :project, :issue, :merge_request, :pipeline) preload(:author, :comment_author, :project, :issue, :merge_request, :pipeline)
......
...@@ -11,6 +11,10 @@ module Vulnerabilities ...@@ -11,6 +11,10 @@ module Vulnerabilities
# https://gitlab.com/gitlab-org/gitlab/-/issues/214563#note_370782508 is why the table names are not renamed # https://gitlab.com/gitlab-org/gitlab/-/issues/214563#note_370782508 is why the table names are not renamed
self.table_name = "vulnerability_occurrences" self.table_name = "vulnerability_occurrences"
# This is necessary to prevent updating the
# created_at attribute with upsert queries.
attr_readonly(:created_at)
FINDINGS_PER_PAGE = 20 FINDINGS_PER_PAGE = 20
MAX_NUMBER_OF_IDENTIFIERS = 20 MAX_NUMBER_OF_IDENTIFIERS = 20
REPORT_TYPES_WITH_LOCATION_IMAGE = %w[container_scanning cluster_image_scanning].freeze REPORT_TYPES_WITH_LOCATION_IMAGE = %w[container_scanning cluster_image_scanning].freeze
......
...@@ -4,6 +4,10 @@ module Vulnerabilities ...@@ -4,6 +4,10 @@ module Vulnerabilities
class FindingIdentifier < ApplicationRecord class FindingIdentifier < ApplicationRecord
self.table_name = "vulnerability_occurrence_identifiers" self.table_name = "vulnerability_occurrence_identifiers"
# This is necessary to prevent updating the
# created_at attribute with upsert queries.
attr_readonly(:created_at)
alias_attribute :finding_id, :occurrence_id alias_attribute :finding_id, :occurrence_id
belongs_to :finding, class_name: 'Vulnerabilities::Finding', inverse_of: :finding_identifiers, foreign_key: 'occurrence_id' belongs_to :finding, class_name: 'Vulnerabilities::Finding', inverse_of: :finding_identifiers, foreign_key: 'occurrence_id'
......
...@@ -9,5 +9,7 @@ module Vulnerabilities ...@@ -9,5 +9,7 @@ module Vulnerabilities
validates :finding, presence: true validates :finding, presence: true
validates :url, presence: true, length: { maximum: 255 } validates :url, presence: true, length: { maximum: 255 }
validates :name, length: { maximum: 2048 } validates :name, length: { maximum: 2048 }
scope :by_finding_id, -> (finding_ids) { where(vulnerability_occurrence_id: finding_ids) }
end end
end end
...@@ -5,7 +5,13 @@ module Vulnerabilities ...@@ -5,7 +5,13 @@ module Vulnerabilities
class FindingRemediation < ApplicationRecord class FindingRemediation < ApplicationRecord
self.table_name = 'vulnerability_findings_remediations' self.table_name = 'vulnerability_findings_remediations'
# This is necessary to prevent updating the
# created_at attribute with upsert queries.
attr_readonly(:created_at)
belongs_to :finding, class_name: 'Vulnerabilities::Finding', inverse_of: :finding_remediations, foreign_key: 'vulnerability_occurrence_id', optional: false belongs_to :finding, class_name: 'Vulnerabilities::Finding', inverse_of: :finding_remediations, foreign_key: 'vulnerability_occurrence_id', optional: false
belongs_to :remediation, class_name: 'Vulnerabilities::Remediation', inverse_of: :finding_remediations, foreign_key: 'vulnerability_remediation_id', optional: false belongs_to :remediation, class_name: 'Vulnerabilities::Remediation', inverse_of: :finding_remediations, foreign_key: 'vulnerability_remediation_id', optional: false
scope :by_finding_id, -> (finding_ids) { where(vulnerability_occurrence_id: finding_ids) }
end end
end end
...@@ -2,11 +2,15 @@ ...@@ -2,11 +2,15 @@
module Vulnerabilities module Vulnerabilities
class FindingSignature < ApplicationRecord class FindingSignature < ApplicationRecord
self.table_name = 'vulnerability_finding_signatures'
include BulkInsertSafe include BulkInsertSafe
include VulnerabilityFindingSignatureHelpers include VulnerabilityFindingSignatureHelpers
self.table_name = 'vulnerability_finding_signatures'
# This is necessary to prevent updating the
# created_at attribute with upsert queries.
attr_readonly(:created_at)
belongs_to :finding, foreign_key: 'finding_id', inverse_of: :signatures, class_name: 'Vulnerabilities::Finding' belongs_to :finding, foreign_key: 'finding_id', inverse_of: :signatures, class_name: 'Vulnerabilities::Finding'
enum algorithm_type: VulnerabilityFindingSignatureHelpers::ALGORITHM_TYPES, _prefix: :algorithm enum algorithm_type: VulnerabilityFindingSignatureHelpers::ALGORITHM_TYPES, _prefix: :algorithm
validates :finding, presence: true validates :finding, presence: true
......
...@@ -8,7 +8,7 @@ module Security ...@@ -8,7 +8,7 @@ module Security
delegator_override :errors delegator_override :errors
def errors def errors
info['errors'].to_a.map { |error| format(ERROR_MESSAGE_FORMAT, error.symbolize_keys) } processing_errors.to_a.map { |error| format(ERROR_MESSAGE_FORMAT, error.symbolize_keys) }
end end
end end
end end
...@@ -36,7 +36,6 @@ module NetworkPolicies ...@@ -36,7 +36,6 @@ module NetworkPolicies
def setup_resource def setup_resource
@resource = policy.generate @resource = policy.generate
resource[:metadata][:namespace] = kubernetes_namespace resource[:metadata][:namespace] = kubernetes_namespace
resource[:metadata][:name] = resource_name if resource_name
end end
def load_policy_from_resource def load_policy_from_resource
...@@ -56,18 +55,26 @@ module NetworkPolicies ...@@ -56,18 +55,26 @@ module NetworkPolicies
end end
def deploy_cilium_network_policy def deploy_cilium_network_policy
if resource_name return platform.kubeclient.create_cilium_network_policy(resource) unless resource_name
platform.kubeclient.update_cilium_network_policy(resource)
else if resource_name != resource.dig(:metadata, :name)
platform.kubeclient.delete_cilium_network_policy(resource_name, kubernetes_namespace)
resource[:metadata][:resourceVersion] = nil
platform.kubeclient.create_cilium_network_policy(resource) platform.kubeclient.create_cilium_network_policy(resource)
else
platform.kubeclient.update_cilium_network_policy(resource)
end end
end end
def deploy_network_policy def deploy_network_policy
if resource_name return platform.kubeclient.create_network_policy(resource) unless resource_name
platform.kubeclient.update_network_policy(resource)
else if resource_name != resource.dig(:metadata, :name)
platform.kubeclient.delete_network_policy(resource_name, kubernetes_namespace)
resource[:metadata][:resourceVersion] = nil
platform.kubeclient.create_network_policy(resource) platform.kubeclient.create_network_policy(resource)
else
platform.kubeclient.update_network_policy(resource)
end end
end end
end end
......
# frozen_string_literal: true
module Security
module Ingestion
class AbstractTask
def self.execute(pipeline, finding_maps)
new(pipeline, finding_maps).execute
end
def initialize(pipeline, finding_maps)
@pipeline = pipeline
@finding_maps = finding_maps
end
def execute
raise "Implement the `execute` template method!"
end
private
attr_reader :pipeline, :finding_maps
end
end
end
# frozen_string_literal: true
module Security
module Ingestion
#
# Provides a DSL to define bulk insertable ingestion tasks.
#
# Tasks including this module should set some configuration value(s)
# and implement the template method(s).
#
# Configuration values;
#
# `model`: The ActiveRecord model which the task is ingesting the data for.
# `unique_by`: Optional value to set unique constraint which will be used by
# PostgreSQL to update records on conflict. The task raises an exception
# in case of a conflict if this is not set.
# `uses`: Optional value to set return columns of the insert query.
# The method named `after_ingest` will be called if this value is set.
#
# Template methods;
#
# `attributes`: Returns an array of Hash objects that contain the name of the attributes and their values
# as key & value pairs.
# `after_ingest`: If the task uses the return value(s) of insert query, this method will
# be called. The return data of the insert query can be accessible by the `return_data` method.
#
module BulkInsertableTask
include Gitlab::Utils::StrongMemoize
def self.included(base)
base.singleton_class.attr_accessor :model, :unique_by, :uses
end
def execute
result_set
after_ingest if uses
end
private
delegate :unique_by, :model, :uses, :cast_values, to: :'self.class', private: true
def return_data
@return_data ||= result_set&.cast_values(model.attribute_types).to_a
end
def result_set
strong_memoize(:result_set) do
if insert_attributes.present?
ActiveRecord::InsertAll.new(model, insert_attributes, on_duplicate: on_duplicate, returning: uses, unique_by: unique_by).execute
end
end
end
def after_ingest
raise "Implement the `after_ingest` template method!"
end
def attributes
raise "Implement the `attributes` template method!"
end
def insert_attributes
@insert_attributes ||= attributes.map { |values| values.merge(timestamps) }
end
def timestamps
@timestamps ||= Time.zone.now.then { |time| { created_at: time, updated_at: time } }
end
def on_duplicate
unique_by.present? ? :update : :skip
end
end
end
end
# frozen_string_literal: true
module Security
module Ingestion
#
# Provides a DSL to define bulk updatable ingestion tasks.
#
# Tasks including this module should set a configuration value
# and implement the template method.
#
# Configuration value;
#
# `model`: The ActiveRecord model which the task is ingesting the data for.
#
# Template method;
#
# `attributes`: Returns an array of Hash objects that contain the name of the attributes and their values
# as key & value pairs.
#
module BulkUpdatableTask
include Gitlab::Utils::StrongMemoize
SQL_TEMPLATE = <<~SQL
UPDATE
%<table_name>s
SET
%<set_values>s
FROM
(%<values>s) AS map(%<map_schema>s)
WHERE
%<table_name>s.%<primary_key>s = map.%<primary_key>s
SQL
def self.included(base)
base.singleton_class.attr_accessor :model
end
def execute
return unless attribute_names.present?
connection.execute(update_sql)
end
private
delegate :model, to: :'self.class', private: true
delegate :table_name, :primary_key, :column_for_attribute, :type_for_attribute, :connection, to: :model, private: true
def update_sql
format(SQL_TEMPLATE, table_name: table_name, set_values: set_values, values: values, primary_key: primary_key, map_schema: map_schema)
end
def set_values
attribute_names.map do |attribute|
"#{attribute} = map.#{attribute}::#{sql_type_for(attribute)}"
end.join(', ')
end
def sql_type_for(attribute)
column_for_attribute(attribute).sql_type
end
def values
attributes.map { |attribute_map| build_values_for(attribute_map) }
.then { |serialized_attributes| Arel::Nodes::ValuesList.new(serialized_attributes) }
.to_sql
end
def build_values_for(attribute_map)
attribute_map.map { |attribute, value| type_for_attribute(attribute).serialize(value) }
end
def map_schema
attribute_names.join(', ')
end
def attribute_names
strong_memoize(:attribute_names) do
attributes.first&.keys
end
end
def attributes
raise "Implement the `attributes` template method!"
end
end
end
end
# frozen_string_literal: true
module Security
module Ingestion
# This entity is used in ingestion services to
# map security_finding - report_finding - vulnerability_id - finding_id
#
# You can think this as the Message object in the pipeline design pattern
# which is passed between tasks.
class FindingMap
FINDING_ATTRIBUTES = %i[confidence metadata_version name raw_metadata report_type severity details].freeze
RAW_METADATA_ATTRIBUTES = %w[description message solution cve location].freeze
RAW_METADATA_PLACEHOLDER = { description: nil, message: nil, solution: nil, cve: nil, location: nil }.freeze
attr_reader :security_finding, :report_finding
attr_accessor :finding_id, :vulnerability_id, :new_record, :identifier_ids
delegate :uuid, :scanner_id, to: :security_finding
def initialize(security_finding, report_finding)
@security_finding = security_finding
@report_finding = report_finding
@identifier_ids = []
end
def identifiers
@identifiers ||= report_finding.identifiers.first(Vulnerabilities::Finding::MAX_NUMBER_OF_IDENTIFIERS)
end
def set_identifier_ids_by(fingerprint_id_map)
@identifier_ids = identifiers.map { |identifier| fingerprint_id_map[identifier.fingerprint] }
end
def to_hash
# This was already an existing problem so we've used it here as well.
# TODO: https://gitlab.com/gitlab-org/gitlab/-/issues/342043
parsed_from_raw_metadata = Gitlab::Json.parse(report_finding.raw_metadata).slice(*RAW_METADATA_ATTRIBUTES).symbolize_keys
report_finding.to_hash
.slice(*FINDING_ATTRIBUTES)
.merge(RAW_METADATA_PLACEHOLDER)
.merge(parsed_from_raw_metadata)
.merge(primary_identifier_id: identifier_ids.first, location_fingerprint: report_finding.location.fingerprint, project_fingerprint: report_finding.project_fingerprint)
.merge(uuid: uuid, scanner_id: scanner_id)
end
end
end
end
# frozen_string_literal: true
module Security
module Ingestion
class FindingMapCollection
include Enumerable
def initialize(security_scan)
@security_scan = security_scan
end
def each
return to_enum(:each) unless block_given?
deduplicated_findings.each do |security_finding|
yield create_finding_map_for(security_finding)
end
end
private
attr_reader :security_scan
delegate :findings, :report_findings, to: :security_scan, private: true
def create_finding_map_for(security_finding)
# For SAST findings, we override the finding UUID with an existing finding UUID
# if we have a matching one.
report_uuid = security_finding.overridden_uuid || security_finding.uuid
FindingMap.new(security_finding, report_findings_map[report_uuid])
end
def report_findings_map
@report_findings_map ||= report_findings.index_by(&:uuid)
end
def deduplicated_findings
@deduplicated_findings ||= findings.deduplicated
end
end
end
end
# frozen_string_literal: true
module Security
module Ingestion
# This class splits the ingestion of the vulnerabilities
# of a security scan into multiple batches.
#
# Returns the ingested vulnerability IDs for each batch.
class IngestReportService
BATCH_SIZE = 50
SCAN_INGESTION_ERROR = {
type: 'IngestionError',
message: 'Ingestion failed for some vulnerabilities'
}.freeze
def self.execute(security_scan)
new(security_scan).execute
end
def initialize(security_scan)
@security_scan = security_scan
@errored = false
end
def execute
finding_map_collection.each_slice(BATCH_SIZE).flat_map { |slice| ingest_slice(slice) }
end
private
attr_reader :security_scan
attr_accessor :errored
delegate :pipeline, to: :security_scan, private: true
def finding_map_collection
@finding_map_collection ||= FindingMapCollection.new(security_scan)
end
def ingest_slice(slice)
IngestReportSliceService.execute(pipeline, slice)
rescue StandardError => error
process_error(error)
end
def process_error(error)
Gitlab::ErrorTracking.track_exception(error)
set_ingestion_error!
# we are explicitly returning an empty array for the caller service.
# Otherwise, the return value will be the result of the `set_ingestion_error!` method.
[]
end
def set_ingestion_error!
return if errored
self.errored = true
security_scan.add_processing_error!(SCAN_INGESTION_ERROR)
end
end
end
end
# frozen_string_literal: true
module Security
module Ingestion
# Base class to organize the chain of responsibilities
# for the report slice.
#
# Returns the ingested vulnerability IDs.
class IngestReportSliceService
TASKS = %i[
IngestIdentifiers
IngestFindings
IngestVulnerabilities
AttachFindingsToVulnerabilities
IngestFindingPipelines
IngestFindingIdentifiers
IngestFindingLinks
IngestFindingSignatures
IngestRemediations
].freeze
def self.execute(pipeline, finding_maps)
new(pipeline, finding_maps).execute
end
def initialize(pipeline, finding_maps)
@pipeline = pipeline
@finding_maps = finding_maps
end
def execute
ApplicationRecord.transaction do
TASKS.each { |task| execute_task(task) }
end
@finding_maps.map(&:vulnerability_id)
end
private
def execute_task(task)
Tasks.const_get(task, false).execute(@pipeline, @finding_maps)
end
end
end
end
# frozen_string_literal: true
module Security
module Ingestion
# Service for starting the ingestion of the security reports
# into the database.
class IngestReportsService
def self.execute(pipeline)
new(pipeline).execute
end
def initialize(pipeline)
@pipeline = pipeline
end
def execute
store_reports
mark_project_as_vulnerable!
set_latest_pipeline!
end
private
attr_reader :pipeline
delegate :project, to: :pipeline, private: true
def store_reports
latest_security_scans.flat_map(&method(:ingest))
.then(&method(:mark_resolved_vulnerabilities))
end
def latest_security_scans
pipeline.security_scans.without_errors.latest
end
def ingest(security_scan)
IngestReportService.execute(security_scan)
end
# This can cause issues if we have lots of existing ids
# or, if we try to update lots of records at once.
# Maybe we can extract this into a different service class
# and update the records iteratively.
def mark_resolved_vulnerabilities(existing_ids)
project.vulnerabilities
.id_not_in(existing_ids)
.update_all(resolved_on_default_branch: true)
end
def mark_project_as_vulnerable!
project.project_setting.update!(has_vulnerabilities: true)
end
def set_latest_pipeline!
Vulnerabilities::Statistic.set_latest_pipeline_with(pipeline)
end
end
end
end
# frozen_string_literal: true
module Security
module Ingestion
module Tasks
# Updates the `vulnerability_id` attribute of finding records.
class AttachFindingsToVulnerabilities < AbstractTask
include BulkUpdatableTask
self.model = Vulnerabilities::Finding
private
def attributes
new_finding_maps.map { |finding_map| attributes_for(finding_map) }
end
def new_finding_maps
@new_finding_maps ||= finding_maps.select(&:new_record)
end
def attributes_for(finding_map)
{
id: finding_map.finding_id,
vulnerability_id: finding_map.vulnerability_id
}
end
end
end
end
end
# frozen_string_literal: true
module Security
module Ingestion
module Tasks
# Links findings with identifiers by creating the
# `Vulnerabilities::FindingIdentifier` records.
class IngestFindingIdentifiers < AbstractTask
include BulkInsertableTask
self.model = Vulnerabilities::FindingIdentifier
self.unique_by = %i[occurrence_id identifier_id].freeze
private
def attributes
finding_maps.flat_map do |finding_map|
finding_map.identifier_ids.map do |identifier_id|
{
occurrence_id: finding_map.finding_id,
identifier_id: identifier_id
}
end
end
end
end
end
end
end
# frozen_string_literal: true
module Security
module Ingestion
module Tasks
# Creates new `Vulnerabilities::FindingLink` records.
class IngestFindingLinks < AbstractTask
include BulkInsertableTask
self.model = Vulnerabilities::FindingLink
private
def attributes
finding_maps.flat_map do |finding_map|
new_links_for(finding_map).map do |link|
{
vulnerability_occurrence_id: finding_map.finding_id,
name: link.name,
url: link.url
}
end
end
end
def new_links_for(finding_map)
existing_links = existing_finding_links[finding_map.finding_id].to_a
existing_urls = existing_links.map(&:url)
finding_map.report_finding.links.reject { |link| existing_urls.include?(link.url) }
end
def existing_finding_links
@existing_finding_links ||= Vulnerabilities::FindingLink.by_finding_id(finding_ids)
.group_by(&:vulnerability_occurrence_id)
end
def finding_ids
finding_maps.map(&:finding_id)
end
end
end
end
end
# frozen_string_literal: true
module Security
module Ingestion
module Tasks
# Links findings with pipelines by creating the
# `Vulnerabilities::FindingPipeline` records.
class IngestFindingPipelines < AbstractTask
include BulkInsertableTask
self.model = Vulnerabilities::FindingPipeline
private
def attributes
finding_maps.map do |finding_map|
{ pipeline_id: pipeline.id, occurrence_id: finding_map.finding_id }
end
end
end
end
end
end
# frozen_string_literal: true
module Security
module Ingestion
module Tasks
class IngestFindingSignatures < AbstractTask
include BulkInsertableTask
self.model = Vulnerabilities::FindingSignature
self.unique_by = %i[finding_id algorithm_type signature_sha].freeze
private
def attributes
finding_maps.flat_map { |finding_map| attributes_for(finding_map) }
end
def attributes_for(finding_map)
finding_map.report_finding.signatures.map do |signature|
{
finding_id: finding_map.finding_id,
algorithm_type: signature.algorithm_type,
signature_sha: signature.signature_sha
}
end
end
end
end
end
end
# frozen_string_literal: true
module Security
module Ingestion
module Tasks
class IngestFindings < AbstractTask
include BulkInsertableTask
self.model = Vulnerabilities::Finding
self.unique_by = :uuid
self.uses = %i[id vulnerability_id].freeze
private
delegate :project, to: :pipeline, private: true
def after_ingest
return_data.each_with_index do |(finding_id, vulnerability_id), index|
finding_map = finding_maps[index]
finding_map.finding_id = finding_id
finding_map.vulnerability_id = vulnerability_id
end
end
def attributes
finding_maps.map { |finding_map| finding_map.to_hash.merge(project_id: project.id) }
end
end
end
end
end
# frozen_string_literal: true
module Security
module Ingestion
module Tasks
# UPSERTs the identifiers for the given findings and
# sets the identifier IDs for each `finding_map`.
class IngestIdentifiers < AbstractTask
include BulkInsertableTask
self.model = Vulnerabilities::Identifier
self.unique_by = %i[project_id fingerprint]
self.uses = %i[fingerprint id]
private
delegate :project, to: :pipeline, private: true
def after_ingest
return_data.to_h.then do |fingerprint_to_id_map|
finding_maps.each { |finding_map| finding_map.set_identifier_ids_by(fingerprint_to_id_map) }
end
end
def attributes
report_identifiers.map do |identifier|
identifier.to_hash.merge!(project_id: project.id)
end
end
def report_identifiers
@report_identifiers ||= finding_maps.flat_map(&:identifiers).uniq(&:fingerprint)
end
end
end
end
end
# frozen_string_literal: true
module Security
module Ingestion
module Tasks
# Creates Vulnerabilities::Remediation records for the new remediations
# and updates the Vulnerabilities::FindingRemediation records.
class IngestRemediations < AbstractTask
include BulkInsertableTask
self.model = Vulnerabilities::FindingRemediation
self.unique_by = %i[vulnerability_occurrence_id vulnerability_remediation_id]
self.uses = :id
private
delegate :project, to: :pipeline
def after_ingest
Vulnerabilities::FindingRemediation.by_finding_id(finding_maps.map(&:finding_id))
.id_not_in(return_data.flatten)
.delete_all
end
def attributes
finding_maps.flat_map do |finding_map|
remediations_for(finding_map.report_finding).map do |remediation|
{
vulnerability_occurrence_id: finding_map.finding_id,
vulnerability_remediation_id: remediation.id
}
end
end
end
def remediations_for(report_finding)
checksums = report_finding.remediations.map(&:checksum)
return [] unless checksums.present?
all_remediations.select { |remediation| checksums.include?(remediation.checksum) }
end
def all_remediations
@all_remediations ||= new_remediations + existing_remediations
end
def new_remediations
new_report_remediations.map do |remediation|
project.vulnerability_remediations.create(summary: remediation.summary, file: remediation.diff_file, checksum: remediation.checksum)
end
end
def new_report_remediations
existing_remediation_checksums = existing_remediations.map(&:checksum)
report_remediations.select { |remediation| !existing_remediation_checksums.include?(remediation.checksum) }
end
def existing_remediations
@existing_remediations ||= project.vulnerability_remediations.by_checksum(report_remediations.map(&:checksum)).to_a
end
def report_remediations
@report_remediations ||= finding_maps.map(&:report_finding).flat_map(&:remediations).uniq(&:checksum)
end
end
end
end
end
# frozen_string_literal: true
module Security
module Ingestion
module Tasks
class IngestVulnerabilities < AbstractTask
def execute
create_new_vulnerabilities
update_existing_vulnerabilities
finding_maps
end
private
def create_new_vulnerabilities
IngestVulnerabilities::Create.new(pipeline, partitioned_maps.first).execute
end
def update_existing_vulnerabilities
IngestVulnerabilities::Update.new(pipeline, partitioned_maps.second).execute
end
def partitioned_maps
@partitioned_maps ||= finding_maps.partition { |finding_map| finding_map.vulnerability_id.nil? }
end
end
end
end
end
# frozen_string_literal: true
module Security
module Ingestion
module Tasks
class IngestVulnerabilities
# Creates new vulnerability records in database for the given
# findings map by using a single database query.
class Create < AbstractTask
include BulkInsertableTask
self.model = Vulnerability
self.uses = :id
private
def after_ingest
return_data.each_with_index do |vulnerability_id, index|
finding_map = finding_maps[index]
finding_map.vulnerability_id = vulnerability_id
finding_map.new_record = true
end
end
def attributes
finding_maps.map { |finding_map| attributes_for(finding_map.report_finding, dismissal_feedback[finding_map.uuid]) }
end
def attributes_for(report_finding, feedback)
{
author_id: pipeline.user_id,
project_id: pipeline.project_id,
title: report_finding.name.to_s.truncate(::Issuable::TITLE_LENGTH_MAX),
state: :detected,
severity: report_finding.severity,
confidence: report_finding.confidence,
report_type: report_finding.report_type,
dismissed_at: feedback&.created_at,
dismissed_by_id: feedback&.author_id
}
end
def dismissal_feedback
@dismissal_feedback ||= Vulnerabilities::Feedback.by_finding_uuid(finding_uuids).index_by(&:finding_uuid)
end
def finding_uuids
finding_maps.map(&:uuid)
end
end
end
end
end
end
# frozen_string_literal: true
module Security
module Ingestion
module Tasks
class IngestVulnerabilities
# Updates the existing vulnerability records
# by using a single database query.
class Update < AbstractTask
include BulkUpdatableTask
self.model = Vulnerability
private
def attributes
finding_maps.map { |finding_map| attributes_for(finding_map.vulnerability_id, finding_map.report_finding) }
end
def attributes_for(vulnerability_id, report_finding)
{
id: vulnerability_id,
title: report_finding.name.truncate(::Issuable::TITLE_LENGTH_MAX),
severity: report_finding.severity,
confidence: report_finding.confidence,
updated_at: Time.zone.now
}
end
end
end
end
end
end
...@@ -73,6 +73,8 @@ module Security ...@@ -73,6 +73,8 @@ module Security
end end
def mark_as_resolved_except(vulnerability_ids) def mark_as_resolved_except(vulnerability_ids)
return if ::Vulnerabilities::Finding::REPORT_TYPES_REQUIRING_MANUAL_RESOLUTION.include?(report.type)
project.vulnerabilities project.vulnerabilities
.with_report_types(report.type) .with_report_types(report.type)
.id_not_in(vulnerability_ids) .id_not_in(vulnerability_ids)
......
...@@ -43,7 +43,7 @@ module Security ...@@ -43,7 +43,7 @@ module Security
def security_scan def security_scan
@security_scan ||= Security::Scan.safe_find_or_create_by!(build: artifact.job, scan_type: artifact.file_type) do |scan| @security_scan ||= Security::Scan.safe_find_or_create_by!(build: artifact.job, scan_type: artifact.file_type) do |scan|
scan.info['errors'] = security_report.errors.map(&:stringify_keys) if security_report.errored? scan.processing_errors = security_report.errors.map(&:stringify_keys) if security_report.errored?
end end
end end
......
...@@ -12,9 +12,6 @@ ...@@ -12,9 +12,6 @@
- has_filters_applied = params[:label_name].present? || params[:author_username].present? || params[:search].present? - has_filters_applied = params[:label_name].present? || params[:author_username].present? || params[:search].present?
- if !Feature.enabled?(:async_filtering, @group, default_enabled: true)
= render 'shared/epic/search_bar', type: :epics, show_roadmap_presets: true, hide_extra_sort_options: true
#js-roadmap{ data: { epics_path: group_epics_path(@group, format: :json), #js-roadmap{ data: { epics_path: group_epics_path(@group, format: :json),
group_id: @group.id, group_id: @group.id,
full_path: @group.full_path, full_path: @group.full_path,
......
...@@ -18,7 +18,11 @@ class StoreSecurityReportsWorker # rubocop:disable Scalability/IdempotentWorker ...@@ -18,7 +18,11 @@ class StoreSecurityReportsWorker # rubocop:disable Scalability/IdempotentWorker
Ci::Pipeline.find(pipeline_id).try do |pipeline| Ci::Pipeline.find(pipeline_id).try do |pipeline|
break unless pipeline.project.can_store_security_reports? break unless pipeline.project.can_store_security_reports?
::Security::StoreReportsService.new(pipeline).execute if Feature.enabled?(:security_report_ingestion_framework, pipeline.project)
::Security::Ingestion::IngestReportsService.execute(pipeline)
else
::Security::StoreReportsService.new(pipeline).execute
end
if revoke_secret_detection_token?(pipeline) if revoke_secret_detection_token?(pipeline)
logger.info "StoreSecurityReportsWorker: token revocation started for pipeline: #{pipeline.id}" logger.info "StoreSecurityReportsWorker: token revocation started for pipeline: #{pipeline.id}"
......
# frozen_string_literal: true
FactoryBot.define do
factory :ci_reports_security_finding_signature, class: '::Gitlab::Ci::Reports::Security::FindingSignature' do
algorithm_type { :hash }
signature_value { SecureRandom.hex(50) }
skip_create
initialize_with do
::Gitlab::Ci::Reports::Security::FindingSignature.new(**attributes)
end
end
end
# frozen_string_literal: true
FactoryBot.define do
factory :finding_map, class: '::Security::Ingestion::FindingMap' do
security_finding
report_finding factory: :ci_reports_security_finding
trait :with_finding do
finding factory: :vulnerabilities_finding
end
trait :new_record do
with_finding
new_record { true }
vulnerability factory: :vulnerability
end
initialize_with do
::Security::Ingestion::FindingMap.new(*attributes.values_at(:security_finding, :report_finding)).tap do |object|
object.finding_id = attributes[:finding]&.id
object.vulnerability_id = attributes[:vulnerability]&.id
object.new_record = attributes[:new_record]
object.identifier_ids = attributes[:identifier_ids].to_a
end
end
skip_create
end
end
...@@ -6,5 +6,9 @@ FactoryBot.define do ...@@ -6,5 +6,9 @@ FactoryBot.define do
build factory: [:ci_build, :success] build factory: [:ci_build, :success]
pipeline { build.pipeline } pipeline { build.pipeline }
project { build.project } project { build.project }
trait :with_error do
info { { errors: [{ type: 'ParsingError', message: 'Unknown error happened' }] } }
end
end end
end end
...@@ -12,7 +12,6 @@ RSpec.describe 'epics list', :js do ...@@ -12,7 +12,6 @@ RSpec.describe 'epics list', :js do
before do before do
stub_licensed_features(epics: true) stub_licensed_features(epics: true)
stub_feature_flags(unfiltered_epic_aggregates: false) stub_feature_flags(unfiltered_epic_aggregates: false)
stub_feature_flags(async_filtering: false)
stub_feature_flags(vue_epics_list: false) stub_feature_flags(vue_epics_list: false)
sign_in(user) sign_in(user)
...@@ -127,42 +126,6 @@ RSpec.describe 'epics list', :js do ...@@ -127,42 +126,6 @@ RSpec.describe 'epics list', :js do
expect(page).to have_button('Last updated') expect(page).to have_button('Last updated')
end end
it 'sorts by the selected value and stores the selection for roadmap' do
visit group_roadmap_path(group)
page.within('.epics-other-filters') do
click_button 'Start date'
sort_options = find('ul.dropdown-menu-sort li').all('a').collect(&:text)
expect(sort_options[0]).to eq('Start date')
expect(sort_options[1]).to eq('Due date')
click_link 'Due date'
end
expect(page).to have_button('Due date')
page.within('.content-wrapper .content') do
page.within('.epics-list-section') do
page.within('div.epic-item-container:nth-child(1) div.epics-list-item') do
expect(page).to have_content(epic1.title)
end
page.within('div.epic-item-container:nth-child(2) div.epics-list-item') do
expect(page).to have_content(epic3.title)
end
page.within('div.epic-item-container:nth-child(3) div.epics-list-item') do
expect(page).to have_content(epic2.title)
end
end
end
visit group_roadmap_path(group)
expect(page).to have_button('Due date')
end
it 'renders the epic detail correctly after clicking the link' do it 'renders the epic detail correctly after clicking the link' do
page.within('.content-wrapper .content .issuable-list') do page.within('.content-wrapper .content .issuable-list') do
click_link(epic1.title) click_link(epic1.title)
......
...@@ -10,30 +10,34 @@ RSpec.describe 'group epic roadmap', :js do ...@@ -10,30 +10,34 @@ RSpec.describe 'group epic roadmap', :js do
let(:user_dev) { create(:user) } let(:user_dev) { create(:user) }
let(:group) { create(:group) } let(:group) { create(:group) }
let(:milestone) { create(:milestone, group: group) } let(:milestone) { create(:milestone, group: group) }
let(:filtered_search) { find('.filtered-search') }
let(:js_dropdown_label) { '#js-dropdown-label' }
let(:filter_dropdown) { find("#{js_dropdown_label} .filter-dropdown") }
let(:state_dropdown) { find('.dropdown-epics-state') } let(:state_dropdown) { find('.dropdown-epics-state') }
let!(:bug_label) { create(:group_label, group: group, title: 'Bug') } let!(:bug_label) { create(:group_label, group: group, title: 'Bug') }
let!(:critical_label) { create(:group_label, group: group, title: 'Critical') } let!(:critical_label) { create(:group_label, group: group, title: 'Critical') }
def search_for_label(label) def search_for_label(label)
init_label_search page.within('.vue-filtered-search-bar-container .gl-search-box-by-click') do
filter_dropdown.find('.filter-dropdown-item', text: bug_label.title).click page.find('input.gl-filtered-search-term-input').click
filtered_search.send_keys(:enter) click_link 'Label'
page.first('.gl-filtered-search-suggestion-list .gl-filtered-search-suggestion').click # Select `=` operator
wait_for_requests
page.find('.gl-filtered-search-suggestion-list .gl-filtered-search-suggestion', text: bug_label.title).click
end
page.find('.gl-search-box-by-click-search-button').click
end end
before do before do
stub_licensed_features(epics: true) stub_licensed_features(epics: true)
stub_feature_flags(unfiltered_epic_aggregates: false) stub_feature_flags(unfiltered_epic_aggregates: false)
stub_feature_flags(async_filtering: false)
stub_feature_flags(performance_roadmap: false) stub_feature_flags(performance_roadmap: false)
stub_feature_flags(roadmap_daterange_filter: false)
sign_in(user) sign_in(user)
end end
context 'when epics exist for the group' do context 'when epics exist for the group' do
available_tokens = %w[Author Label Milestone Epic My-Reaction]
let!(:epic_with_bug) { create(:labeled_epic, group: group, start_date: 10.days.ago, end_date: 1.day.ago, labels: [bug_label]) } let!(:epic_with_bug) { create(:labeled_epic, group: group, start_date: 10.days.ago, end_date: 1.day.ago, labels: [bug_label]) }
let!(:epic_with_critical) { create(:labeled_epic, group: group, start_date: 20.days.ago, end_date: 2.days.ago, labels: [critical_label]) } let!(:epic_with_critical) { create(:labeled_epic, group: group, start_date: 20.days.ago, end_date: 2.days.ago, labels: [critical_label]) }
let!(:closed_epic) { create(:epic, :closed, group: group, start_date: 20.days.ago, end_date: 2.days.ago) } let!(:closed_epic) { create(:epic, :closed, group: group, start_date: 20.days.ago, end_date: 2.days.ago) }
...@@ -45,25 +49,25 @@ RSpec.describe 'group epic roadmap', :js do ...@@ -45,25 +49,25 @@ RSpec.describe 'group epic roadmap', :js do
describe 'roadmap page' do describe 'roadmap page' do
it 'renders roadmap preset buttons correctly' do it 'renders roadmap preset buttons correctly' do
page.within('.js-btn-roadmap-presets') do page.within('.gl-segmented-control') do
expect(page).to have_css('.btn-roadmap-preset input[value="QUARTERS"]') expect(page).to have_css('input[value="QUARTERS"]')
expect(page).to have_css('.btn-roadmap-preset input[value="MONTHS"]') expect(page).to have_css('input[value="MONTHS"]')
expect(page).to have_css('.btn-roadmap-preset input[value="WEEKS"]') expect(page).to have_css('input[value="WEEKS"]')
end end
end end
it 'renders the filtered search bar correctly' do it 'renders the filtered search bar correctly' do
page.within('.content-wrapper .content .epics-filters') do page.within('.content-wrapper .content .epics-filters') do
expect(page).to have_css('.filtered-search-box') expect(page).to have_css('.vue-filtered-search-bar-container')
end end
end end
it 'renders the sort dropdown correctly' do it 'renders the sort dropdown correctly' do
page.within('.content-wrapper .content .epics-other-filters') do page.within('.vue-filtered-search-bar-container') do
expect(page).to have_css('.filter-dropdown-container') expect(page).to have_css('.sort-dropdown-container')
find('.epics-sort-btn').click find('.sort-dropdown-container .dropdown-toggle').click
page.within('.dropdown-menu') do page.within('.sort-dropdown-container .dropdown-menu') do
expect(page).to have_selector('li a', count: 3) expect(page).to have_selector('li button', count: 2)
expect(page).to have_content('Start date') expect(page).to have_content('Start date')
expect(page).to have_content('Due date') expect(page).to have_content('Due date')
end end
...@@ -89,7 +93,7 @@ RSpec.describe 'group epic roadmap', :js do ...@@ -89,7 +93,7 @@ RSpec.describe 'group epic roadmap', :js do
end end
it 'renders open epics only' do it 'renders open epics only' do
state_dropdown.find('a', text: 'Open epics').click state_dropdown.find('button', text: 'Open epics').click
page.within('.roadmap-container .epics-list-section') do page.within('.roadmap-container .epics-list-section') do
expect(page).to have_selector('.epics-list-item .epic-title', count: 2) expect(page).to have_selector('.epics-list-item .epic-title', count: 2)
...@@ -97,16 +101,17 @@ RSpec.describe 'group epic roadmap', :js do ...@@ -97,16 +101,17 @@ RSpec.describe 'group epic roadmap', :js do
end end
it 'renders closed epics only' do it 'renders closed epics only' do
state_dropdown.find('a', text: 'Closed epics').click state_dropdown.find('button', text: 'Closed epics').click
page.within('.roadmap-container .epics-list-section') do page.within('.roadmap-container .epics-list-section') do
expect(page).to have_selector('.epics-list-item .epic-title', count: 1) expect(page).to have_selector('.epics-list-item .epic-title', count: 1)
end end
end end
it 'saves last selected epic state' do it 'saves last selected epic state', quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/341827' do
state_dropdown.find('a', text: 'Open epics').click state_dropdown.find('button', text: 'Open epics').click
wait_for_all_requests
visit group_roadmap_path(group) visit group_roadmap_path(group)
wait_for_requests wait_for_requests
...@@ -123,7 +128,7 @@ RSpec.describe 'group epic roadmap', :js do ...@@ -123,7 +128,7 @@ RSpec.describe 'group epic roadmap', :js do
end end
it 'renders filtered search bar with applied filter token' do it 'renders filtered search bar with applied filter token' do
expect_tokens([label_token(bug_label.title)]) expect_vue_tokens([label_token(bug_label.title)])
end end
it 'renders roadmap view with matching epic' do it 'renders roadmap view with matching epic' do
...@@ -135,7 +140,7 @@ RSpec.describe 'group epic roadmap', :js do ...@@ -135,7 +140,7 @@ RSpec.describe 'group epic roadmap', :js do
it 'keeps label filter when filtering by state' do it 'keeps label filter when filtering by state' do
state_dropdown.find('.dropdown-toggle').click state_dropdown.find('.dropdown-toggle').click
state_dropdown.find('a', text: 'Open epics').click state_dropdown.find('button', text: 'Open epics').click
page.within('.roadmap-container .epics-list-section') do page.within('.roadmap-container .epics-list-section') do
expect(page).to have_selector('.epics-list-item .epic-title', count: 1) expect(page).to have_selector('.epics-list-item .epic-title', count: 1)
...@@ -143,6 +148,35 @@ RSpec.describe 'group epic roadmap', :js do ...@@ -143,6 +148,35 @@ RSpec.describe 'group epic roadmap', :js do
end end
end end
end end
describe 'filtered search tokens' do
let!(:epic1) { create(:epic, group: group, end_date: 10.days.ago) }
let!(:epic2) { create(:epic, group: group, start_date: 2.days.ago) }
let!(:award_emoji_star) { create(:award_emoji, name: 'star', user: user, awardable: epic1) }
before do
group.add_developer(user_dev)
visit group_roadmap_path(group)
wait_for_requests
end
it_behaves_like 'filtered search bar', available_tokens
end
describe 'that is a sub-group' do
let!(:subgroup) { create(:group, parent: group, name: 'subgroup') }
let!(:sub_epic1) { create(:epic, group: subgroup, end_date: 10.days.ago) }
let!(:sub_epic2) { create(:epic, group: subgroup, start_date: 2.days.ago) }
let!(:award_emoji_star) { create(:award_emoji, name: 'star', user: user, awardable: sub_epic1) }
before do
subgroup.add_developer(user_dev)
visit group_roadmap_path(subgroup)
wait_for_requests
end
it_behaves_like 'filtered search bar', available_tokens
end
end end
context 'when no epics exist for the group' do context 'when no epics exist for the group' do
...@@ -193,41 +227,4 @@ RSpec.describe 'group epic roadmap', :js do ...@@ -193,41 +227,4 @@ RSpec.describe 'group epic roadmap', :js do
end end
end end
end end
context 'async filtered search' do
available_tokens = %w[Author Label Milestone Epic My-Reaction]
before do
stub_feature_flags(async_filtering: true)
end
describe 'within a group' do
let!(:epic1) { create(:epic, group: group, end_date: 10.days.ago) }
let!(:epic2) { create(:epic, group: group, start_date: 2.days.ago) }
let!(:award_emoji_star) { create(:award_emoji, name: 'star', user: user, awardable: epic1) }
before do
group.add_developer(user_dev)
visit group_roadmap_path(group)
wait_for_requests
end
it_behaves_like 'filtered search bar', available_tokens
end
describe 'within a sub-group group' do
let!(:subgroup) { create(:group, parent: group, name: 'subgroup') }
let!(:sub_epic1) { create(:epic, group: subgroup, end_date: 10.days.ago) }
let!(:sub_epic2) { create(:epic, group: subgroup, start_date: 2.days.ago) }
let!(:award_emoji_star) { create(:award_emoji, name: 'star', user: user, awardable: sub_epic1) }
before do
subgroup.add_developer(user_dev)
visit group_roadmap_path(subgroup)
wait_for_requests
end
it_behaves_like 'filtered search bar', available_tokens
end
end
end end
...@@ -69,6 +69,8 @@ RSpec.describe 'New project', :js do ...@@ -69,6 +69,8 @@ RSpec.describe 'New project', :js do
end end
it '"Import project" tab creates projects with features enabled' do it '"Import project" tab creates projects with features enabled' do
stub_request(:get, "http://foo.git/info/refs?service=git-upload-pack").to_return(status: 200, body: "001e# servdice=git-upload-pack")
visit new_project_path visit new_project_path
find('[data-qa-panel-name="import_project"]').click # rubocop:disable QA/SelectorUsage find('[data-qa-panel-name="import_project"]').click # rubocop:disable QA/SelectorUsage
...@@ -76,6 +78,9 @@ RSpec.describe 'New project', :js do ...@@ -76,6 +78,9 @@ RSpec.describe 'New project', :js do
first('.js-import-git-toggle-button').click first('.js-import-git-toggle-button').click
fill_in 'project_import_url', with: 'http://foo.git' fill_in 'project_import_url', with: 'http://foo.git'
wait_for_requests
fill_in 'project_name', with: 'import-project-with-features1' fill_in 'project_name', with: 'import-project-with-features1'
fill_in 'project_path', with: 'import-project-with-features1' fill_in 'project_path', with: 'import-project-with-features1'
choose 'project_visibility_level_20' choose 'project_visibility_level_20'
......
import { shallowMount } from '@vue/test-utils'; import { shallowMount } from '@vue/test-utils';
import JiraIssueNote from 'ee/integrations/jira/issues_show/components/note.vue'; import JiraIssueNote from 'ee/external_issues_show/components/note.vue';
import { extendedWrapper } from 'helpers/vue_test_utils_helper'; import { extendedWrapper } from 'helpers/vue_test_utils_helper';
import { mockJiraIssueComment } from '../mock_data'; import { mockExternalIssueComment } from '../mock_data';
describe('JiraIssuesNote', () => { describe('JiraIssuesNote', () => {
let wrapper; let wrapper;
...@@ -15,11 +15,11 @@ describe('JiraIssuesNote', () => { ...@@ -15,11 +15,11 @@ describe('JiraIssuesNote', () => {
wrapper = extendedWrapper( wrapper = extendedWrapper(
shallowMount(JiraIssueNote, { shallowMount(JiraIssueNote, {
propsData: { propsData: {
authorName: mockJiraIssueComment.author.name, authorName: mockExternalIssueComment.author.name,
authorWebUrl: mockJiraIssueComment.author.web_url, authorWebUrl: mockExternalIssueComment.author.web_url,
authorAvatarUrl: mockJiraIssueComment.author.avatar_url, authorAvatarUrl: mockExternalIssueComment.author.avatar_url,
noteCreatedAt: mockJiraIssueComment.created_at, noteCreatedAt: mockExternalIssueComment.created_at,
noteBodyHtml: mockJiraIssueComment.body_html, noteBodyHtml: mockExternalIssueComment.body_html,
...props, ...props,
}, },
slots, slots,
......
// Jest Snapshot v1, https://goo.gl/fbAQLP // Jest Snapshot v1, https://goo.gl/fbAQLP
exports[`JiraIssuesSidebarAssignee with assignee template renders avatar components 1`] = ` exports[`ExternalIssuesSidebarAssignee with assignee template renders avatar components 1`] = `
<div> <div>
<div <div
class="hide-collapsed" class="hide-collapsed"
...@@ -44,7 +44,7 @@ exports[`JiraIssuesSidebarAssignee with assignee template renders avatar compone ...@@ -44,7 +44,7 @@ exports[`JiraIssuesSidebarAssignee with assignee template renders avatar compone
</div> </div>
`; `;
exports[`JiraIssuesSidebarAssignee with no assignee template renders template without avatar components (the "None" state) 1`] = ` exports[`ExternalIssuesSidebarAssignee with no assignee template renders template without avatar components (the "None" state) 1`] = `
<div> <div>
<div <div
class="hide-collapsed" class="hide-collapsed"
......
import { GlAvatarLabeled, GlAvatarLink, GlAvatar } from '@gitlab/ui'; import { GlAvatarLabeled, GlAvatarLink, GlAvatar } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils'; import { shallowMount } from '@vue/test-utils';
import Assignee from 'ee/integrations/jira/issues_show/components/sidebar/assignee.vue'; import Assignee from 'ee/external_issues_show/components/sidebar/assignee.vue';
import { extendedWrapper } from 'helpers/vue_test_utils_helper'; import { extendedWrapper } from 'helpers/vue_test_utils_helper';
import { convertObjectPropsToCamelCase } from '~/lib/utils/common_utils'; import { convertObjectPropsToCamelCase } from '~/lib/utils/common_utils';
import AssigneeTitle from '~/sidebar/components/assignees/assignee_title.vue'; import AssigneeTitle from '~/sidebar/components/assignees/assignee_title.vue';
import { mockJiraIssue } from '../../mock_data'; import { mockExternalIssue } from '../../mock_data';
const mockAssignee = convertObjectPropsToCamelCase(mockJiraIssue.assignees[0], { deep: true }); const mockAssignee = convertObjectPropsToCamelCase(mockExternalIssue.assignees[0], { deep: true });
describe('JiraIssuesSidebarAssignee', () => { describe('ExternalIssuesSidebarAssignee', () => {
let wrapper; let wrapper;
const findNoAssigneeText = () => wrapper.findByTestId('no-assignee-text'); const findNoAssigneeText = () => wrapper.findByTestId('no-assignee-text');
......
import { shallowMount } from '@vue/test-utils'; import { shallowMount } from '@vue/test-utils';
import IssueDueDate from 'ee/integrations/jira/issues_show/components/sidebar/issue_due_date.vue'; import IssueDueDate from 'ee/external_issues_show/components/sidebar/issue_due_date.vue';
import { useFakeDate } from 'helpers/fake_date'; import { useFakeDate } from 'helpers/fake_date';
import { extendedWrapper } from 'helpers/vue_test_utils_helper'; import { extendedWrapper } from 'helpers/vue_test_utils_helper';
......
import { GlDropdownItem, GlLoadingIcon } from '@gitlab/ui'; import { GlDropdownItem, GlLoadingIcon } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils'; import { shallowMount } from '@vue/test-utils';
import IssueFieldDropdown from 'ee/integrations/jira/issues_show/components/sidebar/issue_field_dropdown.vue'; import IssueFieldDropdown from 'ee/external_issues_show/components/sidebar/issue_field_dropdown.vue';
import { mockJiraIssueStatuses } from '../../mock_data'; import { mockExternalIssueStatuses } from '../../mock_data';
describe('IssueFieldDropdown', () => { describe('IssueFieldDropdown', () => {
let wrapper; let wrapper;
...@@ -31,9 +31,9 @@ describe('IssueFieldDropdown', () => { ...@@ -31,9 +31,9 @@ describe('IssueFieldDropdown', () => {
it.each` it.each`
loading | items loading | items
${true} | ${[]} ${true} | ${[]}
${true} | ${mockJiraIssueStatuses} ${true} | ${mockExternalIssueStatuses}
${false} | ${[]} ${false} | ${[]}
${false} | ${mockJiraIssueStatuses} ${false} | ${mockExternalIssueStatuses}
`('with loading = $loading, items = $items', ({ loading, items }) => { `('with loading = $loading, items = $items', ({ loading, items }) => {
createComponent({ createComponent({
props: { props: {
...@@ -47,7 +47,7 @@ describe('IssueFieldDropdown', () => { ...@@ -47,7 +47,7 @@ describe('IssueFieldDropdown', () => {
if (!loading) { if (!loading) {
if (items.length) { if (items.length) {
findAllGlDropdownItems().wrappers.forEach((itemWrapper, index) => { findAllGlDropdownItems().wrappers.forEach((itemWrapper, index) => {
expect(itemWrapper.text()).toBe(mockJiraIssueStatuses[index].title); expect(itemWrapper.text()).toBe(mockExternalIssueStatuses[index].title);
}); });
} else { } else {
expect(wrapper.text()).toBe(emptyText); expect(wrapper.text()).toBe(emptyText);
......
import { GlButton, GlIcon } from '@gitlab/ui'; import { GlButton, GlIcon } from '@gitlab/ui';
import IssueField from 'ee/integrations/jira/issues_show/components/sidebar/issue_field.vue'; import IssueField from 'ee/external_issues_show/components/sidebar/issue_field.vue';
import { createMockDirective, getBinding } from 'helpers/vue_mock_directive'; import { createMockDirective, getBinding } from 'helpers/vue_mock_directive';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper'; import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
......
export const mockExternalIssue = {
title: 'FE-2 The second FE issue on Jira',
description_html:
'<a href="https://jira.reali.sh:8080/projects/FE/issues/FE-2">FE-2</a> The second FE issue on Jira',
created_at: '"2021-02-01T04:04:40.833Z"',
author: {
name: 'Justin Ho',
web_url: 'http://127.0.0.1:3000/root',
avatar_url: 'http://127.0.0.1:3000/uploads/-/system/user/avatar/1/avatar.png?width=90',
},
assignees: [
{
name: 'Justin Ho',
web_url: 'http://127.0.0.1:3000/root',
avatar_url: 'http://127.0.0.1:3000/uploads/-/system/user/avatar/1/avatar.png?width=90',
},
],
due_date: '2021-02-14T00:00:00.000Z',
labels: [
{
title: 'In Progress',
description: 'Work that is still in progress',
color: '#0052CC',
text_color: '#FFFFFF',
},
],
references: {
relative: 'FE-2',
},
state: 'opened',
status: 'In Progress',
};
export const mockExternalIssueComment = {
body_html: '<p>hi</p>',
created_at: '"2021-02-01T04:04:40.833Z"',
author: {
name: 'Justin Ho',
web_url: 'http://127.0.0.1:3000/root',
avatar_url: 'http://127.0.0.1:3000/uploads/-/system/user/avatar/1/avatar.png?width=90',
},
id: 10000,
};
export const mockExternalIssueStatuses = [{ title: 'In Progress' }, { title: 'Done' }];
import { shallowMount } from '@vue/test-utils'; import { shallowMount } from '@vue/test-utils';
import Assignee from 'ee/integrations/jira/issues_show/components/sidebar/assignee.vue'; import Assignee from 'ee/external_issues_show/components/sidebar/assignee.vue';
import IssueDueDate from 'ee/integrations/jira/issues_show/components/sidebar/issue_due_date.vue'; import IssueDueDate from 'ee/external_issues_show/components/sidebar/issue_due_date.vue';
import IssueField from 'ee/integrations/jira/issues_show/components/sidebar/issue_field.vue'; import IssueField from 'ee/external_issues_show/components/sidebar/issue_field.vue';
import Sidebar from 'ee/integrations/jira/issues_show/components/sidebar/jira_issues_sidebar_root.vue'; import Sidebar from 'ee/integrations/jira/issues_show/components/sidebar/jira_issues_sidebar_root.vue';
import { convertObjectPropsToCamelCase } from '~/lib/utils/common_utils'; import { convertObjectPropsToCamelCase } from '~/lib/utils/common_utils';
import CopyableField from '~/vue_shared/components/sidebar/copyable_field.vue'; import CopyableField from '~/vue_shared/components/sidebar/copyable_field.vue';
......
...@@ -45,7 +45,6 @@ describe('RoadmapApp', () => { ...@@ -45,7 +45,6 @@ describe('RoadmapApp', () => {
presetType, presetType,
}, },
provide: { provide: {
glFeatures: { asyncFiltering: true },
groupFullPath: 'gitlab-org', groupFullPath: 'gitlab-org',
groupMilestonesPath: '/groups/gitlab-org/-/milestones.json', groupMilestonesPath: '/groups/gitlab-org/-/milestones.json',
listEpicsPath: '/groups/gitlab-org/-/epics', listEpicsPath: '/groups/gitlab-org/-/epics',
......
...@@ -638,32 +638,58 @@ RSpec.describe Namespace do ...@@ -638,32 +638,58 @@ RSpec.describe Namespace do
describe '#any_project_with_shared_runners_enabled?' do describe '#any_project_with_shared_runners_enabled?' do
subject { namespace.any_project_with_shared_runners_enabled? } subject { namespace.any_project_with_shared_runners_enabled? }
context 'without projects' do shared_examples '#any_project_with_shared_runners_enabled? examples' do
it { is_expected.to be_falsey } context 'without projects' do
end it { is_expected.to be_falsey }
end
context 'group with shared runners enabled project' do context 'group with shared runners enabled project' do
let!(:project) { create(:project, namespace: namespace, shared_runners_enabled: true) } let!(:project) { create(:project, namespace: namespace, shared_runners_enabled: true) }
it { is_expected.to be_truthy }
end
context 'subgroup with shared runners enabled project' do
let(:namespace) { create(:group) }
let(:subgroup) { create(:group, parent: namespace) }
let!(:subproject) { create(:project, namespace: subgroup, shared_runners_enabled: true) }
it { is_expected.to be_truthy }
end
context 'with project and disabled shared runners' do
let!(:project) do
create(:project,
namespace: namespace,
shared_runners_enabled: false)
end
it { is_expected.to be_truthy } it { is_expected.to be_falsey }
end
end end
context 'subgroup with shared runners enabled project' do context 'when use_cte_for_any_project_with_shared_runners_enabled is enabled' do
let(:namespace) { create(:group) } before do
let(:subgroup) { create(:group, parent: namespace) } stub_feature_flags(use_cte_for_any_project_with_shared_runners_enabled: true)
let!(:subproject) { create(:project, namespace: subgroup, shared_runners_enabled: true) } end
it { is_expected.to be_truthy } it_behaves_like '#any_project_with_shared_runners_enabled? examples' do
it 'creates a CTE' do
group = create(:group)
expect(Gitlab::SQL::CTE).to receive(:new).and_call_original
group.any_project_with_shared_runners_enabled?
end
end
end end
context 'with project and disabled shared runners' do context 'when use_cte_for_any_project_with_shared_runners_enabled is disabled' do
let!(:project) do before do
create(:project, stub_feature_flags(use_cte_for_any_project_with_shared_runners_enabled: false)
namespace: namespace,
shared_runners_enabled: false)
end end
it { is_expected.to be_falsey } it_behaves_like '#any_project_with_shared_runners_enabled? examples'
end end
end end
......
...@@ -106,6 +106,75 @@ RSpec.describe Security::Scan do ...@@ -106,6 +106,75 @@ RSpec.describe Security::Scan do
it { is_expected.to match_array(expected_scans) } it { is_expected.to match_array(expected_scans) }
end end
describe '.without_errors' do
let(:scan_1) { create(:security_scan, :with_error) }
let(:scan_2) { create(:security_scan) }
subject { described_class.without_errors }
it { is_expected.to contain_exactly(scan_2) }
end
describe '.latest' do
let!(:latest_scan) { create(:security_scan, latest: true) }
let!(:retried_scan) { create(:security_scan, latest: false) }
subject { described_class.latest }
it { is_expected.to match_array([latest_scan]) }
end
describe '#report_findings' do
let(:artifact) { create(:ee_ci_job_artifact, :dast) }
let(:scan) { create(:security_scan, build: artifact.job) }
let(:artifact_finding_uuids) { artifact.security_report.findings.map(&:uuid) }
subject { scan.report_findings.map(&:uuid) }
it { is_expected.to match_array(artifact_finding_uuids) }
end
describe '#processing_errors' do
let(:scan) { build(:security_scan, :with_error) }
subject { scan.processing_errors }
it { is_expected.to eq([{ 'type' => 'ParsingError', 'message' => 'Unknown error happened' }]) }
end
describe '#processing_errors=' do
let(:scan) { create(:security_scan) }
subject(:set_processing_errors) { scan.processing_errors = [:foo] }
it 'sets the processing errors' do
expect { set_processing_errors }.to change { scan.info['errors'] }.from(nil).to([:foo])
end
end
describe '#add_processing_error!' do
let(:error) { { type: 'foo', message: 'bar' } }
subject(:add_processing_error) { scan.add_processing_error!(error) }
context 'when the scan does not have any errors' do
let(:scan) { create(:security_scan) }
it 'persists the error' do
expect { add_processing_error }.to change { scan.reload.info['errors'] }.from(nil).to([{ 'type' => 'foo', 'message' => 'bar' }])
end
end
context 'when the scan already has some errors' do
let(:scan) { create(:security_scan, :with_error) }
it 'persists the new error with the existing ones' do
expect { add_processing_error }.to change { scan.reload.info['errors'] }.from([{ 'type' => 'ParsingError', 'message' => 'Unknown error happened' }])
.to([{ 'type' => 'ParsingError', 'message' => 'Unknown error happened' }, { 'type' => 'foo', 'message' => 'bar' }])
end
end
end
it_behaves_like 'having unique enum values' it_behaves_like 'having unique enum values'
it 'sets `project_id` and `pipeline_id` before save' do it 'sets `project_id` and `pipeline_id` before save' do
......
...@@ -159,6 +159,15 @@ RSpec.describe Vulnerabilities::Feedback do ...@@ -159,6 +159,15 @@ RSpec.describe Vulnerabilities::Feedback do
end end
end end
describe '.by_finding_uuid' do
let(:feedback_1) { create(:vulnerability_feedback) }
let(:feedback_2) { create(:vulnerability_feedback) }
subject { described_class.by_finding_uuid([feedback_2.finding_uuid]) }
it { is_expected.to eq([feedback_2]) }
end
describe '.with_category' do describe '.with_category' do
it 'filters by category' do it 'filters by category' do
described_class.categories.each do |category, _| described_class.categories.each do |category, _|
......
...@@ -5,4 +5,13 @@ require 'spec_helper' ...@@ -5,4 +5,13 @@ require 'spec_helper'
RSpec.describe Vulnerabilities::FindingRemediation do RSpec.describe Vulnerabilities::FindingRemediation do
it { is_expected.to belong_to(:finding).class_name('Vulnerabilities::Finding').required } it { is_expected.to belong_to(:finding).class_name('Vulnerabilities::Finding').required }
it { is_expected.to belong_to(:remediation).class_name('Vulnerabilities::Remediation').required } it { is_expected.to belong_to(:remediation).class_name('Vulnerabilities::Remediation').required }
describe '.by_finding_id' do
let(:finding_1) { create(:vulnerabilities_finding) }
let!(:remediation) { create(:vulnerabilities_remediation, findings: [finding_1]) }
subject { described_class.by_finding_id(finding_1.id) }
it { is_expected.to eq(remediation.finding_remediations) }
end
end end
...@@ -59,16 +59,39 @@ RSpec.describe NetworkPolicies::DeployResourceService do ...@@ -59,16 +59,39 @@ RSpec.describe NetworkPolicies::DeployResourceService do
end end
context 'with resource_name' do context 'with resource_name' do
let(:resource_name) { 'policy2' } let(:resource_name) { 'policy' }
it 'updates resource in the deployment namespace and returns success response with a policy' do context 'when name is not updated' do
namespaced_policy = policy.generate it 'updates resource in the deployment namespace and returns success response with a policy' do
namespaced_policy[:metadata][:namespace] = environment.deployment_namespace namespaced_policy = policy.generate
namespaced_policy[:metadata][:name] = 'policy2' namespaced_policy[:metadata][:namespace] = environment.deployment_namespace
expect(kubeclient).to receive(:update_network_policy).with(namespaced_policy) { policy.generate } expect(kubeclient).to receive(:update_network_policy).with(namespaced_policy)
expect(subject).to be_success expect(subject).to be_success
expect(subject.payload.as_json).to eq(policy.as_json) expect(subject.payload.as_json).to eq(policy.as_json)
end
end
context 'when name is updated' do
let(:policy) do
Gitlab::Kubernetes::NetworkPolicy.new(
name: 'policy2',
namespace: 'another',
selector: { matchLabels: { role: 'db' } },
ingress: [{ from: [{ namespaceSelector: { matchLabels: { project: 'myproject' } } }] }]
)
end
it 'destroys and recreates resource in the deployment namespace and returns success response with a policy' do
namespaced_policy = policy.generate
namespaced_policy[:metadata][:namespace] = environment.deployment_namespace
namespaced_policy[:metadata][:resourceVersion] = nil
expect(kubeclient).to receive(:delete_network_policy).with(resource_name, environment.deployment_namespace)
expect(kubeclient).to receive(:create_network_policy).with(namespaced_policy)
expect(subject).to be_success
expect(subject.payload.as_json).to eq(policy.as_json)
end
end end
end end
...@@ -137,14 +160,62 @@ RSpec.describe NetworkPolicies::DeployResourceService do ...@@ -137,14 +160,62 @@ RSpec.describe NetworkPolicies::DeployResourceService do
context 'with resource_name' do context 'with resource_name' do
let(:resource_name) { 'policy' } let(:resource_name) { 'policy' }
it 'updates resource in the deployment namespace and returns success response with a policy' do before do
namespaced_policy = policy.generate allow(Gitlab::Kubernetes::CiliumNetworkPolicy).to receive(:from_resource).and_return policy
namespaced_policy[:metadata][:namespace] = environment.deployment_namespace allow(Gitlab::Kubernetes::CiliumNetworkPolicy).to receive(:from_yaml).and_return policy
namespaced_policy[:metadata][:name] = resource_name end
expect(kubeclient).to receive(:update_cilium_network_policy).with(namespaced_policy) { policy.generate } context 'when name is not updated' do
expect(subject).to be_success it 'updates resource in the deployment namespace and returns success response with a policy' do
expect(subject.payload.as_json).to eq(policy.as_json) namespaced_policy = policy.generate
namespaced_policy[:metadata][:namespace] = environment.deployment_namespace
expect(kubeclient).to receive(:update_cilium_network_policy).with(namespaced_policy)
expect(subject).to be_success
expect(subject.payload.as_json).to eq(policy.as_json)
end
end
context 'when name is updated' do
let(:policy) do
Gitlab::Kubernetes::CiliumNetworkPolicy.new(
name: 'policy2',
namespace: 'namespace',
resource_version: 101,
selector: { matchLabels: { role: 'db' } },
ingress: [{ fromEndpoints: [{ matchLabels: { project: 'myproject' } }] }]
)
end
let(:manifest) do
<<~POLICY
apiVersion: cilium.io/v2
kind: CiliumNetworkPolicy
metadata:
name: policy2
namespace: another
resourceVersion: 101
spec:
endpointSelector:
matchLabels:
role: db
ingress:
- fromEndpoints:
- matchLabels:
project: myproject
POLICY
end
it 'destroys and recreates resource in the deployment namespace and returns success response with a policy' do
namespaced_policy = policy.generate
namespaced_policy[:metadata][:namespace] = environment.deployment_namespace
namespaced_policy[:metadata][:resourceVersion] = nil
expect(kubeclient).to receive(:delete_cilium_network_policy).with(resource_name, environment.deployment_namespace)
expect(kubeclient).to receive(:create_cilium_network_policy).with(namespaced_policy)
expect(subject).to be_success
expect(subject.payload.as_json).to eq(policy.as_json)
end
end end
end end
end end
......
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Security::Ingestion::FindingMapCollection do
describe '#each_slice' do
let(:security_scan) { create(:security_scan) }
let(:security_findings) { create_list(:security_finding, 3, scan: security_scan, deduplicated: true) }
let(:report_findings) { [] }
let(:finding_map_collection) { described_class.new(security_scan) }
let(:finding_maps) { [] }
let(:finding_pairs) { finding_maps.map { |finding_map| [finding_map.security_finding, finding_map.report_finding] } }
let(:test_block) { proc { |slice| finding_maps.concat(slice) } }
let(:expected_finding_pairs) do
[
[security_findings[0], report_findings[0]],
[security_findings[1], report_findings[1]],
[security_findings[2], report_findings[2]]
]
end
before do
create(:security_finding, scan: security_scan, deduplicated: false)
security_findings.each { |security_finding| report_findings << create(:ci_reports_security_finding, uuid: security_finding.uuid) }
allow(security_scan).to receive(:report_findings).and_return(report_findings)
allow(finding_maps).to receive(:concat).and_call_original
end
context 'when the size argument given' do
subject(:run_each_slice) { finding_map_collection.each_slice(1, &test_block) }
it 'calls the given block for each slice by the given size argument' do
run_each_slice
expect(finding_maps).to have_received(:concat).exactly(3).times
expect(finding_pairs).to match_array(expected_finding_pairs)
end
end
end
end
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Security::Ingestion::FindingMap do
let(:security_finding) { build(:security_finding) }
let(:identifier) { build(:ci_reports_security_identifier) }
let(:report_finding) { build(:ci_reports_security_finding, identifiers: [identifier]) }
let(:finding_map) { build(:finding_map, security_finding: security_finding, report_finding: report_finding) }
describe '#uuid' do
subject { finding_map }
it { is_expected.to delegate_method(:uuid).to(:security_finding) }
end
describe '#identifiers' do
subject { finding_map.identifiers }
it { is_expected.to eq([identifier]) }
end
describe '#set_identifier_ids_by' do
let(:identifiers_map) { { identifier.fingerprint => 1 } }
subject(:set_idenrifier_ids) { finding_map.set_identifier_ids_by(identifiers_map) }
it 'changes the identifier_ids of the finding_map' do
expect { set_idenrifier_ids }.to change { finding_map.identifier_ids }.from([]).to([1])
end
end
describe '#to_hash' do
let(:expected_hash) do
{
uuid: security_finding.uuid,
scanner_id: security_finding.scanner_id,
primary_identifier_id: nil,
location_fingerprint: report_finding.location.fingerprint,
project_fingerprint: report_finding.project_fingerprint,
name: 'Cipher with no integrity',
report_type: :sast,
severity: :high,
confidence: :medium,
metadata_version: 'sast:1.0',
details: {},
raw_metadata: report_finding.raw_metadata,
description: 'The cipher does not provide data integrity update 1',
solution: 'GCM mode introduces an HMAC into the resulting encrypted data, providing integrity of the result.',
message: nil,
cve: nil,
location: {
"class" => "com.gitlab.security_products.tests.App",
"end_line" => 29,
"file" => "maven/src/main/java/com/gitlab/security_products/tests/App.java",
"method" => "insecureCypher",
"start_line" => 29
}
}
end
subject { finding_map.to_hash }
it { is_expected.to eq(expected_hash) }
end
end
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Security::Ingestion::IngestReportService do
let(:service_object) { described_class.new(security_scan) }
describe '#execute' do
let(:security_scan) { create(:security_scan, scan_type: :sast) }
subject(:ingest_report) { service_object.execute }
before do
create_list(:security_finding, 2, scan: security_scan, deduplicated: true)
stub_const("#{described_class}::BATCH_SIZE", 1)
allow(Security::Ingestion::FindingMapCollection).to receive(:new).with(security_scan).and_return([:foo, :bar])
allow(Security::Ingestion::IngestReportSliceService).to receive(:execute).with(security_scan.pipeline, [:foo]).and_return([1])
allow(Security::Ingestion::IngestReportSliceService).to receive(:execute).with(security_scan.pipeline, [:bar]).and_return([2])
end
it 'calls IngestReportSliceService for each slice of findings and accumulates the return values' do
expect(ingest_report).to eq([1, 2])
expect(Security::Ingestion::IngestReportSliceService).to have_received(:execute).twice
end
context 'when ingesting a slice of vulnerabilities fails' do
let(:exception) { RuntimeError.new }
let(:expected_processing_error) { { 'type' => 'IngestionError', 'message' => 'Ingestion failed for some vulnerabilities' } }
before do
allow(Security::Ingestion::IngestReportSliceService).to receive(:execute).with(security_scan.pipeline, [:foo]).and_raise(exception)
allow(Gitlab::ErrorTracking).to receive(:track_exception)
end
it 'tracks the exception' do
ingest_report
expect(Gitlab::ErrorTracking).to have_received(:track_exception).with(exception)
end
it 'captures the error and sets the processing error for security scan record' do
expect { ingest_report }.to change { security_scan.processing_errors }.from([]).to([expected_processing_error])
end
it 'accumulates the return value of only the succeeded executions' do
expect(ingest_report).to eq([2])
end
end
end
end
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Security::Ingestion::IngestReportSliceService do
let(:service_object) { described_class.new(pipeline, finding_maps) }
let(:pipeline) { create(:ci_pipeline) }
let(:finding_maps) { [create(:finding_map)] }
describe '#execute' do
subject(:ingest_report_slice) { service_object.execute }
before do
described_class::TASKS.each do |task_name|
task = Object.const_get("Security::Ingestion::Tasks::#{task_name}", false)
allow(task).to receive(:execute)
end
end
it 'runs the series of tasks in correct order' do
ingest_report_slice
expect(Security::Ingestion::Tasks::IngestIdentifiers).to have_received(:execute).ordered.with(pipeline, finding_maps)
expect(Security::Ingestion::Tasks::IngestFindings).to have_received(:execute).ordered.with(pipeline, finding_maps)
expect(Security::Ingestion::Tasks::IngestVulnerabilities).to have_received(:execute).ordered.with(pipeline, finding_maps)
expect(Security::Ingestion::Tasks::AttachFindingsToVulnerabilities).to have_received(:execute).ordered.with(pipeline, finding_maps)
expect(Security::Ingestion::Tasks::IngestFindingPipelines).to have_received(:execute).ordered.with(pipeline, finding_maps)
expect(Security::Ingestion::Tasks::IngestFindingIdentifiers).to have_received(:execute).ordered.with(pipeline, finding_maps)
expect(Security::Ingestion::Tasks::IngestFindingLinks).to have_received(:execute).ordered.with(pipeline, finding_maps)
expect(Security::Ingestion::Tasks::IngestFindingSignatures).to have_received(:execute).ordered.with(pipeline, finding_maps)
expect(Security::Ingestion::Tasks::IngestRemediations).to have_received(:execute).ordered.with(pipeline, finding_maps)
end
context 'when an exception happens' do
let(:mock_task_1) { double(:task) }
let(:mock_task_2) { double(:task) }
before do
allow(mock_task_1).to receive(:execute) { |pipeline, *| pipeline.update_column(:updated_at, 3.months.from_now) }
allow(mock_task_2).to receive(:execute) { raise 'foo' }
allow(Security::Ingestion::Tasks).to receive(:const_get).with(:IngestIdentifiers, false).and_return(mock_task_1)
allow(Security::Ingestion::Tasks).to receive(:const_get).with(:IngestFindings, false).and_return(mock_task_2)
end
it 'rollsback the recent changes to not to leave the database in an inconsistent state' do
expect { ingest_report_slice }.to raise_error('foo')
.and not_change { pipeline.reload.updated_at }
end
end
end
end
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Security::Ingestion::IngestReportsService do
let(:service_object) { described_class.new(pipeline) }
let_it_be(:project) { create(:project) }
let_it_be(:pipeline) { create(:ci_pipeline, project: project) }
let_it_be(:build) { create(:ci_build, pipeline: pipeline) }
let_it_be(:security_scan_1) { create(:security_scan, build: build, scan_type: :sast) }
let_it_be(:security_scan_2) { create(:security_scan, :with_error, build: build, scan_type: :dast) }
let_it_be(:security_scan_3) { create(:security_scan, build: build, scan_type: :secret_detection) }
let_it_be(:vulnerability_1) { create(:vulnerability, project: pipeline.project) }
let_it_be(:vulnerability_2) { create(:vulnerability, project: pipeline.project) }
describe '#execute' do
let(:ids_1) { [vulnerability_1.id] }
let(:ids_2) { [] }
subject(:ingest_reports) { service_object.execute }
before do
allow(Security::Ingestion::IngestReportService).to receive(:execute).and_return(ids_1, ids_2)
end
it 'calls IngestReportService for each succeeded security scan' do
ingest_reports
expect(Security::Ingestion::IngestReportService).to have_received(:execute).twice
expect(Security::Ingestion::IngestReportService).to have_received(:execute).once.with(security_scan_1)
expect(Security::Ingestion::IngestReportService).to have_received(:execute).once.with(security_scan_3)
end
it 'sets the resolved vulnerabilities, latest pipeline ID and has_vulnerabilities flag' do
expect { ingest_reports }.to change { project.reload.project_setting&.has_vulnerabilities }.to(true)
.and change { project.reload.vulnerability_statistic&.latest_pipeline_id }.to(pipeline.id)
.and change { vulnerability_2.reload.resolved_on_default_branch }.from(false).to(true)
.and not_change { vulnerability_1.reload.resolved_on_default_branch }.from(false)
end
end
end
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Security::Ingestion::Tasks::AttachFindingsToVulnerabilities do
describe '#execute' do
let(:pipeline) { create(:ci_pipeline) }
let(:finding_maps) { create_list(:finding_map, 3, :new_record) }
let(:service_object) { described_class.new(pipeline, finding_maps) }
let(:finding_1) { Vulnerabilities::Finding.find(finding_maps.first.finding_id) }
let(:finding_2) { Vulnerabilities::Finding.find(finding_maps.second.finding_id) }
let(:finding_3) { Vulnerabilities::Finding.find(finding_maps.third.finding_id) }
let(:vulnerability_id_1) { finding_maps.first.vulnerability_id }
let(:vulnerability_id_2) { finding_maps.second.vulnerability_id }
let(:vulnerability_id_3) { finding_maps.third.vulnerability_id }
subject(:attach_findings_to_vulnerabilities) { service_object.execute }
before do
finding_maps.third.new_record = false
end
it 'associates the findings with vulnerabilities for the new records' do
expect { attach_findings_to_vulnerabilities }.to change { finding_1.reload.vulnerability_id }.from(nil).to(vulnerability_id_1)
.and change { finding_2.reload.vulnerability_id }.from(nil).to(vulnerability_id_2)
.and not_change { finding_3.reload.vulnerability_id }.from(nil)
end
end
end
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Security::Ingestion::Tasks::IngestFindingIdentifiers do
describe '#execute' do
let(:pipeline) { create(:ci_pipeline) }
let(:identifier) { create(:vulnerabilities_identifier) }
let(:finding_1) { create(:vulnerabilities_finding) }
let(:finding_2) { create(:vulnerabilities_finding) }
let(:finding_map_1) { create(:finding_map, finding: finding_1, identifier_ids: [identifier.id]) }
let(:finding_map_2) { create(:finding_map, finding: finding_2, identifier_ids: [identifier.id]) }
let(:service_object) { described_class.new(pipeline, [finding_map_1, finding_map_2]) }
subject(:ingest_finding_identifiers) { service_object.execute }
before do
finding_1.identifiers << identifier
end
it 'associates findings with the identifiers' do
expect { ingest_finding_identifiers }.to change { Vulnerabilities::FindingIdentifier.count }.by(1)
.and change { finding_2.reload.identifiers }.from([]).to([identifier])
end
end
end
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Security::Ingestion::Tasks::IngestFindingLinks do
describe '#execute' do
let(:pipeline) { create(:ci_pipeline) }
let(:finding_link) { create(:ci_reports_security_link) }
let(:finding_1) { create(:vulnerabilities_finding) }
let(:finding_2) { create(:vulnerabilities_finding) }
let(:report_finding_1) { create(:ci_reports_security_finding, links: [finding_link]) }
let(:report_finding_2) { create(:ci_reports_security_finding, links: [finding_link]) }
let(:finding_map_1) { create(:finding_map, finding: finding_1, report_finding: report_finding_1) }
let(:finding_map_2) { create(:finding_map, finding: finding_2, report_finding: report_finding_2) }
let(:service_object) { described_class.new(pipeline, [finding_map_1, finding_map_2]) }
subject(:ingest_finding_links) { service_object.execute }
before do
create(:finding_link, finding: finding_2, url: finding_link.url)
end
it 'creates finding links for the new records' do
expect { ingest_finding_links }.to change { Vulnerabilities::FindingLink.count }.by(1)
.and change { finding_1.finding_links.count }.by(1)
end
end
end
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Security::Ingestion::Tasks::IngestFindingPipelines do
describe '#execute' do
let(:pipeline) { create(:ci_pipeline) }
let(:finding) { create(:vulnerabilities_finding) }
let(:finding_maps) { create_list(:finding_map, 1, finding: finding) }
let(:service_object) { described_class.new(pipeline, finding_maps) }
subject(:ingest_finding_pipelines) { service_object.execute }
it 'associates the findings with pipeline' do
expect { ingest_finding_pipelines }.to change { finding.finding_pipelines.pluck(:pipeline_id) }.from([]).to([pipeline.id])
end
end
end
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Security::Ingestion::Tasks::IngestFindingSignatures do
describe '#execute' do
let(:pipeline) { create(:ci_pipeline) }
let(:finding_1) { create(:vulnerabilities_finding) }
let(:finding_2) { create(:vulnerabilities_finding) }
let(:report_signature) { create(:ci_reports_security_finding_signature) }
let(:report_finding_1) { create(:ci_reports_security_finding, signatures: [report_signature]) }
let(:report_finding_2) { create(:ci_reports_security_finding, signatures: [report_signature]) }
let(:finding_map_1) { create(:finding_map, finding: finding_1, report_finding: report_finding_1) }
let(:finding_map_2) { create(:finding_map, finding: finding_2, report_finding: report_finding_2) }
let(:service_object) { described_class.new(pipeline, [finding_map_1, finding_map_2]) }
subject(:ingest_finding_signatures) { service_object.execute }
before do
create(:vulnerabilities_finding_signature, finding: finding_1, signature_sha: report_signature.signature_sha)
end
it 'ingests new finding signatures' do
expect { ingest_finding_signatures }.to change { Vulnerabilities::FindingSignature.count }.by(1)
.and change { finding_2.signatures.count }.by(1)
end
end
end
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Security::Ingestion::Tasks::IngestFindings do
describe '#execute' do
let_it_be(:pipeline) { create(:ci_pipeline) }
let_it_be(:identifier) { create(:vulnerabilities_identifier) }
let(:finding_maps) { create_list(:finding_map, 4, identifier_ids: [identifier.id]) }
let!(:existing_finding) { create(:vulnerabilities_finding, :detected, uuid: finding_maps.first.uuid) }
subject(:ingest_findings) { described_class.new(pipeline, finding_maps).execute }
it 'ingests findings' do
expect { ingest_findings }.to change { Vulnerabilities::Finding.count }.by(3)
end
it 'sets the finding and vulnerability ids' do
expected_finding_ids = Array.new(3) { an_instance_of(Integer) }.unshift(existing_finding.id)
expected_vulnerability_ids = [existing_finding.vulnerability_id, nil, nil, nil]
expect {ingest_findings }.to change { finding_maps.map(&:finding_id) }.from(Array.new(4)).to(expected_finding_ids)
.and change { finding_maps.map(&:vulnerability_id) }.from(Array.new(4)).to(expected_vulnerability_ids)
end
end
end
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Security::Ingestion::Tasks::IngestIdentifiers do
describe '#execute' do
let_it_be(:pipeline) { create(:ci_pipeline) }
let(:existing_fingerprint) { Digest::SHA1.hexdigest('type:id') }
let(:vulnerability_identifier) { create(:vulnerabilities_identifier, project: pipeline.project, fingerprint: existing_fingerprint, name: 'bar') }
let(:existing_report_identifier) { create(:ci_reports_security_identifier, external_id: 'id', external_type: 'type') }
let(:extra_identifiers) { Array.new(21) { |index| create(:ci_reports_security_identifier, external_id: "id-#{index}", external_type: 'type') } }
let(:identifiers) { extra_identifiers.unshift(existing_report_identifier) }
let(:expected_fingerprints) { Array.new(19) { |index| Digest::SHA1.hexdigest("type:id-#{index}") }.unshift(existing_fingerprint).sort }
let(:report_finding) { create(:ci_reports_security_finding, identifiers: identifiers) }
let(:finding_map) { create(:finding_map, report_finding: report_finding) }
let(:service_object) { described_class.new(pipeline, [finding_map]) }
let(:project_identifiers) { pipeline.project.vulnerability_identifiers }
subject(:ingest_identifiers) { service_object.execute }
it 'creates new records and updates the existing ones' do
expect { ingest_identifiers }.to change { project_identifiers.count }.from(1).to(20)
.and change { vulnerability_identifier.reload.name }
end
it 'sets the identifier_ids for the finding_map object' do
expect { ingest_identifiers }.to(
change { project_identifiers.where(id: finding_map.identifier_ids).pluck(:fingerprint).sort }
.from([])
.to(expected_fingerprints))
end
end
end
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Security::Ingestion::Tasks::IngestRemediations do
describe '#execute' do
let(:pipeline) { create(:ci_pipeline) }
let(:existing_checksum) { Digest::SHA256.hexdigest('foo') }
let(:existing_remediation_1) { create(:vulnerabilities_remediation, project: pipeline.project, checksum: existing_checksum) }
let(:existing_remediation_2) { create(:vulnerabilities_remediation, project: pipeline.project) }
let(:report_remediation_1) { create(:ci_reports_security_remediation, diff: 'foo') }
let(:report_remediation_2) { create(:ci_reports_security_remediation, diff: 'bar') }
let(:finding_1) { create(:vulnerabilities_finding, remediations: [existing_remediation_1, existing_remediation_2]) }
let(:finding_2) { create(:vulnerabilities_finding) }
let(:report_finding_1) { create(:ci_reports_security_finding, remediations: [report_remediation_1, report_remediation_2]) }
let(:report_finding_2) { create(:ci_reports_security_finding, remediations: [report_remediation_1, report_remediation_2]) }
let(:finding_map_1) { create(:finding_map, finding: finding_1, report_finding: report_finding_1) }
let(:finding_map_2) { create(:finding_map, finding: finding_2, report_finding: report_finding_2) }
let!(:service_object) { described_class.new(pipeline, [finding_map_1, finding_map_2]) }
subject(:ingest_finding_remediations) { service_object.execute }
it 'creates remediations and updates the associations' do
expect { ingest_finding_remediations }.to change { Vulnerabilities::Remediation.count }.by(1)
.and change { existing_remediation_2.reload.findings }.from([finding_1]).to([])
.and change { finding_2.reload.association(:remediations).scope.count }.from(0).to(2)
.and not_change { finding_1.reload.association(:remediations).scope.count }.from(2)
end
end
end
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Security::Ingestion::Tasks::IngestVulnerabilities do
describe '#execute' do
let_it_be(:user) { create(:user) }
let_it_be(:pipeline) { create(:ci_pipeline, user: user) }
let_it_be(:identifier) { create(:vulnerabilities_identifier) }
let(:finding_maps) { create_list(:finding_map, 4) }
let(:existing_finding) { create(:vulnerabilities_finding, :detected) }
subject(:ingest_vulnerabilities) { described_class.new(pipeline, finding_maps).execute }
before do
finding_maps.first.vulnerability_id = existing_finding.vulnerability_id
finding_maps.each { |finding_map| finding_map.identifier_ids << identifier.id }
end
it 'ingests vulnerabilities' do
expect { ingest_vulnerabilities }.to change { Vulnerability.count }.by(3)
end
end
end
...@@ -477,6 +477,15 @@ RSpec.describe Security::StoreReportService, '#execute', :snowplow do ...@@ -477,6 +477,15 @@ RSpec.describe Security::StoreReportService, '#execute', :snowplow do
end end
end end
context 'when the existing vulnerability requires manual resolution' do
let(:trait) { :secret_detection }
let!(:finding) { create(:vulnerabilities_finding, :with_secret_detection, project: project, pipelines: [pipeline]) }
it 'wont mark the vulnerability as resolved on default branch' do
expect { subject }.not_to change { finding.vulnerability.reload.resolved_on_default_branch }
end
end
context 'when the existing resolved vulnerability is discovered again on the latest report' do context 'when the existing resolved vulnerability is discovered again on the latest report' do
before do before do
vulnerability.update_column(:resolved_on_default_branch, true) vulnerability.update_column(:resolved_on_default_branch, true)
......
...@@ -63,17 +63,35 @@ RSpec.describe StoreSecurityReportsWorker do ...@@ -63,17 +63,35 @@ RSpec.describe StoreSecurityReportsWorker do
stub_licensed_features(report_type => true) stub_licensed_features(report_type => true)
end end
it 'executes StoreReportsService for given pipeline' do it 'scans security reports for token revocation' do
expect(Security::StoreReportsService).to receive(:new) expect(::ScanSecurityReportSecretsWorker).to receive(:perform_async)
.with(pipeline).once.and_call_original
described_class.new.perform(pipeline.id) described_class.new.perform(pipeline.id)
end end
it 'scans security reports for token revocation' do context 'when the `security_report_ingestion_framework` feature is enabled' do
expect(::ScanSecurityReportSecretsWorker).to receive(:perform_async) before do
stub_feature_flags(security_report_ingestion_framework: project)
end
described_class.new.perform(pipeline.id) it 'executes IngestReportsService for given pipeline' do
expect(::Security::Ingestion::IngestReportsService).to receive(:execute).with(pipeline)
described_class.new.perform(pipeline.id)
end
end
context 'when the `security_report_ingestion_framework` feature is disabled' do
before do
stub_feature_flags(security_report_ingestion_framework: false)
end
it 'executes StoreReportsService for given pipeline' do
expect(Security::StoreReportsService).to receive(:new)
.with(pipeline).once.and_call_original
described_class.new.perform(pipeline.id)
end
end end
end end
end end
......
...@@ -11,7 +11,7 @@ module Banzai ...@@ -11,7 +11,7 @@ module Banzai
def initialize(doc, context = nil, result = nil) def initialize(doc, context = nil, result = nil)
super super
@reference_cache = ReferenceCache.new(self, context) @reference_cache = ReferenceCache.new(self, context, result)
end end
# REFERENCE_PLACEHOLDER is used for re-escaping HTML text except found # REFERENCE_PLACEHOLDER is used for re-escaping HTML text except found
......
...@@ -7,9 +7,10 @@ module Banzai ...@@ -7,9 +7,10 @@ module Banzai
include Gitlab::Utils::StrongMemoize include Gitlab::Utils::StrongMemoize
include RequestStoreReferenceCache include RequestStoreReferenceCache
def initialize(filter, context) def initialize(filter, context, result)
@filter = filter @filter = filter
@context = context @context = context
@result = result || {}
end end
def load_reference_cache(nodes) def load_reference_cache(nodes)
...@@ -166,7 +167,7 @@ module Banzai ...@@ -166,7 +167,7 @@ module Banzai
private private
attr_accessor :filter, :context attr_accessor :filter, :context, :result
delegate :project, :group, :parent, :parent_type, to: :filter delegate :project, :group, :parent, :parent_type, to: :filter
...@@ -184,7 +185,11 @@ module Banzai ...@@ -184,7 +185,11 @@ module Banzai
end end
def prepare_doc_for_scan(doc) def prepare_doc_for_scan(doc)
html = doc.to_html html = if Feature.enabled?(:reference_cache_memoization, project, default_enabled: :yaml)
result[:rendered_html] ||= doc.to_html
else
doc.to_html
end
filter.requires_unescaping? ? unescape_html_entities(html) : html filter.requires_unescaping? ? unescape_html_entities(html) : html
end end
......
...@@ -65,13 +65,6 @@ module Gitlab ...@@ -65,13 +65,6 @@ module Gitlab
Gitlab::Metrics.counter(name, comment) Gitlab::Metrics.counter(name, comment)
end end
def self.legacy_update_jobs_counter
name = :ci_legacy_update_jobs_as_retried_total
comment = 'Counter of occurrences when jobs were not being set as retried before update_retried'
Gitlab::Metrics.counter(name, comment)
end
def self.pipeline_failure_reason_counter def self.pipeline_failure_reason_counter
name = :gitlab_ci_pipeline_failure_reasons name = :gitlab_ci_pipeline_failure_reasons
comment = 'Counter of pipeline failure reasons' comment = 'Counter of pipeline failure reasons'
......
...@@ -80,6 +80,8 @@ module Gitlab ...@@ -80,6 +80,8 @@ module Gitlab
matcher = FindingMatcher.new(head_findings) matcher = FindingMatcher.new(head_findings)
base_findings.each do |base_finding| base_findings.each do |base_finding|
next if base_finding.requires_manual_resolution?
matched_head_finding = matcher.find_and_remove_match!(base_finding) matched_head_finding = matcher.find_and_remove_match!(base_finding)
@fixed_findings << base_finding if matched_head_finding.nil? @fixed_findings << base_finding if matched_head_finding.nil?
......
...@@ -17327,7 +17327,7 @@ msgstr[1] "" ...@@ -17327,7 +17327,7 @@ msgstr[1] ""
msgid "Importing..." msgid "Importing..."
msgstr "" msgstr ""
msgid "Import|A repository URL usually ends in a .git suffix, although this is not required. Double check to make sure your repository URL is correct." msgid "Import|There is not a valid Git repository at this URL. If your HTTP repository is not publicly accessible, verify your credentials."
msgstr "" msgstr ""
msgid "Improve customer support with Service Desk" msgid "Improve customer support with Service Desk"
......
...@@ -8,6 +8,10 @@ class AutomatedCleanup ...@@ -8,6 +8,10 @@ class AutomatedCleanup
attr_reader :project_path, :gitlab_token attr_reader :project_path, :gitlab_token
DEPLOYMENTS_PER_PAGE = 100 DEPLOYMENTS_PER_PAGE = 100
ENVIRONMENT_PREFIX = {
review_app: 'review/',
docs_review_app: 'review-docs/'
}.freeze
IGNORED_HELM_ERRORS = [ IGNORED_HELM_ERRORS = [
'transport is closing', 'transport is closing',
'error upgrading connection', 'error upgrading connection',
...@@ -62,13 +66,14 @@ class AutomatedCleanup ...@@ -62,13 +66,14 @@ class AutomatedCleanup
releases_to_delete = [] releases_to_delete = []
# Delete environments via deployments
gitlab.deployments(project_path, per_page: DEPLOYMENTS_PER_PAGE, sort: 'desc').auto_paginate do |deployment| gitlab.deployments(project_path, per_page: DEPLOYMENTS_PER_PAGE, sort: 'desc').auto_paginate do |deployment|
break if Time.parse(deployment.created_at) < deployments_look_back_threshold break if Time.parse(deployment.created_at) < deployments_look_back_threshold
environment = deployment.environment environment = deployment.environment
next unless environment next unless environment
next unless environment.name.start_with?('review/') next unless environment.name.start_with?(ENVIRONMENT_PREFIX[:review_app])
next if checked_environments.include?(environment.slug) next if checked_environments.include?(environment.slug)
last_deploy = deployment.created_at last_deploy = deployment.created_at
...@@ -92,6 +97,10 @@ class AutomatedCleanup ...@@ -92,6 +97,10 @@ class AutomatedCleanup
checked_environments << environment.slug checked_environments << environment.slug
end end
delete_stopped_environments(environment_type: :review_app, checked_environments: checked_environments, last_updated_threshold: delete_threshold) do |environment|
releases_to_delete << Tooling::Helm3Client::Release.new(environment.slug, 1, environment.updated_at, nil, nil, review_apps_namespace)
end
delete_helm_releases(releases_to_delete) delete_helm_releases(releases_to_delete)
end end
...@@ -102,14 +111,12 @@ class AutomatedCleanup ...@@ -102,14 +111,12 @@ class AutomatedCleanup
stop_threshold = threshold_time(days: days_for_stop) stop_threshold = threshold_time(days: days_for_stop)
delete_threshold = threshold_time(days: days_for_delete) delete_threshold = threshold_time(days: days_for_delete)
max_delete_count = 1000 # Delete environments via deployments
delete_count = 0
gitlab.deployments(project_path, per_page: DEPLOYMENTS_PER_PAGE, sort: 'desc').auto_paginate do |deployment| gitlab.deployments(project_path, per_page: DEPLOYMENTS_PER_PAGE, sort: 'desc').auto_paginate do |deployment|
environment = deployment.environment environment = deployment.environment
next unless environment next unless environment
next unless environment.name.start_with?('review-docs/') next unless environment.name.start_with?(ENVIRONMENT_PREFIX[:docs_review_app])
next if checked_environments.include?(environment.slug) next if checked_environments.include?(environment.slug)
last_deploy = deployment.created_at last_deploy = deployment.created_at
...@@ -120,15 +127,12 @@ class AutomatedCleanup ...@@ -120,15 +127,12 @@ class AutomatedCleanup
stop_environment(environment, deployment) if environment_state && environment_state != 'stopped' stop_environment(environment, deployment) if environment_state && environment_state != 'stopped'
end end
if deployed_at < delete_threshold delete_environment(environment, deployment) if deployed_at < delete_threshold
delete_environment(environment, deployment)
delete_count += 1
break if delete_count > max_delete_count
end
checked_environments << environment.slug checked_environments << environment.slug
end end
delete_stopped_environments(environment_type: :docs_review_app, checked_environments: checked_environments, last_updated_threshold: delete_threshold)
end end
def perform_helm_releases_cleanup!(days:) def perform_helm_releases_cleanup!(days:)
...@@ -171,8 +175,9 @@ class AutomatedCleanup ...@@ -171,8 +175,9 @@ class AutomatedCleanup
nil nil
end end
def delete_environment(environment, deployment) def delete_environment(environment, deployment = nil)
print_release_state(subject: 'Review app', release_name: environment.slug, release_date: deployment.created_at, action: 'deleting') release_date = deployment ? deployment.created_at : environment.updated_at
print_release_state(subject: 'Review app', release_name: environment.slug, release_date: release_date, action: 'deleting')
gitlab.delete_environment(project_path, environment.id) gitlab.delete_environment(project_path, environment.id)
rescue Gitlab::Error::Forbidden rescue Gitlab::Error::Forbidden
...@@ -187,6 +192,24 @@ class AutomatedCleanup ...@@ -187,6 +192,24 @@ class AutomatedCleanup
puts "Review app '#{environment.name}' / '#{environment.slug}' (##{environment.id}) is forbidden: skipping it" puts "Review app '#{environment.name}' / '#{environment.slug}' (##{environment.id}) is forbidden: skipping it"
end end
def delete_stopped_environments(environment_type:, checked_environments:, last_updated_threshold:)
gitlab.environments(project_path, per_page: DEPLOYMENTS_PER_PAGE, sort: 'desc', states: 'stopped', search: ENVIRONMENT_PREFIX[environment_type]).auto_paginate do |environment|
next if skip_environment?(environment: environment, checked_environments: checked_environments, last_updated_threshold: delete_threshold, environment_type: environment_type)
yield environment if delete_environment(environment)
checked_environments << environment.slug
end
end
def skip_environment?(environment:, checked_environments:, last_updated_threshold:, environment_type:)
return true unless environment.name.start_with?(ENVIRONMENT_PREFIX[environment_type])
return true if checked_environments.include?(environment.slug)
return true if Time.parse(environment.updated_at) > last_updated_threshold
false
end
def helm_releases def helm_releases
args = ['--all', '--date'] args = ['--all', '--date']
......
...@@ -296,12 +296,16 @@ RSpec.describe 'New project', :js do ...@@ -296,12 +296,16 @@ RSpec.describe 'New project', :js do
expect(git_import_instructions).to have_content 'Git repository URL' expect(git_import_instructions).to have_content 'Git repository URL'
end end
it 'reports error if repo URL does not end with .git' do it 'reports error if repo URL is not a valid Git repository' do
stub_request(:get, "http://foo/bar/info/refs?service=git-upload-pack").to_return(status: 200, body: "not-a-git-repo")
fill_in 'project_import_url', with: 'http://foo/bar' fill_in 'project_import_url', with: 'http://foo/bar'
# simulate blur event # simulate blur event
find('body').click find('body').click
expect(page).to have_text('A repository URL usually ends in a .git suffix') wait_for_requests
expect(page).to have_text('There is not a valid Git repository at this URL')
end end
it 'keeps "Import project" tab open after form validation error' do it 'keeps "Import project" tab open after form validation error' do
......
...@@ -4,7 +4,7 @@ require 'spec_helper' ...@@ -4,7 +4,7 @@ require 'spec_helper'
RSpec.describe Banzai::CrossProjectReference do RSpec.describe Banzai::CrossProjectReference do
let(:including_class) { Class.new.include(described_class).new } let(:including_class) { Class.new.include(described_class).new }
let(:reference_cache) { Banzai::Filter::References::ReferenceCache.new(including_class, {})} let(:reference_cache) { Banzai::Filter::References::ReferenceCache.new(including_class, {}, {})}
before do before do
allow(including_class).to receive(:context).and_return({}) allow(including_class).to receive(:context).and_return({})
......
...@@ -12,15 +12,48 @@ RSpec.describe Banzai::Filter::References::ReferenceCache do ...@@ -12,15 +12,48 @@ RSpec.describe Banzai::Filter::References::ReferenceCache do
let(:filter_class) { Banzai::Filter::References::IssueReferenceFilter } let(:filter_class) { Banzai::Filter::References::IssueReferenceFilter }
let(:filter) { filter_class.new(doc, project: project) } let(:filter) { filter_class.new(doc, project: project) }
let(:cache) { described_class.new(filter, { project: project }) } let(:cache) { described_class.new(filter, { project: project }, result) }
let(:result) { {} }
describe '#load_references_per_parent' do describe '#load_references_per_parent' do
subject { cache.load_references_per_parent(filter.nodes) }
it 'loads references grouped per parent paths' do it 'loads references grouped per parent paths' do
cache.load_references_per_parent(filter.nodes) expect(doc).to receive(:to_html).and_call_original
subject
expect(cache.references_per_parent).to eq({ project.full_path => [issue1.iid, issue2.iid].to_set, expect(cache.references_per_parent).to eq({ project.full_path => [issue1.iid, issue2.iid].to_set,
project2.full_path => [issue3.iid].to_set }) project2.full_path => [issue3.iid].to_set })
end end
context 'when rendered_html is memoized' do
let(:result) { { rendered_html: 'html' } }
it 'reuses memoized rendered HTML when available' do
expect(doc).not_to receive(:to_html)
subject
end
context 'when feature flag is disabled' do
before do
stub_feature_flags(reference_cache_memoization: false)
end
it 'ignores memoized rendered HTML' do
expect(doc).to receive(:to_html).and_call_original
subject
end
end
end
context 'when result is not available' do
let(:result) { nil }
it { expect { subject }.not_to raise_error }
end
end end
describe '#load_parent_per_reference' do describe '#load_parent_per_reference' do
...@@ -47,7 +80,7 @@ RSpec.describe Banzai::Filter::References::ReferenceCache do ...@@ -47,7 +80,7 @@ RSpec.describe Banzai::Filter::References::ReferenceCache do
it 'does not have an N+1 query problem with cross projects' do it 'does not have an N+1 query problem with cross projects' do
doc_single = Nokogiri::HTML.fragment("#1") doc_single = Nokogiri::HTML.fragment("#1")
filter_single = filter_class.new(doc_single, project: project) filter_single = filter_class.new(doc_single, project: project)
cache_single = described_class.new(filter_single, { project: project }) cache_single = described_class.new(filter_single, { project: project }, {})
control_count = ActiveRecord::QueryRecorder.new do control_count = ActiveRecord::QueryRecorder.new do
cache_single.load_references_per_parent(filter_single.nodes) cache_single.load_references_per_parent(filter_single.nodes)
......
...@@ -1221,32 +1221,6 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep do ...@@ -1221,32 +1221,6 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep do
%w(test success), %w(test success),
%w(deploy running)]) %w(deploy running)])
end end
context 'when commit status is retried' do
let!(:old_commit_status) do
create(:commit_status, pipeline: pipeline,
stage: 'build',
name: 'mac',
stage_idx: 0,
status: 'success')
end
context 'when FF ci_remove_update_retried_from_process_pipeline is disabled' do
before do
stub_feature_flags(ci_remove_update_retried_from_process_pipeline: false)
Ci::ProcessPipelineService
.new(pipeline)
.execute
end
it 'ignores the previous state' do
expect(statuses).to eq([%w(build success),
%w(test success),
%w(deploy running)])
end
end
end
end end
context 'when there is a stage with warnings' do context 'when there is a stage with warnings' do
......
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe VulnerabilityFindingHelpers do
let(:cls) do
Class.new do
include VulnerabilityFindingHelpers
attr_accessor :report_type
def initialize(report_type)
@report_type = report_type
end
end
end
describe '#requires_manual_resolution?' do
it 'returns false if the finding does not require manual resolution' do
expect(cls.new('sast').requires_manual_resolution?).to eq(false)
end
it 'returns true when the finding requires manual resolution' do
expect(cls.new('secret_detection').requires_manual_resolution?).to eq(true)
end
end
end
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Import::UrlController do
let_it_be(:user) { create(:user) }
before do
login_as(user)
end
describe 'POST #validate' do
it 'reports success when service reports success status' do
allow_next_instance_of(Import::ValidateRemoteGitEndpointService) do |validate_endpoint_service|
allow(validate_endpoint_service).to receive(:execute).and_return(ServiceResponse.success)
end
post import_url_validate_path, params: { url: 'https://fake.repo' }
expect(json_response).to eq({ 'success' => true })
end
it 'exposes error message when service reports error' do
expect_next_instance_of(Import::ValidateRemoteGitEndpointService) do |validate_endpoint_service|
expect(validate_endpoint_service).to receive(:execute).and_return(ServiceResponse.error(message: 'foobar'))
end
post import_url_validate_path, params: { url: 'https://fake.repo' }
expect(json_response).to eq({ 'success' => false, 'message' => 'foobar' })
end
context 'with an anonymous user' do
before do
sign_out(user)
end
it 'redirects to sign-in page' do
post import_url_validate_path
expect(response).to redirect_to(new_user_session_path)
end
end
end
end
...@@ -10,11 +10,9 @@ RSpec.describe Ci::ProcessPipelineService do ...@@ -10,11 +10,9 @@ RSpec.describe Ci::ProcessPipelineService do
end end
let(:pipeline_processing_events_counter) { double(increment: true) } let(:pipeline_processing_events_counter) { double(increment: true) }
let(:legacy_update_jobs_counter) { double(increment: true) }
let(:metrics) do let(:metrics) do
double(pipeline_processing_events_counter: pipeline_processing_events_counter, double(pipeline_processing_events_counter: pipeline_processing_events_counter)
legacy_update_jobs_counter: legacy_update_jobs_counter)
end end
subject { described_class.new(pipeline) } subject { described_class.new(pipeline) }
...@@ -33,68 +31,4 @@ RSpec.describe Ci::ProcessPipelineService do ...@@ -33,68 +31,4 @@ RSpec.describe Ci::ProcessPipelineService do
subject.execute subject.execute
end end
end end
describe 'updating a list of retried builds' do
let!(:build_retried) { create_build('build') }
let!(:build) { create_build('build') }
let!(:test) { create_build('test') }
context 'when FF ci_remove_update_retried_from_process_pipeline is enabled' do
it 'does not update older builds as retried' do
subject.execute
expect(all_builds.latest).to contain_exactly(build, build_retried, test)
expect(all_builds.retried).to be_empty
end
end
context 'when FF ci_remove_update_retried_from_process_pipeline is disabled' do
before do
stub_feature_flags(ci_remove_update_retried_from_process_pipeline: false)
end
it 'returns unique statuses' do
subject.execute
expect(all_builds.latest).to contain_exactly(build, test)
expect(all_builds.retried).to contain_exactly(build_retried)
end
it 'increments the counter' do
expect(legacy_update_jobs_counter).to receive(:increment)
subject.execute
end
it 'logs the project and pipeline id' do
expect(Gitlab::AppJsonLogger).to receive(:info).with(event: 'update_retried_is_used',
project_id: project.id,
pipeline_id: pipeline.id)
subject.execute
end
context 'when the previous build has already retried column true' do
before do
build_retried.update_columns(retried: true)
end
it 'does not increment the counter' do
expect(legacy_update_jobs_counter).not_to receive(:increment)
subject.execute
end
end
end
private
def create_build(name, **opts)
create(:ci_build, :created, pipeline: pipeline, name: name, **opts)
end
def all_builds
pipeline.builds.order(:stage_idx, :id)
end
end
end end
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Import::ValidateRemoteGitEndpointService do
include StubRequests
let_it_be(:base_url) { 'http://demo.host/path' }
let_it_be(:endpoint_url) { "#{base_url}/info/refs?service=git-upload-pack" }
let_it_be(:error_message) { "#{base_url} is not a valid HTTP Git repository" }
describe '#execute' do
let(:valid_response) do
{ status: 200,
body: '001e# service=git-upload-pack',
headers: { 'Content-Type': 'application/x-git-upload-pack-advertisement' } }
end
it 'correctly handles URLs with fragment' do
allow(Gitlab::HTTP).to receive(:get)
described_class.new(url: "#{base_url}#somehash").execute
expect(Gitlab::HTTP).to have_received(:get).with(endpoint_url, basic_auth: nil, stream_body: true, follow_redirects: false)
end
context 'when receiving HTTP response' do
subject { described_class.new(url: base_url) }
it 'returns success when HTTP response is valid and contains correct payload' do
stub_full_request(endpoint_url, method: :get).to_return(valid_response)
result = subject.execute
expect(result).to be_a(ServiceResponse)
expect(result.success?).to be(true)
end
it 'reports error when status code is not 200' do
stub_full_request(endpoint_url, method: :get).to_return(valid_response.merge({ status: 301 }))
result = subject.execute
expect(result).to be_a(ServiceResponse)
expect(result.error?).to be(true)
expect(result.message).to eq(error_message)
end
it 'reports error when required header is missing' do
stub_full_request(endpoint_url, method: :get).to_return(valid_response.merge({ headers: nil }))
result = subject.execute
expect(result).to be_a(ServiceResponse)
expect(result.error?).to be(true)
expect(result.message).to eq(error_message)
end
it 'reports error when body is in invalid format' do
stub_full_request(endpoint_url, method: :get).to_return(valid_response.merge({ body: 'invalid content' }))
result = subject.execute
expect(result).to be_a(ServiceResponse)
expect(result.error?).to be(true)
expect(result.message).to eq(error_message)
end
it 'reports error when exception is raised' do
stub_full_request(endpoint_url, method: :get).to_raise(SocketError.new('dummy message'))
result = subject.execute
expect(result).to be_a(ServiceResponse)
expect(result.error?).to be(true)
expect(result.message).to eq(error_message)
end
end
it 'passes basic auth when credentials are provided' do
allow(Gitlab::HTTP).to receive(:get)
described_class.new(url: "#{base_url}#somehash", user: 'user', password: 'password').execute
expect(Gitlab::HTTP).to have_received(:get).with(endpoint_url, basic_auth: { username: 'user', password: 'password' }, stream_body: true, follow_redirects: false)
end
end
end
...@@ -101,6 +101,27 @@ module FilteredSearchHelpers ...@@ -101,6 +101,27 @@ module FilteredSearchHelpers
end end
end end
# Same as `expect_tokens` but works with GlFilteredSearch
def expect_vue_tokens(tokens)
page.within '.gl-search-box-by-click .gl-filtered-search-scrollable' do
token_elements = page.all(:css, '.gl-filtered-search-token')
tokens.each_with_index do |token, index|
el = token_elements[index]
expect(el.find('.gl-filtered-search-token-type')).to have_content(token[:name])
expect(el.find('.gl-filtered-search-token-operator')).to have_content(token[:operator]) if token[:operator].present?
expect(el.find('.gl-filtered-search-token-data')).to have_content(token[:value]) if token[:value].present?
# gl-emoji content is blank when the emoji unicode is not supported
if token[:emoji_name].present?
selector = %(gl-emoji[data-name="#{token[:emoji_name]}"])
expect(el.find('.gl-filtered-search-token-data-content')).to have_css(selector)
end
end
end
end
def create_token(token_name, token_value = nil, symbol = nil, token_operator = '=') def create_token(token_name, token_value = nil, symbol = nil, token_operator = '=')
{ name: token_name, operator: token_operator, value: "#{symbol}#{token_value}" } { name: token_name, operator: token_operator, value: "#{symbol}#{token_value}" }
end end
......
...@@ -19,7 +19,7 @@ module Tooling ...@@ -19,7 +19,7 @@ module Tooling
end end
def last_update def last_update
@last_update ||= Time.parse(self[:last_update]) @last_update ||= self[:last_update] ? Time.parse(self[:last_update]) : nil
end end
end end
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment