Commit f155cc90 authored by GitLab Bot's avatar GitLab Bot

Add latest changes from gitlab-org/gitlab@master

parent c920712f
...@@ -356,6 +356,7 @@ linters: ...@@ -356,6 +356,7 @@ linters:
- 'app/views/shared/_auto_devops_implicitly_enabled_banner.html.haml' - 'app/views/shared/_auto_devops_implicitly_enabled_banner.html.haml'
- 'app/views/shared/_commit_message_container.html.haml' - 'app/views/shared/_commit_message_container.html.haml'
- 'app/views/shared/_confirm_modal.html.haml' - 'app/views/shared/_confirm_modal.html.haml'
- 'app/views/shared/_confirm_fork_modal.html.haml'
- 'app/views/shared/_delete_label_modal.html.haml' - 'app/views/shared/_delete_label_modal.html.haml'
- 'app/views/shared/_group_form.html.haml' - 'app/views/shared/_group_form.html.haml'
- 'app/views/shared/_group_tips.html.haml' - 'app/views/shared/_group_tips.html.haml'
......
...@@ -178,6 +178,11 @@ Gitlab/ModuleWithInstanceVariables: ...@@ -178,6 +178,11 @@ Gitlab/ModuleWithInstanceVariables:
- spec/support/**/*.rb - spec/support/**/*.rb
- features/steps/**/*.rb - features/steps/**/*.rb
Gitlab/ConstGetInheritFalse:
Enabled: true
Exclude:
- 'qa/bin/*'
Gitlab/HTTParty: Gitlab/HTTParty:
Enabled: true Enabled: true
Exclude: Exclude:
......
...@@ -32,6 +32,14 @@ module BlobHelper ...@@ -32,6 +32,14 @@ module BlobHelper
File.join(segments) File.join(segments)
end end
def ide_fork_and_edit_path(project = @project, ref = @ref, path = @path, options = {})
if current_user
project_forks_path(project,
namespace_key: current_user&.namespace&.id,
continue: edit_blob_fork_params(ide_edit_path(project, ref, path)))
end
end
def encode_ide_path(path) def encode_ide_path(path)
url_encode(path).gsub('%2F', '/') url_encode(path).gsub('%2F', '/')
end end
......
...@@ -8,13 +8,13 @@ module Clusters ...@@ -8,13 +8,13 @@ module Clusters
included do included do
state_machine :status do state_machine :status do
before_transition any => [:installed, :updated] do |application| before_transition any => [:installed, :updated] do |application|
application.version = application.class.const_get(:VERSION) application.version = application.class.const_get(:VERSION, false)
end end
end end
end end
def update_available? def update_available?
version != self.class.const_get(:VERSION) version != self.class.const_get(:VERSION, false)
end end
end end
end end
......
...@@ -44,7 +44,7 @@ module PrometheusAdapter ...@@ -44,7 +44,7 @@ module PrometheusAdapter
end end
def query_klass_for(query_name) def query_klass_for(query_name)
Gitlab::Prometheus::Queries.const_get("#{query_name.to_s.classify}Query") Gitlab::Prometheus::Queries.const_get("#{query_name.to_s.classify}Query", false)
end end
def build_query_args(*args) def build_query_args(*args)
......
...@@ -24,7 +24,7 @@ class Note < ApplicationRecord ...@@ -24,7 +24,7 @@ class Note < ApplicationRecord
class << self class << self
def values def values
constants.map {|const| self.const_get(const)} constants.map {|const| self.const_get(const, false)}
end end
def value?(val) def value?(val)
......
...@@ -138,7 +138,7 @@ class Upload < ApplicationRecord ...@@ -138,7 +138,7 @@ class Upload < ApplicationRecord
end end
def uploader_class def uploader_class
Object.const_get(uploader) Object.const_get(uploader, false)
end end
def identifier def identifier
......
...@@ -41,7 +41,7 @@ ...@@ -41,7 +41,7 @@
%li %li
= link_to '#modal-create-new-dir', { 'data-target' => '#modal-create-new-dir', 'data-toggle' => 'modal' } do = link_to '#modal-create-new-dir', { 'data-target' => '#modal-create-new-dir', 'data-toggle' => 'modal' } do
#{ _('New directory') } #{ _('New directory') }
- elsif can?(current_user, :fork_project, @project) && can?(current_user, :create_merge_request_in, @project) - elsif can_create_mr_from_fork
%li %li
- continue_params = { to: project_new_blob_path(@project, @id), - continue_params = { to: project_new_blob_path(@project, @id),
notice: edit_in_new_fork_notice, notice: edit_in_new_fork_notice,
...@@ -81,10 +81,15 @@ ...@@ -81,10 +81,15 @@
= render 'projects/find_file_link' = render 'projects/find_file_link'
- if can_collaborate - if can_create_mr_from_fork
= succeed " " do = succeed " " do
= link_to ide_edit_path(@project, @ref, @path), class: 'btn btn-default qa-web-ide-button' do - if can_collaborate || current_user&.already_forked?(@project)
= _('Web IDE') = link_to ide_edit_path(@project, @ref, @path), class: 'btn btn-default qa-web-ide-button' do
= _('Web IDE')
- else
= link_to '#modal-confirm-fork', class: 'btn btn-default qa-web-ide-button', data: { target: '#modal-confirm-fork', toggle: 'modal'} do
= _('Web IDE')
= render 'shared/confirm_fork_modal', fork_path: ide_fork_and_edit_path(@project, @ref, @path)
- if show_xcode_link?(@project) - if show_xcode_link?(@project)
.project-action-button.project-xcode.inline .project-action-button.project-xcode.inline
......
#modal-confirm-fork.modal.qa-confirm-fork-modal
.modal-dialog
.modal-content
.modal-header
%h3.page-title= _('Fork project?')
%button.close{ type: "button", "data-dismiss": "modal", "aria-label" => _('Close') }
%span{ "aria-hidden": true } &times;
.modal-body.p-3
%p= _("You're not allowed to %{tag_start}edit%{tag_end} files in this project directly. Please fork this project, make your changes there, and submit a merge request.") % { tag_start: '', tag_end: ''}
.modal-footer
= link_to _('Cancel'), '#', class: "btn btn-cancel", "data-dismiss" => "modal"
= link_to _('Fork project'), fork_path, class: 'btn btn-success', method: :post
---
title: Web IDE button should fork and open forked project when selected from read-only
project
merge_request: 17672
author:
type: added
...@@ -34,6 +34,7 @@ module Fog ...@@ -34,6 +34,7 @@ module Fog
# Gems that have not yet updated with the new fog-core namespace # Gems that have not yet updated with the new fog-core namespace
LEGACY_FOG_PROVIDERS = %w(google rackspace aliyun).freeze LEGACY_FOG_PROVIDERS = %w(google rackspace aliyun).freeze
# rubocop:disable Gitlab/ConstGetInheritFalse
def service_provider_constant(service_name, provider_name) def service_provider_constant(service_name, provider_name)
args = service_provider_search_args(service_name, provider_name) args = service_provider_search_args(service_name, provider_name)
Fog.const_get(args.first).const_get(*const_get_args(args.second)) Fog.const_get(args.first).const_get(*const_get_args(args.second))
...@@ -48,5 +49,6 @@ module Fog ...@@ -48,5 +49,6 @@ module Fog
[provider_name, service_name] [provider_name, service_name]
end end
end end
# rubocop:enable Gitlab/ConstGetInheritFalse
end end
end end
...@@ -13,7 +13,7 @@ def instrument_classes(instrumentation) ...@@ -13,7 +13,7 @@ def instrument_classes(instrumentation)
instrumentation.instrument_methods(Gitlab::Git) instrumentation.instrument_methods(Gitlab::Git)
Gitlab::Git.constants.each do |name| Gitlab::Git.constants.each do |name|
const = Gitlab::Git.const_get(name) const = Gitlab::Git.const_get(name, false)
next unless const.is_a?(Module) next unless const.is_a?(Module)
...@@ -75,7 +75,7 @@ def instrument_classes(instrumentation) ...@@ -75,7 +75,7 @@ def instrument_classes(instrumentation)
instrumentation.instrument_instance_methods(Rouge::Formatters::HTMLGitlab) instrumentation.instrument_instance_methods(Rouge::Formatters::HTMLGitlab)
[:XML, :HTML].each do |namespace| [:XML, :HTML].each do |namespace|
namespace_mod = Nokogiri.const_get(namespace) namespace_mod = Nokogiri.const_get(namespace, false)
instrumentation.instrument_methods(namespace_mod) instrumentation.instrument_methods(namespace_mod)
instrumentation.instrument_methods(namespace_mod::Document) instrumentation.instrument_methods(namespace_mod::Document)
......
...@@ -104,10 +104,10 @@ class Settings < Settingslogic ...@@ -104,10 +104,10 @@ class Settings < Settingslogic
# check that `current` (string or integer) is a contant in `modul`. # check that `current` (string or integer) is a contant in `modul`.
def verify_constant(modul, current, default) def verify_constant(modul, current, default)
constant = modul.constants.find { |name| modul.const_get(name) == current } constant = modul.constants.find { |name| modul.const_get(name, false) == current }
value = constant.nil? ? default : modul.const_get(constant) value = constant.nil? ? default : modul.const_get(constant, false)
if current.is_a? String if current.is_a? String
value = modul.const_get(current.upcase) rescue default value = modul.const_get(current.upcase, false) rescue default
end end
value value
......
...@@ -43,23 +43,14 @@ will go smoothly. ...@@ -43,23 +43,14 @@ will go smoothly.
### Object storage ### Object storage
Some classes of non-repository data can use object storage in preference to
file storage. Geo [does not replicate data in object storage](../replication/object_storage.md),
leaving that task up to the object store itself. For a planned failover, this
means you can decouple the replication of this data from the failover of the
GitLab service.
If you're already using object storage, simply verify that your **secondary**
node has access to the same data as the **primary** node - they must either they share the
same object storage configuration, or the **secondary** node should be configured to
access a [geographically-replicated][os-repl] copy provided by the object store
itself.
If you have a large GitLab installation or cannot tolerate downtime, consider If you have a large GitLab installation or cannot tolerate downtime, consider
[migrating to Object Storage][os-conf] **before** scheduling a planned failover. [migrating to Object Storage][os-conf] **before** scheduling a planned failover.
Doing so reduces both the length of the maintenance window, and the risk of data Doing so reduces both the length of the maintenance window, and the risk of data
loss as a result of a poorly executed planned failover. loss as a result of a poorly executed planned failover.
In GitLab 12.4, you can optionally allow GitLab to manage replication of Object Storage for
**secondary** nodes. For more information, see [Object Storage replication][os-conf].
### Review the configuration of each **secondary** node ### Review the configuration of each **secondary** node
Database settings are automatically replicated to the **secondary** node, but the Database settings are automatically replicated to the **secondary** node, but the
...@@ -224,5 +215,4 @@ Don't forget to remove the broadcast message after failover is complete. ...@@ -224,5 +215,4 @@ Don't forget to remove the broadcast message after failover is complete.
[background-verification]: background_verification.md [background-verification]: background_verification.md
[limitations]: ../replication/index.md#current-limitations [limitations]: ../replication/index.md#current-limitations
[moving-repositories]: ../../operations/moving_repositories.md [moving-repositories]: ../../operations/moving_repositories.md
[os-conf]: ../replication/object_storage.md#configuration [os-conf]: ../replication/object_storage.md
[os-repl]: ../replication/object_storage.md#replication
...@@ -283,7 +283,6 @@ You can keep track of the progress to include the missing items in: ...@@ -283,7 +283,6 @@ You can keep track of the progress to include the missing items in:
| [Maven Packages](../../../user/packages/maven_repository/index.md) | No | No | | [Maven Packages](../../../user/packages/maven_repository/index.md) | No | No |
| [Conan Packages](../../../user/packages/conan_repository/index.md) | No | No | | [Conan Packages](../../../user/packages/conan_repository/index.md) | No | No |
| [External merge request diffs](../../merge_request_diffs.md) | No, if they are on-disk | No | | [External merge request diffs](../../merge_request_diffs.md) | No, if they are on-disk | No |
| Content in object storage ([track progress](https://gitlab.com/groups/gitlab-org/-/epics/1526)) | No | No |
1. The integrity can be verified manually using [Integrity Check Rake Task](../../raketasks/check.md) on both nodes and comparing the output between them. 1. The integrity can be verified manually using [Integrity Check Rake Task](../../raketasks/check.md) on both nodes and comparing the output between them.
......
# Geo with Object storage **(PREMIUM ONLY)** # Geo with Object storage **(PREMIUM ONLY)**
Geo can be used in combination with Object Storage (AWS S3, or Geo can be used in combination with Object Storage (AWS S3, or other compatible object storage).
other compatible object storage).
## Configuration Currently, **secondary** nodes can use either:
At this time it is required that if object storage is enabled on the - The same storage bucket as the **primary** node.
**primary** node, it must also be enabled on each **secondary** node. - A replicated storage bucket.
**Secondary** nodes can use the same storage bucket as the **primary** node, or To have:
they can use a replicated storage bucket. At this time GitLab does not
take care of content replication in object storage. - GitLab manage replication, follow [Enabling GitLab replication](#enabling-gitlab-managed-object-storage-replication).
- Third-party services manage replication, follow [Third-party replication services](#third-party-replication-services).
## Enabling GitLab managed object storage replication
> - [Introduced](https://gitlab.com/gitlab-org/gitlab/issues/10586) in GitLab 12.4.
**Secondary** nodes can replicate files stored on the **primary** node regardless of
whether they are stored on the local filesystem or in object storage.
To enable GitLab replication, you must:
1. Go to **Admin Area > Geo**.
1. Press **Edit** on the **secondary** node.
1. Enable the **Allow this secondary node to replicate content on Object Storage**
checkbox.
For LFS, follow the documentation to For LFS, follow the documentation to
[set up LFS object storage](../../../workflow/lfs/lfs_administration.md#storing-lfs-objects-in-remote-object-storage). [set up LFS object storage](../../../workflow/lfs/lfs_administration.md#storing-lfs-objects-in-remote-object-storage).
...@@ -20,12 +34,21 @@ For CI job artifacts, there is similar documentation to configure ...@@ -20,12 +34,21 @@ For CI job artifacts, there is similar documentation to configure
For user uploads, there is similar documentation to configure [upload object storage](../../uploads.md#using-object-storage-core-only) For user uploads, there is similar documentation to configure [upload object storage](../../uploads.md#using-object-storage-core-only)
You should enable and configure object storage on both **primary** and **secondary** If you want to migrate the **primary** node's files to object storage, you can
nodes. Migrating existing data to object storage should be performed on the configure the **secondary** in a few ways:
**primary** node only. **Secondary** nodes will automatically notice that the migrated
files are now in object storage. - Use the exact same object storage.
- Use a separate object store but leverage your object storage solution's built-in
replication.
- Use a separate object store and enable the **Allow this secondary node to replicate
content on Object Storage** setting.
GitLab does not currently support the case where both:
- The **primary** node uses local storage.
- A **secondary** node uses object storage.
## Replication ## Third-party replication services
When using Amazon S3, you can use When using Amazon S3, you can use
[CRR](https://docs.aws.amazon.com/AmazonS3/latest/dev/crr.html) to [CRR](https://docs.aws.amazon.com/AmazonS3/latest/dev/crr.html) to
......
...@@ -31,30 +31,30 @@ This section is for links to information elsewhere in the GitLab documentation. ...@@ -31,30 +31,30 @@ This section is for links to information elsewhere in the GitLab documentation.
- Destructively reseeding the GitLab database. - Destructively reseeding the GitLab database.
- Guidance around updating packaged PostgreSQL, including how to stop it happening automatically. - Guidance around updating packaged PostgreSQL, including how to stop it happening automatically.
- [More about external PostgreSQL](/ee/administration/external_database.html) - [More about external PostgreSQL](../external_database.md)
- [Running GEO with external PostgreSQL](/ee/administration/geo/replication/external_database.html) - [Running GEO with external PostgreSQL](../geo/replication/external_database.md)
- [Upgrades when running PostgreSQL configured for HA.](https://docs.gitlab.com/omnibus/settings/database.html#upgrading-a-gitlab-ha-cluster) - [Upgrades when running PostgreSQL configured for HA.](https://docs.gitlab.com/omnibus/settings/database.html#upgrading-a-gitlab-ha-cluster)
- Consuming PostgreSQL from [within CI runners](/ee/ci/services/postgres.html) - Consuming PostgreSQL from [within CI runners](../../ci/services/postgres.md)
- [Using Slony to update PostgreSQL](/ee/update/upgrading_postgresql_using_slony.html) - [Using Slony to update PostgreSQL](../../update/upgrading_postgresql_using_slony.md)
- Uses replication to handle PostgreSQL upgrades - providing the schemas are the same. - Uses replication to handle PostgreSQL upgrades - providing the schemas are the same.
- Reduces downtime to a short window for swinging over to the newer vewrsion. - Reduces downtime to a short window for swinging over to the newer vewrsion.
- Managing Omnibus PostgreSQL versions [from the development docs](https://docs.gitlab.com/omnibus/development/managing-postgresql-versions.html) - Managing Omnibus PostgreSQL versions [from the development docs](https://docs.gitlab.com/omnibus/development/managing-postgresql-versions.html)
- [PostgreSQL scaling and HA](/ee/administration/high_availability/database.html) - [PostgreSQL scaling and HA](../high_availability/database.md)
- including [troubleshooting](/ee/administration/high_availability/database.html#troubleshooting) gitlab-ctl repmgr-check-master and pgbouncer errors - including [troubleshooting](../high_availability/database.md#troubleshooting) gitlab-ctl repmgr-check-master and pgbouncer errors
- [Developer database documentation](/ee/development/README.html#database-guides) - some of which is absolutely not for production use. Including: - [Developer database documentation](../../development/README.md#database-guides) - some of which is absolutely not for production use. Including:
- understanding EXPLAIN plans - understanding EXPLAIN plans
### Troubleshooting/Fixes ### Troubleshooting/Fixes
- [GitLab database requirements](/ee/install/requirements.html#database) including - [GitLab database requirements](../../install/requirements.md#database) including
- Support for MySQL was removed in GitLab 12.1; [migrate to PostgreSQL](/ee/update/mysql_to_postgresql.html) - Support for MySQL was removed in GitLab 12.1; [migrate to PostgreSQL](../../update/mysql_to_postgresql.md)
- required extension pg_trgm - required extension pg_trgm
- required extension postgres_fdw for Geo - required extension postgres_fdw for Geo
...@@ -71,7 +71,7 @@ pg_basebackup: could not create temporary replication slot "pg_basebackup_12345" ...@@ -71,7 +71,7 @@ pg_basebackup: could not create temporary replication slot "pg_basebackup_12345"
HINT: Free one or increase max_replication_slots. HINT: Free one or increase max_replication_slots.
``` ```
- GEO [replication errors](/ee/administration/geo/replication/troubleshooting.html#fixing-replication-errors) including: - GEO [replication errors](../geo/replication/troubleshooting.md#fixing-replication-errors) including:
``` ```
ERROR: replication slots can only be used if max_replication_slots > 0 ERROR: replication slots can only be used if max_replication_slots > 0
...@@ -83,11 +83,11 @@ Command exceeded allowed execution time ...@@ -83,11 +83,11 @@ Command exceeded allowed execution time
PANIC: could not write to file ‘pg_xlog/xlogtemp.123’: No space left on device PANIC: could not write to file ‘pg_xlog/xlogtemp.123’: No space left on device
``` ```
- [Checking GEO configuration](/ee/administration/geo/replication/troubleshooting.html#checking-configuration) including - [Checking GEO configuration](../geo/replication/troubleshooting.md#checking-configuration) including
- reconfiguring hosts/ports - reconfiguring hosts/ports
- checking and fixing user/password mappings - checking and fixing user/password mappings
- [Common GEO errors](/ee/administration/geo/replication/troubleshooting.html#fixing-common-errors) - [Common GEO errors](../geo/replication/troubleshooting.md#fixing-common-errors)
## Support topics ## Support topics
......
...@@ -1087,7 +1087,7 @@ Manual actions are considered to be write actions, so permissions for ...@@ -1087,7 +1087,7 @@ Manual actions are considered to be write actions, so permissions for
a user wants to trigger an action. In other words, in order to trigger a manual a user wants to trigger an action. In other words, in order to trigger a manual
action assigned to a branch that the pipeline is running for, the user needs to action assigned to a branch that the pipeline is running for, the user needs to
have the ability to merge to this branch. It is possible to use protected environments have the ability to merge to this branch. It is possible to use protected environments
to more strictly [protect manual deployments](#protecting-manual-jobs) from being to more strictly [protect manual deployments](#protecting-manual-jobs-premium) from being
run by unauthorized users. run by unauthorized users.
NOTE: **Note:** NOTE: **Note:**
...@@ -1095,36 +1095,38 @@ Using `when:manual` and `trigger` together results in the error `jobs:#{job-name ...@@ -1095,36 +1095,38 @@ Using `when:manual` and `trigger` together results in the error `jobs:#{job-name
should be on_success, on_failure or always`, because `when:manual` prevents triggers should be on_success, on_failure or always`, because `when:manual` prevents triggers
being used. being used.
##### Protecting manual jobs ##### Protecting manual jobs **(PREMIUM)**
It's possible to use [protected environments](../environments/protected_environments.md) It's possible to use [protected environments](../environments/protected_environments.md)
to define a precise list of users authorized to run a manual job. By allowing only to define a precise list of users authorized to run a manual job. By allowing only
users associated with a protected environment to trigger manual jobs, it is possible users associated with a protected environment to trigger manual jobs, it is possible
to implement some special use cases, such as: to implement some special use cases, such as:
- more precisely limiting who can deploy to an environment. - More precisely limiting who can deploy to an environment.
- enabling a pipeline to be blocked until an approved user "approves" it. - Enabling a pipeline to be blocked until an approved user "approves" it.
To do this, you must add an environment to the job. For example: To do this, you must:
```yaml 1. Add an `environment` to the job. For example:
deploy_prod:
stage: deploy ```yaml
script: deploy_prod:
- echo "Deploy to production server" stage: deploy
environment: script:
name: production - echo "Deploy to production server"
url: https://example.com environment:
when: manual name: production
only: url: https://example.com
- master when: manual
``` only:
- master
Then, in the [protected environments settings](../environments/protected_environments.md#protecting-environments), ```
select the environment (`production` in the example above) and add the users, roles or groups
that are authorized to trigger the manual job to the **Allowed to Deploy** list. Only those in 1. In the [protected environments settings](../environments/protected_environments.md#protecting-environments),
this list will be able to trigger this manual job, as well as GitLab admins who are always able select the environment (`production` in the example above) and add the users, roles or groups
to use protected environments. that are authorized to trigger the manual job to the **Allowed to Deploy** list. Only those in
this list will be able to trigger this manual job, as well as GitLab administrators
who are always able to use protected environments.
Additionally, if a manual job is defined as blocking by adding `allow_failure: false`, Additionally, if a manual job is defined as blocking by adding `allow_failure: false`,
the next stages of the pipeline will not run until the manual job is triggered. This the next stages of the pipeline will not run until the manual job is triggered. This
......
...@@ -21,6 +21,7 @@ Productivity Analytics allows GitLab users to: ...@@ -21,6 +21,7 @@ Productivity Analytics allows GitLab users to:
- Visualize typical merge request (MR) lifetime and statistics. Use a histogram that shows the distribution of the time elapsed between creating and merging merge requests. - Visualize typical merge request (MR) lifetime and statistics. Use a histogram that shows the distribution of the time elapsed between creating and merging merge requests.
- Drill down into the most time consuming merge requests, select a number of outliers, and filter down all subsequent charts to investigate potential causes. - Drill down into the most time consuming merge requests, select a number of outliers, and filter down all subsequent charts to investigate potential causes.
- Filter by group, project, author, label, milestone, or a specific date range. Filter down, for example, to the merge requests of a specific author in a group or project during a milestone or specific date range. - Filter by group, project, author, label, milestone, or a specific date range. Filter down, for example, to the merge requests of a specific author in a group or project during a milestone or specific date range.
- Measure velocity over time. Visualize the trends of each metric from the charts above over time in order to observe progress. Zoom in on a particular date range if you notice outliers.
## Accessing metrics and visualizations ## Accessing metrics and visualizations
...@@ -40,6 +41,8 @@ The following metrics and visualizations are available on a project or group lev ...@@ -40,6 +41,8 @@ The following metrics and visualizations are available on a project or group lev
- Number of commits per merge request. - Number of commits per merge request.
- Number of lines of code per commit. - Number of lines of code per commit.
- Number of files touched. - Number of files touched.
- Scatterplot showing all MRs merged on a certain date, together with the days it took to complete the action and a 30 day rolling median.
- Users can zoom in and out on specific days of interest.
- Table showing the list of merge requests with their respective time duration metrics. - Table showing the list of merge requests with their respective time duration metrics.
- Users can sort by any of the above metrics. - Users can sort by any of the above metrics.
......
...@@ -80,7 +80,7 @@ module API ...@@ -80,7 +80,7 @@ module API
note = create_note(noteable, opts) note = create_note(noteable, opts)
if note.valid? if note.valid?
present note, with: Entities.const_get(note.class.name) present note, with: Entities.const_get(note.class.name, false)
else else
bad_request!("Note #{note.errors.messages}") bad_request!("Note #{note.errors.messages}")
end end
......
...@@ -49,7 +49,7 @@ module API ...@@ -49,7 +49,7 @@ module API
resource :todos do resource :todos do
helpers do helpers do
def issuable_and_awardable?(type) def issuable_and_awardable?(type)
obj_type = Object.const_get(type) obj_type = Object.const_get(type, false)
(obj_type < Issuable) && (obj_type < Awardable) (obj_type < Issuable) && (obj_type < Awardable)
rescue NameError rescue NameError
......
...@@ -3,7 +3,7 @@ ...@@ -3,7 +3,7 @@
module Banzai module Banzai
module Filter module Filter
def self.[](name) def self.[](name)
const_get("#{name.to_s.camelize}Filter") const_get("#{name.to_s.camelize}Filter", false)
end end
end end
end end
...@@ -3,63 +3,15 @@ ...@@ -3,63 +3,15 @@
# Generated HTML is transformed back to GFM by app/assets/javascripts/behaviors/markdown/nodes/audio.js # Generated HTML is transformed back to GFM by app/assets/javascripts/behaviors/markdown/nodes/audio.js
module Banzai module Banzai
module Filter module Filter
# Find every image that isn't already wrapped in an `a` tag, and that has class AudioLinkFilter < PlayableLinkFilter
# a `src` attribute ending with an audio extension, add a new audio node and
# a "Download" link in the case the audio cannot be played.
class AudioLinkFilter < HTML::Pipeline::Filter
def call
doc.xpath('descendant-or-self::img[not(ancestor::a)]').each do |el|
el.replace(audio_node(doc, el)) if has_audio_extension?(el)
end
doc
end
private private
def has_audio_extension?(element) def media_type
src = element.attr('data-canonical-src').presence || element.attr('src') "audio"
return unless src.present?
src_ext = File.extname(src).sub('.', '').downcase
Gitlab::FileTypeDetection::SAFE_AUDIO_EXT.include?(src_ext)
end end
def audio_node(doc, element) def safe_media_ext
container = doc.document.create_element( Gitlab::FileTypeDetection::SAFE_AUDIO_EXT
'div',
class: 'audio-container'
)
audio = doc.document.create_element(
'audio',
src: element['src'],
controls: true,
'data-setup' => '{}',
'data-title' => element['title'] || element['alt'])
link = doc.document.create_element(
'a',
element['title'] || element['alt'],
href: element['src'],
target: '_blank',
rel: 'noopener noreferrer',
title: "Download '#{element['title'] || element['alt']}'")
# make sure the original non-proxied src carries over
if element['data-canonical-src']
audio['data-canonical-src'] = element['data-canonical-src']
link['data-canonical-src'] = element['data-canonical-src']
end
download_paragraph = doc.document.create_element('p')
download_paragraph.children = link
container.add_child(audio)
container.add_child(download_paragraph)
container
end end
end end
end end
......
# frozen_string_literal: true
module Banzai
module Filter
# Find every image that isn't already wrapped in an `a` tag, and that has
# a `src` attribute ending with an audio or video extension, add a new audio or video node and
# a "Download" link in the case the media cannot be played.
class PlayableLinkFilter < HTML::Pipeline::Filter
def call
doc.xpath('descendant-or-self::img[not(ancestor::a)]').each do |el|
el.replace(media_node(doc, el)) if has_media_extension?(el)
end
doc
end
private
def media_type
raise NotImplementedError
end
def safe_media_ext
raise NotImplementedError
end
def extra_element_attrs
{}
end
def has_media_extension?(element)
src = element.attr('data-canonical-src').presence || element.attr('src')
return unless src.present?
src_ext = File.extname(src).sub('.', '').downcase
safe_media_ext.include?(src_ext)
end
def media_element(doc, element)
media_element_attrs = {
src: element['src'],
controls: true,
'data-setup': '{}',
'data-title': element['title'] || element['alt']
}.merge!(extra_element_attrs)
if element['data-canonical-src']
media_element_attrs['data-canonical-src'] = element['data-canonical-src']
end
doc.document.create_element(media_type, media_element_attrs)
end
def download_paragraph(doc, element)
link_content = element['title'] || element['alt']
link_element_attrs = {
href: element['src'],
target: '_blank',
rel: 'noopener noreferrer',
title: "Download '#{link_content}'"
}
# make sure the original non-proxied src carries over
if element['data-canonical-src']
link_element_attrs['data-canonical-src'] = element['data-canonical-src']
end
link = doc.document.create_element('a', link_content, link_element_attrs)
doc.document.create_element('p').tap do |paragraph|
paragraph.children = link
end
end
def media_node(doc, element)
container_element_attrs = { class: "#{media_type}-container" }
doc.document.create_element( "div", container_element_attrs).tap do |container|
container.add_child(media_element(doc, element))
container.add_child(download_paragraph(doc, element))
end
end
end
end
end
...@@ -3,64 +3,19 @@ ...@@ -3,64 +3,19 @@
# Generated HTML is transformed back to GFM by app/assets/javascripts/behaviors/markdown/nodes/video.js # Generated HTML is transformed back to GFM by app/assets/javascripts/behaviors/markdown/nodes/video.js
module Banzai module Banzai
module Filter module Filter
# Find every image that isn't already wrapped in an `a` tag, and that has class VideoLinkFilter < PlayableLinkFilter
# a `src` attribute ending with a video extension, add a new video node and
# a "Download" link in the case the video cannot be played.
class VideoLinkFilter < HTML::Pipeline::Filter
def call
doc.xpath('descendant-or-self::img[not(ancestor::a)]').each do |el|
el.replace(video_node(doc, el)) if has_video_extension?(el)
end
doc
end
private private
def has_video_extension?(element) def media_type
src = element.attr('data-canonical-src').presence || element.attr('src') "video"
return unless src.present?
src_ext = File.extname(src).sub('.', '').downcase
Gitlab::FileTypeDetection::SAFE_VIDEO_EXT.include?(src_ext)
end end
def video_node(doc, element) def safe_media_ext
container = doc.document.create_element( Gitlab::FileTypeDetection::SAFE_VIDEO_EXT
'div', end
class: 'video-container'
)
video = doc.document.create_element(
'video',
src: element['src'],
width: '100%',
controls: true,
'data-setup' => '{}',
'data-title' => element['title'] || element['alt'])
link = doc.document.create_element(
'a',
element['title'] || element['alt'],
href: element['src'],
target: '_blank',
rel: 'noopener noreferrer',
title: "Download '#{element['title'] || element['alt']}'")
# make sure the original non-proxied src carries over
if element['data-canonical-src']
video['data-canonical-src'] = element['data-canonical-src']
link['data-canonical-src'] = element['data-canonical-src']
end
download_paragraph = doc.document.create_element('p')
download_paragraph.children = link
container.add_child(video)
container.add_child(download_paragraph)
container def extra_element_attrs
{ width: "100%" }
end end
end end
end end
......
...@@ -4,7 +4,7 @@ module Banzai ...@@ -4,7 +4,7 @@ module Banzai
module Pipeline module Pipeline
def self.[](name) def self.[](name)
name ||= :full name ||= :full
const_get("#{name.to_s.camelize}Pipeline") const_get("#{name.to_s.camelize}Pipeline", false)
end end
end end
end end
...@@ -10,7 +10,7 @@ module Banzai ...@@ -10,7 +10,7 @@ module Banzai
# #
# This would return the `Banzai::ReferenceParser::IssueParser` class. # This would return the `Banzai::ReferenceParser::IssueParser` class.
def self.[](name) def self.[](name)
const_get("#{name.to_s.camelize}Parser") const_get("#{name.to_s.camelize}Parser", false)
end end
end end
end end
...@@ -30,7 +30,7 @@ module Bitbucket ...@@ -30,7 +30,7 @@ module Bitbucket
end end
def representation_class(type) def representation_class(type)
Bitbucket::Representation.const_get(type.to_s.camelize) Bitbucket::Representation.const_get(type.to_s.camelize, false)
end end
end end
end end
...@@ -30,7 +30,7 @@ module BitbucketServer ...@@ -30,7 +30,7 @@ module BitbucketServer
end end
def representation_class(type) def representation_class(type)
BitbucketServer::Representation.const_get(type.to_s.camelize) BitbucketServer::Representation.const_get(type.to_s.camelize, false)
end end
end end
end end
...@@ -78,7 +78,7 @@ module Gitlab ...@@ -78,7 +78,7 @@ module Gitlab
end end
def self.migration_class_for(class_name) def self.migration_class_for(class_name)
const_get(class_name) const_get(class_name, false)
end end
def self.enqueued_job?(queues, migration_class) def self.enqueued_job?(queues, migration_class)
......
...@@ -171,7 +171,11 @@ module Gitlab ...@@ -171,7 +171,11 @@ module Gitlab
end end
def schedule_retry(project, retry_count) def schedule_retry(project, retry_count)
BackgroundMigrationWorker.perform_in(RETRY_DELAY, self.class::RetryOne.name, [project.id, retry_count]) # Constants provided to BackgroundMigrationWorker must be within the
# scope of Gitlab::BackgroundMigration
retry_class_name = self.class::RetryOne.name.sub('Gitlab::BackgroundMigration::', '')
BackgroundMigrationWorker.perform_in(RETRY_DELAY, retry_class_name, [project.id, retry_count])
end end
end end
......
...@@ -23,7 +23,7 @@ module Gitlab ...@@ -23,7 +23,7 @@ module Gitlab
end end
def request_cache(method_name, &method_key_block) def request_cache(method_name, &method_key_block)
const_get(:RequestCacheExtension).module_eval do const_get(:RequestCacheExtension, false).module_eval do
cache_key_method_name = "#{method_name}_cache_key" cache_key_method_name = "#{method_name}_cache_key"
define_method(method_name) do |*args| define_method(method_name) do |*args|
......
...@@ -6,7 +6,7 @@ module Gitlab ...@@ -6,7 +6,7 @@ module Gitlab
module Policy module Policy
def self.fabricate(specs) def self.fabricate(specs)
specifications = specs.to_h.map do |spec, value| specifications = specs.to_h.map do |spec, value|
self.const_get(spec.to_s.camelize).new(value) self.const_get(spec.to_s.camelize, false).new(value)
end end
specifications.compact specifications.compact
......
...@@ -20,7 +20,7 @@ module Gitlab ...@@ -20,7 +20,7 @@ module Gitlab
def core_status def core_status
Gitlab::Ci::Status Gitlab::Ci::Status
.const_get(@status.capitalize) .const_get(@status.capitalize, false)
.new(@subject, @user) .new(@subject, @user)
.extend(self.class.common_helpers) .extend(self.class.common_helpers)
end end
......
...@@ -37,7 +37,7 @@ module Gitlab ...@@ -37,7 +37,7 @@ module Gitlab
def self.entry_class(strategy) def self.entry_class(strategy)
if strategy.present? if strategy.present?
self.const_get(strategy.name) self.const_get(strategy.name, false)
else else
self::UnknownStrategy self::UnknownStrategy
end end
......
...@@ -4,7 +4,7 @@ module Gitlab ...@@ -4,7 +4,7 @@ module Gitlab
module CycleAnalytics module CycleAnalytics
module EventFetcher module EventFetcher
def self.[](stage_name) def self.[](stage_name)
CycleAnalytics.const_get("#{stage_name.to_s.camelize}EventFetcher") CycleAnalytics.const_get("#{stage_name.to_s.camelize}EventFetcher", false)
end end
end end
end end
......
...@@ -4,7 +4,7 @@ module Gitlab ...@@ -4,7 +4,7 @@ module Gitlab
module CycleAnalytics module CycleAnalytics
module Stage module Stage
def self.[](stage_name) def self.[](stage_name)
CycleAnalytics.const_get("#{stage_name.to_s.camelize}Stage") CycleAnalytics.const_get("#{stage_name.to_s.camelize}Stage", false)
end end
end end
end end
......
...@@ -58,13 +58,13 @@ module Gitlab ...@@ -58,13 +58,13 @@ module Gitlab
# Returns true if the given migration can be performed without downtime. # Returns true if the given migration can be performed without downtime.
def online?(migration) def online?(migration)
migration.const_get(DOWNTIME_CONST) == false migration.const_get(DOWNTIME_CONST, false) == false
end end
# Returns the downtime reason, or nil if none was defined. # Returns the downtime reason, or nil if none was defined.
def downtime_reason(migration) def downtime_reason(migration)
if migration.const_defined?(DOWNTIME_REASON_CONST) if migration.const_defined?(DOWNTIME_REASON_CONST)
migration.const_get(DOWNTIME_REASON_CONST) migration.const_get(DOWNTIME_REASON_CONST, false)
else else
nil nil
end end
......
...@@ -86,7 +86,7 @@ module Gitlab ...@@ -86,7 +86,7 @@ module Gitlab
if name == :health_check if name == :health_check
Grpc::Health::V1::Health::Stub Grpc::Health::V1::Health::Stub
else else
Gitaly.const_get(name.to_s.camelcase.to_sym).const_get(:Stub) Gitaly.const_get(name.to_s.camelcase.to_sym, false).const_get(:Stub, false)
end end
end end
......
...@@ -8,7 +8,7 @@ module Gitlab ...@@ -8,7 +8,7 @@ module Gitlab
extend ActiveSupport::Concern extend ActiveSupport::Concern
included do included do
attr_accessor(*const_get(:ATTRS)) attr_accessor(*const_get(:ATTRS, false))
end end
def initialize(params) def initialize(params)
...@@ -26,7 +26,7 @@ module Gitlab ...@@ -26,7 +26,7 @@ module Gitlab
end end
def attributes def attributes
self.class.const_get(:ATTRS) self.class.const_get(:ATTRS, false)
end end
end end
end end
......
...@@ -24,7 +24,7 @@ module Gitlab ...@@ -24,7 +24,7 @@ module Gitlab
super super
if const_defined?(:ClassMethods) if const_defined?(:ClassMethods)
klass_methods = const_get(:ClassMethods) klass_methods = const_get(:ClassMethods, false)
base.singleton_class.prepend klass_methods base.singleton_class.prepend klass_methods
base.instance_variable_set(:@_prepended_class_methods, klass_methods) base.instance_variable_set(:@_prepended_class_methods, klass_methods)
end end
...@@ -40,7 +40,7 @@ module Gitlab ...@@ -40,7 +40,7 @@ module Gitlab
super super
if instance_variable_defined?(:@_prepended_class_methods) if instance_variable_defined?(:@_prepended_class_methods)
const_get(:ClassMethods).prepend @_prepended_class_methods const_get(:ClassMethods, false).prepend @_prepended_class_methods
end end
end end
......
...@@ -7250,6 +7250,9 @@ msgstr "" ...@@ -7250,6 +7250,9 @@ msgstr ""
msgid "Fork project" msgid "Fork project"
msgstr "" msgstr ""
msgid "Fork project?"
msgstr ""
msgid "ForkedFromProjectPath|Forked from" msgid "ForkedFromProjectPath|Forked from"
msgstr "" msgstr ""
......
...@@ -17,7 +17,7 @@ module QA ...@@ -17,7 +17,7 @@ module QA
def constants def constants
@consts ||= @module.constants.map do |const| @consts ||= @module.constants.map do |const|
@module.const_get(const) @module.const_get(const, false)
end end
end end
......
...@@ -65,7 +65,7 @@ module QA ...@@ -65,7 +65,7 @@ module QA
# QA::Runtime::Env.browser.capitalize will work for every driver type except PhantomJS. # QA::Runtime::Env.browser.capitalize will work for every driver type except PhantomJS.
# We will have no use to use PhantomJS so this shouldn't be a problem. # We will have no use to use PhantomJS so this shouldn't be a problem.
options = Selenium::WebDriver.const_get(QA::Runtime::Env.browser.capitalize)::Options.new options = Selenium::WebDriver.const_get(QA::Runtime::Env.browser.capitalize, false)::Options.new
if QA::Runtime::Env.browser == :chrome if QA::Runtime::Env.browser == :chrome
options.add_argument("window-size=1480,2200") options.add_argument("window-size=1480,2200")
......
...@@ -19,7 +19,7 @@ module QA ...@@ -19,7 +19,7 @@ module QA
end end
def strategy def strategy
QA.const_get("QA::#{version}::Strategy") Object.const_get("QA::#{version}::Strategy", false)
end end
def self.method_missing(name, *args) def self.method_missing(name, *args)
......
# frozen_string_literal: true
module RuboCop
module Cop
module Gitlab
# Cop that encourages usage of inherit=false for 2nd argument when using const_get.
#
# See https://gitlab.com/gitlab-org/gitlab/issues/27678
class ConstGetInheritFalse < RuboCop::Cop::Cop
MSG = 'Use inherit=false when using const_get.'
def_node_matcher :const_get?, <<~PATTERN
(send _ :const_get ...)
PATTERN
def on_send(node)
return unless const_get?(node)
return if second_argument(node)&.false_type?
add_offense(node, location: :selector)
end
def autocorrect(node)
lambda do |corrector|
if arg = second_argument(node)
corrector.replace(arg.source_range, 'false')
else
first_argument = node.arguments[0]
corrector.insert_after(first_argument.source_range, ', false')
end
end
end
private
def second_argument(node)
node.arguments[1]
end
end
end
end
end
require_relative 'cop/gitlab/const_get_inherit_false'
require_relative 'cop/gitlab/module_with_instance_variables' require_relative 'cop/gitlab/module_with_instance_variables'
require_relative 'cop/gitlab/predicate_memoization' require_relative 'cop/gitlab/predicate_memoization'
require_relative 'cop/gitlab/httparty' require_relative 'cop/gitlab/httparty'
......
...@@ -15,7 +15,7 @@ FactoryBot.define do ...@@ -15,7 +15,7 @@ FactoryBot.define do
end end
path do path do
uploader_instance = Object.const_get(uploader.to_s).new(model, mount_point) uploader_instance = Object.const_get(uploader.to_s, false).new(model, mount_point)
File.join(uploader_instance.store_dir, filename) File.join(uploader_instance.store_dir, filename)
end end
......
...@@ -270,4 +270,32 @@ describe BlobHelper do ...@@ -270,4 +270,32 @@ describe BlobHelper do
end end
end end
end end
describe '#ide_fork_and_edit_path' do
let(:project) { create(:project) }
let(:current_user) { create(:user) }
let(:can_push_code) { true }
before do
allow(helper).to receive(:current_user).and_return(current_user)
allow(helper).to receive(:can?).and_return(can_push_code)
end
it 'returns path to fork the repo with a redirect param to the full IDE path' do
uri = URI(helper.ide_fork_and_edit_path(project, "master", ""))
params = CGI.unescape(uri.query)
expect(uri.path).to eq("/#{project.namespace.path}/#{project.path}/-/forks")
expect(params).to include("continue[to]=/-/ide/project/#{project.namespace.path}/#{project.path}/edit/master")
expect(params).to include("namespace_key=#{current_user.namespace.id}")
end
context 'when user is not logged in' do
let(:current_user) { nil }
it 'returns nil' do
expect(helper.ide_fork_and_edit_path(project, "master", "")).to be_nil
end
end
end
end end
...@@ -32,6 +32,7 @@ describe Banzai::Filter::VideoLinkFilter do ...@@ -32,6 +32,7 @@ describe Banzai::Filter::VideoLinkFilter do
expect(video.name).to eq 'video' expect(video.name).to eq 'video'
expect(video['src']).to eq src expect(video['src']).to eq src
expect(video['width']).to eq "100%"
expect(paragraph.name).to eq 'p' expect(paragraph.name).to eq 'p'
......
...@@ -22,7 +22,7 @@ describe Gitlab::Ci::Status::External::Factory do ...@@ -22,7 +22,7 @@ describe Gitlab::Ci::Status::External::Factory do
end end
let(:expected_status) do let(:expected_status) do
Gitlab::Ci::Status.const_get(simple_status.capitalize) Gitlab::Ci::Status.const_get(simple_status.capitalize, false)
end end
it "fabricates a core status #{simple_status}" do it "fabricates a core status #{simple_status}" do
......
...@@ -13,7 +13,7 @@ describe Gitlab::Ci::Status::Factory do ...@@ -13,7 +13,7 @@ describe Gitlab::Ci::Status::Factory do
let(:resource) { double('resource', status: simple_status) } let(:resource) { double('resource', status: simple_status) }
let(:expected_status) do let(:expected_status) do
Gitlab::Ci::Status.const_get(simple_status.capitalize) Gitlab::Ci::Status.const_get(simple_status.capitalize, false)
end end
it "fabricates a core status #{simple_status}" do it "fabricates a core status #{simple_status}" do
......
...@@ -18,7 +18,7 @@ describe Gitlab::Ci::Status::Pipeline::Factory do ...@@ -18,7 +18,7 @@ describe Gitlab::Ci::Status::Pipeline::Factory do
let(:pipeline) { create(:ci_pipeline, status: simple_status) } let(:pipeline) { create(:ci_pipeline, status: simple_status) }
let(:expected_status) do let(:expected_status) do
Gitlab::Ci::Status.const_get(simple_status.capitalize) Gitlab::Ci::Status.const_get(simple_status.capitalize, false)
end end
it "matches correct core status for #{simple_status}" do it "matches correct core status for #{simple_status}" do
......
...@@ -34,7 +34,7 @@ describe Gitlab::Ci::Status::Stage::Factory do ...@@ -34,7 +34,7 @@ describe Gitlab::Ci::Status::Stage::Factory do
it "fabricates a core status #{core_status}" do it "fabricates a core status #{core_status}" do
expect(status).to be_a( expect(status).to be_a(
Gitlab::Ci::Status.const_get(core_status.capitalize)) Gitlab::Ci::Status.const_get(core_status.capitalize, false))
end end
it 'extends core status with common stage methods' do it 'extends core status with common stage methods' do
......
...@@ -24,9 +24,9 @@ describe Gitlab::Config::Entry::Simplifiable do ...@@ -24,9 +24,9 @@ describe Gitlab::Config::Entry::Simplifiable do
let(:unknown) { double('unknown strategy') } let(:unknown) { double('unknown strategy') }
before do before do
stub_const("#{described_class.name}::Something", first) entry::Something = first
stub_const("#{described_class.name}::DifferentOne", second) entry::DifferentOne = second
stub_const("#{described_class.name}::UnknownStrategy", unknown) entry::UnknownStrategy = unknown
end end
context 'when first strategy should be used' do context 'when first strategy should be used' do
......
...@@ -72,8 +72,8 @@ describe Gitlab::Patch::Prependable do ...@@ -72,8 +72,8 @@ describe Gitlab::Patch::Prependable do
expect(subject.ancestors.take(3)).to eq([subject, ee, ce]) expect(subject.ancestors.take(3)).to eq([subject, ee, ce])
expect(subject.singleton_class.ancestors.take(3)) expect(subject.singleton_class.ancestors.take(3))
.to eq([subject.singleton_class, .to eq([subject.singleton_class,
ee.const_get(:ClassMethods), ee.const_get(:ClassMethods, false),
ce.const_get(:ClassMethods)]) ce.const_get(:ClassMethods, false)])
end end
it 'prepends only once even if called twice' do it 'prepends only once even if called twice' do
...@@ -115,8 +115,8 @@ describe Gitlab::Patch::Prependable do ...@@ -115,8 +115,8 @@ describe Gitlab::Patch::Prependable do
it 'has the expected ancestors' do it 'has the expected ancestors' do
expect(subject.ancestors.take(3)).to eq([ee, ce, subject]) expect(subject.ancestors.take(3)).to eq([ee, ce, subject])
expect(subject.singleton_class.ancestors.take(3)) expect(subject.singleton_class.ancestors.take(3))
.to eq([ee.const_get(:ClassMethods), .to eq([ee.const_get(:ClassMethods, false),
ce.const_get(:ClassMethods), ce.const_get(:ClassMethods, false),
subject.singleton_class]) subject.singleton_class])
end end
...@@ -152,7 +152,7 @@ describe Gitlab::Patch::Prependable do ...@@ -152,7 +152,7 @@ describe Gitlab::Patch::Prependable do
it 'has the expected ancestors' do it 'has the expected ancestors' do
expect(subject.ancestors.take(2)).to eq([ee, subject]) expect(subject.ancestors.take(2)).to eq([ee, subject])
expect(subject.singleton_class.ancestors.take(2)) expect(subject.singleton_class.ancestors.take(2))
.to eq([ee.const_get(:ClassMethods), .to eq([ee.const_get(:ClassMethods, false),
subject.singleton_class]) subject.singleton_class])
end end
......
# frozen_string_literal: true
require 'spec_helper'
require 'rubocop'
require 'rubocop/rspec/support'
require_relative '../../../../rubocop/cop/gitlab/const_get_inherit_false'
describe RuboCop::Cop::Gitlab::ConstGetInheritFalse do
include CopHelper
subject(:cop) { described_class.new }
context 'Object.const_get' do
it 'registers an offense with no 2nd argument' do
expect_offense(<<~PATTERN.strip_indent)
Object.const_get(:CONSTANT)
^^^^^^^^^ Use inherit=false when using const_get.
PATTERN
end
it 'autocorrects' do
expect(autocorrect_source('Object.const_get(:CONSTANT)')).to eq('Object.const_get(:CONSTANT, false)')
end
context 'inherit=false' do
it 'does not register an offense' do
expect_no_offenses(<<~PATTERN.strip_indent)
Object.const_get(:CONSTANT, false)
PATTERN
end
end
context 'inherit=true' do
it 'registers an offense' do
expect_offense(<<~PATTERN.strip_indent)
Object.const_get(:CONSTANT, true)
^^^^^^^^^ Use inherit=false when using const_get.
PATTERN
end
it 'autocorrects' do
expect(autocorrect_source('Object.const_get(:CONSTANT, true)')).to eq('Object.const_get(:CONSTANT, false)')
end
end
end
context 'const_get for a nested class' do
it 'registers an offense on reload usage' do
expect_offense(<<~PATTERN.strip_indent)
Nested::Blog.const_get(:CONSTANT)
^^^^^^^^^ Use inherit=false when using const_get.
PATTERN
end
it 'autocorrects' do
expect(autocorrect_source('Nested::Blag.const_get(:CONSTANT)')).to eq('Nested::Blag.const_get(:CONSTANT, false)')
end
context 'inherit=false' do
it 'does not register an offense' do
expect_no_offenses(<<~PATTERN.strip_indent)
Nested::Blog.const_get(:CONSTANT, false)
PATTERN
end
end
context 'inherit=true' do
it 'registers an offense if inherit is true' do
expect_offense(<<~PATTERN.strip_indent)
Nested::Blog.const_get(:CONSTANT, true)
^^^^^^^^^ Use inherit=false when using const_get.
PATTERN
end
it 'autocorrects' do
expect(autocorrect_source('Nested::Blag.const_get(:CONSTANT, true)')).to eq('Nested::Blag.const_get(:CONSTANT, false)')
end
end
end
end
...@@ -75,7 +75,7 @@ shared_examples 'cluster application status specs' do |application_name| ...@@ -75,7 +75,7 @@ shared_examples 'cluster application status specs' do |application_name|
subject.reload subject.reload
expect(subject.version).to eq(subject.class.const_get(:VERSION)) expect(subject.version).to eq(subject.class.const_get(:VERSION, false))
end end
context 'application is updating' do context 'application is updating' do
...@@ -104,7 +104,7 @@ shared_examples 'cluster application status specs' do |application_name| ...@@ -104,7 +104,7 @@ shared_examples 'cluster application status specs' do |application_name|
subject.reload subject.reload
expect(subject.version).to eq(subject.class.const_get(:VERSION)) expect(subject.version).to eq(subject.class.const_get(:VERSION, false))
end end
end end
end end
......
...@@ -12,7 +12,7 @@ shared_examples 'cluster application version specs' do |application_name| ...@@ -12,7 +12,7 @@ shared_examples 'cluster application version specs' do |application_name|
context 'version is the same as VERSION' do context 'version is the same as VERSION' do
let(:application) { build(application_name) } let(:application) { build(application_name) }
let(:version) { application.class.const_get(:VERSION) } let(:version) { application.class.const_get(:VERSION, false) }
it { is_expected.to be_falsey } it { is_expected.to be_falsey }
end end
......
# frozen_string_literal: true
require 'spec_helper'
describe 'projects/tree/_tree_header' do
let(:project) { create(:project, :repository) }
let(:current_user) { create(:user) }
let(:repository) { project.repository }
before do
assign(:project, project)
assign(:repository, repository)
assign(:id, File.join('master', ''))
assign(:ref, 'master')
allow(view).to receive(:current_user).and_return(current_user)
allow(view).to receive(:can_collaborate_with_project?) { true }
end
it 'does not render the WebIDE button when user cannot create fork or cannot open MR' do
allow(view).to receive(:can?) { false }
render
expect(rendered).not_to have_link('Web IDE')
end
it 'renders the WebIDE button when user can create fork and can open MR in project' do
allow(view).to receive(:can?) { true }
render
expect(rendered).to have_link('Web IDE')
end
it 'opens a popup confirming a fork if the user can create fork/MR but cannot collaborate with the project' do
allow(view).to receive(:can?) { true }
allow(view).to receive(:can_collaborate_with_project?) { false }
render
expect(rendered).to have_link('Web IDE', href: '#modal-confirm-fork')
end
end
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment