Commit b98fa9ef authored by GitLab Bot's avatar GitLab Bot

Add latest changes from gitlab-org/gitlab@master

parent 7e300596
import { viewerInformationForPath } from '~/vue_shared/components/content_viewer/lib/viewer_utils';
import { escapeFileUrl } from '~/lib/utils/url_utility';
import { decorateData, sortTree } from '../stores/utils';
export const splitParent = path => {
......@@ -48,7 +47,7 @@ export const decorateFiles = ({
id: path,
name,
path,
url: `/${projectId}/tree/${branchId}/-/${escapeFileUrl(path)}/`,
url: `/${projectId}/tree/${branchId}/-/${path}/`,
type: 'tree',
parentTreeUrl: parentFolder ? parentFolder.url : `/${projectId}/tree/${branchId}/`,
tempFile,
......@@ -85,7 +84,7 @@ export const decorateFiles = ({
id: path,
name,
path,
url: `/${projectId}/blob/${branchId}/-/${escapeFileUrl(path)}`,
url: `/${projectId}/blob/${branchId}/-/${path}`,
type: 'blob',
parentTreeUrl: fileFolder ? fileFolder.url : `/${projectId}/blob/${branchId}`,
tempFile,
......
import { commitActionTypes, FILE_VIEW_MODE_EDITOR } from '../constants';
import { escapeFileUrl } from '~/lib/utils/url_utility';
export const dataStructure = () => ({
id: '',
......@@ -220,9 +219,7 @@ export const mergeTrees = (fromTree, toTree) => {
export const replaceFileUrl = (url, oldPath, newPath) => {
// Add `/-/` so that we don't accidentally replace project path
const result = url.replace(`/-/${escapeFileUrl(oldPath)}`, `/-/${escapeFileUrl(newPath)}`);
return result;
return url.replace(`/-/${oldPath}`, `/-/${newPath}`);
};
export const swapInStateArray = (state, arr, key, entryPath) =>
......
<script>
import { omit } from 'underscore';
import { omit } from 'lodash';
import { GlEmptyState, GlPagination, GlSkeletonLoading } from '@gitlab/ui';
import flash from '~/flash';
import axios from '~/lib/utils/axios_utils';
......
<script>
import FileHeader from '~/vue_shared/components/file_row_header.vue';
import FileIcon from '~/vue_shared/components/file_icon.vue';
import { escapeFileUrl } from '~/lib/utils/url_utility';
export default {
name: 'FileRow',
......@@ -94,7 +95,7 @@ export default {
hasUrlAtCurrentRoute() {
if (!this.$router || !this.$router.currentRoute) return true;
return this.$router.currentRoute.path === `/project${this.file.url}`;
return this.$router.currentRoute.path === `/project${escapeFileUrl(this.file.url)}`;
},
},
};
......
......@@ -6,20 +6,18 @@ module SnippetsActions
def edit
end
# rubocop:disable Gitlab/ModuleWithInstanceVariables
def raw
disposition = params[:inline] == 'false' ? 'attachment' : 'inline'
workhorse_set_content_type!
send_data(
convert_line_endings(@snippet.content),
convert_line_endings(blob.data),
type: 'text/plain; charset=utf-8',
disposition: disposition,
filename: @snippet.sanitized_file_name
filename: Snippet.sanitized_file_name(blob.name)
)
end
# rubocop:enable Gitlab/ModuleWithInstanceVariables
def js_request?
request.format.js?
......
......@@ -66,7 +66,6 @@ class Projects::SnippetsController < Projects::ApplicationController
end
def show
blob = @snippet.blob
conditionally_expand_blob(blob)
respond_to do |format|
......@@ -115,6 +114,16 @@ class Projects::SnippetsController < Projects::ApplicationController
alias_method :awardable, :snippet
alias_method :spammable, :snippet
def blob
return unless snippet
@blob ||= if Feature.enabled?(:version_snippets, current_user) && !snippet.repository.empty?
snippet.blobs.first
else
snippet.blob
end
end
def spammable_path
project_snippet_path(@project, @snippet)
end
......
......@@ -68,17 +68,15 @@ class SnippetsController < ApplicationController
end
def show
blob = @snippet.blob
conditionally_expand_blob(blob)
@note = Note.new(noteable: @snippet)
@noteable = @snippet
@discussions = @snippet.discussions
@notes = prepare_notes_for_rendering(@discussions.flat_map(&:notes), @noteable)
respond_to do |format|
format.html do
@note = Note.new(noteable: @snippet)
@noteable = @snippet
@discussions = @snippet.discussions
@notes = prepare_notes_for_rendering(@discussions.flat_map(&:notes), @noteable)
render 'show'
end
......@@ -121,6 +119,16 @@ class SnippetsController < ApplicationController
alias_method :awardable, :snippet
alias_method :spammable, :snippet
def blob
return unless snippet
@blob ||= if Feature.enabled?(:version_snippets, current_user) && !snippet.repository.empty?
snippet.blobs.first
else
snippet.blob
end
end
def spammable_path
snippet_path(@snippet)
end
......
......@@ -62,6 +62,7 @@ module Types
field :blob, type: Types::Snippets::BlobType,
description: 'Snippet blob',
calls_gitaly: true,
null: false
markdown_field :description_html, null: true, method: :description
......
# frozen_string_literal: true
# Applicable for blob classes with project attribute
module BlobLanguageFromGitAttributes
extend ActiveSupport::Concern
def language_from_gitattributes
return unless project
return unless repository&.exists?
repository = project.repository
repository.gitattribute(path, 'gitlab-language')
end
end
......@@ -197,7 +197,11 @@ class Snippet < ApplicationRecord
end
def blob
@blob ||= Blob.decorate(SnippetBlob.new(self), nil)
@blob ||= Blob.decorate(SnippetBlob.new(self), self)
end
def blobs
repository.ls_files(repository.root_ref).map { |file| Blob.lazy(self, repository.root_ref, file) }
end
def hook_attrs
......@@ -208,7 +212,7 @@ class Snippet < ApplicationRecord
super.to_s
end
def sanitized_file_name
def self.sanitized_file_name(file_name)
file_name.gsub(/[^a-zA-Z0-9_\-\.]+/, '')
end
......
......@@ -32,7 +32,7 @@ class SnippetBlobPresenter < BlobPresenter
end
def snippet
blob.snippet
blob.container
end
def language
......
......@@ -27,6 +27,14 @@ class SnippetPresenter < Gitlab::View::Presenter::Delegated
snippet.submittable_as_spam_by?(current_user)
end
def blob
if Feature.enabled?(:version_snippets, current_user) && !snippet.repository.empty?
snippet.blobs.first
else
snippet.blob
end
end
private
def can_access_resource?(ability_prefix)
......
......@@ -3,14 +3,13 @@
class AuditEventService
# Instantiates a new service
#
# @param author [User] the user who authors the change
# @param entity [Object] an instance of either Project/Group/User type. This
# param is also used to determine at which level the audit events are
# shown.
# - Project: events are visible at Project level
# - Group: events are visible at Group level
# @param [User] author the user who authors the change
# @param [User, Project, Group] entity the scope which audit event belongs to
# This param is also used to determine the visibility of the audit event.
# - Project: events are visible at Project and Instance level
# - Group: events are visible at Group and Instance level
# - User: events are visible at Instance level
# @param details [Hash] details to be added to audit event
# @param [Hash] details extra data of audit event
#
# @return [AuditEventService]
def initialize(author, entity, details = {})
......@@ -21,7 +20,7 @@ class AuditEventService
# Builds the @details attribute for authentication
#
# This uses the @author as the target object being changed
# This uses the @author as the target object being audited
#
# @return [AuditEventService]
def for_authentication
......
......@@ -9,8 +9,7 @@
= render 'shared/snippets/header'
.project-snippets
%article.file-holder.snippet-file-content
= render 'shared/snippets/blob'
= render 'shared/snippets/blob', blob: @blob
.row-content-block.top-block.content-component-block
= render 'award_emoji/awards_block', awardable: @snippet, inline: true
......
- blob = @snippet.blob
.js-file-title.file-title-flex-parent
= render 'projects/blob/header_content', blob: blob
%article.file-holder.snippet-file-content
.js-file-title.file-title-flex-parent
= render 'projects/blob/header_content', blob: blob
.file-actions.d-none.d-sm-block
= render 'projects/blob/viewer_switcher', blob: blob
.file-actions.d-none.d-sm-block
= render 'projects/blob/viewer_switcher', blob: blob
.btn-group{ role: "group" }<
= copy_blob_source_button(blob)
= open_raw_blob_button(blob)
= download_raw_snippet_button(@snippet)
.btn-group{ role: "group" }<
= copy_blob_source_button(blob)
= open_raw_blob_button(blob)
= download_raw_snippet_button(@snippet)
= render 'projects/blob/content', blob: blob
= render 'projects/blob/content', blob: blob
- blob = @snippet.blob
.gitlab-embed-snippets
.js-file-title.file-title-flex-parent
.file-header-content
......@@ -6,10 +5,10 @@
%strong.file-title-name
%a.gitlab-embedded-snippets-title{ href: url_for(only_path: false, overwrite_params: nil) }
= blob.name
= @blob.name
%small
= number_to_human_size(blob.raw_size)
= number_to_human_size(@blob.raw_size)
%a.gitlab-logo-wrapper{ href: url_for(only_path: false, overwrite_params: nil), title: 'view on gitlab' }
%img.gitlab-logo{ src: image_url('ext_snippet_icons/logo.svg'), alt: "GitLab logo" }
......@@ -19,4 +18,4 @@
= embedded_snippet_download_button
%article.file-holder.snippet-file-content
= render 'projects/blob/viewer', viewer: @snippet.blob.simple_viewer, load_async: false, external_embed: true
= render 'projects/blob/viewer', viewer: @blob.simple_viewer, load_async: false, external_embed: true
......@@ -10,8 +10,7 @@
= render 'shared/snippets/header'
.personal-snippets
%article.file-holder.snippet-file-content
= render 'shared/snippets/blob'
= render 'shared/snippets/blob', blob: @blob
.row-content-block.top-block.content-component-block
= render 'award_emoji/awards_block', awardable: @snippet, inline: true
......
---
title: Fixed regression when URL was encoded in a loop
merge_request: 25849
author:
type: fixed
---
title: 'Fix: tableflip quick action is interpreted even if inside code block'
merge_request:
author: Pavlo Dudchenko
type: fixed
---
title: Render single snippet blob in repository
merge_request: 23848
author:
type: added
---
title: Add validation for updated_at parameter in update Issue API
merge_request: 25201
author: Filip Stybel
type: fixed
---
# Checks for the presence of absolute hyperlinks that should be relative.
#
# Requires --ignore-syntax CLI flag to find matches.
#
# For a list of all options, see https://errata-ai.github.io/vale/styles/
extends: existence
message: URL '%s' must be relative.
link: https://docs.gitlab.com/ee/development/documentation/styleguide.html#links-to-internal-documentation
level: error
raw:
- '\[.+\]\(https?:\/\/docs\.gitlab\.com\/ee.*\)'
......@@ -51,14 +51,14 @@ Feature.enable('geo_repository_verification')
## Repository verification
Navigate to the **Admin Area > Geo** dashboard on the **primary** node and expand
Navigate to the **{admin}** **Admin Area >** **{location-dot}** **Geo** dashboard on the **primary** node and expand
the **Verification information** tab for that node to view automatic checksumming
status for repositories and wikis. Successes are shown in green, pending work
in grey, and failures in red.
![Verification status](img/verification-status-primary.png)
Navigate to the **Admin Area > Geo** dashboard on the **secondary** node and expand
Navigate to the **{admin}** **Admin Area >** **{location-dot}** **Geo** dashboard on the **secondary** node and expand
the **Verification information** tab for that node to view automatic verification
status for repositories and wikis. As with checksumming, successes are shown in
green, pending work in grey, and failures in red.
......@@ -85,7 +85,7 @@ data. The default and recommended re-verification interval is 7 days, though
an interval as short as 1 day can be set. Shorter intervals reduce risk but
increase load and vice versa.
Navigate to the **Admin Area > Geo** dashboard on the **primary** node, and
Navigate to the **{admin}** **Admin Area >** **{location-dot}** **Geo** dashboard on the **primary** node, and
click the **Edit** button for the **primary** node to customize the minimum
re-verification interval:
......@@ -134,7 +134,7 @@ sudo gitlab-rake geo:verification:wiki:reset
If the **primary** and **secondary** nodes have a checksum verification mismatch, the cause may not be apparent. To find the cause of a checksum mismatch:
1. Navigate to the **Admin Area > Projects** dashboard on the **primary** node, find the
1. Navigate to the **{admin}** **Admin Area >** **{overview}** **Overview > Projects** dashboard on the **primary** node, find the
project that you want to check the checksum differences and click on the
**Edit** button:
![Projects dashboard](img/checksum-differences-admin-projects.png)
......
......@@ -205,20 +205,20 @@ secondary domain, like changing Git remotes and API URLs.
This command will use the changed `external_url` configuration defined
in `/etc/gitlab/gitlab.rb`.
1. For GitLab 11.11 through 12.7 only, you may need to update the primary
1. For GitLab 11.11 through 12.7 only, you may need to update the **primary**
node's name in the database. This bug has been fixed in GitLab 12.8.
To determine if you need to do this, search for the
`gitlab_rails["geo_node_name"]` setting in your `/etc/gitlab/gitlab.rb`
file. If it is commented out with `#` or not found at all, then you will
need to update the primary node's name in the database. You can search for it
need to update the **primary** node's name in the database. You can search for it
like so:
```shell
grep "geo_node_name" /etc/gitlab/gitlab.rb
```
To update the primary node's name in the database:
To update the **primary** node's name in the database:
```shell
gitlab-rails runner 'Gitlab::Geo.primary_node.update!(name: GeoNode.current_node_name)'
......
......@@ -92,7 +92,7 @@ The maintenance window won't end until Geo replication and verification is
completely finished. To keep the window as short as possible, you should
ensure these processes are close to 100% as possible during active use.
Navigate to the **Admin Area > Geo** dashboard on the **secondary** node to
Navigate to the **{admin}** **Admin Area >** **{location-dot}** **Geo** dashboard on the **secondary** node to
review status. Replicated objects (shown in green) should be close to 100%,
and there should be no failures (shown in red). If a large proportion of
objects aren't yet replicated (shown in grey), consider giving the node more
......@@ -117,8 +117,8 @@ This [content was moved to another location][background-verification].
### Notify users of scheduled maintenance
On the **primary** node, navigate to **Admin Area > Messages**, add a broadcast
message. You can check under **Admin Area > Geo** to estimate how long it
On the **primary** node, navigate to **{admin}** **Admin Area >** **{bullhorn}** **Messages**, add a broadcast
message. You can check under **{admin}** **Admin Area >** **{location-dot}** **Geo** to estimate how long it
will take to finish syncing. An example message would be:
> A scheduled maintenance will take place at XX:XX UTC. We expect it to take
......@@ -162,8 +162,8 @@ access to the **primary** node during the maintenance window.
existing Git repository with an SSH remote URL. The server should refuse
connection.
1. Disable non-Geo periodic background jobs on the primary node by navigating
to **Admin Area > Monitoring > Background Jobs > Cron** , pressing `Disable All`,
1. Disable non-Geo periodic background jobs on the **primary** node by navigating
to **{admin}** **Admin Area >** **{monitor}** **Monitoring > Background Jobs > Cron**, pressing `Disable All`,
and then pressing `Enable` for the `geo_sidekiq_cron_config_worker` cron job.
This job will re-enable several other cron jobs that are essential for planned
failover to complete successfully.
......@@ -172,11 +172,11 @@ access to the **primary** node during the maintenance window.
1. If you are manually replicating any data not managed by Geo, trigger the
final replication process now.
1. On the **primary** node, navigate to **Admin Area > Monitoring > Background Jobs > Queues**
1. On the **primary** node, navigate to **{admin}** **Admin Area >** **{monitor}** **Monitoring > Background Jobs > Queues**
and wait for all queues except those with `geo` in the name to drop to 0.
These queues contain work that has been submitted by your users; failing over
before it is completed will cause the work to be lost.
1. On the **primary** node, navigate to **Admin Area > Geo** and wait for the
1. On the **primary** node, navigate to **{admin}** **Admin Area >** **{location-dot}** **Geo** and wait for the
following conditions to be true of the **secondary** node you are failing over to:
- All replication meters to each 100% replicated, 0% failures.
......@@ -184,7 +184,7 @@ access to the **primary** node during the maintenance window.
- Database replication lag is 0ms.
- The Geo log cursor is up to date (0 events behind).
1. On the **secondary** node, navigate to **Admin Area > Monitoring > Background Jobs > Queues**
1. On the **secondary** node, navigate to **{admin}** **Admin Area >** **{monitor}** **Monitoring > Background Jobs > Queues**
and wait for all the `geo` queues to drop to 0 queued and 0 running jobs.
1. On the **secondary** node, use [these instructions][foreground-verification]
to verify the integrity of CI artifacts, LFS objects, and uploads in file
......@@ -201,7 +201,7 @@ Finally, follow the [Disaster Recovery docs][disaster-recovery] to promote the
Once it is completed, the maintenance window is over! Your new **primary** node will now
begin to diverge from the old one. If problems do arise at this point, failing
back to the old **primary** node [is possible][bring-primary-back], but likely to result
in the loss of any data uploaded to the new primary in the meantime.
in the loss of any data uploaded to the new **primary** in the meantime.
Don't forget to remove the broadcast message after failover is complete.
......
......@@ -184,7 +184,7 @@ keys must be manually replicated to the **secondary** node.
gitlab-ctl reconfigure
```
1. Visit the **primary** node's **Admin Area > Geo**
1. Visit the **primary** node's **{admin}** **Admin Area >** **{location-dot}** **Geo**
(`/admin/geo/nodes`) in your browser.
1. Click the **New node** button.
![Add secondary node](img/adding_a_secondary_node.png)
......@@ -231,7 +231,7 @@ You can login to the **secondary** node with the same credentials as used for th
Using Hashed Storage significantly improves Geo replication. Project and group
renames no longer require synchronization between nodes.
1. Visit the **primary** node's **Admin Area > Settings > Repository**
1. Visit the **primary** node's **{admin}** **Admin Area >** **{settings}** **Settings > Repository**
(`/admin/application_settings/repository`) in your browser.
1. In the **Repository storage** section, check **Use hashed storage paths for newly created and renamed projects**.
......@@ -248,7 +248,7 @@ on the **secondary** node.
### Step 6. Enable Git access over HTTP/HTTPS
Geo synchronizes repositories over HTTP/HTTPS, and therefore requires this clone
method to be enabled. Navigate to **Admin Area > Settings**
method to be enabled. Navigate to **{admin}** **Admin Area >** **{settings}** **Settings**
(`/admin/application_settings/general`) on the **primary** node, and set
`Enabled Git access protocols` to `Both SSH and HTTP(S)` or `Only HTTP(S)`.
......@@ -257,13 +257,13 @@ method to be enabled. Navigate to **Admin Area > Settings**
Your **secondary** node is now configured!
You can login to the **secondary** node with the same credentials you used for the
**primary** node. Visit the **secondary** node's **Admin Area > Geo**
**primary** node. Visit the **secondary** node's **{admin}** **Admin Area >** **{location-dot}** **Geo**
(`/admin/geo/nodes`) in your browser to check if it's correctly identified as a
**secondary** Geo node and if Geo is enabled.
The initial replication, or 'backfill', will probably still be in progress. You
can monitor the synchronization process on each geo node from the **primary**
node's Geo Nodes dashboard in your browser.
node's **Geo Nodes** dashboard in your browser.
![Geo dashboard](img/geo_node_dashboard.png)
......
......@@ -97,7 +97,7 @@ as well as permissions and credentials.
PostgreSQL can also hold some level of cached data like HTML rendered Markdown, cached merge-requests diff (this can
also be configured to be offloaded to object storage).
We use PostgreSQL's own replication functionality to replicate data from the primary to secondary nodes.
We use PostgreSQL's own replication functionality to replicate data from the **primary** to **secondary** nodes.
We use Redis both as a cache store and to hold persistent data for our background jobs system. Because both
use-cases has data that are exclusive to the same Geo node, we don't replicate it between nodes.
......
......@@ -17,7 +17,7 @@ integrated [Container Registry](../../packages/container_registry.md#container-r
You can enable a storage-agnostic replication so it
can be used for cloud or local storages. Whenever a new image is pushed to the
primary node, each **secondary** node will pull it to its own container
**primary** node, each **secondary** node will pull it to its own container
repository.
To configure Docker Registry replication:
......@@ -111,6 +111,7 @@ generate a short-lived JWT that is pull-only-capable to access the
### Verify replication
To verify Container Registry replication is working, go to **Admin Area > Geo** (`/admin/geo/nodes`) on the **secondary** node.
To verify Container Registry replication is working, go to **{admin}** **Admin Area >** **{location-dot}** **Geo**
(`/admin/geo/nodes`) on the **secondary** node.
The initial replication, or "backfill", will probably still be in progress.
You can monitor the synchronization process on each Geo node from the **primary** node's **Geo Nodes** dashboard in your browser.
......@@ -270,7 +270,7 @@ For answers to common questions, see the [Geo FAQ](faq.md).
Since GitLab 9.5, Geo stores structured log messages in a `geo.log` file. For Omnibus installations, this file is at `/var/log/gitlab/gitlab-rails/geo.log`.
This file contains information about when Geo attempts to sync repositories and files. Each line in the file contains a separate JSON entry that can be ingested into, for example, Elasticsearch or Splunk.
This file contains information about when Geo attempts to sync repositories and files. Each line in the file contains a separate JSON entry that can be ingested into. For example, Elasticsearch or Splunk.
For example:
......
......@@ -37,7 +37,7 @@ In any case, you require:
- A Route53 Hosted Zone managing your domain.
If you have not yet setup a Geo **primary** node and **secondary** node, please consult
[the Geo setup instructions](https://docs.gitlab.com/ee/administration/geo/replication/#setup-instructions).
[the Geo setup instructions](index.md#setup-instructions).
## Create a traffic policy
......
......@@ -24,7 +24,7 @@ whether they are stored on the local filesystem or in object storage.
To enable GitLab replication, you must:
1. Go to **Admin Area > Geo**.
1. Go to **{admin}** **Admin Area >** **{location-dot}** **Geo**.
1. Press **Edit** on the **secondary** node.
1. Enable the **Allow this secondary node to replicate content on Object Storage**
checkbox.
......
......@@ -2,7 +2,7 @@
**Secondary** nodes can be removed from the Geo cluster using the Geo admin page of the **primary** node. To remove a **secondary** node:
1. Navigate to **Admin Area > Geo** (`/admin/geo/nodes`).
1. Navigate to **{admin}** **Admin Area >** **{location-dot}** **Geo** (`/admin/geo/nodes`).
1. Click the **Remove** button for the **secondary** node you want to remove.
1. Confirm by clicking **Remove** when the prompt appears.
......
......@@ -19,7 +19,7 @@ Before attempting more advanced troubleshooting:
### Check the health of the **secondary** node
Visit the **primary** node's **Admin Area > Geo** (`/admin/geo/nodes`) in
Visit the **primary** node's **{admin}** **Admin Area >** **{location-dot}** **Geo** (`/admin/geo/nodes`) in
your browser. We perform the following health checks on each **secondary** node
to help identify if something is wrong:
......@@ -122,7 +122,7 @@ Geo finds the current machine's Geo node name in `/etc/gitlab/gitlab.rb` by:
- If that is not defined, using the `external_url` setting.
This name is used to look up the node with the same **Name** in
**Admin Area > Geo**.
**{admin}** **Admin Area >** **{location-dot}** **Geo**.
To check if the current machine has a node name that matches a node in the
database, run the check task:
......@@ -211,9 +211,9 @@ sudo gitlab-rake gitlab:geo:check
Checking Geo ... Finished
```
- Ensure that you have added the secondary node in the Admin Area of the primary node.
- Ensure that you entered the `external_url` or `gitlab_rails['geo_node_name']` when adding the secondary node in the admin are of the primary node.
- Prior to GitLab 12.4, edit the secondary node in the Admin Area of the primary node and ensure that there is a trailing `/` in the `Name` field.
- Ensure that you have added the secondary node in the Admin Area of the **primary** node.
- Ensure that you entered the `external_url` or `gitlab_rails['geo_node_name']` when adding the secondary node in the admin are of the **primary** node.
- Prior to GitLab 12.4, edit the secondary node in the Admin Area of the **primary** node and ensure that there is a trailing `/` in the `Name` field.
1. Check returns Exception: PG::UndefinedTable: ERROR: relation "geo_nodes" does not exist
......@@ -244,8 +244,8 @@ sudo gitlab-rake gitlab:geo:check
When performing a Postgres major version (9 > 10) update this is expected. Follow:
- [initiate-the-replication-process](https://docs.gitlab.com/ee/administration/geo/replication/database.html#step-3-initiate-the-replication-process)
- [Geo database has an outdated FDW remote schema](https://docs.gitlab.com/ee/administration/geo/replication/troubleshooting.html#geo-database-has-an-outdated-fdw-remote-schema-error)
- [initiate-the-replication-process](database.md#step-3-initiate-the-replication-process)
- [Geo database has an outdated FDW remote schema](troubleshooting.md#geo-database-has-an-outdated-fdw-remote-schema-error)
## Fixing replication errors
......@@ -359,7 +359,7 @@ To help us resolve this problem, consider commenting on
GitLab places a timeout on all repository clones, including project imports
and Geo synchronization operations. If a fresh `git clone` of a repository
on the primary takes more than a few minutes, you may be affected by this.
on the **primary** takes more than a few minutes, you may be affected by this.
To increase the timeout, add the following line to `/etc/gitlab/gitlab.rb`
on the **secondary** node:
......@@ -494,7 +494,7 @@ If you encounter this message when running `gitlab-rake geo:set_secondary_as_pri
or `gitlab-ctl promote-to-primary-node`, either:
- Enter a Rails console and run:
```ruby
Rails.application.load_tasks; nil
Gitlab::Geo.expire_cache_keys!([:primary_node, :current_node])
......@@ -750,7 +750,7 @@ If you are able to log in to the **primary** node, but you receive this error
when attempting to log into a **secondary**, you should check that the Geo
node's URL matches its external URL.
1. On the primary, visit **Admin Area > Geo**.
1. On the primary, visit **{admin}** **Admin Area >** **{location-dot}** **Geo**.
1. Find the affected **secondary** and click **Edit**.
1. Ensure the **URL** field matches the value found in `/etc/gitlab/gitlab.rb`
in `external_url "https://gitlab.example.com"` on the frontend server(s) of
......@@ -833,4 +833,4 @@ To resolve this issue:
- Check `/var/log/gitlab/gitlab-rails/geo.log` to see if the **secondary** node is
using IPv6 to send its status to the **primary** node. If it is, add an entry to
the **primary** node using IPv4 in the `/etc/hosts` file. Alternatively, you should
[enable IPv6 on the primary node](https://docs.gitlab.com/omnibus/settings/nginx.html#setting-the-nginx-listen-address-or-addresses).
[enable IPv6 on the **primary** node](https://docs.gitlab.com/omnibus/settings/nginx.html#setting-the-nginx-listen-address-or-addresses).
......@@ -2,8 +2,8 @@
## Changing the sync capacity values
In the Geo admin page (`/admin/geo/nodes`), there are several variables that
can be tuned to improve performance of Geo:
In the Geo admin page at **{admin}** **Admin Area >** **{location-dot}** **Geo** (`/admin/geo/nodes`),
there are several variables that can be tuned to improve performance of Geo:
- Repository sync capacity.
- File sync capacity.
......
......@@ -186,7 +186,7 @@ Replicating over SSH has been deprecated, and support for this option will be
removed in a future release.
To switch to HTTP/HTTPS replication, log into the **primary** node as an admin and visit
**Admin Area > Geo** (`/admin/geo/nodes`). For each **secondary** node listed,
**{admin}** **Admin Area >** **{location-dot}** **Geo** (`/admin/geo/nodes`). For each **secondary** node listed,
press the "Edit" button, change the "Repository cloning" setting from
"SSH (deprecated)" to "HTTP/HTTPS", and press "Save changes". This should take
effect immediately.
......
......@@ -63,7 +63,7 @@ For source installations the following settings are nested under `uploads:` and
|---------|-------------|---------|
| `enabled` | Enable/disable object storage | `false` |
| `remote_directory` | The bucket name where Uploads will be stored| |
| `direct_upload` | Set to true to remove Unicorn from the Upload path. Workhorse handles the actual Artifact Upload to Object Storage while Unicorn does minimal processing to keep track of the upload. There is no need for local shared storage. The option may be removed if support for a single storage type for all files is introduced. Read more on [what the direct_upload setting means](https://docs.gitlab.com/ee/development/uploads.html#what-does-the-direct_upload-setting-mean). | `false` |
| `direct_upload` | Set to true to remove Unicorn from the Upload path. Workhorse handles the actual Artifact Upload to Object Storage while Unicorn does minimal processing to keep track of the upload. There is no need for local shared storage. The option may be removed if support for a single storage type for all files is introduced. Read more on [direct upload](../development/uploads.md#direct-upload). | `false` |
| `background_upload` | Set to false to disable automatic upload. Option may be removed once upload is direct to S3 (if `direct_upload` is set to `true` it will override `background_upload`) | `true` |
| `proxy_download` | Set to true to enable proxying all files served. Option allows to reduce egress traffic as this allows clients to download directly from remote storage instead of proxying all data | `false` |
| `connection` | Various connection options described below | |
......
......@@ -647,7 +647,7 @@ POST /projects/:id/issues
| `iid` | integer/string | no | The internal ID of the project's issue (requires admin or project owner rights) |
| `title` | string | yes | The title of an issue |
| `description` | string | no | The description of an issue. Limited to 1,048,576 characters. |
| `confidential` | boolean | no | Set an issue to be confidential. Default is `false`. |
| `confidential` | Boolean | no | Set an issue to be confidential. Default is `false`. |
| `assignee_ids` | integer array | no | The ID of a user to assign issue |
| `milestone_id` | integer | no | The global ID of a milestone to assign issue |
| `labels` | string | no | Comma-separated label names for an issue |
......@@ -755,15 +755,15 @@ PUT /projects/:id/issues/:issue_iid
| `issue_iid` | integer | yes | The internal ID of a project's issue |
| `title` | string | no | The title of an issue |
| `description` | string | no | The description of an issue. Limited to 1,048,576 characters. |
| `confidential` | boolean | no | Updates an issue to be confidential |
| `confidential` | Boolean | no | Updates an issue to be confidential |
| `assignee_ids` | integer array | no | The ID of the user(s) to assign the issue to. Set to `0` or provide an empty value to unassign all assignees. |
| `milestone_id` | integer | no | The global ID of a milestone to assign the issue to. Set to `0` or provide an empty value to unassign a milestone.|
| `labels` | string | no | Comma-separated label names for an issue. Set to an empty string to unassign all labels. |
| `state_event` | string | no | The state event of an issue. Set `close` to close the issue and `reopen` to reopen it |
| `updated_at` | string | no | Date time string, ISO 8601 formatted, e.g. `2016-03-11T03:45:40Z` (requires admin or project owner rights) |
| `updated_at` | string | no | Date time string, ISO 8601 formatted, e.g. `2016-03-11T03:45:40Z` (requires admin or project owner rights). Empty string or null values are not accepted.|
| `due_date` | string | no | Date time string in the format YEAR-MONTH-DAY, e.g. `2016-03-11` |
| `weight` **(STARTER)** | integer | no | The weight of the issue. Valid values are greater than or equal to 0. 0 |
| `discussion_locked` | boolean | no | Flag indicating if the issue's discussion is locked. If the discussion is locked only project members can add or edit comments. |
| `discussion_locked` | Boolean | no | Flag indicating if the issue's discussion is locked. If the discussion is locked only project members can add or edit comments. |
| `epic_id` **(ULTIMATE)** | integer | no | ID of the epic to add the issue to. Valid values are greater than or equal to 0. |
| `epic_iid` **(ULTIMATE)** | integer | no | IID of the epic to add the issue to. Valid values are greater than or equal to 0. (deprecated, [will be removed in 13.0](https://gitlab.com/gitlab-org/gitlab/issues/35157)) |
......@@ -851,9 +851,14 @@ the `weight` parameter:
}
```
**Note**: `assignee` column is deprecated, now we show it as a single-sized array `assignees` to conform to the GitLab EE API.
NOTE: **Note:**
At least one of following parameters is required to be passed for the request to be successful: `:assignee_id`, `:assignee_ids`, `:confidential`, `:created_at`, `:description`, `:discussion_locked`, `:due_date`, `:labels`, `:milestone_id`, `:state_event`, or `:title`.
**Note**: The `closed_by` attribute was [introduced in GitLab 10.6][ce-17042]. This value will only be present for issues which were closed after GitLab 10.6 and when the user account that closed the issue still exists.
NOTE: **Note**:
`assignee` column is deprecated. We now show it as a single-sized array `assignees` to conform to the GitLab EE API.
NOTE: **Note**:
The `closed_by` attribute was [introduced in GitLab 10.6][ce-17042]. This value will only be present for issues which were closed after GitLab 10.6 and when the user account that closed the issue still exists.
## Delete an issue
......
......@@ -367,6 +367,7 @@ Parameters:
- `email` (required) - Email
- `password` (optional) - Password
- `reset_password` (optional) - Send user password reset link - true or false(default)
- `force_random_password` (optional) - Set user password to a random value - true or false (default)
- `username` (required) - Username
- `name` (required) - Name
- `skype` (optional) - Skype ID
......
......@@ -580,21 +580,15 @@ do not use this option until further notice.
### Links to internal documentation
- To link to internal documentation, use relative links, not full URLs.
- To link to internal documentation, use relative links, not absolute URLs.
Use `../` to navigate to high-level directories. Links should not refer to root.
Don't:
```md
[Geo Troubleshooting](https://docs.gitlab.com/ee/administration/geo/replication/troubleshooting.html)
[Geo Troubleshooting](/ee/administration/geo/replication/troubleshooting.md)
```
Do:
- `https://docs.gitlab.com/ee/administration/geo/replication/troubleshooting.html`
- `/ee/administration/geo/replication/troubleshooting.md`
```md
[Geo Troubleshooting](../../geo/replication/troubleshooting.md)
```
Do: `../../geo/replication/troubleshooting.md`
- Always add the file name `file.md` at the end of the link with the `.md` extension, not `.html`.
......
# Issuable-like Rails models utilities
GitLab Rails codebase contains several models that hold common functionality and behave similarly to
[Issues](https://docs.gitlab.com/ee/user/project/issues/). Other examples of "issuables"
are [Merge Requests](https://docs.gitlab.com/ee/user/project/merge_requests/) and
[Epics](https://docs.gitlab.com/ee/user/group/epics/).
[Issues](../user/project/issues/index.md). Other examples of "issuables"
are [Merge Requests](../user/project/merge_requests/index.md) and
[Epics](../user/group/epics/index.md).
This guide accumulates guidelines on working with such Rails models.
......
......@@ -115,7 +115,7 @@ data migration. Migrating millions of rows will always be troublesome and
can have a negative impact on the application.
To better understand how to get help with the query plan reviews
read this section on [how to prepare the merge request for a database review](https://docs.gitlab.com/ee/development/database_review.html#how-to-prepare-the-merge-request-for-a-database-review).
read this section on [how to prepare the merge request for a database review](database_review.md#how-to-prepare-the-merge-request-for-a-database-review).
## Query Counts
......@@ -199,7 +199,7 @@ This could result in Puma/Unicorn timeout and should be avoided at all cost.
You should set a reasonable timeout, gracefully handle exceptions and surface the
errors in UI or logging internally.
Using [`ReactiveCaching`](https://docs.gitlab.com/ee/development/utilities.html#reactivecaching) is one of the best solutions to fetch external data.
Using [`ReactiveCaching`](utilities.md#reactivecaching) is one of the best solutions to fetch external data.
## Keep database transaction minimal
......@@ -396,4 +396,4 @@ Performance deficiencies should be addressed right away after we merge initial
changes.
Read more about when and how feature flags should be used in
[Feature flags in GitLab development](https://docs.gitlab.com/ee/development/feature_flags/process.html#feature-flags-in-gitlab-development).
[Feature flags in GitLab development](feature_flags/process.md#feature-flags-in-gitlab-development).
......@@ -49,7 +49,7 @@ require Rails.root.join('db', 'post_migrate', '20170526185842_migrate_pipeline_s
#### `table`
Use the `table` helper to create a temporary `ActiveRecord::Base`-derived model
for a table. [FactoryBot](https://docs.gitlab.com/ee/development/testing_guide/best_practices.html#factories)
for a table. [FactoryBot](best_practices.md#factories)
**should not** be used to create data for migration specs. For example, to
create a record in the `projects` table:
......
......@@ -511,7 +511,7 @@ Here are some common pitfalls and how to overcome them:
If you see `Elasticsearch::Model::Response::Records`, you are using Elasticsearch.
NOTE: **Note**:
The above instructions are used to verify that GitLab is using Elasticsearch only when indexing all namespaces. This is not to be used for scenarios that only index a [subset of namespaces](https://docs.gitlab.com/ee/integration/elasticsearch.html#limiting-namespaces-and-projects).
The above instructions are used to verify that GitLab is using Elasticsearch only when indexing all namespaces. This is not to be used for scenarios that only index a [subset of namespaces](#limiting-namespaces-and-projects).
- **I updated GitLab and now I can't find anything**
......@@ -534,7 +534,7 @@ Here are some common pitfalls and how to overcome them:
```
NOTE: **Note**:
The above instructions are not to be used for scenarios that only index a [subset of namespaces](https://docs.gitlab.com/ee/integration/elasticsearch.html#limiting-namespaces-and-projects).
The above instructions are not to be used for scenarios that only index a [subset of namespaces](#limiting-namespaces-and-projects).
See [Elasticsearch Index Scopes](#elasticsearch-index-scopes) for more information on searching for specific types of data.
......@@ -597,7 +597,7 @@ Here are some common pitfalls and how to overcome them:
AWS has [fixed limits](https://docs.aws.amazon.com/elasticsearch-service/latest/developerguide/aes-limits.html)
for this setting ("Maximum Size of HTTP Request Payloads"), based on the size of
the underlying instance.
- **My single node Elasticsearch cluster status never goes from `yellow` to `green` even though everything seems to be running properly**
**For a single node Elasticsearch cluster the functional cluster health status will be yellow** (will never be green) because the primary shard is allocated but replicas can not be as there is no other node to which Elasticsearch can assign a replica. This also applies if you are using using the
......@@ -614,7 +614,7 @@ Here are some common pitfalls and how to overcome them:
}
}'
```
- **I'm getting a `health check timeout: no Elasticsearch node available` error in Sidekiq during the indexing process**
```
......
......@@ -243,7 +243,7 @@ If you reach your limit, you can [purchase additional CI minutes](#extra-shared-
##### How pipeline quota usage is calculated
Pipeline quota usage is calculated as the sum of the duration of each individual job. This is slightly different to how pipeline _duration_ is [calculated](https://docs.gitlab.com/ee/ci/pipelines.html#how-pipeline-duration-is-calculated). Pipeline quota usage doesn't consider the intersection of jobs.
Pipeline quota usage is calculated as the sum of the duration of each individual job. This is slightly different to how pipeline _duration_ is [calculated](../ci/pipelines.md#how-pipeline-duration-is-calculated). Pipeline quota usage doesn't consider the intersection of jobs.
A simple example is:
......
......@@ -16,7 +16,7 @@ like:
- Working with secrets.
- Setting up CORS.
Alternatively, you can quickly [create a new project with a template](https://docs.gitlab.com/ee/gitlab-basics/create-project.html#project-templates). The [`Serverless Framework/JS` template](https://gitlab.com/gitlab-org/project-templates/serverless-framework/) already includes all parts described below.
Alternatively, you can quickly [create a new project with a template](../../../../gitlab-basics/create-project.md#project-templates). The [`Serverless Framework/JS` template](https://gitlab.com/gitlab-org/project-templates/serverless-framework/) already includes all parts described below.
## Example
......@@ -282,6 +282,6 @@ The example code is available:
- As a [cloneable repository](https://gitlab.com/gitlab-org/serverless/examples/serverless-framework-js).
- In a version with [tests and secret variables](https://gitlab.com/gitlab-org/project-templates/serverless-framework/).
You can also use a [template](https://docs.gitlab.com/ee/gitlab-basics/create-project.html#project-templates)
You can also use a [template](../../../../gitlab-basics/create-project.md#project-templates)
(based on the version with tests and secret variables) from within the GitLab UI (see
the `Serverless Framework/JS` template).
......@@ -7,7 +7,7 @@ type: reference
> [Introduced](https://gitlab.com/gitlab-org/gitlab-foss/-/merge_requests/15643) in GitLab 11.7.
GitLab supports using client-side [Git push options](https://git-scm.com/docs/git-push#Documentation/git-push.txt--oltoptiongt)
to perform various actions at the same time as pushing changes. Additionally, [Push Rules](https://docs.gitlab.com/ee/push_rules/push_rules.html) offer server-side control and enforcement options.
to perform various actions at the same time as pushing changes. Additionally, [Push Rules](../../push_rules/push_rules.md) offer server-side control and enforcement options.
Currently, there are push options available for:
......
......@@ -247,6 +247,7 @@ module API
requires :issue_iid, type: Integer, desc: 'The internal ID of a project issue'
optional :title, type: String, desc: 'The title of an issue'
optional :updated_at, type: DateTime,
allow_blank: false,
desc: 'Date time when the issue was updated. Available only for admins and project owners.'
optional :state_event, type: String, values: %w[reopen close], desc: 'State of the issue'
use :issue_params
......
......@@ -13,6 +13,7 @@ module Gitlab
def initialize(command_definitions)
@command_definitions = command_definitions
@commands_regex = {}
end
# Extracts commands from content and return an array of commands.
......@@ -58,7 +59,8 @@ module Gitlab
content = content.dup
content.delete!("\r")
content.gsub!(commands_regex(only: only)) do
names = command_names(limit_to_commands: only).map(&:to_s)
content.gsub!(commands_regex(names: names)) do
command, output = process_commands($~, redact)
commands << command
output
......@@ -91,10 +93,8 @@ module Gitlab
# It looks something like:
#
# /^\/(?<cmd>close|reopen|...)(?:( |$))(?<arg>[^\/\n]*)(?:\n|$)/
def commands_regex(only:)
names = command_names(limit_to_commands: only).map(&:to_s)
@commands_regex ||= %r{
def commands_regex(names:)
@commands_regex[names] ||= %r{
(?<code>
# Code blocks:
# ```
......@@ -151,14 +151,18 @@ module Gitlab
end
substitution_definitions.each do |substitution|
match_data = substitution.match(content.downcase)
if match_data
command = [substitution.name.to_s]
command << match_data[1] unless match_data[1].empty?
commands << command
regex = commands_regex(names: substitution.all_names)
content = content.gsub(regex) do |text|
if $~[:cmd]
command = [substitution.name.to_s]
command << $~[:arg] if $~[:arg].present?
commands << command
substitution.perform_substitution(self, text)
else
text
end
end
content = substitution.perform_substitution(self, content)
end
[content, commands]
......
......@@ -17,7 +17,7 @@ module Gitlab
return unless content
all_names.each do |a_name|
content = content.gsub(%r{/#{a_name}(?![\S]) ?(.*)$}i, execute_block(action_block, context, '\1'))
content = content.sub(%r{/#{a_name}(?![\S]) ?(.*)$}i, execute_block(action_block, context, '\1'))
end
content
......
......@@ -155,7 +155,7 @@ module Gitlab
end
def repository
@repository ||= project.repository
@repository ||= project&.repository
end
end
end
......
......@@ -3,9 +3,11 @@
require 'spec_helper'
describe Projects::SnippetsController do
include Gitlab::Routing
let_it_be(:user) { create(:user) }
let_it_be(:user2) { create(:user) }
let(:project) { create(:project_empty_repo, :public) }
let(:user) { create(:user) }
let(:user2) { create(:user) }
before do
project.add_maintainer(user)
......@@ -318,14 +320,45 @@ describe Projects::SnippetsController do
end
end
shared_examples 'successful response' do
it 'renders the snippet' do
subject
expect(assigns(:snippet)).to eq(project_snippet)
expect(response).to have_gitlab_http_status(:ok)
end
it 'renders the blob from the repository' do
subject
expect(assigns(:blob)).to eq(project_snippet.blobs.first)
end
context 'when feature flag version_snippets is disabled' do
before do
stub_feature_flags(version_snippets: false)
end
it 'returns the snippet database content' do
subject
blob = assigns(:blob)
expect(blob.data).to eq(project_snippet.content)
end
end
end
%w[show raw].each do |action|
describe "GET ##{action}" do
context 'when the project snippet is private' do
let(:project_snippet) { create(:project_snippet, :private, project: project, author: user) }
let(:project_snippet) { create(:project_snippet, :private, :repository, project: project, author: user) }
subject { get action, params: { namespace_id: project.namespace, project_id: project, id: project_snippet.to_param } }
context 'when anonymous' do
it 'responds with status 404' do
get action, params: { namespace_id: project.namespace, project_id: project, id: project_snippet.to_param }
subject
expect(response).to have_gitlab_http_status(:not_found)
end
......@@ -336,12 +369,7 @@ describe Projects::SnippetsController do
sign_in(user)
end
it 'renders the snippet' do
get action, params: { namespace_id: project.namespace, project_id: project, id: project_snippet.to_param }
expect(assigns(:snippet)).to eq(project_snippet)
expect(response).to have_gitlab_http_status(:ok)
end
it_behaves_like 'successful response'
end
context 'when signed in as a project member' do
......@@ -349,19 +377,16 @@ describe Projects::SnippetsController do
sign_in(user2)
end
it 'renders the snippet' do
get action, params: { namespace_id: project.namespace, project_id: project, id: project_snippet.to_param }
expect(assigns(:snippet)).to eq(project_snippet)
expect(response).to have_gitlab_http_status(:ok)
end
it_behaves_like 'successful response'
end
end
context 'when the project snippet does not exist' do
subject { get action, params: { namespace_id: project.namespace, project_id: project, id: 42 } }
context 'when anonymous' do
it 'responds with status 404' do
get action, params: { namespace_id: project.namespace, project_id: project, id: 42 }
subject
expect(response).to have_gitlab_http_status(:not_found)
end
......@@ -373,7 +398,7 @@ describe Projects::SnippetsController do
end
it 'responds with status 404' do
get action, params: { namespace_id: project.namespace, project_id: project, id: 42 }
subject
expect(response).to have_gitlab_http_status(:not_found)
end
......@@ -383,18 +408,20 @@ describe Projects::SnippetsController do
end
describe "GET #show for embeddable content" do
let(:project_snippet) { create(:project_snippet, snippet_permission, project: project, author: user) }
let(:project_snippet) { create(:project_snippet, :repository, snippet_permission, project: project, author: user) }
before do
sign_in(user)
get :show, params: { namespace_id: project.namespace, project_id: project, id: project_snippet.to_param }, format: :js
end
subject { get :show, params: { namespace_id: project.namespace, project_id: project, id: project_snippet.to_param }, format: :js }
context 'when snippet is private' do
let(:snippet_permission) { :private }
it 'responds with status 404' do
subject
expect(response).to have_gitlab_http_status(:not_found)
end
end
......@@ -402,10 +429,7 @@ describe Projects::SnippetsController do
context 'when snippet is public' do
let(:snippet_permission) { :public }
it 'responds with status 200' do
expect(assigns(:snippet)).to eq(project_snippet)
expect(response).to have_gitlab_http_status(:ok)
end
it_behaves_like 'successful response'
end
context 'when the project is private' do
......@@ -415,6 +439,8 @@ describe Projects::SnippetsController do
let(:project_snippet) { create(:project_snippet, :public, project: project, author: user) }
it 'responds with status 404' do
subject
expect(assigns(:snippet)).to eq(project_snippet)
expect(response).to have_gitlab_http_status(:not_found)
end
......@@ -423,14 +449,17 @@ describe Projects::SnippetsController do
end
describe 'GET #raw' do
let(:content) { "first line\r\nsecond line\r\nthird line" }
let(:formatted_content) { content.gsub(/\r\n/, "\n") }
let(:project_snippet) do
create(
:project_snippet, :public,
:project_snippet, :public, :repository,
project: project,
author: user,
content: "first line\r\nsecond line\r\nthird line"
content: content
)
end
let(:blob) { project_snippet.blobs.first }
context 'CRLF line ending' do
let(:params) do
......@@ -441,16 +470,22 @@ describe Projects::SnippetsController do
}
end
before do
allow_next_instance_of(Blob) do |instance|
allow(instance).to receive(:data).and_return(content)
end
end
it 'returns LF line endings by default' do
get :raw, params: params
expect(response.body).to eq("first line\nsecond line\nthird line")
expect(response.body).to eq(formatted_content)
end
it 'does not convert line endings when parameter present' do
get :raw, params: params.merge(line_ending: :raw)
expect(response.body).to eq("first line\r\nsecond line\r\nthird line")
expect(response.body).to eq(content)
end
end
end
......
......@@ -3,11 +3,9 @@
require 'spec_helper'
describe SnippetsController do
let(:user) { create(:user) }
let_it_be(:user) { create(:user) }
describe 'GET #index' do
let(:user) { create(:user) }
context 'when username parameter is present' do
it_behaves_like 'paginated collection' do
let(:collection) { Snippet.all }
......@@ -75,8 +73,37 @@ describe SnippetsController do
end
describe 'GET #show' do
shared_examples 'successful response' do
it 'renders the snippet' do
subject
expect(assigns(:snippet)).to eq(personal_snippet)
expect(response).to have_gitlab_http_status(:ok)
end
it 'renders the blob from the repository' do
subject
expect(assigns(:blob)).to eq(personal_snippet.blobs.first)
end
context 'when feature flag version_snippets is disabled' do
before do
stub_feature_flags(version_snippets: false)
end
it 'returns the snippet database content' do
subject
blob = assigns(:blob)
expect(blob.data).to eq(personal_snippet.content)
end
end
end
context 'when the personal snippet is private' do
let(:personal_snippet) { create(:personal_snippet, :private, author: user) }
let_it_be(:personal_snippet) { create(:personal_snippet, :private, :repository, author: user) }
context 'when signed in' do
before do
......@@ -95,11 +122,8 @@ describe SnippetsController do
end
context 'when signed in user is the author' do
it 'renders the snippet' do
get :show, params: { id: personal_snippet.to_param }
expect(assigns(:snippet)).to eq(personal_snippet)
expect(response).to have_gitlab_http_status(:ok)
it_behaves_like 'successful response' do
subject { get :show, params: { id: personal_snippet.to_param } }
end
it 'responds with status 404 when embeddable content is requested' do
......@@ -120,18 +144,15 @@ describe SnippetsController do
end
context 'when the personal snippet is internal' do
let(:personal_snippet) { create(:personal_snippet, :internal, author: user) }
let_it_be(:personal_snippet) { create(:personal_snippet, :internal, :repository, author: user) }
context 'when signed in' do
before do
sign_in(user)
end
it 'renders the snippet' do
get :show, params: { id: personal_snippet.to_param }
expect(assigns(:snippet)).to eq(personal_snippet)
expect(response).to have_gitlab_http_status(:ok)
it_behaves_like 'successful response' do
subject { get :show, params: { id: personal_snippet.to_param } }
end
it 'responds with status 404 when embeddable content is requested' do
......@@ -151,18 +172,15 @@ describe SnippetsController do
end
context 'when the personal snippet is public' do
let(:personal_snippet) { create(:personal_snippet, :public, author: user) }
let_it_be(:personal_snippet) { create(:personal_snippet, :public, :repository, author: user) }
context 'when signed in' do
before do
sign_in(user)
end
it 'renders the snippet' do
get :show, params: { id: personal_snippet.to_param }
expect(assigns(:snippet)).to eq(personal_snippet)
expect(response).to have_gitlab_http_status(:ok)
it_behaves_like 'successful response' do
subject { get :show, params: { id: personal_snippet.to_param } }
end
it 'responds with status 200 when embeddable content is requested' do
......@@ -481,8 +499,82 @@ describe SnippetsController do
end
describe "GET #raw" do
shared_examples '200 status' do
before do
subject
end
it 'responds with status 200' do
expect(assigns(:snippet)).to eq(snippet)
expect(response).to have_gitlab_http_status(:ok)
end
it 'has expected headers' do
expect(response.header['Content-Type']).to eq('text/plain; charset=utf-8')
expect(response.header['Content-Disposition']).to match(/inline/)
end
it "sets #{Gitlab::Workhorse::DETECT_HEADER} header" do
expect(response.header[Gitlab::Workhorse::DETECT_HEADER]).to eq 'true'
end
end
shared_examples 'CRLF line ending' do
let(:content) { "first line\r\nsecond line\r\nthird line" }
let(:formatted_content) { content.gsub(/\r\n/, "\n") }
let(:snippet) do
create(:personal_snippet, :public, :repository, author: user, content: content)
end
before do
allow_next_instance_of(Blob) do |instance|
allow(instance).to receive(:data).and_return(content)
end
subject
end
it 'returns LF line endings by default' do
expect(response.body).to eq(formatted_content)
end
context 'when parameter present' do
let(:params) { { id: snippet.to_param, line_ending: :raw } }
it 'does not convert line endings when parameter present' do
expect(response.body).to eq(content)
end
end
end
shared_examples 'successful response' do
it_behaves_like '200 status'
it_behaves_like 'CRLF line ending'
it 'returns snippet first blob data' do
subject
expect(response.body).to eq snippet.blobs.first.data
end
context 'when feature flag version_snippets is disabled' do
before do
stub_feature_flags(version_snippets: false)
end
it_behaves_like '200 status'
it_behaves_like 'CRLF line ending'
it 'returns snippet database content' do
subject
expect(response.body).to eq snippet.content
end
end
end
context 'when the personal snippet is private' do
let(:personal_snippet) { create(:personal_snippet, :private, author: user) }
let_it_be(:personal_snippet) { create(:personal_snippet, :private, :repository, author: user) }
context 'when signed in' do
before do
......@@ -501,24 +593,11 @@ describe SnippetsController do
end
context 'when signed in user is the author' do
before do
get :raw, params: { id: personal_snippet.to_param }
end
it_behaves_like 'successful response' do
let(:snippet) { personal_snippet }
let(:params) { { id: snippet.to_param } }
it 'responds with status 200' do
expect(assigns(:snippet)).to eq(personal_snippet)
expect(response).to have_gitlab_http_status(:ok)
end
it 'has expected headers' do
expect(response.header['Content-Type']).to eq('text/plain; charset=utf-8')
expect(response.header['Content-Disposition']).to match(/inline/)
end
it "sets #{Gitlab::Workhorse::DETECT_HEADER} header" do
expect(response).to have_gitlab_http_status(:ok)
expect(response.header[Gitlab::Workhorse::DETECT_HEADER]).to eq "true"
subject { get :raw, params: params }
end
end
end
......@@ -533,18 +612,18 @@ describe SnippetsController do
end
context 'when the personal snippet is internal' do
let(:personal_snippet) { create(:personal_snippet, :internal, author: user) }
let_it_be(:personal_snippet) { create(:personal_snippet, :internal, :repository, author: user) }
context 'when signed in' do
before do
sign_in(user)
end
it 'responds with status 200' do
get :raw, params: { id: personal_snippet.to_param }
it_behaves_like 'successful response' do
let(:snippet) { personal_snippet }
let(:params) { { id: snippet.to_param } }
expect(assigns(:snippet)).to eq(personal_snippet)
expect(response).to have_gitlab_http_status(:ok)
subject { get :raw, params: params }
end
end
......@@ -558,36 +637,18 @@ describe SnippetsController do
end
context 'when the personal snippet is public' do
let(:personal_snippet) { create(:personal_snippet, :public, author: user) }
let_it_be(:personal_snippet) { create(:personal_snippet, :public, :repository, author: user) }
context 'when signed in' do
before do
sign_in(user)
end
it 'responds with status 200' do
get :raw, params: { id: personal_snippet.to_param }
expect(assigns(:snippet)).to eq(personal_snippet)
expect(response).to have_gitlab_http_status(:ok)
end
context 'CRLF line ending' do
let(:personal_snippet) do
create(:personal_snippet, :public, author: user, content: "first line\r\nsecond line\r\nthird line")
end
it_behaves_like 'successful response' do
let(:snippet) { personal_snippet }
let(:params) { { id: snippet.to_param } }
it 'returns LF line endings by default' do
get :raw, params: { id: personal_snippet.to_param }
expect(response.body).to eq("first line\nsecond line\nthird line")
end
it 'does not convert line endings when parameter present' do
get :raw, params: { id: personal_snippet.to_param, line_ending: :raw }
expect(response.body).to eq("first line\r\nsecond line\r\nthird line")
end
subject { get :raw, params: params }
end
end
......
......@@ -3,9 +3,9 @@
require 'spec_helper'
describe 'Thread Comments Snippet', :js do
let(:user) { create(:user) }
let(:project) { create(:project) }
let(:snippet) { create(:project_snippet, :private, project: project, author: user) }
let_it_be(:user) { create(:user) }
let_it_be(:project) { create(:project) }
let_it_be(:snippet) { create(:project_snippet, :private, :repository, project: project, author: user) }
before do
stub_feature_flags(snippets_vue: false)
......
......@@ -5,8 +5,8 @@ require 'spec_helper'
describe 'Projects > Snippets > Create Snippet', :js do
include DropzoneHelper
let(:user) { create(:user) }
let(:project) { create(:project, :public) }
let_it_be(:user) { create(:user) }
let_it_be(:project) { create(:project, :public) }
def description_field
find('.js-description-input input,textarea')
......@@ -102,7 +102,7 @@ describe 'Projects > Snippets > Create Snippet', :js do
end
it 'shows a public snippet on the index page but not the New snippet button' do
snippet = create(:project_snippet, :public, project: project)
snippet = create(:project_snippet, :public, :repository, project: project)
visit project_snippets_path(project)
......
......@@ -3,9 +3,9 @@
require 'spec_helper'
describe 'Projects > Snippets > User comments on a snippet', :js do
let(:project) { create(:project) }
let!(:snippet) { create(:project_snippet, project: project, author: user) }
let(:user) { create(:user) }
let_it_be(:project) { create(:project) }
let_it_be(:user) { create(:user) }
let_it_be(:snippet) { create(:project_snippet, :repository, project: project, author: user) }
before do
stub_feature_flags(snippets_vue: false)
......
......@@ -3,8 +3,8 @@
require 'spec_helper'
describe 'Reportable note on snippets', :js do
let(:user) { create(:user) }
let(:project) { create(:project) }
let_it_be(:user) { create(:user) }
let_it_be(:project) { create(:project) }
before do
stub_feature_flags(snippets_vue: false)
......@@ -13,8 +13,8 @@ describe 'Reportable note on snippets', :js do
end
describe 'on project snippet' do
let(:snippet) { create(:project_snippet, :public, project: project, author: user) }
let!(:note) { create(:note_on_project_snippet, noteable: snippet, project: project) }
let_it_be(:snippet) { create(:project_snippet, :public, :repository, project: project, author: user) }
let_it_be(:note) { create(:note_on_project_snippet, noteable: snippet, project: project) }
before do
visit project_snippet_path(project, snippet)
......
......@@ -41,6 +41,23 @@ describe 'Labels (JavaScript fixtures)' do
end
end
describe API::Helpers::LabelHelpers, type: :request do
include JavaScriptFixturesHelpers
include ApiHelpers
let(:user) { create(:user) }
before do
group.add_owner(user)
end
it 'api/group_labels.json' do
get api("/groups/#{group.id}/labels", user)
expect(response).to be_successful
end
end
describe Projects::LabelsController, '(JavaScript fixtures)', type: :controller do
render_views
......
import { viewerInformationForPath } from '~/vue_shared/components/content_viewer/lib/viewer_utils';
import { decorateFiles, splitParent } from '~/ide/lib/files';
import { decorateData } from '~/ide/stores/utils';
import { escapeFileUrl } from '~/lib/utils/url_utility';
const TEST_BRANCH_ID = 'lorem-ipsum';
const TEST_PROJECT_ID = 10;
......@@ -22,7 +21,7 @@ const createEntries = paths => {
id: path,
name,
path,
url: createUrl(`/${TEST_PROJECT_ID}/${type}/${TEST_BRANCH_ID}/-/${escapeFileUrl(path)}`),
url: createUrl(`/${TEST_PROJECT_ID}/${type}/${TEST_BRANCH_ID}/-/${path}`),
type,
previewMode,
binary: (previewMode && previewMode.binary) || false,
......
......@@ -494,7 +494,7 @@ describe('Multi-file store mutations', () => {
it('properly handles files with spaces in name', () => {
const path = 'my fancy path';
const newPath = 'new path';
const oldEntry = { ...file(path, path, 'blob'), url: `project/-/${encodeURI(path)}` };
const oldEntry = { ...file(path, path, 'blob'), url: `project/-/${path}` };
localState.entries[path] = oldEntry;
......@@ -510,12 +510,12 @@ describe('Multi-file store mutations', () => {
id: newPath,
path: newPath,
name: newPath,
url: `project/-/new%20path`,
url: `project/-/new path`,
key: expect.stringMatching(newPath),
prevId: path,
prevName: path,
prevPath: path,
prevUrl: `project/-/my%20fancy%20path`,
prevUrl: `project/-/my fancy path`,
prevKey: oldEntry.key,
prevParentPath: oldEntry.parentPath,
});
......
import { file } from 'jest/ide/helpers';
import FileRow from '~/vue_shared/components/file_row.vue';
import { mount } from '@vue/test-utils';
import FileHeader from '~/vue_shared/components/file_row_header.vue';
import { shallowMount } from '@vue/test-utils';
import { nextTick } from 'vue';
import { escapeFileUrl } from '~/lib/utils/url_utility';
describe('File row component', () => {
let wrapper;
function createComponent(propsData) {
wrapper = mount(FileRow, {
function createComponent(propsData, $router = undefined) {
wrapper = shallowMount(FileRow, {
propsData,
mocks: {
$router,
},
});
}
......@@ -61,7 +67,7 @@ describe('File row component', () => {
}),
});
return wrapper.vm.$nextTick().then(() => {
return nextTick().then(() => {
expect(wrapper.vm.scrollIntoView).toHaveBeenCalled();
});
});
......@@ -85,6 +91,27 @@ describe('File row component', () => {
level: 0,
});
expect(wrapper.element.classList).toContain('js-file-row-header');
expect(wrapper.contains(FileHeader)).toBe(true);
});
it('matches the current route against encoded file URL', () => {
const fileName = 'with space';
const rowFile = Object.assign({}, file(fileName), {
url: `/${fileName}`,
});
const routerPath = `/project/${escapeFileUrl(fileName)}`;
createComponent(
{
file: rowFile,
level: 0,
},
{
currentRoute: {
path: routerPath,
},
},
);
expect(wrapper.vm.hasUrlAtCurrentRoute()).toBe(true);
});
});
......@@ -16,4 +16,47 @@ describe GitlabSchema.types['Snippet'] do
describe 'authorizations' do
it { expect(described_class).to require_graphql_authorizations(:read_snippet) }
end
describe '#blob' do
let_it_be(:user) { create(:user) }
let(:query_blob) { subject.dig('data', 'snippets', 'edges')[0]['node']['blob'] }
let(:query) do
%(
{
snippets {
edges {
node {
blob {
name
path
}
}
}
}
}
)
end
subject { GitlabSchema.execute(query, context: { current_user: user }).as_json }
context 'when snippet has repository' do
let!(:snippet) { create(:personal_snippet, :repository, :public, author: user) }
let(:blob) { snippet.blobs.first }
it 'returns blob from the repository' do
expect(query_blob['name']).to eq blob.name
expect(query_blob['path']).to eq blob.path
end
end
context 'when snippet does not have a repository' do
let!(:snippet) { create(:personal_snippet, :public, author: user) }
let(:blob) { snippet.blob }
it 'returns SnippetBlob type' do
expect(query_blob['name']).to eq blob.name
expect(query_blob['path']).to eq blob.path
end
end
end
end
......@@ -216,6 +216,22 @@ describe Gitlab::QuickActions::Extractor do
expect(msg).to eq "hello\nworld\nthis is great? SHRUG"
end
it 'extracts and performs multiple substitution commands' do
msg = %(hello\nworld\n/reopen\n/shrug this is great?\n/shrug meh)
msg, commands = extractor.extract_commands(msg)
expect(commands).to eq [['reopen'], ['shrug', 'this is great?'], %w(shrug meh)]
expect(msg).to eq "hello\nworld\nthis is great? SHRUG\nmeh SHRUG"
end
it 'does not extract substitution command in inline code' do
msg = %(hello\nworld\n/reopen\n`/tableflip this is great`?)
msg, commands = extractor.extract_commands(msg)
expect(commands).to eq [['reopen']]
expect(msg).to eq "hello\nworld\n`/tableflip this is great`?"
end
it 'extracts and performs substitution commands case insensitive' do
msg = %(hello\nworld\n/reOpen\n/sHRuG this is great?)
msg, commands = extractor.extract_commands(msg)
......
......@@ -7,6 +7,7 @@ describe Gitlab::QuickActions::SubstitutionDefinition do
<<EOF
Hello! Let's do this!
/sub_name I like this stuff
/sub_name second substitution
EOF
end
......@@ -24,6 +25,7 @@ EOF
expect(subject.perform_substitution(self, content)).to eq <<EOF
Hello! Let's do this!
I like this stuff foo
/sub_name second substitution
EOF
end
end
......
......@@ -11,13 +11,20 @@ describe BlobLanguageFromGitAttributes do
subject(:blob) { fake_blob(path: 'file.md') }
it 'returns return value from gitattribute' do
expect(blob.project.repository).to receive(:gitattribute).with(blob.path, 'gitlab-language').and_return('erb?parent=json')
allow(blob.repository).to receive(:exists?).and_return(true)
expect(blob.repository).to receive(:gitattribute).with(blob.path, 'gitlab-language').and_return('erb?parent=json')
expect(blob.language_from_gitattributes).to eq('erb?parent=json')
end
it 'returns nil if project is absent' do
allow(blob).to receive(:project).and_return(nil)
it 'returns nil if repository is absent' do
allow(blob).to receive(:repository).and_return(nil)
expect(blob.language_from_gitattributes).to eq(nil)
end
it 'returns nil if repository does not exist' do
allow(blob.repository).to receive(:exists?).and_return(false)
expect(blob.language_from_gitattributes).to eq(nil)
end
......
......@@ -76,18 +76,18 @@ describe SnippetBlobPresenter do
context 'with ProjectSnippet' do
let!(:project) { create(:project) }
let(:snippet) { build(:project_snippet, project: project, id: 1) }
let(:snippet) { create(:project_snippet, project: project) }
it 'returns the raw path' do
expect(subject).to eq "/#{snippet.project.full_path}/snippets/1/raw"
expect(subject).to eq "/#{snippet.project.full_path}/snippets/#{snippet.id}/raw"
end
end
context 'with PersonalSnippet' do
let(:snippet) { build(:personal_snippet, id: 1) }
let(:snippet) { create(:personal_snippet) }
it 'returns the raw path' do
expect(subject).to eq "/snippets/1/raw"
expect(subject).to eq "/snippets/#{snippet.id}/raw"
end
end
end
......
......@@ -143,4 +143,24 @@ describe SnippetPresenter do
expect(subject).to be_truthy
end
end
describe '#blob' do
let(:snippet) { personal_snippet }
subject { presenter.blob }
context 'when snippet does not have a repository' do
it 'returns SnippetBlob' do
expect(subject).to eq snippet.blob
end
end
context 'when snippet has a repository' do
let(:snippet) { create(:snippet, :repository, author: user) }
it 'returns repository first blob' do
expect(subject).to eq snippet.blobs.first
end
end
end
end
......@@ -4,8 +4,9 @@ require 'spec_helper'
describe API::Issues do
let_it_be(:user) { create(:user) }
let_it_be(:owner) { create(:owner) }
let_it_be(:project, reload: true) do
create(:project, :public, creator_id: user.id, namespace: user.namespace)
create(:project, :public, creator_id: owner.id, namespace: owner.namespace)
end
let(:user2) { create(:user) }
......@@ -97,7 +98,7 @@ describe API::Issues do
labels: 'label, label?, label&foo, ?, &'
}
expect(response.status).to eq(200)
expect(response).to have_gitlab_http_status(:ok)
expect(json_response['labels']).to include 'label'
expect(json_response['labels']).to include 'label?'
expect(json_response['labels']).to include 'label&foo'
......@@ -112,7 +113,7 @@ describe API::Issues do
labels: ['label', 'label?', 'label&foo, ?, &']
}
expect(response.status).to eq(200)
expect(response).to have_gitlab_http_status(:ok)
expect(json_response['labels']).to include 'label'
expect(json_response['labels']).to include 'label?'
expect(json_response['labels']).to include 'label&foo'
......@@ -349,7 +350,7 @@ describe API::Issues do
it 'allows special label names' do
put api("/projects/#{project.id}/issues/#{issue.iid}", user),
params: { labels: 'label:foo, label-bar,label_bar,label/bar,label?bar,label&bar,?,&' }
expect(response.status).to eq(200)
expect(response).to have_gitlab_http_status(:ok)
expect(json_response['labels']).to include 'label:foo'
expect(json_response['labels']).to include 'label-bar'
expect(json_response['labels']).to include 'label_bar'
......@@ -363,7 +364,7 @@ describe API::Issues do
it 'allows special label names with labels param as array' do
put api("/projects/#{project.id}/issues/#{issue.iid}", user),
params: { labels: ['label:foo', 'label-bar', 'label_bar', 'label/bar,label?bar,label&bar,?,&'] }
expect(response.status).to eq(200)
expect(response).to have_gitlab_http_status(:ok)
expect(json_response['labels']).to include 'label:foo'
expect(json_response['labels']).to include 'label-bar'
expect(json_response['labels']).to include 'label_bar'
......@@ -400,15 +401,49 @@ describe API::Issues do
expect(response).to have_gitlab_http_status(:ok)
expect(json_response['state']).to eq 'opened'
end
end
context 'when an admin or owner makes the request' do
describe 'PUT /projects/:id/issues/:issue_iid to update updated_at param' do
context 'when reporter makes request' do
it 'accepts the update date to be set' do
update_time = 2.weeks.ago
put api("/projects/#{project.id}/issues/#{issue.iid}", user),
params: { labels: 'label3', state_event: 'close', updated_at: update_time }
params: { title: 'some new title', updated_at: update_time }
expect(response).to have_gitlab_http_status(:ok)
expect(json_response['title']).to include 'some new title'
expect(Time.parse(json_response['updated_at'])).not_to be_like_time(update_time)
end
end
context 'when admin or owner makes the request' do
it 'not allow to set null for updated_at' do
put api("/projects/#{project.id}/issues/#{issue.iid}", owner), params: { updated_at: nil }
expect(response).to have_gitlab_http_status(:bad_request)
end
it 'not allow to set blank for updated_at' do
put api("/projects/#{project.id}/issues/#{issue.iid}", owner), params: { updated_at: '' }
expect(response).to have_gitlab_http_status(:bad_request)
end
it 'not allow to set invalid format for updated_at' do
put api("/projects/#{project.id}/issues/#{issue.iid}", owner), params: { updated_at: 'invalid-format' }
expect(response).to have_gitlab_http_status(:bad_request)
end
it 'accepts the update date to be set' do
update_time = 2.weeks.ago
put api("/projects/#{project.id}/issues/#{issue.iid}", owner),
params: { title: 'some new title', updated_at: update_time }
expect(response).to have_gitlab_http_status(:ok)
expect(json_response['labels']).to include 'label3'
expect(json_response['title']).to include 'some new title'
expect(Time.parse(json_response['updated_at'])).to be_like_time(update_time)
end
end
......
......@@ -30,7 +30,8 @@ RSpec.shared_examples 'restores project successfully' do |**results|
expect(project.issues.size).to eq(results.fetch(:issues, 0))
end
it 'does not set params that are excluded from import_export settings' do
# This test is quarantined because the use of magic number 999 causes failure on CI
it 'does not set params that are excluded from import_export settings', quarantine: 'https://gitlab.com/gitlab-org/gitlab/issues/207932#note_293724442' do
expect(project.import_type).to be_nil
expect(project.creator_id).not_to eq 999
end
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment