Commit f85f4c60 authored by Dmitriy Zaporozhets's avatar Dmitriy Zaporozhets

Merge branch 'master' into 'ce-to-ee'

# Conflicts:
#   db/schema.rb
#   package.json
#   spec/javascripts/test_bundle.js
parents 8e5c72f8 7b210602
{
"presets": [
["latest", { "es2015": { "modules": false } }],
"stage-2"
],
"env": {
"coverage": {
"plugins": [
["istanbul", {
"exclude": [
"app/assets/javascripts/droplab/**/*",
"spec/javascripts/**/*"
]
}],
["transform-define", {
"process.env.BABEL_ENV": "coverage"
}]
]
}
}
}
......@@ -262,6 +262,8 @@ rake karma:
stage: test
<<: *use-db
<<: *dedicated-runner
variables:
BABEL_ENV: "coverage"
script:
- bundle exec rake karma
artifacts:
......
Please view this file on the master branch, on stable branches it's out of date.
## 8.17.4 (2017-03-19)
- Elastic security fix: Respect feature visibility level.
## 8.17.3 (2017-03-07)
- No changes.
......@@ -34,6 +38,13 @@ Please view this file on the master branch, on stable branches it's out of date.
- Reduce queries needed to check if node is a primary or secondary Geo node.
- Allow squashing merge requests into a single commit.
## 8.16.8 (2017-03-19)
- No changes.
- No changes.
- No changes.
- Elastic security fix: Respect feature visibility level.
## 8.16.7 (2017-02-27)
- Fixed merge request state not updating when approvals feature is active.
......@@ -78,6 +89,12 @@ Please view this file on the master branch, on stable branches it's out of date.
- Expose issue weight in the API. !1023 (Robert Schilling)
- Copy <some text> to clipboard. !1048
## 8.15.8 (2017-03-19)
- No changes.
- No changes.
- Elastic security fix: Respect feature visibility level.
## 8.15.7 (2017-02-15)
- No changes.
......
......@@ -2,6 +2,11 @@
documentation](doc/development/changelog.md) for instructions on adding your own
entry.
## 8.17.4 (2017-03-19)
- Only show public emails in atom feeds.
- To protect against Server-side Request Forgery project import URLs are now prohibited against localhost or the server IP except for the assigned instance URL and port. Imports are also prohibited from ports below 1024 with the exception of ports 22, 80, and 443.
## 8.17.3 (2017-03-07)
- Fix the redirect to custom home page URL. !9518
......@@ -211,6 +216,14 @@ entry.
- Remove deprecated GitlabCiService.
- Requeue pending deletion projects.
## 8.16.8 (2017-03-19)
- No changes.
- No changes.
- No changes.
- Only show public emails in atom feeds.
- To protect against Server-side Request Forgery project import URLs are now prohibited against localhost or the server IP except for the assigned instance URL and port. Imports are also prohibited from ports below 1024 with the exception of ports 22, 80, and 443.
## 8.16.7 (2017-02-27)
- Fix MR changes tab size count when there are over 100 files in the diff.
......@@ -410,6 +423,14 @@ entry.
- Add margin to markdown math blocks.
- Add hover state to MR comment reply button.
## 8.15.8 (2017-03-19)
- No changes.
- No changes.
- Read true-up info from license and validate it. !1159
- Only show public emails in atom feeds.
- To protect against Server-side Request Forgery project import URLs are now prohibited against localhost or the server IP except for the assigned instance URL and port. Imports are also prohibited from ports below 1024 with the exception of ports 22, 80, and 443.
## 8.15.7 (2017-02-15)
- No changes.
......
......@@ -81,7 +81,7 @@ module IssuableCollections
# @filter_params[:authorized_only] = true
end
@filter_params
@filter_params.permit(IssuableFinder::VALID_PARAMS)
end
def set_default_scope
......
......@@ -163,8 +163,7 @@ class Projects::IssuesController < Projects::ApplicationController
end
def export_csv
csv_params = filter_params.permit(IssuableFinder::VALID_PARAMS)
ExportCsvWorker.perform_async(@current_user.id, @project.id, csv_params)
ExportCsvWorker.perform_async(@current_user.id, @project.id, filter_params)
index_path = namespace_project_issues_path(@project.namespace, @project)
redirect_to(index_path, notice: "Your CSV export has started. It will be emailed to #{current_user.notification_email} when complete.")
......
......@@ -20,7 +20,10 @@
#
class IssuableFinder
NONE = '0'.freeze
VALID_PARAMS = %i(scope state group_id project_id milestone_title assignee_id search label_name sort assignee_username author_id author_username authorized_only due_date iids non_archived weight).freeze
SCALAR_PARAMS = %i(scope state group_id project_id milestone_title assignee_id search label_name sort assignee_username author_id author_username authorized_only due_date iids non_archived weight).freeze
ARRAY_PARAMS = { label_name: [], iids: [] }.freeze
VALID_PARAMS = (SCALAR_PARAMS + [ARRAY_PARAMS]).freeze
attr_accessor :current_user, :params
......
......@@ -158,43 +158,76 @@ module Elastic
end
def project_ids_filter(query_hash, options)
if options[:project_ids]
condition = project_ids_condition(
options[:current_user],
options[:project_ids],
options[:public_and_internal_projects]
)
query_hash[:query][:bool][:filter] ||= []
query_hash[:query][:bool][:filter] << {
has_parent: {
parent_type: "project",
query: {
bool: {
should: condition
}
}
project_query = project_ids_query(
options[:current_user],
options[:project_ids],
options[:public_and_internal_projects],
options[:feature]
)
query_hash[:query][:bool][:filter] ||= []
query_hash[:query][:bool][:filter] << {
has_parent: {
parent_type: "project",
query: {
bool: project_query
}
}
end
}
query_hash
end
def project_ids_condition(current_user, project_ids, public_and_internal_projects)
conditions = [{
terms: { id: project_ids }
}]
def project_ids_query(current_user, project_ids, public_and_internal_projects, feature = nil)
conditions = []
private_project_condition = {
bool: {
filter: {
terms: { id: project_ids }
}
}
}
if feature
private_project_condition[:bool][:must_not] = {
term: { "#{feature}_access_level" => ProjectFeature::DISABLED }
}
end
conditions << private_project_condition
if public_and_internal_projects
conditions << { term: { visibility_level: Project::PUBLIC } }
conditions << if feature
{
bool: {
filter: [
{ term: { visibility_level: Project::PUBLIC } },
{ term: { "#{feature}_access_level" => ProjectFeature::ENABLED } }
]
}
}
else
{ term: { visibility_level: Project::PUBLIC } }
end
if current_user
conditions << { term: { visibility_level: Project::INTERNAL } }
conditions << if feature
{
bool: {
filter: [
{ term: { visibility_level: Project::INTERNAL } },
{ term: { "#{feature}_access_level" => ProjectFeature::ENABLED } }
]
}
}
else
{ term: { visibility_level: Project::INTERNAL } }
end
end
end
conditions
{ should: conditions }
end
end
end
......
......@@ -45,6 +45,7 @@ module Elastic
basic_query_hash(%w(title^2 description), query)
end
options[:feature] = 'issues'
query_hash = project_ids_filter(query_hash, options)
query_hash = confidentiality_filter(query_hash, options[:current_user])
......
......@@ -67,6 +67,7 @@ module Elastic
basic_query_hash(%w(title^2 description), query)
end
options[:feature] = 'merge_requests'
query_hash = project_ids_filter(query_hash, options)
self.__elasticsearch__.search(query_hash)
......
......@@ -18,6 +18,9 @@ module Elastic
indexes :author_id, type: :integer
indexes :confidential, type: :boolean
end
indexes :noteable_type, type: :string, index: :not_analyzed
indexes :noteable_id, type: :integer, index: :not_analyzed
end
def as_indexed_json(options = {})
......@@ -25,7 +28,7 @@ module Elastic
# We don't use as_json(only: ...) because it calls all virtual and serialized attributtes
# https://gitlab.com/gitlab-org/gitlab-ee/issues/349
[:id, :note, :project_id, :created_at, :updated_at].each do |attr|
[:id, :note, :project_id, :noteable_type, :noteable_id, :created_at, :updated_at].each do |attr|
data[attr.to_s] = safely_read_attribute_for_elasticsearch(attr)
end
......@@ -55,11 +58,6 @@ module Elastic
}
}
if query.blank?
query_hash[:query][:bool][:must] = [{ match_all: {} }]
query_hash[:track_scores] = true
end
query_hash = project_ids_filter(query_hash, options)
query_hash = confidentiality_filter(query_hash, options[:current_user])
......
......@@ -2,6 +2,14 @@ module Elastic
module ProjectsSearch
extend ActiveSupport::Concern
TRACKED_FEATURE_SETTINGS = %w(
issues_access_level
merge_requests_access_level
snippets_access_level
wiki_access_level
repository_access_level
)
included do
include ApplicationSearch
......@@ -22,7 +30,14 @@ module Elastic
indexes :created_at, type: :date
indexes :updated_at, type: :date
indexes :archived, type: :boolean
indexes :visibility_level, type: :integer
indexes :visibility_level, type: :integer
indexes :issues_access_level, type: :integer
indexes :merge_requests_access_level, type: :integer
indexes :snippets_access_level, type: :integer
indexes :wiki_access_level, type: :integer
indexes :repository_access_level, type: :integer
indexes :last_activity_at, type: :date
indexes :last_pushed_at, type: :date
end
......@@ -49,6 +64,10 @@ module Elastic
data[attr.to_s] = safely_read_attribute_for_elasticsearch(attr)
end
TRACKED_FEATURE_SETTINGS.each do |feature|
data[feature] = project_feature.public_send(feature)
end
data
end
......@@ -85,9 +104,7 @@ module Elastic
if options[:project_ids]
filters << {
bool: {
should: project_ids_condition(options[:current_user], options[:project_ids], options[:public_and_internal_projects])
}
bool: project_ids_query(options[:current_user], options[:project_ids], options[:public_and_internal_projects])
}
end
......
......@@ -79,14 +79,23 @@ module Elastic
{
bool: {
should: [
{ terms: { visibility_level: [Snippet::PUBLIC, Snippet::INTERNAL] } },
{ term: { author_id: user.id } },
{ terms: { project_id: user.authorized_projects.pluck(:id) } },
{ bool: {
filter: { terms: { visibility_level: [Snippet::PUBLIC, Snippet::INTERNAL] } },
must_not: { exists: { field: 'project_id' } }
}
}
]
}
}
else
{ term: { visibility_level: Snippet::PUBLIC } }
{
bool: {
filter: { term: { visibility_level: Snippet::PUBLIC } },
must_not: { exists: { field: 'project_id' } }
}
}
end
query_hash[:query][:bool][:filter] = filter
......
......@@ -49,6 +49,7 @@ module Issuable
delegate :name,
:email,
to: :author,
allow_nil: true,
prefix: true
delegate :name,
......
class Geo::BaseRegistry < ActiveRecord::Base
self.abstract_class = true
if Gitlab::Geo.secondary? || (Rails.env.test? && Rails.configuration.respond_to?(:geo_database))
if Gitlab::Geo.configured? && (Gitlab::Geo.secondary? || Rails.env.test?)
establish_connection Rails.configuration.geo_database
end
end
......@@ -63,6 +63,14 @@ class Issue < ActiveRecord::Base
state :opened
state :reopened
state :closed
before_transition any => :closed do |issue|
issue.closed_at = Time.zone.now
end
before_transition closed: any do |issue|
issue.closed_at = nil
end
end
def hook_attrs
......
......@@ -43,6 +43,12 @@ class ProjectFeature < ActiveRecord::Base
default_value_for :wiki_access_level, value: ENABLED, allows_nil: false
default_value_for :repository_access_level, value: ENABLED, allows_nil: false
after_commit on: :update do
if current_application_settings.elasticsearch_indexing?
ElasticIndexerWorker.perform_async(:update, 'Project', project_id)
end
end
def feature_available?(feature, user)
access_level = public_send(ProjectFeature.access_level_attribute(feature))
get_permission(user, access_level)
......
......@@ -17,4 +17,4 @@
%strong= @current_user.notification_email
in an attachment.
.modal-footer
= link_to 'Export issues', export_csv_namespace_project_issues_path(@project.namespace, @project, params.permit(IssuableFinder::VALID_PARAMS)), method: :post, class: 'btn btn-success pull-left', title: 'Export issues'
= link_to 'Export issues', export_csv_namespace_project_issues_path(@project.namespace, @project, request.query_parameters), method: :post, class: 'btn btn-success pull-left', title: 'Export issues'
......@@ -123,7 +123,7 @@
= dropdown_loading
#js-add-issues-btn.prepend-left-10
- elsif type != :boards_modal
= render 'shared/sort_dropdown'
= render 'shared/sort_dropdown', type: local_assigns[:type]
- if @bulk_edit
.issues_bulk_update.hide
......
......@@ -6,6 +6,7 @@ class GeoBackfillWorker
BATCH_SIZE = 100.freeze
def perform
return unless Gitlab::Geo.configured?
return unless Gitlab::Geo.primary_node.present?
start_time = Time.now
......
......@@ -22,6 +22,7 @@ class GeoFileDownloadDispatchWorker
# files, excluding ones in progress.
# 5. Quit when we have scheduled all downloads or exceeded an hour.
def perform
return unless Gitlab::Geo.configured?
return unless Gitlab::Geo.secondary?
@start_time = Time.now
......
---
title: 'Elastic security fix: Respect feature visibility level'
merge_request:
author:
---
title: Add closed_at field to issues
merge_request:
author:
---
title: Only add code coverage instrumentation when generating coverage report
merge_request: 9987
author:
......@@ -3,17 +3,6 @@ var webpack = require('webpack');
var webpackConfig = require('./webpack.config.js');
var ROOT_PATH = path.resolve(__dirname, '..');
// add coverage instrumentation to babel config
if (webpackConfig.module && webpackConfig.module.rules) {
var babelConfig = webpackConfig.module.rules.find(function (rule) {
return rule.loader === 'babel-loader';
});
babelConfig.options = babelConfig.options || {};
babelConfig.options.plugins = babelConfig.options.plugins || [];
babelConfig.options.plugins.push('istanbul');
}
// remove problematic plugins
if (webpackConfig.plugins) {
webpackConfig.plugins = webpackConfig.plugins.filter(function (plugin) {
......@@ -27,7 +16,8 @@ if (webpackConfig.plugins) {
// Karma configuration
module.exports = function(config) {
var progressReporter = process.env.CI ? 'mocha' : 'progress';
config.set({
var karmaConfig = {
basePath: ROOT_PATH,
browsers: ['PhantomJS'],
frameworks: ['jasmine'],
......@@ -38,14 +28,20 @@ module.exports = function(config) {
preprocessors: {
'spec/javascripts/**/*.js': ['webpack', 'sourcemap'],
},
reporters: [progressReporter, 'coverage-istanbul'],
coverageIstanbulReporter: {
reporters: [progressReporter],
webpack: webpackConfig,
webpackMiddleware: { stats: 'errors-only' },
};
if (process.env.BABEL_ENV === 'coverage' || process.env.NODE_ENV === 'coverage') {
karmaConfig.reporters.push('coverage-istanbul');
karmaConfig.coverageIstanbulReporter = {
reports: ['html', 'text-summary'],
dir: 'coverage-javascript/',
subdir: '.',
fixWebpackSourcePaths: true
},
webpack: webpackConfig,
webpackMiddleware: { stats: 'errors-only' },
});
};
}
config.set(karmaConfig);
};
......@@ -60,13 +60,7 @@ var config = {
{
test: /\.js$/,
exclude: /(node_modules|vendor\/assets)/,
loader: 'babel-loader',
options: {
presets: [
["es2015", {"modules": false}],
'stage-2'
]
}
loader: 'babel-loader'
},
{
test: /\.svg$/,
......
class AddClosedAtToIssues < ActiveRecord::Migration
DOWNTIME = false
def change
add_column :issues, :closed_at, :datetime
end
end
......@@ -11,7 +11,7 @@
#
# It's strongly recommended that you check this file into your version control system.
ActiveRecord::Schema.define(version: 20170315174634) do
ActiveRecord::Schema.define(version: 20170315194013) do
# These are extensions that must be enabled in order to support this database
enable_extension "plpgsql"
......@@ -530,6 +530,7 @@ ActiveRecord::Schema.define(version: 20170315174634) do
t.text "description_html"
t.integer "time_estimate"
t.integer "relative_position"
t.datetime "closed_at"
end
add_index "issues", ["assignee_id"], name: "index_issues_on_assignee_id", using: :btree
......@@ -1518,4 +1519,4 @@ ActiveRecord::Schema.define(version: 20170315174634) do
add_foreign_key "timelogs", "merge_requests", name: "fk_timelogs_merge_requests_merge_request_id", on_delete: :cascade
add_foreign_key "trending_projects", "projects", on_delete: :cascade
add_foreign_key "u2f_registrations", "users"
end
end
\ No newline at end of file
# GitLab Geo
> **Note:**
This feature was introduced in GitLab 8.5 EE as Alpha.
We recommend you use with at least GitLab 8.6 EE.
> **Notes:**
- GitLab Geo is part of [GitLab Enterprise Edition Premium][ee].
- Introduced in GitLab Enterprise Edition 8.9.
We recommend you use it with at least GitLab Enterprise Edition 8.14.
- You should make sure that all nodes run the same GitLab version.
GitLab Geo allows you to replicate your GitLab instance to other geographical
locations as a read-only fully operational version.
- [Overview](#overview)
- [Setup instructions](#setup-instructions)
- [Database Replication](database.md)
- [Configuration](configuration.md)
- [Current limitations](#current-limitations)
- [Disaster Recovery](disaster-recovery.md)
- [Frequently Asked Questions](#frequently-asked-questions)
- [Can I use Geo in a disaster recovery situation?](#can-i-use-geo-in-a-disaster-recovery-situation)
- [What data is replicated to a secondary node?](#what-data-is-replicated-to-a-secondary-node)
- [Can I git push to a secondary node?](#can-i-git-push-to-a-secondary-node)
- [How long does it take to have a commit replicated to a secondary node?](#how-long-does-it-take-to-have-a-commit-replicated-to-a-secondary-node)
- [What happens if the SSH server runs at a different port?](#what-happens-if-the-ssh-server-runs-at-a-different-port)
## Overview
If you have two or more teams geographically spread out, but your GitLab
......@@ -44,36 +33,39 @@ Keep in mind that:
## Setup instructions
In order to set up one or more GitLab Geo instances, follow the steps below in
this **exact order**:
the **exact order** they appear. **Make sure the GitLab version is the same on
all nodes.**
### Using Omnibus GitLab
If you installed GitLab using the Omnibus packages (highly recommended):
1. Follow the first 3 steps to [install GitLab Enterprise Edition][install-ee]
on the server that will serve as the secondary Geo node. Do not login or
set up anything else in the secondary node for the moment.
1. [Setup the database replication](database.md) (`primary <-> secondary (read-only)` topology)
1. [Install GitLab Enterprise Edition][install-ee] on the server that will serve
as the **secondary** Geo node. Do not login or set up anything else in the
secondary node for the moment.
1. [Setup the database replication](database.md) (`primary (read-write) <-> secondary (read-only)` topology).
1. [Configure GitLab](configuration.md) to set the primary and secondary nodes.
1. [Follow the after setup steps](after_setup.md).
## After setup
[install-ee]: https://about.gitlab.com/downloads-ee/ "GitLab Enterprise Edition Omnibus packages downloads page"
After you set up the database replication and configure the GitLab Geo nodes,
there are a few things to consider:
### Using GitLab installed from source
1. When you create a new project in the primary node, the Git repository will
appear in the secondary only _after_ the first `git push`.
1. You need an extra step to be able to fetch code from the `secondary` and push
to `primary`:
If you installed GitLab from source:
1. Clone your repository as you would normally do from the `secondary` node
1. Change the remote push URL following this example:
1. [Install GitLab Enterprise Edition][install-ee-source] on the server that
will serve as the **secondary** Geo node. Do not login or set up anything
else in the secondary node for the moment.
1. [Setup the database replication](database_source.md) (`primary (read-write) <-> secondary (read-only)` topology).
1. [Configure GitLab](configuration_source.md) to set the primary and secondary
nodes.
1. [Follow the after setup steps](after_setup.md).
```bash
git remote set-url --push origin git@primary.gitlab.example.com:user/repo.git
```
[install-ee-source]: https://docs.gitlab.com/ee/install/installation.html "GitLab Enterprise Edition installation from source"
>**Important**:
The initialization of a new Geo secondary node on versions older than 8.14
requires data to be copied from the primary, as there is no backfill
feature bundled with those versions.
See more details in the [Configure GitLab](configuration.md) step.
## Updating the Geo nodes
Read how to [update your Geo nodes to the latest GitLab version](updating_the_geo_nodes.md).
## Current limitations
......@@ -83,41 +75,12 @@ See more details in the [Configure GitLab](configuration.md) step.
## Frequently Asked Questions
### Can I use Geo in a disaster recovery situation?
There are limitations to what we replicate (see
[What data is replicated to a secondary node?](#what-data-is-replicated-to-a-secondary-node)).
In an extreme data-loss situation you can make a secondary Geo into your
primary, but this is not officially supported yet.
If you still want to proceed, see our step-by-step instructions on how to
manually [promote a secondary node](disaster-recovery.md) into primary.
### What data is replicated to a secondary node?
We currently replicate project repositories and the whole database. This
means user accounts, issues, merge requests, groups, project data, etc.,
will be available for query.
We currently don't replicate user generated attachments / avatars or any
other file in `public/upload`. We also don't replicate LFS or
artifacts data (`shared/folder`).
### Can I git push to a secondary node?
No. All writing operations (this includes `git push`) must be done in your
primary node.
### How long does it take to have a commit replicated to a secondary node?
All replication operations are asynchronous and are queued to be dispatched in
a batched request every 10 seconds. Besides that, it depends on a lot of other
factors including the amount of traffic, how big your commit is, the
connectivity between your nodes, your hardware, etc.
Read more in the [Geo FAQ](faq.md).
### What happens if the SSH server runs at a different port?
## Troubleshooting
We send the clone url from the primary server to any secondaries, so it
doesn't matter. If primary is running on port `2200` clone url will reflect
that.
Read the [troubleshooting document](troubleshooting.md).
[install-ee]: https://about.gitlab.com/downloads-ee/
[ee]: https://about.gitlab.com/gitlab-ee/ "GitLab Enterprise Edition landing page"
[install-ee]: https://about.gitlab.com/downloads-ee/ "GitLab Enterprise Edition Omnibus packages downloads page"
[install-ee-source]: https://docs.gitlab.com/ee/install/installation.html "GitLab Enterprise Edition installation from source"
# After setup
After you set up the [database replication and configure the GitLab Geo nodes][req],
there are a few things to consider:
1. When you create a new project in the primary node, the Git repository will
appear in the secondary only _after_ the first `git push`.
1. You need an extra step to be able to fetch code from the `secondary` and push
to `primary`:
1. Clone your repository as you would normally do from the `secondary` node
1. Change the remote push URL following this example:
```bash
git remote set-url --push origin git@primary.gitlab.example.com:user/repo.git
```
[req]: README.md#setup-instructions
This diff is collapsed.
# GitLab Geo configuration
>**Note:**
This is the documentation for installations from source. For installations
using the Omnibus GitLab packages, follow the
[**Omnibus GitLab Geo nodes configuration**](configuration.md) guide.
1. [Install GitLab Enterprise Edition][install-ee-source] on the server that
will serve as the secondary Geo node. Do not login or set up anything else
in the secondary node for the moment.
1. [Setup the database replication](database_source.md) (`primary (read-write) <-> secondary (read-only)` topology).
1. **Configure GitLab to set the primary and secondary nodes.**
1. [Follow the after setup steps](after_setup.md).
[install-ee-source]: https://docs.gitlab.com/ee/install/installation.html "GitLab Enterprise Edition installation from source"
This is the final step you need to follow in order to setup a Geo node.
You are encouraged to first read through all the steps before executing them
in your testing/production environment.
## Setting up GitLab
>**Notes:**
- Don't setup any custom authentication in the secondary nodes, this will be
handled by the primary node.
- Do not add anything in the secondaries Geo nodes admin area
(**Admin Area ➔ Geo Nodes**). This is handled solely by the primary node.
After having installed GitLab Enterprise Edition in the instance that will serve
as a Geo node and set up the [database replication](database_source.md), the
next steps can be summed up to:
1. Configure the primary node
1. Replicate some required configurations between the primary and the secondaries
1. Start GitLab in the secondary node's machine
1. Configure every secondary node in the primary's Admin screen
### Prerequisites
This is the last step of configuring a Geo node. Make sure you have followed the
first two steps of the [Setup instructions](README.md#setup-instructions):
1. You have already installed on the secondary server the same version of
GitLab Enterprise Edition that is present on the primary server.
1. You have set up the database replication.
1. Your secondary node is allowed to communicate via HTTP/HTTPS and SSH with
your primary node (make sure your firewall is not blocking that).
Some of the following steps require to configure the primary and secondary
nodes almost at the same time. For your convenience make sure you have SSH
logins opened on all nodes as we will be moving back and forth.
### Step 1. Adding the primary GitLab node
1. SSH into the **primary** node and login as root:
```
sudo -i
```
1. (Source install only): Create a new SSH key pair for the primary node. Choose the default location
and leave the password blank by hitting 'Enter' three times:
```bash
sudo -u git -H ssh-keygen -b 4096 -C 'Primary GitLab Geo node'
```
Read more in [additional info for SSH key pairs](#additional-information-for-the-ssh-key-pairs).
1. Get the contents of `id_rsa.pub` for the git user:
```
# Installations from source
sudo -u git cat /home/git/.ssh/id_rsa.pub
```
1. Visit the primary node's **Admin Area ➔ Geo Nodes** (`/admin/geo_nodes`) in
your browser.
1. Add the primary node by providing its full URL and the public SSH key
you created previously. Make sure to check the box 'This is a primary node'
when adding it.
![Add new primary Geo node](img/geo_nodes_add_new.png)
1. Click the **Add node** button.
### Step 2. Updating the `known_hosts` file of the secondary nodes
1. SSH into the **secondary** node and login as root:
```
sudo -i
```
1. The secondary nodes need to know the SSH fingerprint of the primary node that
will be used for the Git clone/fetch operations. In order to add it to the
`known_hosts` file, run the following command and type `yes` when asked:
```
sudo -u git -H ssh git@<primary-node-url>
```
Replace `<primary-node-url>` with the FQDN of the primary node.
1. Verify that the fingerprint was added by checking `known_hosts`:
```
# Installations from source
cat /home/git/.ssh/known_hosts
```
### Step 3. Copying the database encryption key
GitLab stores a unique encryption key in disk that we use to safely store
sensitive data in the database. Any secondary node must have the
**exact same value** for `db_key_base` as defined in the primary one.
1. SSH into the **primary** node and login as root:
```
sudo -i
```
1. Find the value of `db_key_base` and copy it:
```
# Installations from source
cat /home/git/gitlab/config/secrets.yml | grep db_key_base
```
1. SSH into the **secondary** node and login as root:
```
sudo -i
```
1. Open the secrets file and paste the value of `db_key_base` you copied in the
previous step:
```
# Installations from source
editor /home/git/gitlab/config/secrets.yml
```
1. Save and close the file.
### Step 4. Enabling the secondary GitLab node
1. SSH into the **secondary** node and login as root:
```
sudo -i
```
1. Create a new SSH key pair for the secondary node. Choose the default location
and leave the password blank by hitting 'Enter' three times:
```bash
sudo -u git -H ssh-keygen -b 4096 -C 'Secondary GitLab Geo node'
```
Read more in [additional info for SSH key pairs](#additional-information-for-the-ssh-key-pairs).
1. Get the contents of `id_rsa.pub` the was just created:
```
# Installations from source
sudo -u git cat /home/git/.ssh/id_rsa.pub
```
1. Visit the **primary** node's **Admin Area ➔ Geo Nodes** (`/admin/geo_nodes`)
in your browser.
1. Add the secondary node by providing its full URL and the public SSH key
you created previously. **Do NOT** check the box 'This is a primary node'.
1. Click the **Add node** button.
---
After the **Add Node** button is pressed, the primary node will start to notify
changes to the secondary. Make sure the secondary instance is running and
accessible.
The two most obvious issues that replication can have here are:
1. Database replication not working well
1. Instance to instance notification not working. In that case, it can be
something of the following:
- You are using a custom certificate or custom CA (see the
[Troubleshooting](configuration.md#troubleshooting) section)
- Instance is firewalled (check your firewall rules)
### Step 5. Replicating the repositories data
Getting a new secondary Geo node up and running, will also require the
repositories directory to be synced from the primary node.
With GitLab **8.14** you can start the syncing process by clicking the
"Backfill all repositories" button on `Admin > Geo Nodes` screen.
On previous versions, you can use `rsync` for that:
Make sure `rsync` is installed in both primary and secondary servers and root
SSH access with a password is enabled. Otherwise, you can set up an SSH key-based
connection between the servers.
1. SSH into the **secondary** node and login as root:
```
sudo -i
```
1. Assuming `1.2.3.4` is the IP of the primary node, run the following command
to start the sync:
```bash
# Installations from source
rsync -guavrP root@1.2.3.4:/home/git/repositories/ /home/git/repositories/
chmod ug+rwX,o-rwx /home/git/repositories
```
If this step is not followed, the secondary node will eventually clone and
fetch every missing repository as they are updated with new commits on the
primary node, so syncing the repositories beforehand will buy you some time.
While active repositories will be eventually replicated, if you don't rsync,
the files, any archived/inactive repositories will not get in the secondary node
as Geo doesn't run any routine task to look for missing repositories.
### Step 6. Regenerating the authorized keys in the secondary node
The final step is to regenerate the keys for `~/.ssh/authorized_keys`
(HTTPS clone will still work without this extra step).
On the **secondary** node where the database is [already replicated](./database.md),
run:
```
# Installations from source
sudo -u git -H bundle exec rake gitlab:shell:setup RAILS_ENV=production
```
This will enable `git` operations to authorize against your existing users.
New users and SSH keys updated after this step, will be replicated automatically.
## Next steps
Your nodes should now be ready to use. You can login to the secondary node
with the same credentials as used in the primary. Visit the secondary node's
**Admin Area ➔ Geo Nodes** (`/admin/geo_nodes`) in your browser to check if it's
correctly identified as a secondary Geo node and if Geo is enabled.
If your installation isn't working properly, check the
[troubleshooting](configuration.md#troubleshooting) section.
Point your users to the [after setup steps](after_setup.md).
## Adding another secondary Geo node
To add another Geo node in an already Geo configured infrastructure, just follow
[the steps starting form step 2](#step-2-updating-the-known_hosts-file-of-the-secondary-nodes).
Just omit the first step that sets up the primary node.
## Additional information for the SSH key pairs
Read [Additional information for the SSH key pairs](configuration.md#additional-information-for-the-ssh-key-pairs).
## Troubleshooting
Read the [troubleshooting document](troubleshooting.md).
# GitLab Geo database replication
>**Note:**
This is the documentation for the Omnibus GitLab packages. For installations
from source, follow the
[**database replication for installations from source**](database_source.md) guide.
1. [Install GitLab Enterprise Edition][install-ee] on the server that will serve
as the secondary Geo node. Do not login or set up anything else in the
secondary node for the moment.
1. **Setup the database replication (`primary (read-write) <-> secondary (read-only)` topology).**
1. [Configure GitLab](configuration.md) to set the primary and secondary nodes.
1. [Follow the after setup steps](after_setup.md).
[install-ee]: https://about.gitlab.com/downloads-ee/ "GitLab Enterprise Edition Omnibus packages downloads page"
This document describes the minimal steps you have to take in order to
replicate your GitLab database into another server. You may have to change
some values according to your database setup, how big it is, etc.
<!-- START doctoc generated TOC please keep comment here to allow auto update -->
<!-- DON'T EDIT THIS SECTION, INSTEAD RE-RUN doctoc TO UPDATE -->
**Table of Contents**
- [PostgreSQL replication](#postgresql-replication)
- [Prerequisites](#prerequisites)
- [Step 1. Configure the primary server](#step-1-configure-the-primary-server)
- [Step 2. Configure the secondary server](#step-2-configure-the-secondary-server)
- [Step 3. Initiate the replication process](#step-3-initiate-the-replication-process)
- [Next steps](#next-steps)
- [MySQL replication](#mysql-replication)
<!-- END doctoc generated TOC please keep comment here to allow auto update -->
You are encouraged to first read through all the steps before executing them
in your testing/production environment.
## PostgreSQL replication
......@@ -44,8 +47,6 @@ The following guide assumes that:
### Step 1. Configure the primary server
**For Omnibus installations**
1. SSH into your GitLab **primary** server and login as root:
```
......@@ -118,70 +119,8 @@ The following guide assumes that:
public IP.
1. Continue to [set up the secondary server](#step-2-configure-the-secondary-server).
---
**For installations from source**
1. SSH into your database **primary** server and login as root:
```
sudo -i
```
1. Create a replication user named `gitlab_replicator`:
```bash
sudo -u postgres psql -c "CREATE USER gitlab_replicator REPLICATION ENCRYPTED PASSWORD 'thepassword';"
```
1. Edit `postgresql.conf` to configure the primary server for streaming replication
(for Debian/Ubuntu that would be `/etc/postgresql/9.x/main/postgresql.conf`):
```bash
listen_address = '1.2.3.4'
wal_level = hot_standby
max_wal_senders = 5
checkpoint_segments = 10
wal_keep_segments = 10
hot_standby = on
```
See the Omnibus notes above for more details of `listen_address`.
Edit the `wal` values as you see fit.
1. Set the access control on the primary to allow TCP connections using the
server's public IP and set the connection from the secondary to require a
password. Edit `pg_hba.conf` (for Debian/Ubuntu that would be
`/etc/postgresql/9.x/main/pg_hba.conf`):
```bash
host all all 127.0.0.1/32 trust
host all all 1.2.3.4/32 trust
host replication gitlab_replicator 5.6.7.8/32 md5
```
Where `1.2.3.4` is the public IP address of the primary server, and `5.6.7.8`
the public IP address of the secondary one. If you want to add another
secondary, add one more row like the replication one and change the IP
address:
```bash
host all all 127.0.0.1/32 trust
host all all 1.2.3.4/32 trust
host replication gitlab_replicator 5.6.7.8/32 md5
host replication gitlab_replicator 11.22.33.44/32 md5
```
1. Restart PostgreSQL for the changes to take effect.
1. Now that the PostgreSQL server is set up to accept remote connections, run
`netstat -plnt` to make sure that PostgreSQL is listening to the server's
public IP.
### Step 2. Configure the secondary server
**For Omnibus installations**
1. SSH into your GitLab **secondary** server and login as root:
```
......@@ -217,46 +156,6 @@ The following guide assumes that:
1. [Reconfigure GitLab][] for the changes to take effect.
1. Continue to [initiate the replication process](#step-3-initiate-the-replication-process).
---
**For installations from source**
1. SSH into your database **secondary** server and login as root:
```
sudo -i
```
1. Test that the remote connection to the primary server works:
```
sudo -u postgres psql -h 1.2.3.4 -U gitlab_replicator -d gitlabhq_production -W
```
When prompted enter the password you set in the first step for the
`gitlab_replicator` user. If all worked correctly, you should see the
database prompt.
1. Exit the PostgreSQL console:
```
\q
```
1. Edit `postgresql.conf` to configure the secondary for streaming replication
(for Debian/Ubuntu that would be `/etc/postgresql/9.x/main/postgresql.conf`):
```bash
wal_level = hot_standby
max_wal_senders = 5
checkpoint_segments = 10
wal_keep_segments = 10
hot_standby = on
```
1. Restart PostgreSQL for the changes to take effect.
1. Continue to [initiate the replication process](#step-3-initiate-the-replication-process).
### Step 3. Initiate the replication process
Below we provide a script that connects to the primary server, replicates the
......@@ -342,5 +241,9 @@ Now that the database replication is done, the next step is to configure GitLab.
We don't support MySQL replication for GitLab Geo.
## Troubleshooting
Read the [troubleshooting document](troubleshooting.md).
[pgback]: http://www.postgresql.org/docs/9.2/static/app-pgbasebackup.html
[reconfigure GitLab]: ../administration/restart_gitlab.md#omnibus-gitlab-reconfigure
# GitLab Geo database replication
>**Note:**
This is the documentation for installations from source. For installations
using the Omnibus GitLab packages, follow the
[**database replication for Omnibus GitLab**](database.md) guide.
1. [Install GitLab Enterprise Edition][install-ee-source] on the server that
will serve as the secondary Geo node. Do not login or set up anything else
in the secondary node for the moment.
1. **Setup the database replication (`primary (read-write) <-> secondary (read-only)` topology).**
1. [Configure GitLab](configuration_source.md) to set the primary and secondary
nodes.
1. [Follow the after setup steps](after_setup.md).
[install-ee-source]: https://docs.gitlab.com/ee/install/installation.html "GitLab Enterprise Edition installation from source"
This document describes the minimal steps you have to take in order to
replicate your GitLab database into another server. You may have to change
some values according to your database setup, how big it is, etc.
You are encouraged to first read through all the steps before executing them
in your testing/production environment.
## PostgreSQL replication
The GitLab primary node where the write operations happen will connect to
`primary` database server, and the secondary ones which are read-only will
connect to `secondary` database servers (which are read-only too).
>**Note:**
In many databases documentation you will see `primary` being references as `master`
and `secondary` as either `slave` or `standby` server (read-only).
### Prerequisites
The following guide assumes that:
- You are using PostgreSQL 9.2 or later which includes the
[`pg_basebackup` tool][pgback]. If you are using Omnibus it includes the required
PostgreSQL version for Geo.
- You have a primary server already set up (the GitLab server you are
replicating from), and you have a new secondary server set up on the same OS
and PostgreSQL version.
- The IP of the primary server for our examples will be `1.2.3.4`, whereas the
secondary's IP will be `5.6.7.8`.
### Step 1. Configure the primary server
1. SSH into your database **primary** server and login as root:
```
sudo -i
```
1. Create a replication user named `gitlab_replicator`:
```bash
sudo -u postgres psql -c "CREATE USER gitlab_replicator REPLICATION ENCRYPTED PASSWORD 'thepassword';"
```
1. Edit `postgresql.conf` to configure the primary server for streaming replication
(for Debian/Ubuntu that would be `/etc/postgresql/9.x/main/postgresql.conf`):
```bash
listen_address = '1.2.3.4'
wal_level = hot_standby
max_wal_senders = 5
checkpoint_segments = 10
wal_keep_segments = 10
hot_standby = on
```
See the Omnibus notes above for more details of `listen_address`.
Edit the `wal` values as you see fit.
1. Set the access control on the primary to allow TCP connections using the
server's public IP and set the connection from the secondary to require a
password. Edit `pg_hba.conf` (for Debian/Ubuntu that would be
`/etc/postgresql/9.x/main/pg_hba.conf`):
```bash
host all all 127.0.0.1/32 trust
host all all 1.2.3.4/32 trust
host replication gitlab_replicator 5.6.7.8/32 md5
```
Where `1.2.3.4` is the public IP address of the primary server, and `5.6.7.8`
the public IP address of the secondary one. If you want to add another
secondary, add one more row like the replication one and change the IP
address:
```bash
host all all 127.0.0.1/32 trust
host all all 1.2.3.4/32 trust
host replication gitlab_replicator 5.6.7.8/32 md5
host replication gitlab_replicator 11.22.33.44/32 md5
```
1. Restart PostgreSQL for the changes to take effect.
1. Now that the PostgreSQL server is set up to accept remote connections, run
`netstat -plnt` to make sure that PostgreSQL is listening to the server's
public IP.
### Step 2. Configure the secondary server
1. SSH into your database **secondary** server and login as root:
```
sudo -i
```
1. Test that the remote connection to the primary server works:
```
sudo -u postgres psql -h 1.2.3.4 -U gitlab_replicator -d gitlabhq_production -W
```
When prompted enter the password you set in the first step for the
`gitlab_replicator` user. If all worked correctly, you should see the
database prompt.
1. Exit the PostgreSQL console:
```
\q
```
1. Edit `postgresql.conf` to configure the secondary for streaming replication
(for Debian/Ubuntu that would be `/etc/postgresql/9.x/main/postgresql.conf`):
```bash
wal_level = hot_standby
max_wal_senders = 5
checkpoint_segments = 10
wal_keep_segments = 10
hot_standby = on
```
1. Restart PostgreSQL for the changes to take effect.
1. Continue to [initiate the replication process](#step-3-initiate-the-replication-process).
### Step 3. Initiate the replication process
Below we provide a script that connects to the primary server, replicates the
database and creates the needed files for replication.
The directories used are the defaults for Debian/Ubuntu. If you have changed
any defaults, configure it as you see fit replacing the directories and paths.
>**Warning:**
Make sure to run this on the **secondary** server as it removes all PostgreSQL's
data before running `pg_basebackup`.
1. SSH into your GitLab **secondary** server and login as root:
```
sudo -i
```
1. Save the snippet below in a file, let's say `/tmp/replica.sh`:
```bash
#!/bin/bash
PORT="5432"
USER="gitlab_replicator"
echo ---------------------------------------------------------------
echo WARNING: Make sure this scirpt is run from the secondary server
echo ---------------------------------------------------------------
echo
echo Enter the IP of the primary PostgreSQL server
read HOST
echo Enter the password for $USER@$HOST
read -s PASSWORD
echo Stopping PostgreSQL and all GitLab services
gitlab-ctl stop
echo Backing up postgresql.conf
sudo -u gitlab-psql mv /var/opt/gitlab/postgresql/data/postgresql.conf /var/opt/gitlab/postgresql/
echo Cleaning up old cluster directory
sudo -u gitlab-psql rm -rf /var/opt/gitlab/postgresql/data
rm -f /tmp/postgresql.trigger
echo Starting base backup as the replicator user
echo Enter the password for $USER@$HOST
sudo -u gitlab-psql /opt/gitlab/embedded/bin/pg_basebackup -h $HOST -D /var/opt/gitlab/postgresql/data -U gitlab_replicator -v -x -P
echo Writing recovery.conf file
sudo -u gitlab-psql bash -c "cat > /var/opt/gitlab/postgresql/data/recovery.conf <<- _EOF1_
standby_mode = 'on'
primary_conninfo = 'host=$HOST port=$PORT user=$USER password=$PASSWORD'
trigger_file = '/tmp/postgresql.trigger'
_EOF1_
"
echo Restoring postgresql.conf
sudo -u gitlab-psql mv /var/opt/gitlab/postgresql/postgresql.conf /var/opt/gitlab/postgresql/data/
echo Starting PostgreSQL and all GitLab services
gitlab-ctl start
```
1. Run it with:
```
bash /tmp/replica.sh
```
When prompted, enter the password you set up for the `gitlab_replicator`
user in the first step.
The replication process is now over.
### Next steps
Now that the database replication is done, the next step is to configure GitLab.
[➤ GitLab Geo configuration](configuration_source.md)
## MySQL replication
We don't support MySQL replication for GitLab Geo.
## Troubleshooting
Read the [troubleshooting document](troubleshooting.md).
[pgback]: http://www.postgresql.org/docs/9.2/static/app-pgbasebackup.html
[reconfigure GitLab]: ../administration/restart_gitlab.md#omnibus-gitlab-reconfigure
# Geo Frequently Asked Questions
## Can I use Geo in a disaster recovery situation?
There are limitations to what we replicate (see
[What data is replicated to a secondary node?](#what-data-is-replicated-to-a-secondary-node)).
In an extreme data-loss situation you can make a secondary Geo into your
primary, but this is not officially supported yet.
If you still want to proceed, see our step-by-step instructions on how to
manually [promote a secondary node](disaster-recovery.md) into primary.
## What data is replicated to a secondary node?
We currently replicate project repositories, LFS objects and the whole
database. This means user accounts, issues, merge requests, groups, project
data, etc., will be available for query.
We currently don't replicate user generated attachments / avatars or any
other file in `public/upload`. We also don't replicate artifact data
data (`shared/folder`).
## Can I git push to a secondary node?
No. All writing operations (this includes `git push`) must be done in your
primary node.
## How long does it take to have a commit replicated to a secondary node?
All replication operations are asynchronous and are queued to be dispatched in
a batched request every 10 seconds. Besides that, it depends on a lot of other
factors including the amount of traffic, how big your commit is, the
connectivity between your nodes, your hardware, etc.
## What happens if the SSH server runs at a different port?
We send the clone url from the primary server to any secondaries, so it
doesn't matter. If primary is running on port `2200` clone url will reflect
that.
# GitLab Geo troubleshooting
>**Note:**
This list is an attempt to document all the moving parts that can go wrong.
We are working into getting all this steps verified automatically in a
rake task in the future.
Setting up Geo requires careful attention to details and sometimes it's easy to
miss a step. Here is a checklist of questions you should ask to try to detect
where you have to fix (all commands and path locations are for Omnibus installs):
- Is Postgres replication working?
- Are my nodes pointing to the correct database instance?
- You should make sure your primary Geo node points to the instance with
writing permissions.
- Any secondary nodes should point only to read-only instances.
- Can Geo detect my current node correctly?
- Geo uses your defined node from `Admin ➔ Geo` screen, and tries to match
with the value defined in `/etc/gitlab/gitlab.rb` configuration file.
The relevant line looks like: `external_url "http://gitlab.example.com"`.
- To check if node on current machine is correctly detected type:
```
sudo gitlab-rails runner "puts Gitlab::Geo.current_node.inspect"
```
and expect something like:
```
#<GeoNode id: 2, schema: "https", host: "gitlab.example.com", port: 443, relative_url_root: "", primary: false, ...>
```
- By running the command above, `primary` should be `true` when executed in
the primary node, and `false` on any secondary
- Did I define the correct SSH Key for the node?
- You must create an SSH Key for `git` user
- This key is the one you have to inform at `Admin > Geo`
- Can I SSH from secondary to primary node using `git` user account?
- This is the most obvious cause of problems with repository replication issues.
If you haven't added the primary node's key to `known_hosts`, you will end up with
a lot of failed sidekiq jobs with an error similar to:
```
Gitlab::Shell::Error: Host key verification failed. fatal: Could not read from remote repository. Please make sure you have the correct access rights and the repository exists.
```
An easy way to fix is by logging in as the `git` user in the secondary node and run:
```
# remove old entries to your primary gitlab in known_hosts
ssh-keyscan -R your-primary-gitlab.example.com
Visit the primary node's **Admin Area ➔ Geo Nodes** (`/admin/geo_nodes`) in
your browser. We perform the following health checks on each secondary node
to help identify if something is wrong:
- Is the node running?
- Is the node's secondary tracking database configured?
- Is the node's secondary tracking database connected?
- Is the node's secondary tracking database up-to-date?
![GitLab Geo health check](img/geo-node-healthcheck.png)
# Updating the Geo nodes
In order to update the GitLab Geo nodes when a new GitLab version is released,
all you need to do is update GitLab itself:
1. Log into each node (primary and secondaries)
1. Upgrade GitLab
1. Test primary and secondary nodes, and check version in each.
---
For Omnibus GitLab installations it's a matter of updating the package:
```
# Debian/Ubuntu
sudo apt-get update
sudo apt-get install gitlab-ee
# Centos/RHEL
sudo yum install gitlab-ee
```
For installations from source, [follow the instructions for your GitLab version]
(https://gitlab.com/gitlab-org/gitlab-ee/tree/master/doc/update).
......@@ -127,7 +127,8 @@ module Gitlab
end
def merge_requests
MergeRequest.elastic_search(query, options: base_options)
options = base_options.merge(project_ids: non_guest_project_ids)
MergeRequest.elastic_search(query, options: options)
end
def blobs
......@@ -135,7 +136,7 @@ module Gitlab
Kaminari.paginate_array([])
else
opt = {
additional_filter: build_filter_by_project
additional_filter: repository_filter
}
Repository.search(
......@@ -151,7 +152,7 @@ module Gitlab
Kaminari.paginate_array([])
else
options = {
additional_filter: build_filter_by_project
additional_filter: repository_filter
}
Repository.find_commits_by_message_with_elastic(
......@@ -163,14 +164,28 @@ module Gitlab
end
end
def build_filter_by_project
conditions = [{ terms: { id: limit_project_ids } }]
def repository_filter
conditions = [{ terms: { id: non_guest_project_ids } }]
if public_and_internal_projects
conditions << { term: { visibility_level: Project::PUBLIC } }
conditions << {
bool: {
filter: [
{ term: { visibility_level: Project::PUBLIC } },
{ term: { repository_access_level: ProjectFeature::ENABLED } }
]
}
}
if current_user
conditions << { term: { visibility_level: Project::INTERNAL } }
conditions << {
bool: {
filter: [
{ term: { visibility_level: Project::INTERNAL } },
{ term: { repository_access_level: ProjectFeature::ENABLED } }
]
}
}
end
end
......@@ -179,13 +194,28 @@ module Gitlab
parent_type: 'project',
query: {
bool: {
should: conditions
should: conditions,
must_not: { term: { repository_access_level: ProjectFeature::DISABLED } }
}
}
}
}
end
def guest_project_ids
if current_user
current_user.authorized_projects.
where('project_authorizations.access_level = ?', Gitlab::Access::GUEST).
pluck(:id)
else
[]
end
end
def non_guest_project_ids
@non_guest_project_ids ||= limit_project_ids - guest_project_ids
end
def default_scope
'projects'
end
......
......@@ -29,6 +29,10 @@ module Gitlab
Gitlab::Geo.current_node.reload.enabled?
end
def self.configured?
Rails.configuration.respond_to?(:geo_database)
end
def self.license_allows?
::License.current && ::License.current.add_on?('GitLab_Geo')
end
......
......@@ -3,6 +3,7 @@ module Gitlab
class HealthCheck
def self.perform_checks
return '' unless Gitlab::Geo.secondary?
return 'The Geo database configuration file is missing.' unless Gitlab::Geo.configured?
database_version = self.get_database_version.to_i
migration_version = self.get_migration_version.to_i
......
......@@ -114,6 +114,61 @@ namespace :gitlab do
puts "Index recreated".color(:green)
end
desc "GitLab | Elasticsearch | Add feature access levels to project"
task add_feature_visibility_levels_to_project: :environment do
client = Project.__elasticsearch__.client
#### Check if this task has already been run ####
mapping = client.indices.get(index: Project.index_name)
project_fields = mapping['gitlab-development']['mappings']['project']['properties'].keys
if project_fields.include?('issues_access_level')
puts 'Index mapping is already up to date'.color(:yellow)
exit
end
####
project_fields = {
properties: {
issues_access_level: {
type: :integer
},
merge_requests_access_level: {
type: :integer
},
snippets_access_level: {
type: :integer
},
wiki_access_level: {
type: :integer
},
repository_access_level: {
type: :integer
}
}
}
note_fields = {
properties: {
noteable_type: {
type: :string,
index: :not_analyzed
},
noteable_id: {
type: :integer
}
}
}
client.indices.put_mapping(index: Project.index_name, type: :project, body: project_fields)
client.indices.put_mapping(index: Project.index_name, type: :note, body: note_fields)
Project.__elasticsearch__.import
Note.searchable.import_with_parent
puts "Done".color(:green)
end
def batch_size
ENV.fetch('BATCH', 300).to_i
end
......
......@@ -6,6 +6,7 @@
"eslint-fix": "eslint --max-warnings 0 --ext .js --fix .",
"eslint-report": "eslint --max-warnings 0 --ext .js --format html --output-file ./eslint-report.html .",
"karma": "karma start config/karma.config.js --single-run",
"karma-coverage": "BABEL_ENV=coverage karma start config/karma.config.js --single-run",
"karma-start": "karma start config/karma.config.js",
"webpack": "webpack --config config/webpack.config.js",
"webpack-prod": "NODE_ENV=production webpack --config config/webpack.config.js"
......@@ -13,7 +14,8 @@
"dependencies": {
"babel-core": "^6.22.1",
"babel-loader": "^6.2.10",
"babel-preset-es2015": "^6.22.0",
"babel-plugin-transform-define": "^1.2.0",
"babel-preset-latest": "^6.24.0",
"babel-preset-stage-2": "^6.22.0",
"bootstrap-sass": "^3.3.6",
"compression-webpack-plugin": "^0.3.2",
......@@ -57,12 +59,5 @@
"karma-sourcemap-loader": "^0.3.7",
"karma-webpack": "^2.0.2",
"webpack-dev-server": "^2.3.0"
},
"nyc": {
"exclude": [
"spec/javascripts/test_bundle.js",
"spec/javascripts/**/*_spec.js",
"app/assets/javascripts/droplab/**/*"
]
}
}
}
\ No newline at end of file
require 'spec_helper'
describe 'GlobalSearch' do
let(:features) { %i(issues merge_requests repository builds) }
let(:non_member) { create :user }
let(:member) { create :user }
let(:guest) { create :user }
before do
stub_application_setting(elasticsearch_search: true, elasticsearch_indexing: true)
Gitlab::Elastic::Helper.create_empty_index
project.team << [member, :developer]
project.team << [guest, :guest]
end
after do
Gitlab::Elastic::Helper.delete_index
stub_application_setting(elasticsearch_search: false, elasticsearch_indexing: false)
end
context "Respect feature visibility levels" do
context "Private projects" do
let(:project) { create(:project, :private) }
# The feature can be disabled but the data may actually exist
it "does not find items if features are disabled" do
create_items(project, feature_settings(:disabled))
expect_no_items_to_be_found(member)
expect_no_items_to_be_found(guest)
expect_no_items_to_be_found(non_member)
expect_no_items_to_be_found(nil)
end
it "shows items to member only if features are enabled" do
create_items(project, feature_settings(:enabled))
expect_items_to_be_found(member)
expect_non_code_items_to_be_found(guest)
expect_no_items_to_be_found(non_member)
expect_no_items_to_be_found(nil)
end
end
context "Internal projects" do
let(:project) { create(:project, :internal) }
# The feature can be disabled but the data may actually exist
it "does not find items if features are disabled" do
create_items(project, feature_settings(:disabled))
expect_no_items_to_be_found(member)
expect_no_items_to_be_found(guest)
expect_no_items_to_be_found(non_member)
expect_no_items_to_be_found(nil)
end
it "shows items to member only if features are enabled" do
create_items(project, feature_settings(:enabled))
expect_items_to_be_found(member)
expect_items_to_be_found(guest)
expect_items_to_be_found(non_member)
expect_no_items_to_be_found(nil)
end
it "shows items to member only if features are private" do
create_items(project, feature_settings(:private))
expect_items_to_be_found(member)
expect_non_code_items_to_be_found(guest)
expect_no_items_to_be_found(non_member)
expect_no_items_to_be_found(nil)
end
end
context "Public projects" do
let(:project) { create(:project, :public) }
# The feature can be disabled but the data may actually exist
it "does not find items if features are disabled" do
create_items(project, feature_settings(:disabled))
expect_no_items_to_be_found(member)
expect_no_items_to_be_found(guest)
expect_no_items_to_be_found(non_member)
expect_no_items_to_be_found(nil)
end
it "finds items if features are enabled" do
create_items(project, feature_settings(:enabled))
expect_items_to_be_found(member)
expect_items_to_be_found(guest)
expect_items_to_be_found(non_member)
expect_items_to_be_found(nil)
end
it "shows items to member only if features are private" do
create_items(project, feature_settings(:private))
expect_items_to_be_found(member)
expect_non_code_items_to_be_found(guest)
expect_no_items_to_be_found(non_member)
expect_no_items_to_be_found(nil)
end
end
end
def create_items(project, feature_settings = nil)
Sidekiq::Testing.inline! do
project.project_feature.update!(feature_settings) if feature_settings
create :issue, title: 'term', project: project
create :merge_request, title: 'term', target_project: project, source_project: project
project.repository.index_blobs
project.repository.index_commits
Gitlab::Elastic::Helper.refresh_index
end
end
# access_level can be :disabled, :enabled or :private
def feature_settings(access_level)
Hash[features.collect { |k| ["#{k}_access_level", ProjectFeature.const_get(access_level.to_s.upcase)] }]
end
def expect_no_items_to_be_found(user)
results = search(user, 'term')
expect(results.issues_count).to eq(0)
expect(results.merge_requests_count).to eq(0)
expect(search(user, 'def').blobs_count).to eq(0)
expect(search(user, 'add').commits_count).to eq(0)
end
def expect_items_to_be_found(user)
results = search(user, 'term')
expect(results.issues_count).not_to eq(0)
expect(results.merge_requests_count).not_to eq(0)
expect(search(user, 'def').blobs_count).not_to eq(0)
expect(search(user, 'add').commits_count).not_to eq(0)
end
def expect_non_code_items_to_be_found(user)
results = search(guest, 'term')
expect(results.issues_count).not_to eq(0)
expect(results.merge_requests_count).to eq(0)
expect(search(guest, 'def').blobs_count).to eq(0)
expect(search(guest, 'add').commits_count).to eq(0)
end
def search(user, search)
Search::GlobalService.new(user, search: search).execute
end
end
......@@ -63,6 +63,14 @@ describe 'Issues csv', feature: true do
expect(csv.count).to eq 0
end
it 'uses array filters, such as label_name' do
issue.update!(labels: [idea_label])
request_csv("label_name[]" => 'Bug')
expect(csv.count).to eq 0
end
it 'avoids excessive database calls' do
control_count = ActiveRecord::QueryRecorder.new{ request_csv }.count
create_list(:labeled_issue,
......
......@@ -32,10 +32,11 @@ testsContext.keys().forEach(function (path) {
}
});
// workaround: include all source files to find files with 0% coverage
// see also https://github.com/deepsweet/istanbul-instrumenter-loader/issues/15
describe('Uncovered files', function () {
// the following files throw errors because of undefined variables
// if we're generating coverage reports, make sure to include all files so
// that we can catch files with 0% coverage
// see: https://github.com/deepsweet/istanbul-instrumenter-loader/issues/15
if (process.env.BABEL_ENV === 'coverage') {
// exempt these files from the coverage report
const troubleMakers = [
'./blob_edit/blob_edit_bundle.js',
'./cycle_analytics/components/stage_plan_component.js',
......@@ -48,21 +49,23 @@ describe('Uncovered files', function () {
'./network/branch_graph.js',
];
const sourceFiles = require.context('~', true, /^\.\/(?!application\.js).*\.js$/);
sourceFiles.keys().forEach(function (path) {
// ignore if there is a matching spec file
if (testsContext.keys().indexOf(`${path.replace(/\.js$/, '')}_spec`) > -1) {
return;
}
describe('Uncovered files', function () {
const sourceFiles = require.context('~', true, /\.js$/);
sourceFiles.keys().forEach(function (path) {
// ignore if there is a matching spec file
if (testsContext.keys().indexOf(`${path.replace(/\.js$/, '')}_spec`) > -1) {
return;
}
it(`includes '${path}'`, function () {
try {
sourceFiles(path);
} catch (err) {
if (troubleMakers.indexOf(path) === -1) {
expect(err).toBeNull();
it(`includes '${path}'`, function () {
try {
sourceFiles(path);
} catch (err) {
if (troubleMakers.indexOf(path) === -1) {
expect(err).toBeNull();
}
}
}
});
});
});
});
}
\ No newline at end of file
require 'spec_helper'
describe Gitlab::Geo::HealthCheck do
let!(:secondary) { create(:geo_node, :current) }
subject { described_class }
describe '.perform_checks' do
it 'returns an empty string when not running on a secondary node' do
allow(Gitlab::Geo).to receive(:secondary?) { false }
expect(subject.perform_checks).to be_blank
end
it 'returns an error when configuration file is missing for tracking DB' do
allow(Rails.configuration).to receive(:respond_to?).with(:geo_database) { false }
expect(subject.perform_checks).not_to be_blank
end
it 'returns an error when Geo database version does not match the latest migration version' do
allow(subject).to receive(:get_database_version) { 1 }
expect(subject.perform_checks).not_to be_blank
end
it 'returns an error when latest migration version does not match the Geo database version' do
allow(subject).to receive(:get_migration_version) { 1 }
expect(subject.perform_checks).not_to be_blank
end
end
end
......@@ -15,6 +15,7 @@ Issue:
- updated_by_id
- confidential
- deleted_at
- closed_at
- due_date
- moved_to_id
- lock_version
......
......@@ -11,9 +11,9 @@ describe Issue, elastic: true do
stub_application_setting(elasticsearch_search: false, elasticsearch_indexing: false)
end
it "searches issues" do
project = create :empty_project
let(:project) { create :empty_project }
it "searches issues" do
Sidekiq::Testing.inline! do
create :issue, title: 'bla-bla term', project: project
create :issue, description: 'bla-bla term', project: project
......@@ -31,7 +31,6 @@ describe Issue, elastic: true do
end
it "returns json with all needed elements" do
project = create :empty_project
issue = create :issue, project: project
expected_hash = issue.attributes.extract!('id', 'iid', 'title', 'description', 'created_at',
......
......@@ -54,6 +54,8 @@ describe Note, elastic: true do
id
note
project_id
noteable_type
noteable_id
created_at
updated_at
issue
......
......@@ -59,6 +59,16 @@ describe Project, elastic: true do
'last_activity_at'
)
expected_hash.merge!(
project.project_feature.attributes.extract!(
'issues_access_level',
'merge_requests_access_level',
'snippets_access_level',
'wiki_access_level',
'repository_access_level'
)
)
expected_hash['name_with_namespace'] = project.name_with_namespace
expected_hash['path_with_namespace'] = project.path_with_namespace
......
......@@ -30,17 +30,17 @@ describe Snippet, elastic: true do
it 'returns only public snippets when user is blank' do
result = described_class.elastic_search_code('password', options: { user: nil })
expect(result.total_count).to eq(2)
expect(result.records).to match_array [public_snippet, project_public_snippet]
expect(result.total_count).to eq(1)
expect(result.records).to match_array [public_snippet]
end
it 'returns only public and internal snippets for regular users' do
regular_user = create(:user)
it 'returns only public and internal personal snippets for non-members' do
non_member = create(:user)
result = described_class.elastic_search_code('password', options: { user: regular_user })
result = described_class.elastic_search_code('password', options: { user: non_member })
expect(result.total_count).to eq(4)
expect(result.records).to match_array [public_snippet, internal_snippet, project_public_snippet, project_internal_snippet]
expect(result.total_count).to eq(2)
expect(result.records).to match_array [public_snippet, internal_snippet]
end
it 'returns public, internal snippets, and project private snippets for project members' do
......@@ -56,8 +56,8 @@ describe Snippet, elastic: true do
it 'returns private snippets where the user is the author' do
result = described_class.elastic_search_code('password', options: { user: author })
expect(result.total_count).to eq(5)
expect(result.records).to match_array [public_snippet, internal_snippet, private_snippet, project_public_snippet, project_internal_snippet]
expect(result.total_count).to eq(3)
expect(result.records).to match_array [public_snippet, internal_snippet, private_snippet]
end
it 'returns all snippets for admins' do
......
......@@ -44,6 +44,34 @@ describe Issue, "Issuable" do
it { expect(described_class).to respond_to(:assigned) }
end
describe 'author_name' do
it 'is delegated to author' do
expect(issue.author_name).to eq issue.author.name
end
it 'returns nil when author is nil' do
issue.author_id = nil
issue.save(validate: false)
expect(issue.author_name).to eq nil
end
end
describe 'assignee_name' do
it 'is delegated to assignee' do
issue.update!(assignee: create(:user))
expect(issue.assignee_name).to eq issue.assignee.name
end
it 'returns nil when assignee is nil' do
issue.assignee_id = nil
issue.save(validate: false)
expect(issue.assignee_name).to eq nil
end
end
describe "before_save" do
describe "#update_cache_counts" do
context "when previous assignee exists" do
......
......@@ -37,6 +37,30 @@ describe Issue, models: true do
end
end
describe '#closed_at' do
after do
Timecop.return
end
let!(:now) { Timecop.freeze(Time.now) }
it 'sets closed_at to Time.now when issue is closed' do
issue = create(:issue, state: 'opened')
issue.close
expect(issue.closed_at).to eq(now)
end
it 'sets closed_at to nil when issue is reopened' do
issue = create(:issue, state: 'closed')
issue.reopen
expect(issue.closed_at).to be_nil
end
end
describe '#to_reference' do
let(:namespace) { build(:namespace, path: 'sample-namespace') }
let(:project) { build(:empty_project, name: 'sample-project', namespace: namespace) }
......
......@@ -108,4 +108,12 @@ describe Issues::ExportCsvService, services: true do
expect(csv[0]['Labels']).to eq nil
end
end
it 'succeeds when author is non-existent' do
issue.author_id = 99999999
issue.save(validate: false)
expect(csv[0]['Author']).to eq nil
expect(csv[0]['Author Username']).to eq nil
end
end
......@@ -18,6 +18,22 @@ describe Geo::GeoBackfillWorker, services: true do
subject.perform
end
it 'does not perform Geo::RepositoryBackfillService when tracking DB is not available' do
allow(Rails.configuration).to receive(:respond_to?).with(:geo_database) { false }
expect(Geo::RepositoryBackfillService).not_to receive(:new)
subject.perform
end
it 'does not perform Geo::RepositoryBackfillService when primary node does not exists' do
allow(Gitlab::Geo).to receive(:primary_node) { nil }
expect(Geo::RepositoryBackfillService).not_to receive(:new)
subject.perform
end
it 'does not perform Geo::RepositoryBackfillService when node is disabled' do
allow_any_instance_of(GeoNode).to receive(:enabled?) { false }
......
......@@ -13,12 +13,24 @@ describe GeoFileDownloadDispatchWorker do
subject { described_class.new }
describe '#perform' do
it 'does not schedule anything when node is disabled' do
it 'does not schedule anything when tracking DB is not available' do
create(:lfs_object, :with_file)
allow(Rails.configuration).to receive(:respond_to?).with(:geo_database) { false }
expect(GeoFileDownloadWorker).not_to receive(:perform_async)
subject.perform
end
it 'does not schedule anything when node is disabled' do
create(:lfs_object, :with_file)
@secondary.enabled = false
@secondary.save
expect(GeoFileDownloadWorker).not_to receive(:perform_async)
subject.perform
end
......
......@@ -473,6 +473,13 @@ babel-plugin-transform-decorators@^6.22.0:
babel-template "^6.22.0"
babel-types "^6.22.0"
babel-plugin-transform-define@^1.2.0:
version "1.2.0"
resolved "https://registry.yarnpkg.com/babel-plugin-transform-define/-/babel-plugin-transform-define-1.2.0.tgz#f036bda05162f29a542e434f585da1ccf1e7ec6a"
dependencies:
lodash.get "4.4.2"
traverse "0.6.6"
babel-plugin-transform-es2015-arrow-functions@^6.22.0:
version "6.22.0"
resolved "https://registry.yarnpkg.com/babel-plugin-transform-es2015-arrow-functions/-/babel-plugin-transform-es2015-arrow-functions-6.22.0.tgz#452692cb711d5f79dc7f85e440ce41b9f244d221"
......@@ -549,17 +556,17 @@ babel-plugin-transform-es2015-literals@^6.22.0:
dependencies:
babel-runtime "^6.22.0"
babel-plugin-transform-es2015-modules-amd@^6.22.0:
version "6.22.0"
resolved "https://registry.yarnpkg.com/babel-plugin-transform-es2015-modules-amd/-/babel-plugin-transform-es2015-modules-amd-6.22.0.tgz#bf69cd34889a41c33d90dfb740e0091ccff52f21"
babel-plugin-transform-es2015-modules-amd@^6.24.0:
version "6.24.0"
resolved "https://registry.yarnpkg.com/babel-plugin-transform-es2015-modules-amd/-/babel-plugin-transform-es2015-modules-amd-6.24.0.tgz#a1911fb9b7ec7e05a43a63c5995007557bcf6a2e"
dependencies:
babel-plugin-transform-es2015-modules-commonjs "^6.22.0"
babel-plugin-transform-es2015-modules-commonjs "^6.24.0"
babel-runtime "^6.22.0"
babel-template "^6.22.0"
babel-plugin-transform-es2015-modules-commonjs@^6.22.0:
version "6.23.0"
resolved "https://registry.yarnpkg.com/babel-plugin-transform-es2015-modules-commonjs/-/babel-plugin-transform-es2015-modules-commonjs-6.23.0.tgz#cba7aa6379fb7ec99250e6d46de2973aaffa7b92"
babel-plugin-transform-es2015-modules-commonjs@^6.24.0:
version "6.24.0"
resolved "https://registry.yarnpkg.com/babel-plugin-transform-es2015-modules-commonjs/-/babel-plugin-transform-es2015-modules-commonjs-6.24.0.tgz#e921aefb72c2cc26cb03d107626156413222134f"
dependencies:
babel-plugin-transform-strict-mode "^6.22.0"
babel-runtime "^6.22.0"
......@@ -574,11 +581,11 @@ babel-plugin-transform-es2015-modules-systemjs@^6.22.0:
babel-runtime "^6.22.0"
babel-template "^6.23.0"
babel-plugin-transform-es2015-modules-umd@^6.22.0:
version "6.23.0"
resolved "https://registry.yarnpkg.com/babel-plugin-transform-es2015-modules-umd/-/babel-plugin-transform-es2015-modules-umd-6.23.0.tgz#8d284ae2e19ed8fe21d2b1b26d6e7e0fcd94f0f1"
babel-plugin-transform-es2015-modules-umd@^6.24.0:
version "6.24.0"
resolved "https://registry.yarnpkg.com/babel-plugin-transform-es2015-modules-umd/-/babel-plugin-transform-es2015-modules-umd-6.24.0.tgz#fd5fa63521cae8d273927c3958afd7c067733450"
dependencies:
babel-plugin-transform-es2015-modules-amd "^6.22.0"
babel-plugin-transform-es2015-modules-amd "^6.24.0"
babel-runtime "^6.22.0"
babel-template "^6.23.0"
......@@ -669,9 +676,9 @@ babel-plugin-transform-strict-mode@^6.22.0:
babel-runtime "^6.22.0"
babel-types "^6.22.0"
babel-preset-es2015@^6.22.0:
version "6.22.0"
resolved "https://registry.yarnpkg.com/babel-preset-es2015/-/babel-preset-es2015-6.22.0.tgz#af5a98ecb35eb8af764ad8a5a05eb36dc4386835"
babel-preset-es2015@^6.24.0:
version "6.24.0"
resolved "https://registry.yarnpkg.com/babel-preset-es2015/-/babel-preset-es2015-6.24.0.tgz#c162d68b1932696e036cd3110dc1ccd303d2673a"
dependencies:
babel-plugin-check-es2015-constants "^6.22.0"
babel-plugin-transform-es2015-arrow-functions "^6.22.0"
......@@ -684,10 +691,10 @@ babel-preset-es2015@^6.22.0:
babel-plugin-transform-es2015-for-of "^6.22.0"
babel-plugin-transform-es2015-function-name "^6.22.0"
babel-plugin-transform-es2015-literals "^6.22.0"
babel-plugin-transform-es2015-modules-amd "^6.22.0"
babel-plugin-transform-es2015-modules-commonjs "^6.22.0"
babel-plugin-transform-es2015-modules-amd "^6.24.0"
babel-plugin-transform-es2015-modules-commonjs "^6.24.0"
babel-plugin-transform-es2015-modules-systemjs "^6.22.0"
babel-plugin-transform-es2015-modules-umd "^6.22.0"
babel-plugin-transform-es2015-modules-umd "^6.24.0"
babel-plugin-transform-es2015-object-super "^6.22.0"
babel-plugin-transform-es2015-parameters "^6.22.0"
babel-plugin-transform-es2015-shorthand-properties "^6.22.0"
......@@ -698,6 +705,27 @@ babel-preset-es2015@^6.22.0:
babel-plugin-transform-es2015-unicode-regex "^6.22.0"
babel-plugin-transform-regenerator "^6.22.0"
babel-preset-es2016@^6.22.0:
version "6.22.0"
resolved "https://registry.yarnpkg.com/babel-preset-es2016/-/babel-preset-es2016-6.22.0.tgz#b061aaa3983d40c9fbacfa3743b5df37f336156c"
dependencies:
babel-plugin-transform-exponentiation-operator "^6.22.0"
babel-preset-es2017@^6.22.0:
version "6.22.0"
resolved "https://registry.yarnpkg.com/babel-preset-es2017/-/babel-preset-es2017-6.22.0.tgz#de2f9da5a30c50d293fb54a0ba15d6ddc573f0f2"
dependencies:
babel-plugin-syntax-trailing-function-commas "^6.22.0"
babel-plugin-transform-async-to-generator "^6.22.0"
babel-preset-latest@^6.24.0:
version "6.24.0"
resolved "https://registry.yarnpkg.com/babel-preset-latest/-/babel-preset-latest-6.24.0.tgz#a68d20f509edcc5d7433a48dfaebf7e4f2cd4cb7"
dependencies:
babel-preset-es2015 "^6.24.0"
babel-preset-es2016 "^6.22.0"
babel-preset-es2017 "^6.22.0"
babel-preset-stage-2@^6.22.0:
version "6.22.0"
resolved "https://registry.yarnpkg.com/babel-preset-stage-2/-/babel-preset-stage-2-6.22.0.tgz#ccd565f19c245cade394b21216df704a73b27c07"
......@@ -2900,6 +2928,10 @@ lodash.deburr@^4.0.0:
version "4.1.0"
resolved "https://registry.yarnpkg.com/lodash.deburr/-/lodash.deburr-4.1.0.tgz#ddb1bbb3ef07458c0177ba07de14422cb033ff9b"
lodash.get@4.4.2:
version "4.4.2"
resolved "https://registry.yarnpkg.com/lodash.get/-/lodash.get-4.4.2.tgz#2d177f652fa31e939b4438d5341499dfa3825e99"
lodash.get@^3.7.0:
version "3.7.0"
resolved "https://registry.yarnpkg.com/lodash.get/-/lodash.get-3.7.0.tgz#3ce68ae2c91683b281cc5394128303cbf75e691f"
......@@ -4271,6 +4303,10 @@ tough-cookie@~2.3.0:
dependencies:
punycode "^1.4.1"
traverse@0.6.6:
version "0.6.6"
resolved "https://registry.yarnpkg.com/traverse/-/traverse-0.6.6.tgz#cbdf560fd7b9af632502fed40f918c157ea97137"
trim-right@^1.0.1:
version "1.0.1"
resolved "https://registry.yarnpkg.com/trim-right/-/trim-right-1.0.1.tgz#cb2e1203067e0c8de1f614094b9fe45704ea6003"
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment