Commit 20450649 authored by GitLab Bot's avatar GitLab Bot

Add latest changes from gitlab-org/gitlab@master

parent 3f0f13c6
...@@ -24,6 +24,7 @@ When adding migrations: ...@@ -24,6 +24,7 @@ When adding migrations:
- [ ] Added a `down` method so the migration can be reverted - [ ] Added a `down` method so the migration can be reverted
- [ ] Added the output of the migration(s) to the MR body - [ ] Added the output of the migration(s) to the MR body
- [ ] Added tests for the migration in `spec/migrations` if necessary (e.g. when migrating data) - [ ] Added tests for the migration in `spec/migrations` if necessary (e.g. when migrating data)
- [ ] Added rollback procedure. Include either a rollback procedure or description how to rollback changes
When adding or modifying queries to improve performance: When adding or modifying queries to improve performance:
......
...@@ -5,102 +5,103 @@ import Api from './api'; ...@@ -5,102 +5,103 @@ import Api from './api';
import ProjectSelectComboButton from './project_select_combo_button'; import ProjectSelectComboButton from './project_select_combo_button';
import { s__ } from './locale'; import { s__ } from './locale';
export default function projectSelect() { const projectSelect = () => {
import(/* webpackChunkName: 'select2' */ 'select2/select2') $('.ajax-project-select').each(function(i, select) {
.then(() => { var placeholder;
$('.ajax-project-select').each(function(i, select) { const simpleFilter = $(select).data('simpleFilter') || false;
var placeholder; this.groupId = $(select).data('groupId');
const simpleFilter = $(select).data('simpleFilter') || false; this.includeGroups = $(select).data('includeGroups');
this.groupId = $(select).data('groupId'); this.allProjects = $(select).data('allProjects') || false;
this.includeGroups = $(select).data('includeGroups'); this.orderBy = $(select).data('orderBy') || 'id';
this.allProjects = $(select).data('allProjects') || false; this.withIssuesEnabled = $(select).data('withIssuesEnabled');
this.orderBy = $(select).data('orderBy') || 'id'; this.withMergeRequestsEnabled = $(select).data('withMergeRequestsEnabled');
this.withIssuesEnabled = $(select).data('withIssuesEnabled'); this.withShared =
this.withMergeRequestsEnabled = $(select).data('withMergeRequestsEnabled'); $(select).data('withShared') === undefined ? true : $(select).data('withShared');
this.withShared = this.includeProjectsInSubgroups = $(select).data('includeProjectsInSubgroups') || false;
$(select).data('withShared') === undefined ? true : $(select).data('withShared'); this.allowClear = $(select).data('allowClear') || false;
this.includeProjectsInSubgroups = $(select).data('includeProjectsInSubgroups') || false;
this.allowClear = $(select).data('allowClear') || false;
placeholder = s__('ProjectSelect|Search for project'); placeholder = s__('ProjectSelect|Search for project');
if (this.includeGroups) { if (this.includeGroups) {
placeholder += s__('ProjectSelect| or group'); placeholder += s__('ProjectSelect| or group');
} }
$(select).select2({ $(select).select2({
placeholder, placeholder,
minimumInputLength: 0, minimumInputLength: 0,
query: (function(_this) { query: (function(_this) {
return function(query) { return function(query) {
var finalCallback, projectsCallback; var finalCallback, projectsCallback;
finalCallback = function(projects) { finalCallback = function(projects) {
var data;
data = {
results: projects,
};
return query.callback(data);
};
if (_this.includeGroups) {
projectsCallback = function(projects) {
var groupsCallback;
groupsCallback = function(groups) {
var data; var data;
data = { data = groups.concat(projects);
results: projects, return finalCallback(data);
};
return query.callback(data);
}; };
if (_this.includeGroups) { return Api.groups(query.term, {}, groupsCallback);
projectsCallback = function(projects) {
var groupsCallback;
groupsCallback = function(groups) {
var data;
data = groups.concat(projects);
return finalCallback(data);
};
return Api.groups(query.term, {}, groupsCallback);
};
} else {
projectsCallback = finalCallback;
}
if (_this.groupId) {
return Api.groupProjects(
_this.groupId,
query.term,
{
with_issues_enabled: _this.withIssuesEnabled,
with_merge_requests_enabled: _this.withMergeRequestsEnabled,
with_shared: _this.withShared,
include_subgroups: _this.includeProjectsInSubgroups,
},
projectsCallback,
);
} else {
return Api.projects(
query.term,
{
order_by: _this.orderBy,
with_issues_enabled: _this.withIssuesEnabled,
with_merge_requests_enabled: _this.withMergeRequestsEnabled,
membership: !_this.allProjects,
},
projectsCallback,
);
}
}; };
})(this), } else {
id(project) { projectsCallback = finalCallback;
if (simpleFilter) return project.id; }
return JSON.stringify({ if (_this.groupId) {
name: project.name, return Api.groupProjects(
url: project.web_url, _this.groupId,
}); query.term,
}, {
text(project) { with_issues_enabled: _this.withIssuesEnabled,
return project.name_with_namespace || project.name; with_merge_requests_enabled: _this.withMergeRequestsEnabled,
}, with_shared: _this.withShared,
include_subgroups: _this.includeProjectsInSubgroups,
},
projectsCallback,
);
} else {
return Api.projects(
query.term,
{
order_by: _this.orderBy,
with_issues_enabled: _this.withIssuesEnabled,
with_merge_requests_enabled: _this.withMergeRequestsEnabled,
membership: !_this.allProjects,
},
projectsCallback,
);
}
};
})(this),
id(project) {
if (simpleFilter) return project.id;
return JSON.stringify({
name: project.name,
url: project.web_url,
});
},
text(project) {
return project.name_with_namespace || project.name;
},
initSelection(el, callback) { initSelection(el, callback) {
return Api.project(el.val()).then(({ data }) => callback(data)); return Api.project(el.val()).then(({ data }) => callback(data));
}, },
allowClear: this.allowClear, allowClear: this.allowClear,
dropdownCssClass: 'ajax-project-dropdown', dropdownCssClass: 'ajax-project-dropdown',
}); });
if (simpleFilter) return select; if (simpleFilter) return select;
return new ProjectSelectComboButton(select); return new ProjectSelectComboButton(select);
}); });
}) };
export default () =>
import(/* webpackChunkName: 'select2' */ 'select2/select2')
.then(projectSelect)
.catch(() => {}); .catch(() => {});
}
# frozen_string_literal: true
module ExportHelper
# An EE-overwriteable list of descriptions
def project_export_descriptions
[
_('Project and wiki repositories'),
_('Project uploads'),
_('Project configuration, including services'),
_('Issues with comments, merge requests with diffs and comments, labels, milestones, snippets, and other project entities'),
_('LFS objects'),
_('Issue Boards')
]
end
end
ExportHelper.prepend_if_ee('EE::ExportHelper')
...@@ -1098,6 +1098,8 @@ class Repository ...@@ -1098,6 +1098,8 @@ class Repository
raw.create_repository raw.create_repository
after_create after_create
true
end end
def blobs_metadata(paths, ref = 'HEAD') def blobs_metadata(paths, ref = 'HEAD')
......
...@@ -12,6 +12,8 @@ module Projects ...@@ -12,6 +12,8 @@ module Projects
private private
attr_accessor :shared
def execute_after_export_action(after_export_strategy) def execute_after_export_action(after_export_strategy)
return unless after_export_strategy return unless after_export_strategy
...@@ -21,50 +23,54 @@ module Projects ...@@ -21,50 +23,54 @@ module Projects
end end
def save_all! def save_all!
if save_services if save_exporters
Gitlab::ImportExport::Saver.save(project: project, shared: @shared) Gitlab::ImportExport::Saver.save(project: project, shared: shared)
notify_success notify_success
else else
cleanup_and_notify_error! cleanup_and_notify_error!
end end
end end
def save_services def save_exporters
[version_saver, avatar_saver, project_tree_saver, uploads_saver, repo_saver, wiki_repo_saver, lfs_saver].all?(&:save) exporters.all?(&:save)
end
def exporters
[version_saver, avatar_saver, project_tree_saver, uploads_saver, repo_saver, wiki_repo_saver, lfs_saver]
end end
def version_saver def version_saver
Gitlab::ImportExport::VersionSaver.new(shared: @shared) Gitlab::ImportExport::VersionSaver.new(shared: shared)
end end
def avatar_saver def avatar_saver
Gitlab::ImportExport::AvatarSaver.new(project: project, shared: @shared) Gitlab::ImportExport::AvatarSaver.new(project: project, shared: shared)
end end
def project_tree_saver def project_tree_saver
Gitlab::ImportExport::ProjectTreeSaver.new(project: project, current_user: @current_user, shared: @shared, params: @params) Gitlab::ImportExport::ProjectTreeSaver.new(project: project, current_user: current_user, shared: shared, params: params)
end end
def uploads_saver def uploads_saver
Gitlab::ImportExport::UploadsSaver.new(project: project, shared: @shared) Gitlab::ImportExport::UploadsSaver.new(project: project, shared: shared)
end end
def repo_saver def repo_saver
Gitlab::ImportExport::RepoSaver.new(project: project, shared: @shared) Gitlab::ImportExport::RepoSaver.new(project: project, shared: shared)
end end
def wiki_repo_saver def wiki_repo_saver
Gitlab::ImportExport::WikiRepoSaver.new(project: project, shared: @shared) Gitlab::ImportExport::WikiRepoSaver.new(project: project, shared: shared)
end end
def lfs_saver def lfs_saver
Gitlab::ImportExport::LfsSaver.new(project: project, shared: @shared) Gitlab::ImportExport::LfsSaver.new(project: project, shared: shared)
end end
def cleanup_and_notify_error def cleanup_and_notify_error
Rails.logger.error("Import/Export - Project #{project.name} with ID: #{project.id} export error - #{@shared.errors.join(', ')}") # rubocop:disable Gitlab/RailsLogger Rails.logger.error("Import/Export - Project #{project.name} with ID: #{project.id} export error - #{shared.errors.join(', ')}") # rubocop:disable Gitlab/RailsLogger
FileUtils.rm_rf(@shared.export_path) FileUtils.rm_rf(shared.export_path)
notify_error notify_error
end end
...@@ -72,7 +78,7 @@ module Projects ...@@ -72,7 +78,7 @@ module Projects
def cleanup_and_notify_error! def cleanup_and_notify_error!
cleanup_and_notify_error cleanup_and_notify_error
raise Gitlab::ImportExport::Error.new(@shared.errors.join(', ')) raise Gitlab::ImportExport::Error.new(shared.errors.to_sentence)
end end
def notify_success def notify_success
...@@ -80,8 +86,10 @@ module Projects ...@@ -80,8 +86,10 @@ module Projects
end end
def notify_error def notify_error
notification_service.project_not_exported(@project, @current_user, @shared.errors) notification_service.project_not_exported(project, current_user, shared.errors)
end end
end end
end end
end end
Projects::ImportExport::ExportService.prepend_if_ee('EE::Projects::ImportExport::ExportService')
...@@ -10,12 +10,8 @@ ...@@ -10,12 +10,8 @@
%p.append-bottom-0 %p.append-bottom-0
%p= _('The following items will be exported:') %p= _('The following items will be exported:')
%ul %ul
%li= _('Project and wiki repositories') - project_export_descriptions.each do |desc|
%li= _('Project uploads') %li= desc
%li= _('Project configuration, including services')
%li= _('Issues with comments, merge requests with diffs and comments, labels, milestones, snippets, and other project entities')
%li= _('LFS objects')
%li= _('Issue Boards')
%p= _('The following items will NOT be exported:') %p= _('The following items will NOT be exported:')
%ul %ul
%li= _('Job traces and artifacts') %li= _('Job traces and artifacts')
......
# frozen_string_literal: true
class DesignIssueIdNullable < ActiveRecord::Migration[5.2]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
def change
change_column_null :design_management_designs, :issue_id, true
end
end
...@@ -1217,7 +1217,7 @@ ActiveRecord::Schema.define(version: 2019_09_26_041216) do ...@@ -1217,7 +1217,7 @@ ActiveRecord::Schema.define(version: 2019_09_26_041216) do
create_table "design_management_designs", force: :cascade do |t| create_table "design_management_designs", force: :cascade do |t|
t.integer "project_id", null: false t.integer "project_id", null: false
t.integer "issue_id", null: false t.integer "issue_id"
t.string "filename", null: false t.string "filename", null: false
t.index ["issue_id", "filename"], name: "index_design_management_designs_on_issue_id_and_filename", unique: true t.index ["issue_id", "filename"], name: "index_design_management_designs_on_issue_id_and_filename", unique: true
t.index ["project_id"], name: "index_design_management_designs_on_project_id" t.index ["project_id"], name: "index_design_management_designs_on_project_id"
......
...@@ -31,8 +31,8 @@ including (but not limited to): ...@@ -31,8 +31,8 @@ including (but not limited to):
- System statistics such as the process' memory usage and open file descriptors. - System statistics such as the process' memory usage and open file descriptors.
- Ruby garbage collection statistics. - Ruby garbage collection statistics.
Metrics data is written to [InfluxDB][influxdb] over [UDP][influxdb-udp]. Stored Metrics data is written to [InfluxDB](https://www.influxdata.com/products/influxdb-overview/)
data can be visualized using [Grafana][grafana] or any other application that over [UDP][influxdb-udp]. Stored data can be visualized using [Grafana](https://grafana.com) or any other application that
supports reading data from InfluxDB. Alternatively data can be queried using the supports reading data from InfluxDB. Alternatively data can be queried using the
InfluxDB CLI. InfluxDB CLI.
...@@ -67,6 +67,4 @@ the actual interval can be anywhere between 7.5 and 22.5. The interval is ...@@ -67,6 +67,4 @@ the actual interval can be anywhere between 7.5 and 22.5. The interval is
re-generated for every sampling run instead of being generated once and re-used re-generated for every sampling run instead of being generated once and re-used
for the duration of the process' lifetime. for the duration of the process' lifetime.
[influxdb]: https://influxdata.com/time-series-platform/influxdb/
[influxdb-udp]: https://docs.influxdata.com/influxdb/v0.9/write_protocols/udp/ [influxdb-udp]: https://docs.influxdata.com/influxdb/v0.9/write_protocols/udp/
[grafana]: http://grafana.org/
...@@ -38,8 +38,8 @@ InfluxDB needs to be restarted. ...@@ -38,8 +38,8 @@ InfluxDB needs to be restarted.
### Storage Engine ### Storage Engine
InfluxDB comes with different storage engines and as of InfluxDB 0.9.5 a new InfluxDB comes with different storage engines and as of InfluxDB 0.9.5 a new
storage engine is available, called [TSM Tree]. All users **must** use the new storage engine is available, called [TSM Tree](https://www.influxdata.com/blog/new-storage-engine-time-structured-merge-tree/).
`tsm1` storage engine as this [will be the default engine][tsm1-commit] in All users **must** use the new `tsm1` storage engine as this [will be the default engine][tsm1-commit] in
upcoming InfluxDB releases. upcoming InfluxDB releases.
Make sure you have the following in your configuration file: Make sure you have the following in your configuration file:
...@@ -188,6 +188,5 @@ Read more on: ...@@ -188,6 +188,5 @@ Read more on:
[influxdb cli]: https://docs.influxdata.com/influxdb/v0.9/tools/shell/ [influxdb cli]: https://docs.influxdata.com/influxdb/v0.9/tools/shell/
[udp]: https://docs.influxdata.com/influxdb/v0.9/write_protocols/udp/ [udp]: https://docs.influxdata.com/influxdb/v0.9/write_protocols/udp/
[influxdb]: https://www.influxdata.com/products/influxdb-overview/ [influxdb]: https://www.influxdata.com/products/influxdb-overview/
[tsm tree]: https://influxdata.com/blog/new-storage-engine-time-structured-merge-tree/
[tsm1-commit]: https://github.com/influxdata/influxdb/commit/15d723dc77651bac83e09e2b1c94be480966cb0d [tsm1-commit]: https://github.com/influxdata/influxdb/commit/15d723dc77651bac83e09e2b1c94be480966cb0d
[influx-admin]: https://docs.influxdata.com/influxdb/v0.9/administration/authentication_and_authorization/#create-a-new-admin-user [influx-admin]: https://docs.influxdata.com/influxdb/v0.9/administration/authentication_and_authorization/#create-a-new-admin-user
...@@ -21,7 +21,7 @@ Prometheus works by periodically connecting to data sources and collecting their ...@@ -21,7 +21,7 @@ Prometheus works by periodically connecting to data sources and collecting their
performance metrics via the [various exporters](#bundled-software-metrics). To view performance metrics via the [various exporters](#bundled-software-metrics). To view
and work with the monitoring data, you can either and work with the monitoring data, you can either
[connect directly to Prometheus](#viewing-performance-metrics) or utilize a [connect directly to Prometheus](#viewing-performance-metrics) or utilize a
dashboard tool like [Grafana]. dashboard tool like [Grafana](https://grafana.com).
## Configuring Prometheus ## Configuring Prometheus
...@@ -199,8 +199,8 @@ having [NGINX proxy it][nginx-custom-config]. ...@@ -199,8 +199,8 @@ having [NGINX proxy it][nginx-custom-config].
The performance data collected by Prometheus can be viewed directly in the The performance data collected by Prometheus can be viewed directly in the
Prometheus console or through a compatible dashboard tool. Prometheus console or through a compatible dashboard tool.
The Prometheus interface provides a [flexible query language][prom-query] to work The Prometheus interface provides a [flexible query language](https://prometheus.io/docs/prometheus/latest/querying/basics/)
with the collected data where you can visualize their output. to work with the collected data where you can visualize their output.
For a more fully featured dashboard, Grafana can be used and has For a more fully featured dashboard, Grafana can be used and has
[official support for Prometheus][prom-grafana]. [official support for Prometheus][prom-grafana].
...@@ -274,7 +274,7 @@ The GitLab exporter allows you to measure various GitLab metrics, pulled from Re ...@@ -274,7 +274,7 @@ The GitLab exporter allows you to measure various GitLab metrics, pulled from Re
> Introduced in GitLab 9.0. > Introduced in GitLab 9.0.
> Pod monitoring introduced in GitLab 9.4. > Pod monitoring introduced in GitLab 9.4.
If your GitLab server is running within Kubernetes, Prometheus will collect metrics from the Nodes and [annotated Pods](https://prometheus.io/docs/operating/configuration/#kubernetes_sd_config) in the cluster, including performance data on each container. This is particularly helpful if your CI/CD environments run in the same cluster, as you can use the [Prometheus project integration][prometheus integration] to monitor them. If your GitLab server is running within Kubernetes, Prometheus will collect metrics from the Nodes and [annotated Pods](https://prometheus.io/docs/prometheus/latest/configuration/configuration/#kubernetes_sd_config) in the cluster, including performance data on each container. This is particularly helpful if your CI/CD environments run in the same cluster, as you can use the [Prometheus project integration][prometheus integration] to monitor them.
To disable the monitoring of Kubernetes: To disable the monitoring of Kubernetes:
...@@ -288,16 +288,11 @@ To disable the monitoring of Kubernetes: ...@@ -288,16 +288,11 @@ To disable the monitoring of Kubernetes:
1. Save the file and [reconfigure GitLab][reconfigure] for the changes to 1. Save the file and [reconfigure GitLab][reconfigure] for the changes to
take effect. take effect.
[grafana]: https://grafana.net
[hsts]: https://en.wikipedia.org/wiki/HTTP_Strict_Transport_Security [hsts]: https://en.wikipedia.org/wiki/HTTP_Strict_Transport_Security
[multi-user-prometheus]: https://gitlab.com/gitlab-org/multi-user-prometheus [multi-user-prometheus]: https://gitlab.com/gitlab-org/multi-user-prometheus
[nginx-custom-config]: https://docs.gitlab.com/omnibus/settings/nginx.html#inserting-custom-nginx-settings-into-the-gitlab-server-block [nginx-custom-config]: https://docs.gitlab.com/omnibus/settings/nginx.html#inserting-custom-nginx-settings-into-the-gitlab-server-block
[prometheus]: https://prometheus.io [prometheus]: https://prometheus.io
[prom-exporters]: https://prometheus.io/docs/instrumenting/exporters/
[prom-query]: https://prometheus.io/docs/querying/basics
[prom-grafana]: https://prometheus.io/docs/visualization/grafana/ [prom-grafana]: https://prometheus.io/docs/visualization/grafana/
[scrape-config]: https://prometheus.io/docs/operating/configuration/#%3Cscrape_config%3E
[reconfigure]: ../../restart_gitlab.md#omnibus-gitlab-reconfigure [reconfigure]: ../../restart_gitlab.md#omnibus-gitlab-reconfigure
[1261]: https://gitlab.com/gitlab-org/omnibus-gitlab/merge_requests/1261 [1261]: https://gitlab.com/gitlab-org/omnibus-gitlab/merge_requests/1261
[prometheus integration]: ../../../user/project/integrations/prometheus.md [prometheus integration]: ../../../user/project/integrations/prometheus.md
[prometheus-cadvisor-metrics]: https://github.com/google/cadvisor/blob/master/docs/storage/prometheus.md
...@@ -877,6 +877,6 @@ The above image shows: ...@@ -877,6 +877,6 @@ The above image shows:
- The HEAD request to the AWS bucket reported a 403 Unauthorized. - The HEAD request to the AWS bucket reported a 403 Unauthorized.
What does this mean? This strongly suggests that the S3 user does not have the right What does this mean? This strongly suggests that the S3 user does not have the right
[permissions to perform a HEAD request](http://docs.aws.amazon.com/AmazonS3/latest/API/RESTObjectHEAD.html). [permissions to perform a HEAD request](https://docs.aws.amazon.com/AmazonS3/latest/API/RESTObjectHEAD.html).
The solution: check the [IAM permissions again](https://docs.docker.com/registry/storage-drivers/s3/). The solution: check the [IAM permissions again](https://docs.docker.com/registry/storage-drivers/s3/).
Once the right permissions were set, the error will go away. Once the right permissions were set, the error will go away.
...@@ -94,17 +94,18 @@ and details for a database reviewer: ...@@ -94,17 +94,18 @@ and details for a database reviewer:
- Check queries timing (If any): Queries executed in a migration - Check queries timing (If any): Queries executed in a migration
need to fit comfortably within `15s` - preferably much less than that - on GitLab.com. need to fit comfortably within `15s` - preferably much less than that - on GitLab.com.
- Check [background migrations](background_migrations.md): - Check [background migrations](background_migrations.md):
- For data migrations, establish a time estimate for execution - Establish a time estimate for execution
- They should only be used when migrating data in larger tables. - They should only be used when migrating data in larger tables.
- If a single `update` is below than `1s` the query can be placed - If a single `update` is below than `1s` the query can be placed
directly in a regular migration (inside `db/migrate`). directly in a regular migration (inside `db/migrate`).
- Review queries (for example, make sure batch sizes are fine) - Review queries (for example, make sure batch sizes are fine)
- Establish a time estimate for execution
- Because execution time can be longer than for a regular migration, - Because execution time can be longer than for a regular migration,
it's suggested to treat background migrations as post migrations: it's suggested to treat background migrations as post migrations:
place them in `db/post_migrate` instead of `db/migrate`. Keep in mind place them in `db/post_migrate` instead of `db/migrate`. Keep in mind
that post migrations are executed post-deployment in production. that post migrations are executed post-deployment in production.
- Check [timing guidelines for migrations](#timing-guidelines-for-migrations) - Check [timing guidelines for migrations](#timing-guidelines-for-migrations)
- Check migrations are reversible and implement a `#down` method
- Data migrations should be reversible too or come with a description of how to reverse, when possible. This applies to all types of migrations (regular, post-deploy, background).
- Query performance - Query performance
- Check for any obviously complex queries and queries the author specifically - Check for any obviously complex queries and queries the author specifically
points out for review (if any) points out for review (if any)
......
...@@ -83,7 +83,7 @@ the above methods, provided the cloud provider supports it. ...@@ -83,7 +83,7 @@ the above methods, provided the cloud provider supports it.
- [Install GitLab on Google Cloud Platform](google_cloud_platform/index.md): Install Omnibus GitLab on a VM in GCP. - [Install GitLab on Google Cloud Platform](google_cloud_platform/index.md): Install Omnibus GitLab on a VM in GCP.
- [Install GitLab on Azure](azure/index.md): Install Omnibus GitLab from Azure Marketplace. - [Install GitLab on Azure](azure/index.md): Install Omnibus GitLab from Azure Marketplace.
- [Install GitLab on OpenShift](https://docs.gitlab.com/charts/installation/cloud/openshift.html): Install GitLab on OpenShift by using GitLab's Helm charts. - [Install GitLab on OpenShift](https://docs.gitlab.com/charts/installation/cloud/openshift.html): Install GitLab on OpenShift by using GitLab's Helm charts.
- [Install GitLab on DC/OS](https://mesosphere.com/blog/gitlab-dcos/): Install GitLab on Mesosphere DC/OS via the [GitLab-Mesosphere integration](https://about.gitlab.com/2016/09/16/announcing-gitlab-and-mesosphere/). - [Install GitLab on DC/OS](https://d2iq.com/blog/gitlab-dcos): Install GitLab on Mesosphere DC/OS via the [GitLab-Mesosphere integration](https://about.gitlab.com/2016/09/16/announcing-gitlab-and-mesosphere/).
- [Install GitLab on DigitalOcean](https://about.gitlab.com/2016/04/27/getting-started-with-gitlab-and-digitalocean/): Install Omnibus GitLab on DigitalOcean. - [Install GitLab on DigitalOcean](https://about.gitlab.com/2016/04/27/getting-started-with-gitlab-and-digitalocean/): Install Omnibus GitLab on DigitalOcean.
- _Testing only!_ [DigitalOcean and Docker Machine](digitaloceandocker.md): - _Testing only!_ [DigitalOcean and Docker Machine](digitaloceandocker.md):
Quickly test any version of GitLab on DigitalOcean using Docker Machine. Quickly test any version of GitLab on DigitalOcean using Docker Machine.
...@@ -43,7 +43,7 @@ Below is a diagram of the recommended architecture. ...@@ -43,7 +43,7 @@ Below is a diagram of the recommended architecture.
Here's a list of the AWS services we will use, with links to pricing information: Here's a list of the AWS services we will use, with links to pricing information:
- **EC2**: GitLab will deployed on shared hardware which means - **EC2**: GitLab will deployed on shared hardware which means
[on-demand pricing](https://aws.amazon.com/ec2/pricing/on-demand) [on-demand pricing](https://aws.amazon.com/ec2/pricing/on-demand/)
will apply. If you want to run it on a dedicated or reserved instance, will apply. If you want to run it on a dedicated or reserved instance,
consult the [EC2 pricing page](https://aws.amazon.com/ec2/pricing/) for more consult the [EC2 pricing page](https://aws.amazon.com/ec2/pricing/) for more
information on the cost. information on the cost.
...@@ -222,10 +222,10 @@ Now, it's time to create the database: ...@@ -222,10 +222,10 @@ Now, it's time to create the database:
1. For the size, let's select a `t2.medium` instance. 1. For the size, let's select a `t2.medium` instance.
1. Multi-AZ-deployment is recommended as redundancy, so choose "Create 1. Multi-AZ-deployment is recommended as redundancy, so choose "Create
replica in different zone". Read more at replica in different zone". Read more at
[High Availability (Multi-AZ)](http://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/Concepts.MultiAZ.html). [High Availability (Multi-AZ)](https://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/Concepts.MultiAZ.html).
1. A Provisioned IOPS (SSD) storage type is best suited for HA (though you can 1. A Provisioned IOPS (SSD) storage type is best suited for HA (though you can
choose a General Purpose (SSD) to reduce the costs). Read more about it at choose a General Purpose (SSD) to reduce the costs). Read more about it at
[Storage for Amazon RDS](http://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/CHAP_Storage.html). [Storage for Amazon RDS](https://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/CHAP_Storage.html).
1. The rest of the settings on this page request a DB instance identifier, username 1. The rest of the settings on this page request a DB instance identifier, username
and a master password. We've chosen to use `gitlab-db-ha`, `gitlab` and a and a master password. We've chosen to use `gitlab-db-ha`, `gitlab` and a
...@@ -668,7 +668,7 @@ to request additional material: ...@@ -668,7 +668,7 @@ to request additional material:
about administering your GitLab instance. about administering your GitLab instance.
- [Upload a license](../../user/admin_area/license.md): - [Upload a license](../../user/admin_area/license.md):
Activate all GitLab Enterprise Edition functionality with a license. Activate all GitLab Enterprise Edition functionality with a license.
- [Pricing](https://about.gitlab.com/pricing): Pricing for the different tiers. - [Pricing](https://about.gitlab.com/pricing/): Pricing for the different tiers.
<!-- ## Troubleshooting <!-- ## Troubleshooting
......
...@@ -38,7 +38,7 @@ create SQL Databases, author websites, and perform lots of other cloud tasks. ...@@ -38,7 +38,7 @@ create SQL Databases, author websites, and perform lots of other cloud tasks.
## Create New VM ## Create New VM
The [Azure Marketplace][Azure-Marketplace] is an online store for pre-configured applications and The [Azure Marketplace](https://azuremarketplace.microsoft.com/en-us/marketplace/) is an online store for pre-configured applications and
services which have been optimized for the cloud by software vendors like GitLab, services which have been optimized for the cloud by software vendors like GitLab,
available on the Azure Marketplace as pre-configured solutions. In this tutorial available on the Azure Marketplace as pre-configured solutions. In this tutorial
we will install GitLab Community Edition, but for GitLab Enterprise Edition you we will install GitLab Community Edition, but for GitLab Enterprise Edition you
...@@ -108,7 +108,7 @@ ahead and select this one, but please choose the size which best meets your own ...@@ -108,7 +108,7 @@ ahead and select this one, but please choose the size which best meets your own
> **Note:** be aware that whilst your VM is active (known as "allocated"), it will incur > **Note:** be aware that whilst your VM is active (known as "allocated"), it will incur
"compute charges" which, ultimately, you will be billed for. So, even if you're using the "compute charges" which, ultimately, you will be billed for. So, even if you're using the
free trial credits, you'll likely want to learn free trial credits, you'll likely want to learn
[how to properly shutdown an Azure VM to save money][Azure-Properly-Shutdown-VM]. [how to properly shutdown an Azure VM to save money](https://buildazure.com/properly-shutdown-azure-vm-to-save-money/).
Go ahead and click your chosen size, then click **"Select"** when you're ready to proceed to the Go ahead and click your chosen size, then click **"Select"** when you're ready to proceed to the
next step. next step.
...@@ -329,7 +329,7 @@ To perform an update, we need to connect directly to our Azure VM instance and r ...@@ -329,7 +329,7 @@ To perform an update, we need to connect directly to our Azure VM instance and r
from the terminal. Our Azure VM is actually a server running Linux (Ubuntu), so we'll need to from the terminal. Our Azure VM is actually a server running Linux (Ubuntu), so we'll need to
connect to it using SSH ([Secure Shell][SSH]). connect to it using SSH ([Secure Shell][SSH]).
If you're running Windows, you'll need to connect using [PuTTY] or an equivalent Windows SSH client. If you're running Windows, you'll need to connect using [PuTTY](https://www.putty.org) or an equivalent Windows SSH client.
If you're running Linux or macOS, then you already have an SSH client installed. If you're running Linux or macOS, then you already have an SSH client installed.
> **Note:** > **Note:**
...@@ -337,7 +337,7 @@ If you're running Linux or macOS, then you already have an SSH client installed. ...@@ -337,7 +337,7 @@ If you're running Linux or macOS, then you already have an SSH client installed.
> - Remember that you will need to login with the username and password you specified > - Remember that you will need to login with the username and password you specified
> [when you created](#basics) your Azure VM > [when you created](#basics) your Azure VM
> - If you need to reset your VM password, read > - If you need to reset your VM password, read
> [how to reset SSH credentials for a user on an Azure VM][Azure-Troubleshoot-SSH-Connection]. > [how to reset SSH credentials for a user on an Azure VM](https://docs.microsoft.com/en-us/azure/virtual-machines/troubleshooting/troubleshoot-ssh-connection).
#### SSH from the command-line #### SSH from the command-line
...@@ -356,7 +356,7 @@ Provide your password at the prompt to authenticate. ...@@ -356,7 +356,7 @@ Provide your password at the prompt to authenticate.
#### SSH from Windows (PuTTY) #### SSH from Windows (PuTTY)
If you're using [PuTTY] in Windows as your [SSH] client, then you might want to take a quick If you're using [PuTTY](https://www.putty.org) in Windows as your [SSH] client, then you might want to take a quick
read on [using PuTTY in Windows][Using-SSH-In-Putty]. read on [using PuTTY in Windows][Using-SSH-In-Putty].
### Updating GitLab ### Updating GitLab
...@@ -416,12 +416,12 @@ Check out our other [Technical Articles](../../articles/index.md) or browse the ...@@ -416,12 +416,12 @@ Check out our other [Technical Articles](../../articles/index.md) or browse the
- [GitLab Enterprise Edition][EE] - [GitLab Enterprise Edition][EE]
- [Microsoft Azure][Azure] - [Microsoft Azure][Azure]
- [Azure - Free Account FAQ][Azure-Free-Account-FAQ] - [Azure - Free Account FAQ][Azure-Free-Account-FAQ]
- [Azure - Marketplace][Azure-Marketplace] - [Azure - Marketplace](https://azuremarketplace.microsoft.com/en-us/marketplace/)
- [Azure Portal][Azure-Portal] - [Azure Portal][Azure-Portal]
- [Azure - Pricing Calculator][Azure-Pricing-Calculator] - [Azure - Pricing Calculator][Azure-Pricing-Calculator]
- [Azure - Troubleshoot SSH Connections to an Azure Linux VM][Azure-Troubleshoot-SSH-Connection] - [Azure - Troubleshoot SSH Connections to an Azure Linux VM](https://docs.microsoft.com/en-us/azure/virtual-machines/troubleshooting/troubleshoot-ssh-connection)
- [Azure - Properly Shutdown an Azure VM][Azure-Properly-Shutdown-VM] - [Azure - Properly Shutdown an Azure VM](https://buildazure.com/properly-shutdown-azure-vm-to-save-money/)
- [SSH], [PuTTY] and [Using SSH in PuTTY][Using-SSH-In-Putty] - [SSH], [PuTTY](https://www.putty.org) and [Using SSH in PuTTY][Using-SSH-In-Putty]
[Original-Blog-Post]: https://about.gitlab.com/2016/07/13/how-to-setup-a-gitlab-instance-on-microsoft-azure/ "How to Set up a GitLab Instance on Microsoft Azure" [Original-Blog-Post]: https://about.gitlab.com/2016/07/13/how-to-setup-a-gitlab-instance-on-microsoft-azure/ "How to Set up a GitLab Instance on Microsoft Azure"
[CE]: https://about.gitlab.com/features/ [CE]: https://about.gitlab.com/features/
...@@ -430,16 +430,12 @@ Check out our other [Technical Articles](../../articles/index.md) or browse the ...@@ -430,16 +430,12 @@ Check out our other [Technical Articles](../../articles/index.md) or browse the
[Azure-Troubleshoot-Linux-VM]: https://docs.microsoft.com/en-us/azure/virtual-machines/linux/troubleshoot-app-connection "Troubleshoot application connectivity issues on a Linux virtual machine in Azure" [Azure-Troubleshoot-Linux-VM]: https://docs.microsoft.com/en-us/azure/virtual-machines/linux/troubleshoot-app-connection "Troubleshoot application connectivity issues on a Linux virtual machine in Azure"
[Azure-IP-Address-Types]: https://docs.microsoft.com/en-us/azure/virtual-network/virtual-network-ip-addresses-overview-arm "IP address types and allocation methods in Azure" [Azure-IP-Address-Types]: https://docs.microsoft.com/en-us/azure/virtual-network/virtual-network-ip-addresses-overview-arm "IP address types and allocation methods in Azure"
[Azure-How-To-Open-Ports]: https://docs.microsoft.com/en-us/azure/virtual-machines/windows/nsg-quickstart-portal "How to open ports to a virtual machine with the Azure portal" [Azure-How-To-Open-Ports]: https://docs.microsoft.com/en-us/azure/virtual-machines/windows/nsg-quickstart-portal "How to open ports to a virtual machine with the Azure portal"
[Azure-Troubleshoot-SSH-Connection]: https://docs.microsoft.com/en-us/azure/virtual-machines/linux/troubleshoot-ssh-connection "Troubleshoot SSH connections to an Azure Linux VM"
[Azure]: https://azure.microsoft.com/en-us/ [Azure]: https://azure.microsoft.com/en-us/
[Azure-Free-Account-FAQ]: https://azure.microsoft.com/en-us/free/free-account-faq/ [Azure-Free-Account-FAQ]: https://azure.microsoft.com/en-us/free/free-account-faq/
[Azure-Marketplace]: https://azure.microsoft.com/en-us/marketplace/
[Azure-Portal]: https://portal.azure.com [Azure-Portal]: https://portal.azure.com
[Azure-Pricing-Calculator]: https://azure.microsoft.com/en-us/pricing/calculator/ [Azure-Pricing-Calculator]: https://azure.microsoft.com/en-us/pricing/calculator/
[Azure-Properly-Shutdown-VM]: https://buildazure.com/2017/03/16/properly-shutdown-azure-vm-to-save-money/ "Properly Shutdown an Azure VM to Save Money"
[SSH]: https://en.wikipedia.org/wiki/Secure_Shell [SSH]: https://en.wikipedia.org/wiki/Secure_Shell
[PuTTY]: http://www.putty.org/
[Using-SSH-In-Putty]: https://mediatemple.net/community/products/dv/204404604/using-ssh-in-putty- [Using-SSH-In-Putty]: https://mediatemple.net/community/products/dv/204404604/using-ssh-in-putty-
<!-- ## Troubleshooting <!-- ## Troubleshooting
......
...@@ -14,9 +14,9 @@ locally on either macOS or Linux. ...@@ -14,9 +14,9 @@ locally on either macOS or Linux.
### On macOS ### On macOS
#### Install Docker Toolbox #### Install Docker Desktop
- <https://www.docker.com/products/docker-toolbox> - <https://www.docker.com/products/docker-desktop>
### On Linux ### On Linux
...@@ -115,7 +115,7 @@ docker-machine ip gitlab-test-env-do ...@@ -115,7 +115,7 @@ docker-machine ip gitlab-test-env-do
# example output: 192.168.151.134 # example output: 192.168.151.134
``` ```
Browse to: <http://192.168.151.134:8888/>. Browse to: `http://192.168.151.134:8888/`.
#### Execute interactive shell/edit configuration #### Execute interactive shell/edit configuration
......
...@@ -72,7 +72,7 @@ By default, Google assigns an ephemeral IP to your instance. It is strongly ...@@ -72,7 +72,7 @@ By default, Google assigns an ephemeral IP to your instance. It is strongly
recommended to assign a static IP if you are going to use GitLab in production recommended to assign a static IP if you are going to use GitLab in production
and use a domain name as we'll see below. and use a domain name as we'll see below.
Read Google's documentation on how to [promote an ephemeral IP address][ip]. Read Google's documentation on how to [promote an ephemeral IP address](https://cloud.google.com/compute/docs/ip-addresses/reserve-static-external-ip-address#promote_ephemeral_ip).
### Using a domain name ### Using a domain name
...@@ -133,9 +133,7 @@ Kerberos, etc. Here are some documents you might be interested in reading: ...@@ -133,9 +133,7 @@ Kerberos, etc. Here are some documents you might be interested in reading:
- [GitLab Container Registry configuration](../../administration/packages/container_registry.md) - [GitLab Container Registry configuration](../../administration/packages/container_registry.md)
[freetrial]: https://console.cloud.google.com/freetrial "GCP free trial" [freetrial]: https://console.cloud.google.com/freetrial "GCP free trial"
[ip]: https://cloud.google.com/compute/docs/configure-instance-ip-addresses#promote_ephemeral_ip "Configuring an Instance's IP Addresses"
[gcp]: https://cloud.google.com/ "Google Cloud Platform" [gcp]: https://cloud.google.com/ "Google Cloud Platform"
[launcher]: https://cloud.google.com/launcher/ "Google Cloud Launcher home page"
[req]: ../requirements.md "GitLab hardware and software requirements" [req]: ../requirements.md "GitLab hardware and software requirements"
[ssh]: https://cloud.google.com/compute/docs/instances/connecting-to-instance "Connecting to Linux Instances" [ssh]: https://cloud.google.com/compute/docs/instances/connecting-to-instance "Connecting to Linux Instances"
[omni-smtp]: https://docs.gitlab.com/omnibus/settings/smtp.html#smtp-settings "Omnibus GitLab SMTP settings" [omni-smtp]: https://docs.gitlab.com/omnibus/settings/smtp.html#smtp-settings "Omnibus GitLab SMTP settings"
......
...@@ -10,7 +10,7 @@ other installation options, see the [main installation page](README.md). ...@@ -10,7 +10,7 @@ other installation options, see the [main installation page](README.md).
It was created for and tested on **Debian/Ubuntu** operating systems. It was created for and tested on **Debian/Ubuntu** operating systems.
Read [requirements.md](requirements.md) for hardware and operating system requirements. Read [requirements.md](requirements.md) for hardware and operating system requirements.
If you want to install on RHEL/CentOS, we recommend using the If you want to install on RHEL/CentOS, we recommend using the
[Omnibus packages](https://about.gitlab.com/downloads/). [Omnibus packages](https://about.gitlab.com/install/).
This guide is long because it covers many cases and includes all commands you This guide is long because it covers many cases and includes all commands you
need, this is [one of the few installation scripts that actually works out of the box](https://twitter.com/robinvdvleuten/status/424163226532986880). need, this is [one of the few installation scripts that actually works out of the box](https://twitter.com/robinvdvleuten/status/424163226532986880).
...@@ -25,7 +25,7 @@ following the ...@@ -25,7 +25,7 @@ following the
## Consider the Omnibus package installation ## Consider the Omnibus package installation
Since an installation from source is a lot of work and error prone we strongly recommend the fast and reliable [Omnibus package installation](https://about.gitlab.com/downloads/) (deb/rpm). Since an installation from source is a lot of work and error prone we strongly recommend the fast and reliable [Omnibus package installation](https://about.gitlab.com/install/) (deb/rpm).
One reason the Omnibus package is more reliable is its use of Runit to restart any of the GitLab processes in case one crashes. One reason the Omnibus package is more reliable is its use of Runit to restart any of the GitLab processes in case one crashes.
On heavily used GitLab instances the memory usage of the Sidekiq background worker will grow over time. On heavily used GitLab instances the memory usage of the Sidekiq background worker will grow over time.
...@@ -205,7 +205,7 @@ The Ruby interpreter is required to run GitLab. ...@@ -205,7 +205,7 @@ The Ruby interpreter is required to run GitLab.
**Note:** The current supported Ruby (MRI) version is 2.6.x. GitLab 12.2 **Note:** The current supported Ruby (MRI) version is 2.6.x. GitLab 12.2
dropped support for Ruby 2.5.x. dropped support for Ruby 2.5.x.
The use of Ruby version managers such as [RVM], [rbenv] or [chruby] with GitLab The use of Ruby version managers such as [RVM], [rbenv](https://github.com/rbenv/rbenv) or [chruby] with GitLab
in production, frequently leads to hard to diagnose problems. For example, in production, frequently leads to hard to diagnose problems. For example,
GitLab Shell is called from OpenSSH, and having a version manager can prevent GitLab Shell is called from OpenSSH, and having a version manager can prevent
pushing and pulling over SSH. Version managers are not supported and we strongly pushing and pulling over SSH. Version managers are not supported and we strongly
...@@ -532,7 +532,7 @@ sudo -u git -H chmod o-rwx config/database.yml ...@@ -532,7 +532,7 @@ sudo -u git -H chmod o-rwx config/database.yml
### Install Gems ### Install Gems
NOTE: **Note:** NOTE: **Note:**
As of Bundler 1.5.2, you can invoke `bundle install -jN` (where `N` is the number of your processor cores) and enjoy parallel gems installation with measurable difference in completion time (~60% faster). Check the number of your cores with `nproc`. For more information, see this [post](https://robots.thoughtbot.com/parallel-gem-installing-using-bundler). As of Bundler 1.5.2, you can invoke `bundle install -jN` (where `N` is the number of your processor cores) and enjoy parallel gems installation with measurable difference in completion time (~60% faster). Check the number of your cores with `nproc`. For more information, see this [post](https://thoughtbot.com/blog/parallel-gem-installing-using-bundler).
Make sure you have `bundle` (run `bundle -v`): Make sure you have `bundle` (run `bundle -v`):
...@@ -1025,5 +1025,4 @@ sudo yum groupinstall 'Development Tools' ...@@ -1025,5 +1025,4 @@ sudo yum groupinstall 'Development Tools'
``` ```
[RVM]: https://rvm.io/ "RVM Homepage" [RVM]: https://rvm.io/ "RVM Homepage"
[rbenv]: https://github.com/sstephenson/rbenv "rbenv on GitHub"
[chruby]: https://github.com/postmodern/chruby "chruby on GitHub" [chruby]: https://github.com/postmodern/chruby "chruby on GitHub"
...@@ -38,7 +38,7 @@ test OpenShift easily: ...@@ -38,7 +38,7 @@ test OpenShift easily:
- [VirtualBox] - [VirtualBox]
- [Vagrant] - [Vagrant]
- [OpenShift Client][oc] (`oc` for short) - [OpenShift Client](https://docs.okd.io/latest/cli_reference/get_started_cli.html) (`oc` for short)
It is also important to mention that for the purposes of this tutorial, the It is also important to mention that for the purposes of this tutorial, the
latest Origin release is used: latest Origin release is used:
...@@ -92,7 +92,7 @@ Now that OpenShift is set up, let's see how the web console looks like. ...@@ -92,7 +92,7 @@ Now that OpenShift is set up, let's see how the web console looks like.
Once Vagrant finishes its thing with the VM, you will be presented with a Once Vagrant finishes its thing with the VM, you will be presented with a
message which has some important information. One of them is the IP address message which has some important information. One of them is the IP address
of the deployed OpenShift platform and in particular <https://10.2.2.2:8443/console/>. of the deployed OpenShift platform and in particular `https://10.2.2.2:8443/console/`.
Open this link with your browser and accept the self-signed certificate in Open this link with your browser and accept the self-signed certificate in
order to proceed. order to proceed.
...@@ -101,7 +101,7 @@ landing page looks like: ...@@ -101,7 +101,7 @@ landing page looks like:
![openshift web console](img/web-console.png) ![openshift web console](img/web-console.png)
You can see that a number of [projects] are already created for testing purposes. You can see that a number of [projects](https://docs.okd.io/latest/dev_guide/projects.html) are already created for testing purposes.
If you head over the `openshift-infra` project, a number of services with their If you head over the `openshift-infra` project, a number of services with their
respective pods are there to explore. respective pods are there to explore.
...@@ -109,15 +109,15 @@ respective pods are there to explore. ...@@ -109,15 +109,15 @@ respective pods are there to explore.
![openshift web console](img/openshift-infra-project.png) ![openshift web console](img/openshift-infra-project.png)
We are not going to explore the whole interface, but if you want to learn about We are not going to explore the whole interface, but if you want to learn about
the key concepts of OpenShift, read the [core concepts reference][core] in the the key concepts of OpenShift, read the [core concepts reference](https://docs.okd.io/latest/architecture/core_concepts/index.html)
official documentation. in the official documentation.
### Explore the OpenShift CLI ### Explore the OpenShift CLI
OpenShift Client (`oc`), is a powerful CLI tool that talks to the OpenShift API OpenShift Client (`oc`), is a powerful CLI tool that talks to the OpenShift API
and performs pretty much everything you can do from the web UI and much more. and performs pretty much everything you can do from the web UI and much more.
Assuming you have [installed][oc] it, let's explore some of its main Assuming you have [installed](https://docs.okd.io/latest/cli_reference/get_started_cli.html) it, let's explore some of its main
functionalities. functionalities.
Let's first see the version of `oc`: Let's first see the version of `oc`:
...@@ -174,7 +174,7 @@ The last command should spit a bunch of information about the statuses of the ...@@ -174,7 +174,7 @@ The last command should spit a bunch of information about the statuses of the
pods and the services, which if you look closely is what we encountered in the pods and the services, which if you look closely is what we encountered in the
second image when we explored the web console. second image when we explored the web console.
You can always read more about `oc` in the [OpenShift CLI documentation][oc]. You can always read more about `oc` in the [OpenShift CLI documentation](https://docs.okd.io/latest/cli_reference/get_started_cli.html).
### Troubleshooting the all-in-one VM ### Troubleshooting the all-in-one VM
...@@ -250,7 +250,7 @@ The next step is to import the OpenShift template for GitLab. ...@@ -250,7 +250,7 @@ The next step is to import the OpenShift template for GitLab.
### Import the template ### Import the template
The [template][templates] is basically a JSON file which describes a set of The [template](https://docs.okd.io/latest/architecture/core_concepts/templates.html) is basically a JSON file which describes a set of
related object definitions to be created together, as well as a set of related object definitions to be created together, as well as a set of
parameters for those objects. parameters for those objects.
...@@ -318,7 +318,7 @@ password for PostgreSQL, it will be created automatically. ...@@ -318,7 +318,7 @@ password for PostgreSQL, it will be created automatically.
The `gitlab.apps.10.2.2.2.nip.io` hostname that is used by default will The `gitlab.apps.10.2.2.2.nip.io` hostname that is used by default will
resolve to the host with IP `10.2.2.2` which is the IP our VM uses. It is a resolve to the host with IP `10.2.2.2` which is the IP our VM uses. It is a
trick to have distinct FQDNs pointing to services that are on our local network. trick to have distinct FQDNs pointing to services that are on our local network.
Read more on how this works in <http://nip.io>. Read more on how this works in <https://nip.io>.
Now that we configured this, let's see how to manage and scale GitLab. Now that we configured this, let's see how to manage and scale GitLab.
...@@ -355,7 +355,7 @@ Navigate back to the **Overview** and hopefully all pods will be up and running. ...@@ -355,7 +355,7 @@ Navigate back to the **Overview** and hopefully all pods will be up and running.
![GitLab running](img/gitlab-running.png) ![GitLab running](img/gitlab-running.png)
Congratulations! You can now navigate to your new shinny GitLab instance by Congratulations! You can now navigate to your new shinny GitLab instance by
visiting <http://gitlab.apps.10.2.2.2.nip.io> where you will be asked to visiting `http://gitlab.apps.10.2.2.2.nip.io` where you will be asked to
change the root user password. Login using `root` as username and providing the change the root user password. Login using `root` as username and providing the
password you just set, and start using GitLab! password you just set, and start using GitLab!
...@@ -366,7 +366,7 @@ of resources, you'd be happy to know that you can scale up with the push of a ...@@ -366,7 +366,7 @@ of resources, you'd be happy to know that you can scale up with the push of a
button. button.
In the **Overview** page just click the up arrow button in the pod where In the **Overview** page just click the up arrow button in the pod where
GitLab is. The change is instant and you can see the number of [replicas] now GitLab is. The change is instant and you can see the number of [replicas](https://docs.okd.io/latest/architecture/core_concepts/deployments.html#replication-controllers) now
running scaled to 2. running scaled to 2.
![GitLab scale](img/gitlab-scale.png) ![GitLab scale](img/gitlab-scale.png)
...@@ -384,7 +384,7 @@ scale up. If a pod is in pending state for too long, you can navigate to ...@@ -384,7 +384,7 @@ scale up. If a pod is in pending state for too long, you can navigate to
### Scale GitLab using the `oc` CLI ### Scale GitLab using the `oc` CLI
Using `oc` is super easy to scale up the replicas of a pod. You may want to Using `oc` is super easy to scale up the replicas of a pod. You may want to
skim through the [basic CLI operations][basic-cli] to get a taste how the CLI skim through the [basic CLI operations](https://docs.okd.io/latest/cli_reference/basic_cli_operations.html) to get a taste how the CLI
commands are used. Pay extra attention to the object types as we will use some commands are used. Pay extra attention to the object types as we will use some
of them and their abbreviated versions below. of them and their abbreviated versions below.
...@@ -457,7 +457,7 @@ In case you were wondering whether there is an option to autoscale a pod based ...@@ -457,7 +457,7 @@ In case you were wondering whether there is an option to autoscale a pod based
on the resources of your server, the answer is yes, of course there is. on the resources of your server, the answer is yes, of course there is.
We will not expand on this matter, but feel free to read the documentation on We will not expand on this matter, but feel free to read the documentation on
OpenShift's website about [autoscaling]. OpenShift's website about [autoscaling](https://docs.okd.io/latest/dev_guide/pod_autoscaling.html).
## Current limitations ## Current limitations
...@@ -472,7 +472,7 @@ bother us. In any case, it is something to keep in mind when deploying GitLab ...@@ -472,7 +472,7 @@ bother us. In any case, it is something to keep in mind when deploying GitLab
on a production cluster. on a production cluster.
In order to deploy GitLab on a production cluster, you will need to assign the In order to deploy GitLab on a production cluster, you will need to assign the
GitLab service account to the `anyuid` [Security Context Constraints][scc]. GitLab service account to the `anyuid` [Security Context Constraints](https://docs.okd.io/latest/admin_guide/manage_scc.html).
For OpenShift v3.0, you will need to do this manually: For OpenShift v3.0, you will need to do this manually:
...@@ -505,25 +505,16 @@ application and you are done. You are ready to login to your new GitLab instance ...@@ -505,25 +505,16 @@ application and you are done. You are ready to login to your new GitLab instance
And remember that in this tutorial we just scratched the surface of what Origin And remember that in this tutorial we just scratched the surface of what Origin
is capable of. As always, you can refer to the detailed is capable of. As always, you can refer to the detailed
[documentation][openshift-docs] to learn more about deploying your own OpenShift [documentation](https://docs.okd.io) to learn more about deploying your own OpenShift
PaaS and managing your applications with the ease of containers. PaaS and managing your applications with the ease of containers.
[RedHat]: https://www.redhat.com/en "RedHat website" [RedHat]: https://www.redhat.com/en "RedHat website"
[vm-new]: https://app.vagrantup.com/openshift/boxes/origin-all-in-one "Official OpenShift Vagrant box on Vagrant Cloud" [vm-new]: https://app.vagrantup.com/openshift/boxes/origin-all-in-one "Official OpenShift Vagrant box on Vagrant Cloud"
[template]: https://gitlab.com/gitlab-org/omnibus-gitlab/blob/master/docker/openshift-template.json "OpenShift template for GitLab" [template]: https://gitlab.com/gitlab-org/omnibus-gitlab/blob/master/docker/openshift-template.json "OpenShift template for GitLab"
[Docker]: https://www.docker.com "Docker website" [Docker]: https://www.docker.com "Docker website"
[oc]: https://docs.openshift.org/latest/cli_reference/get_started_cli.html "Documentation - oc CLI documentation"
[VirtualBox]: https://www.virtualbox.org/wiki/Downloads "VirtualBox downloads" [VirtualBox]: https://www.virtualbox.org/wiki/Downloads "VirtualBox downloads"
[Vagrant]: https://www.vagrantup.com/downloads.html "Vagrant downloads" [Vagrant]: https://www.vagrantup.com/downloads.html "Vagrant downloads"
[projects]: https://docs.openshift.org/latest/dev_guide/projects.html "Documentation - Projects overview"
[core]: https://docs.openshift.org/latest/architecture/core_concepts/index.html "Documentation - Core concepts of OpenShift Origin"
[templates]: https://docs.openshift.org/latest/architecture/core_concepts/templates.html "Documentation - OpenShift templates"
[old-post]: https://blog.openshift.com/deploy-gitlab-openshift/ "Old post - Deploy GitLab on OpenShift" [old-post]: https://blog.openshift.com/deploy-gitlab-openshift/ "Old post - Deploy GitLab on OpenShift"
[line]: https://gitlab.com/gitlab-org/omnibus-gitlab/blob/658c065c8d022ce858dd63eaeeadb0b2ddc8deea/docker/openshift-template.json#L239 "GitLab - OpenShift template" [line]: https://gitlab.com/gitlab-org/omnibus-gitlab/blob/658c065c8d022ce858dd63eaeeadb0b2ddc8deea/docker/openshift-template.json#L239 "GitLab - OpenShift template"
[oc-gh]: https://github.com/openshift/origin/releases/tag/v1.3.0 "OpenShift Origin 1.3.0 release on GitHub" [oc-gh]: https://github.com/openshift/origin/releases/tag/v1.3.0 "OpenShift Origin 1.3.0 release on GitHub"
[ha]: ../../administration/high_availability/gitlab.html "Documentation - GitLab High Availability" [ha]: ../../administration/high_availability/gitlab.md "Documentation - GitLab High Availability"
[replicas]: https://docs.openshift.org/latest/architecture/core_concepts/deployments.html#replication-controllers "Documentation - Replication controller"
[autoscaling]: https://docs.openshift.org/latest/dev_guide/pod_autoscaling.html "Documentation - Autoscale"
[basic-cli]: https://docs.openshift.org/latest/cli_reference/basic_cli_operations.html "Documentation - Basic CLI operations"
[openshift-docs]: https://docs.openshift.org "OpenShift documentation"
[scc]: https://docs.openshift.org/latest/admin_guide/manage_scc.html "Documentation - Managing Security Context Constraints"
...@@ -69,7 +69,7 @@ This is the recommended minimum hardware for a handful of example GitLab user ba ...@@ -69,7 +69,7 @@ This is the recommended minimum hardware for a handful of example GitLab user ba
- 4 cores supports up to 500 users - 4 cores supports up to 500 users
- 8 cores supports up to 1,000 users - 8 cores supports up to 1,000 users
- 32 cores supports up to 5,000 users - 32 cores supports up to 5,000 users
- More users? Run it high-availability on [multiple application servers](https://about.gitlab.com/high-availability/) - More users? Run it high-availability on [multiple application servers](https://about.gitlab.com/solutions/high-availability/)
### Memory ### Memory
...@@ -86,7 +86,7 @@ errors during usage. ...@@ -86,7 +86,7 @@ errors during usage.
- 16GB RAM supports up to 500 users - 16GB RAM supports up to 500 users
- 32GB RAM supports up to 1,000 users - 32GB RAM supports up to 1,000 users
- 128GB RAM supports up to 5,000 users - 128GB RAM supports up to 5,000 users
- More users? Run it high-availability on [multiple application servers](https://about.gitlab.com/high-availability/) - More users? Run it high-availability on [multiple application servers](https://about.gitlab.com/solutions/high-availability/)
We recommend having at least [2GB of swap on your server](https://askubuntu.com/a/505344/310789), even if you currently have We recommend having at least [2GB of swap on your server](https://askubuntu.com/a/505344/310789), even if you currently have
enough available RAM. Having swap will help reduce the chance of errors occurring enough available RAM. Having swap will help reduce the chance of errors occurring
...@@ -139,7 +139,7 @@ If you are using [GitLab Geo](../development/geo.md): ...@@ -139,7 +139,7 @@ If you are using [GitLab Geo](../development/geo.md):
- The - The
[tracking database](../development/geo.md#using-the-tracking-database) [tracking database](../development/geo.md#using-the-tracking-database)
requires the requires the
[postgres_fdw](https://www.postgresql.org/docs/9.6/static/postgres-fdw.html) [postgres_fdw](https://www.postgresql.org/docs/9.6/postgres-fdw.html)
extension. extension.
``` ```
......
...@@ -55,7 +55,7 @@ The following languages and dependency managers are supported. ...@@ -55,7 +55,7 @@ The following languages and dependency managers are supported.
|----------------------------- | --------- | ------------ | |----------------------------- | --------- | ------------ |
| Java ([Gradle](https://gradle.org/)) | not currently ([issue](https://gitlab.com/gitlab-org/gitlab/issues/13075 "Dependency Scanning for Gradle" )) | not available | | Java ([Gradle](https://gradle.org/)) | not currently ([issue](https://gitlab.com/gitlab-org/gitlab/issues/13075 "Dependency Scanning for Gradle" )) | not available |
| Java ([Maven](https://maven.apache.org/)) | yes | [gemnasium](https://gitlab.com/gitlab-org/security-products/gemnasium) | | Java ([Maven](https://maven.apache.org/)) | yes | [gemnasium](https://gitlab.com/gitlab-org/security-products/gemnasium) |
| JavaScript ([npm](https://www.npmjs.com/), [yarn](https://yarnpkg.com/en/)) | yes | [gemnasium](https://gitlab.com/gitlab-org/security-products/gemnasium), [Retire.js](https://retirejs.github.io/retire.js) | | JavaScript ([npm](https://www.npmjs.com/), [yarn](https://yarnpkg.com/en/)) | yes | [gemnasium](https://gitlab.com/gitlab-org/security-products/gemnasium), [Retire.js](https://retirejs.github.io/retire.js/) |
| Go ([Golang](https://golang.org/)) | not currently ([issue](https://gitlab.com/gitlab-org/gitlab/issues/7132 "Dependency Scanning for Go")) | not available | | Go ([Golang](https://golang.org/)) | not currently ([issue](https://gitlab.com/gitlab-org/gitlab/issues/7132 "Dependency Scanning for Go")) | not available |
| PHP ([Composer](https://getcomposer.org/)) | yes | [gemnasium](https://gitlab.com/gitlab-org/security-products/gemnasium) | | PHP ([Composer](https://getcomposer.org/)) | yes | [gemnasium](https://gitlab.com/gitlab-org/security-products/gemnasium) |
| Python ([pip](https://pip.pypa.io/en/stable/)) | yes | [gemnasium](https://gitlab.com/gitlab-org/security-products/gemnasium) | | Python ([pip](https://pip.pypa.io/en/stable/)) | yes | [gemnasium](https://gitlab.com/gitlab-org/security-products/gemnasium) |
......
...@@ -57,7 +57,7 @@ This workflow comes with some drawbacks and there's a ...@@ -57,7 +57,7 @@ This workflow comes with some drawbacks and there's a
## Interacting with the vulnerabilities ## Interacting with the vulnerabilities
> Introduced in [GitLab Ultimate](https://about.gitlab.com/pricing) 10.8. > Introduced in [GitLab Ultimate](https://about.gitlab.com/pricing/) 10.8.
CAUTION: **Warning:** CAUTION: **Warning:**
This feature is currently [Alpha](https://about.gitlab.com/handbook/product/#alpha-beta-ga) and while you can start using it, it may receive important changes in the future. This feature is currently [Alpha](https://about.gitlab.com/handbook/product/#alpha-beta-ga) and while you can start using it, it may receive important changes in the future.
...@@ -84,7 +84,7 @@ If you wish to undo this dismissal, you can click the **Undo dismiss** button. ...@@ -84,7 +84,7 @@ If you wish to undo this dismissal, you can click the **Undo dismiss** button.
#### Adding a dismissal reason #### Adding a dismissal reason
> Introduced in [GitLab Ultimate](https://about.gitlab.com/pricing) 12.0. > Introduced in [GitLab Ultimate](https://about.gitlab.com/pricing/) 12.0.
When dismissing a vulnerability, it's often helpful to provide a reason for doing so. When dismissing a vulnerability, it's often helpful to provide a reason for doing so.
If you press the comment button next to **Dismiss vulnerability** in the modal, a text box will appear, allowing you to add a comment with your dismissal. If you press the comment button next to **Dismiss vulnerability** in the modal, a text box will appear, allowing you to add a comment with your dismissal.
...@@ -110,7 +110,7 @@ the vulnerability will now have an associated issue next to the name. ...@@ -110,7 +110,7 @@ the vulnerability will now have an associated issue next to the name.
### Solutions for vulnerabilities (auto-remediation) ### Solutions for vulnerabilities (auto-remediation)
> [Introduced](https://gitlab.com/gitlab-org/gitlab/issues/5656) in [GitLab Ultimate](https://about.gitlab.com/pricing) 11.7. > [Introduced](https://gitlab.com/gitlab-org/gitlab/issues/5656) in [GitLab Ultimate](https://about.gitlab.com/pricing/) 11.7.
Some vulnerabilities can be fixed by applying the solution that GitLab Some vulnerabilities can be fixed by applying the solution that GitLab
automatically generates. The following scanners are supported: automatically generates. The following scanners are supported:
...@@ -134,7 +134,7 @@ generated by GitLab. To apply the fix: ...@@ -134,7 +134,7 @@ generated by GitLab. To apply the fix:
#### Creating a merge request from a vulnerability #### Creating a merge request from a vulnerability
> [Introduced](https://gitlab.com/gitlab-org/gitlab/issues/9224) in [GitLab Ultimate](https://about.gitlab.com/pricing) 11.9. > [Introduced](https://gitlab.com/gitlab-org/gitlab/issues/9224) in [GitLab Ultimate](https://about.gitlab.com/pricing/) 11.9.
In certain cases, GitLab will allow you to create a merge request that will In certain cases, GitLab will allow you to create a merge request that will
automatically remediate the vulnerability. Any vulnerability that has a automatically remediate the vulnerability. Any vulnerability that has a
...@@ -148,7 +148,7 @@ Clicking on this button will create a merge request to apply the solution onto t ...@@ -148,7 +148,7 @@ Clicking on this button will create a merge request to apply the solution onto t
## Security approvals in merge requests **(ULTIMATE)** ## Security approvals in merge requests **(ULTIMATE)**
> [Introduced](https://gitlab.com/gitlab-org/gitlab/issues/9928) in [GitLab Ultimate](https://about.gitlab.com/pricing) 12.2. > [Introduced](https://gitlab.com/gitlab-org/gitlab/issues/9928) in [GitLab Ultimate](https://about.gitlab.com/pricing/) 12.2.
Merge Request Approvals can be configured to require approval from a member Merge Request Approvals can be configured to require approval from a member
of your security team when a vulnerability, or a software license compliance violation would be introduced by a merge request. of your security team when a vulnerability, or a software license compliance violation would be introduced by a merge request.
......
...@@ -60,7 +60,7 @@ The following languages and package managers are supported. ...@@ -60,7 +60,7 @@ The following languages and package managers are supported.
| Elixir | [mix](https://elixir-lang.org/getting-started/mix-otp/introduction-to-mix.html) ([experimental support](https://github.com/pivotal/LicenseFinder#experimental-project-types)) |[License Finder](https://github.com/pivotal/LicenseFinder)| | Elixir | [mix](https://elixir-lang.org/getting-started/mix-otp/introduction-to-mix.html) ([experimental support](https://github.com/pivotal/LicenseFinder#experimental-project-types)) |[License Finder](https://github.com/pivotal/LicenseFinder)|
| C++/C | [conan](https://conan.io/) ([experimental support](https://github.com/pivotal/LicenseFinder#experimental-project-types))|[License Finder](https://github.com/pivotal/LicenseFinder)| | C++/C | [conan](https://conan.io/) ([experimental support](https://github.com/pivotal/LicenseFinder#experimental-project-types))|[License Finder](https://github.com/pivotal/LicenseFinder)|
| Scala | [sbt](https://www.scala-sbt.org/) ([experimental support](https://github.com/pivotal/LicenseFinder#experimental-project-types))|[License Finder](https://github.com/pivotal/LicenseFinder)| | Scala | [sbt](https://www.scala-sbt.org/) ([experimental support](https://github.com/pivotal/LicenseFinder#experimental-project-types))|[License Finder](https://github.com/pivotal/LicenseFinder)|
| Rust | [cargo](https://crates.io/) ([experimental support](https://github.com/pivotal/LicenseFinder#experimental-project-types))|[License Finder](https://github.com/pivotal/LicenseFinder)| | Rust | [cargo](https://crates.io) ([experimental support](https://github.com/pivotal/LicenseFinder#experimental-project-types))|[License Finder](https://github.com/pivotal/LicenseFinder)|
| PHP | [composer](https://getcomposer.org/) ([experimental support](https://github.com/pivotal/LicenseFinder#experimental-project-types))|[License Finder](https://github.com/pivotal/LicenseFinder)| | PHP | [composer](https://getcomposer.org/) ([experimental support](https://github.com/pivotal/LicenseFinder#experimental-project-types))|[License Finder](https://github.com/pivotal/LicenseFinder)|
## Requirements ## Requirements
......
...@@ -67,7 +67,7 @@ The following table shows which languages, package managers and frameworks are s ...@@ -67,7 +67,7 @@ The following table shows which languages, package managers and frameworks are s
| .NET | [Security Code Scan](https://security-code-scan.github.io) | 11.0 | | .NET | [Security Code Scan](https://security-code-scan.github.io) | 11.0 |
| Any | [Gitleaks](https://github.com/zricethezav/gitleaks) and [TruffleHog](https://github.com/dxa4481/truffleHog) | 11.9 | | Any | [Gitleaks](https://github.com/zricethezav/gitleaks) and [TruffleHog](https://github.com/dxa4481/truffleHog) | 11.9 |
| Apex (Salesforce) | [pmd](https://pmd.github.io/pmd/index.html) | 12.1 | | Apex (Salesforce) | [pmd](https://pmd.github.io/pmd/index.html) | 12.1 |
| C/C++ | [Flawfinder](https://www.dwheeler.com/flawfinder/) | 10.7 | | C/C++ | [Flawfinder](https://dwheeler.com/flawfinder/) | 10.7 |
| Elixir (Phoenix) | [Sobelow](https://github.com/nccgroup/sobelow) | 11.10 | | Elixir (Phoenix) | [Sobelow](https://github.com/nccgroup/sobelow) | 11.10 |
| Go | [Gosec](https://github.com/securego/gosec) | 10.7 | | Go | [Gosec](https://github.com/securego/gosec) | 10.7 |
| Groovy ([Ant](https://ant.apache.org/), [Gradle](https://gradle.org/), [Maven](https://maven.apache.org/) and [SBT](https://www.scala-sbt.org/)) | [SpotBugs](https://spotbugs.github.io/) with the [find-sec-bugs](https://find-sec-bugs.github.io/) plugin | 11.3 (Gradle) & 11.9 (Ant, Maven, SBT) | | Groovy ([Ant](https://ant.apache.org/), [Gradle](https://gradle.org/), [Maven](https://maven.apache.org/) and [SBT](https://www.scala-sbt.org/)) | [SpotBugs](https://spotbugs.github.io/) with the [find-sec-bugs](https://find-sec-bugs.github.io/) plugin | 11.3 (Gradle) & 11.9 (Ant, Maven, SBT) |
......
...@@ -36,7 +36,7 @@ To use the group, project or pipeline security dashboard: ...@@ -36,7 +36,7 @@ To use the group, project or pipeline security dashboard:
## Pipeline Security Dashboard ## Pipeline Security Dashboard
> [Introduced](https://gitlab.com/gitlab-org/gitlab/issues/13496) in [GitLab Ultimate](https://about.gitlab.com/pricing) 12.3. > [Introduced](https://gitlab.com/gitlab-org/gitlab/issues/13496) in [GitLab Ultimate](https://about.gitlab.com/pricing/) 12.3.
At the pipeline level, the Security Dashboard displays the vulnerabilities present in the branch of the project the pipeline was run against. At the pipeline level, the Security Dashboard displays the vulnerabilities present in the branch of the project the pipeline was run against.
...@@ -46,7 +46,7 @@ Visit the page for any pipeline which has run any of the [supported reports](#su ...@@ -46,7 +46,7 @@ Visit the page for any pipeline which has run any of the [supported reports](#su
## Project Security Dashboard ## Project Security Dashboard
> [Introduced](https://gitlab.com/gitlab-org/gitlab/issues/6165) in [GitLab Ultimate](https://about.gitlab.com/pricing) 11.1. > [Introduced](https://gitlab.com/gitlab-org/gitlab/issues/6165) in [GitLab Ultimate](https://about.gitlab.com/pricing/) 11.1.
At the project level, the Security Dashboard displays the latest security reports At the project level, the Security Dashboard displays the latest security reports
for your project. Use it to find and fix vulnerabilities affecting the for your project. Use it to find and fix vulnerabilities affecting the
...@@ -56,7 +56,7 @@ for your project. Use it to find and fix vulnerabilities affecting the ...@@ -56,7 +56,7 @@ for your project. Use it to find and fix vulnerabilities affecting the
## Group Security Dashboard ## Group Security Dashboard
> [Introduced](https://gitlab.com/gitlab-org/gitlab/issues/6709) in [GitLab Ultimate](https://about.gitlab.com/pricing) 11.5. > [Introduced](https://gitlab.com/gitlab-org/gitlab/issues/6709) in [GitLab Ultimate](https://about.gitlab.com/pricing/) 11.5.
The group Security Dashboard gives an overview of the vulnerabilities of all the The group Security Dashboard gives an overview of the vulnerabilities of all the
projects in a group and its subgroups. projects in a group and its subgroups.
......
...@@ -38,7 +38,6 @@ to be enabled: ...@@ -38,7 +38,6 @@ to be enabled:
- Files uploaded must have a file extension of either `png`, `jpg`, `jpeg`, `gif`, `bmp`, `tiff` or `ico`. - Files uploaded must have a file extension of either `png`, `jpg`, `jpeg`, `gif`, `bmp`, `tiff` or `ico`.
The [`svg` extension is not yet supported](https://gitlab.com/gitlab-org/gitlab/issues/12771). The [`svg` extension is not yet supported](https://gitlab.com/gitlab-org/gitlab/issues/12771).
- Design uploads are limited to 10 files at a time. - Design uploads are limited to 10 files at a time.
- [Designs cannot yet be deleted](https://gitlab.com/gitlab-org/gitlab/issues/11089).
- Design Management is - Design Management is
[not yet supported in the project export](https://gitlab.com/gitlab-org/gitlab/issues/11090). [not yet supported in the project export](https://gitlab.com/gitlab-org/gitlab/issues/11090).
- Design Management data - Design Management data
...@@ -64,13 +63,13 @@ of the design, and will replace the previous version. ...@@ -64,13 +63,13 @@ of the design, and will replace the previous version.
## Viewing designs ## Viewing designs
Images on the Design Management page can be enlarged by clicking on them. Images on the Design Management page can be enlarged by clicking on them.
The number of comments on a design — if any — is listed to the right The number of comments on a design — if any — is listed to the right
of the design filename. Clicking on this number enlarges the design of the design filename. Clicking on this number enlarges the design
just like clicking anywhere else on the design. just like clicking anywhere else on the design.
When a design is added or modified, an icon is displayed on the item When a design is added or modified, an icon is displayed on the item
to help summarize changes between versions. to help summarize changes between versions.
| Indicator | Example | | Indicator | Example |
| --------- | ------- | | --------- | ------- |
......
...@@ -65,6 +65,7 @@ The following items will be exported: ...@@ -65,6 +65,7 @@ The following items will be exported:
- Project configuration, including services - Project configuration, including services
- Issues with comments, merge requests with diffs and comments, labels, milestones, snippets, - Issues with comments, merge requests with diffs and comments, labels, milestones, snippets,
and other project entities and other project entities
- Design Management files and data **(PREMIUM)**
- LFS objects - LFS objects
- Issue boards - Issue boards
......
...@@ -38,6 +38,10 @@ module Gitlab ...@@ -38,6 +38,10 @@ module Gitlab
"lfs-objects" "lfs-objects"
end end
def wiki_repo_bundle_filename
"project.wiki.bundle"
end
def config_file def config_file
Rails.root.join('lib/gitlab/import_export/import_export.yml') Rails.root.join('lib/gitlab/import_export/import_export.yml')
end end
...@@ -61,3 +65,5 @@ module Gitlab ...@@ -61,3 +65,5 @@ module Gitlab
end end
end end
end end
Gitlab::ImportExport.prepend_if_ee('EE::Gitlab::ImportExport')
...@@ -26,30 +26,60 @@ module Gitlab ...@@ -26,30 +26,60 @@ module Gitlab
end end
def find def find
find_object || @klass.create(project_attributes) find_object || klass.create(project_attributes)
end end
private private
attr_reader :klass, :attributes, :group, :project
def find_object def find_object
@klass.where(where_clause).first klass.where(where_clause).first
end end
def where_clause def where_clause
@attributes.slice('title').map do |key, value| where_clauses.reduce(:and)
scope_clause = table[:project_id].eq(@project.id) end
scope_clause = scope_clause.or(table[:group_id].eq(@group.id)) if @group
def where_clauses
[
where_clause_base,
where_clause_for_title,
where_clause_for_klass
].compact
end
# Returns Arel clause `"{table_name}"."project_id" = {project.id}`
# or, if group is present:
# `"{table_name}"."project_id" = {project.id} OR "{table_name}"."group_id" = {group.id}`
def where_clause_base
clause = table[:project_id].eq(project.id)
clause = clause.or(table[:group_id].eq(group.id)) if group
clause
end
table[key].eq(value).and(scope_clause) # Returns Arel clause `"{table_name}"."title" = '{attributes['title']}'`
end.reduce(:or) # if attributes has 'title key, otherwise `nil`.
def where_clause_for_title
attrs_to_arel(attributes.slice('title'))
end
# Returns Arel clause:
# `"{table_name}"."{attrs.keys[0]}" = '{attrs.values[0]} AND {table_name}"."{attrs.keys[1]}" = '{attrs.values[1]}"`
# from the given Hash of attributes.
def attrs_to_arel(attrs)
attrs.map do |key, value|
table[key].eq(value)
end.reduce(:and)
end end
def table def table
@table ||= @klass.arel_table @table ||= klass.arel_table
end end
def project_attributes def project_attributes
@attributes.except('group').tap do |atts| attributes.except('group').tap do |atts|
if label? if label?
atts['type'] = 'ProjectLabel' # Always create project labels atts['type'] = 'ProjectLabel' # Always create project labels
elsif milestone? elsif milestone?
...@@ -60,15 +90,17 @@ module Gitlab ...@@ -60,15 +90,17 @@ module Gitlab
claim_iid claim_iid
end end
end end
atts['importing'] = true if klass.ancestors.include?(Importable)
end end
end end
def label? def label?
@klass == Label klass == Label
end end
def milestone? def milestone?
@klass == Milestone klass == Milestone
end end
# If an existing group milestone used the IID # If an existing group milestone used the IID
...@@ -79,7 +111,7 @@ module Gitlab ...@@ -79,7 +111,7 @@ module Gitlab
def claim_iid def claim_iid
# The milestone has to be a group milestone, as it's the only case where # The milestone has to be a group milestone, as it's the only case where
# we set the IID as the maximum. The rest of them are fixed. # we set the IID as the maximum. The rest of them are fixed.
milestone = @project.milestones.find_by(iid: @attributes['iid']) milestone = project.milestones.find_by(iid: attributes['iid'])
return unless milestone return unless milestone
...@@ -87,6 +119,15 @@ module Gitlab ...@@ -87,6 +119,15 @@ module Gitlab
milestone.ensure_project_iid! milestone.ensure_project_iid!
milestone.save! milestone.save!
end end
protected
# Returns Arel clause for a particular model or `nil`.
def where_clause_for_klass
# no-op
end
end end
end end
end end
Gitlab::ImportExport::GroupProjectObjectBuilder.prepend_if_ee('EE::Gitlab::ImportExport::GroupProjectObjectBuilder')
...@@ -248,7 +248,16 @@ preloads: ...@@ -248,7 +248,16 @@ preloads:
ee: ee:
tree: tree:
project: project:
protected_branches: - issues:
- designs:
- notes:
- :author
- events:
- :push_event_payload
- design_versions:
- actions:
- :design # Duplicate export of issues.designs in order to link the record to both Issue and DesignVersion
- protected_branches:
- :unprotect_access_levels - :unprotect_access_levels
protected_environments: - protected_environments:
- :deploy_access_levels - :deploy_access_levels
...@@ -21,7 +21,7 @@ module Gitlab ...@@ -21,7 +21,7 @@ module Gitlab
if import_file && check_version! && restorers.all?(&:restore) && overwrite_project if import_file && check_version! && restorers.all?(&:restore) && overwrite_project
project_tree.restored_project project_tree.restored_project
else else
raise Projects::ImportService::Error.new(@shared.errors.join(', ')) raise Projects::ImportService::Error.new(shared.errors.to_sentence)
end end
rescue => e rescue => e
raise Projects::ImportService::Error.new(e.message) raise Projects::ImportService::Error.new(e.message)
...@@ -31,70 +31,72 @@ module Gitlab ...@@ -31,70 +31,72 @@ module Gitlab
private private
attr_accessor :archive_file, :current_user, :project, :shared
def restorers def restorers
[repo_restorer, wiki_restorer, project_tree, avatar_restorer, [repo_restorer, wiki_restorer, project_tree, avatar_restorer,
uploads_restorer, lfs_restorer, statistics_restorer] uploads_restorer, lfs_restorer, statistics_restorer]
end end
def import_file def import_file
Gitlab::ImportExport::FileImporter.import(project: @project, Gitlab::ImportExport::FileImporter.import(project: project,
archive_file: @archive_file, archive_file: archive_file,
shared: @shared) shared: shared)
end end
def check_version! def check_version!
Gitlab::ImportExport::VersionChecker.check!(shared: @shared) Gitlab::ImportExport::VersionChecker.check!(shared: shared)
end end
def project_tree def project_tree
@project_tree ||= Gitlab::ImportExport::ProjectTreeRestorer.new(user: @current_user, @project_tree ||= Gitlab::ImportExport::ProjectTreeRestorer.new(user: current_user,
shared: @shared, shared: shared,
project: @project) project: project)
end end
def avatar_restorer def avatar_restorer
Gitlab::ImportExport::AvatarRestorer.new(project: project_tree.restored_project, shared: @shared) Gitlab::ImportExport::AvatarRestorer.new(project: project_tree.restored_project, shared: shared)
end end
def repo_restorer def repo_restorer
Gitlab::ImportExport::RepoRestorer.new(path_to_bundle: repo_path, Gitlab::ImportExport::RepoRestorer.new(path_to_bundle: repo_path,
shared: @shared, shared: shared,
project: project_tree.restored_project) project: project_tree.restored_project)
end end
def wiki_restorer def wiki_restorer
Gitlab::ImportExport::WikiRestorer.new(path_to_bundle: wiki_repo_path, Gitlab::ImportExport::WikiRestorer.new(path_to_bundle: wiki_repo_path,
shared: @shared, shared: shared,
project: ProjectWiki.new(project_tree.restored_project), project: ProjectWiki.new(project_tree.restored_project),
wiki_enabled: @project.wiki_enabled?) wiki_enabled: project.wiki_enabled?)
end end
def uploads_restorer def uploads_restorer
Gitlab::ImportExport::UploadsRestorer.new(project: project_tree.restored_project, shared: @shared) Gitlab::ImportExport::UploadsRestorer.new(project: project_tree.restored_project, shared: shared)
end end
def lfs_restorer def lfs_restorer
Gitlab::ImportExport::LfsRestorer.new(project: project_tree.restored_project, shared: @shared) Gitlab::ImportExport::LfsRestorer.new(project: project_tree.restored_project, shared: shared)
end end
def statistics_restorer def statistics_restorer
Gitlab::ImportExport::StatisticsRestorer.new(project: project_tree.restored_project, shared: @shared) Gitlab::ImportExport::StatisticsRestorer.new(project: project_tree.restored_project, shared: shared)
end end
def path_with_namespace def path_with_namespace
File.join(@project.namespace.full_path, @project.path) File.join(project.namespace.full_path, project.path)
end end
def repo_path def repo_path
File.join(@shared.export_path, 'project.bundle') File.join(shared.export_path, Gitlab::ImportExport.project_bundle_filename)
end end
def wiki_repo_path def wiki_repo_path
File.join(@shared.export_path, 'project.wiki.bundle') File.join(shared.export_path, Gitlab::ImportExport.wiki_repo_bundle_filename)
end end
def remove_import_file def remove_import_file
upload = @project.import_export_upload upload = project.import_export_upload
return unless upload&.import_file&.file return unless upload&.import_file&.file
...@@ -105,10 +107,10 @@ module Gitlab ...@@ -105,10 +107,10 @@ module Gitlab
def overwrite_project def overwrite_project
project = project_tree.restored_project project = project_tree.restored_project
return unless can?(@current_user, :admin_namespace, project.namespace) return unless can?(current_user, :admin_namespace, project.namespace)
if overwrite_project? if overwrite_project?
::Projects::OverwriteProjectService.new(project, @current_user) ::Projects::OverwriteProjectService.new(project, current_user)
.execute(project_to_overwrite) .execute(project_to_overwrite)
end end
...@@ -116,7 +118,7 @@ module Gitlab ...@@ -116,7 +118,7 @@ module Gitlab
end end
def original_path def original_path
@project.import_data&.data&.fetch('original_path', nil) project.import_data&.data&.fetch('original_path', nil)
end end
def overwrite_project? def overwrite_project?
...@@ -125,9 +127,11 @@ module Gitlab ...@@ -125,9 +127,11 @@ module Gitlab
def project_to_overwrite def project_to_overwrite
strong_memoize(:project_to_overwrite) do strong_memoize(:project_to_overwrite) do
Project.find_by_full_path("#{@project.namespace.full_path}/#{original_path}") Project.find_by_full_path("#{project.namespace.full_path}/#{original_path}")
end end
end end
end end
end end
end end
Gitlab::ImportExport::Importer.prepend_if_ee('EE::Gitlab::ImportExport::Importer')
...@@ -93,6 +93,10 @@ module Gitlab ...@@ -93,6 +93,10 @@ module Gitlab
end end
end end
def remove_feature_dependent_sub_relations(_relation_item)
# no-op
end
def project_relations_without_project_members def project_relations_without_project_members
# We remove `project_members` as they are deserialized separately # We remove `project_members` as they are deserialized separately
project_relations.except(:project_members) project_relations.except(:project_members)
...@@ -171,6 +175,8 @@ module Gitlab ...@@ -171,6 +175,8 @@ module Gitlab
next next
end end
remove_feature_dependent_sub_relations(relation_item)
# The transaction at this level is less speedy than one single transaction # The transaction at this level is less speedy than one single transaction
# But we can't have it in the upper level or GC won't get rid of the AR objects # But we can't have it in the upper level or GC won't get rid of the AR objects
# after we save the batch. # after we save the batch.
...@@ -238,3 +244,5 @@ module Gitlab ...@@ -238,3 +244,5 @@ module Gitlab
end end
end end
end end
Gitlab::ImportExport::ProjectTreeRestorer.prepend_if_ee('::EE::Gitlab::ImportExport::ProjectTreeRestorer')
...@@ -34,13 +34,13 @@ module Gitlab ...@@ -34,13 +34,13 @@ module Gitlab
PROJECT_REFERENCES = %w[project_id source_project_id target_project_id].freeze PROJECT_REFERENCES = %w[project_id source_project_id target_project_id].freeze
BUILD_MODELS = %w[Ci::Build commit_status].freeze BUILD_MODELS = %i[Ci::Build commit_status].freeze
IMPORTED_OBJECT_MAX_RETRIES = 5.freeze IMPORTED_OBJECT_MAX_RETRIES = 5.freeze
EXISTING_OBJECT_CHECK = %i[milestone milestones label labels project_label project_labels group_label group_labels project_feature].freeze EXISTING_OBJECT_CHECK = %i[milestone milestones label labels project_label project_labels group_label group_labels project_feature].freeze
TOKEN_RESET_MODELS = %w[Project Namespace Ci::Trigger Ci::Build Ci::Runner ProjectHook].freeze TOKEN_RESET_MODELS = %i[Project Namespace Ci::Trigger Ci::Build Ci::Runner ProjectHook].freeze
def self.create(*args) def self.create(*args)
new(*args).create new(*args).create
...@@ -56,7 +56,7 @@ module Gitlab ...@@ -56,7 +56,7 @@ module Gitlab
end end
def initialize(relation_sym:, relation_hash:, members_mapper:, user:, project:, excluded_keys: []) def initialize(relation_sym:, relation_hash:, members_mapper:, user:, project:, excluded_keys: [])
@relation_name = self.class.overrides[relation_sym] || relation_sym @relation_name = self.class.overrides[relation_sym]&.to_sym || relation_sym
@relation_hash = relation_hash.except('noteable_id') @relation_hash = relation_hash.except('noteable_id')
@members_mapper = members_mapper @members_mapper = members_mapper
@user = user @user = user
...@@ -92,6 +92,10 @@ module Gitlab ...@@ -92,6 +92,10 @@ module Gitlab
OVERRIDES OVERRIDES
end end
def self.existing_object_check
EXISTING_OBJECT_CHECK
end
private private
def setup_models def setup_models
...@@ -105,7 +109,7 @@ module Gitlab ...@@ -105,7 +109,7 @@ module Gitlab
update_group_references update_group_references
remove_duplicate_assignees remove_duplicate_assignees
setup_pipeline if @relation_name == 'Ci::Pipeline' setup_pipeline if @relation_name == :'Ci::Pipeline'
reset_tokens! reset_tokens!
remove_encrypted_attributes! remove_encrypted_attributes!
...@@ -184,14 +188,14 @@ module Gitlab ...@@ -184,14 +188,14 @@ module Gitlab
end end
def update_group_references def update_group_references
return unless EXISTING_OBJECT_CHECK.include?(@relation_name) return unless self.class.existing_object_check.include?(@relation_name)
return unless @relation_hash['group_id'] return unless @relation_hash['group_id']
@relation_hash['group_id'] = @project.namespace_id @relation_hash['group_id'] = @project.namespace_id
end end
def reset_tokens! def reset_tokens!
return unless Gitlab::ImportExport.reset_tokens? && TOKEN_RESET_MODELS.include?(@relation_name.to_s) return unless Gitlab::ImportExport.reset_tokens? && TOKEN_RESET_MODELS.include?(@relation_name)
# If we import/export a project to the same instance, tokens will have to be reset. # If we import/export a project to the same instance, tokens will have to be reset.
# We also have to reset them to avoid issues when the gitlab secrets file cannot be copied across. # We also have to reset them to avoid issues when the gitlab secrets file cannot be copied across.
...@@ -255,7 +259,7 @@ module Gitlab ...@@ -255,7 +259,7 @@ module Gitlab
# Only find existing records to avoid mapping tables such as milestones # Only find existing records to avoid mapping tables such as milestones
# Otherwise always create the record, skipping the extra SELECT clause. # Otherwise always create the record, skipping the extra SELECT clause.
@existing_or_new_object ||= begin @existing_or_new_object ||= begin
if EXISTING_OBJECT_CHECK.include?(@relation_name) if self.class.existing_object_check.include?(@relation_name)
attribute_hash = attribute_hash_for(['events']) attribute_hash = attribute_hash_for(['events'])
existing_object.assign_attributes(attribute_hash) if attribute_hash.any? existing_object.assign_attributes(attribute_hash) if attribute_hash.any?
...@@ -284,7 +288,7 @@ module Gitlab ...@@ -284,7 +288,7 @@ module Gitlab
end end
def legacy_trigger? def legacy_trigger?
@relation_name == 'Ci::Trigger' && @relation_hash['owner_id'].nil? @relation_name == :'Ci::Trigger' && @relation_hash['owner_id'].nil?
end end
def find_or_create_object! def find_or_create_object!
...@@ -293,7 +297,7 @@ module Gitlab ...@@ -293,7 +297,7 @@ module Gitlab
# Can't use IDs as validation exists calling `group` or `project` attributes # Can't use IDs as validation exists calling `group` or `project` attributes
finder_hash = parsed_relation_hash.tap do |hash| finder_hash = parsed_relation_hash.tap do |hash|
hash['group'] = @project.group if relation_class.attribute_method?('group_id') hash['group'] = @project.group if relation_class.attribute_method?('group_id')
hash['project'] = @project hash['project'] = @project if relation_class.reflect_on_association(:project)
hash.delete('project_id') hash.delete('project_id')
end end
......
...@@ -6,19 +6,23 @@ module Gitlab ...@@ -6,19 +6,23 @@ module Gitlab
include Gitlab::ImportExport::CommandLineUtil include Gitlab::ImportExport::CommandLineUtil
def initialize(project:, shared:, path_to_bundle:) def initialize(project:, shared:, path_to_bundle:)
@project = project @repository = project.repository
@path_to_bundle = path_to_bundle @path_to_bundle = path_to_bundle
@shared = shared @shared = shared
end end
def restore def restore
return true unless File.exist?(@path_to_bundle) return true unless File.exist?(path_to_bundle)
@project.repository.create_from_bundle(@path_to_bundle) repository.create_from_bundle(path_to_bundle)
rescue => e rescue => e
@shared.error(e) shared.error(e)
false false
end end
private
attr_accessor :repository, :path_to_bundle, :shared
end end
end end
end end
...@@ -5,27 +5,35 @@ module Gitlab ...@@ -5,27 +5,35 @@ module Gitlab
class RepoSaver class RepoSaver
include Gitlab::ImportExport::CommandLineUtil include Gitlab::ImportExport::CommandLineUtil
attr_reader :full_path attr_reader :project, :repository, :shared
def initialize(project:, shared:) def initialize(project:, shared:)
@project = project @project = project
@shared = shared @shared = shared
@repository = @project.repository
end end
def save def save
return true if @project.empty_repo? # it's ok to have no repo return true unless repository_exists? # it's ok to have no repo
@full_path = File.join(@shared.export_path, ImportExport.project_bundle_filename)
bundle_to_disk bundle_to_disk
end end
private private
def repository_exists?
repository.exists? && !repository.empty?
end
def bundle_full_path
File.join(shared.export_path, ImportExport.project_bundle_filename)
end
def bundle_to_disk def bundle_to_disk
mkdir_p(@shared.export_path) mkdir_p(shared.export_path)
@project.repository.bundle_to_disk(@full_path) repository.bundle_to_disk(bundle_full_path)
rescue => e rescue => e
@shared.error(e) shared.error(e)
false false
end end
end end
......
...@@ -4,28 +4,16 @@ module Gitlab ...@@ -4,28 +4,16 @@ module Gitlab
module ImportExport module ImportExport
class WikiRepoSaver < RepoSaver class WikiRepoSaver < RepoSaver
def save def save
@wiki = ProjectWiki.new(@project) wiki = ProjectWiki.new(project)
return true unless wiki_repository_exists? # it's okay to have no Wiki @repository = wiki.repository
bundle_to_disk(File.join(@shared.export_path, project_filename)) super
end
def bundle_to_disk(full_path)
mkdir_p(@shared.export_path)
@wiki.repository.bundle_to_disk(full_path)
rescue => e
@shared.error(e)
false
end end
private private
def project_filename def bundle_full_path
"project.wiki.bundle" File.join(shared.export_path, ImportExport.wiki_repo_bundle_filename)
end
def wiki_repository_exists?
@wiki.repository.exists? && !@wiki.repository.empty?
end end
end end
end end
......
...@@ -6,19 +6,22 @@ module Gitlab ...@@ -6,19 +6,22 @@ module Gitlab
def initialize(project:, shared:, path_to_bundle:, wiki_enabled:) def initialize(project:, shared:, path_to_bundle:, wiki_enabled:)
super(project: project, shared: shared, path_to_bundle: path_to_bundle) super(project: project, shared: shared, path_to_bundle: path_to_bundle)
@project = project
@wiki_enabled = wiki_enabled @wiki_enabled = wiki_enabled
end end
def restore def restore
@project.wiki if create_empty_wiki? project.wiki if create_empty_wiki?
super super
end end
private private
attr_accessor :project, :wiki_enabled
def create_empty_wiki? def create_empty_wiki?
!File.exist?(@path_to_bundle) && @wiki_enabled !File.exist?(path_to_bundle) && wiki_enabled
end end
end end
end end
......
...@@ -5185,6 +5185,9 @@ msgstr "" ...@@ -5185,6 +5185,9 @@ msgstr ""
msgid "Deselect all" msgid "Deselect all"
msgstr "" msgstr ""
msgid "Design Management files and data"
msgstr ""
msgid "DesignManagement|%{current_design} of %{designs_count}" msgid "DesignManagement|%{current_design} of %{designs_count}"
msgstr "" msgstr ""
......
...@@ -502,3 +502,17 @@ lists: ...@@ -502,3 +502,17 @@ lists:
milestone_releases: milestone_releases:
- milestone - milestone
- release - release
design: &design
- issue
- actions
- versions
- notes
designs: *design
actions:
- design
- version
versions: &version
- issue
- designs
- actions
design_versions: *version
...@@ -2,6 +2,8 @@ require 'spec_helper' ...@@ -2,6 +2,8 @@ require 'spec_helper'
include ImportExport::CommonUtil include ImportExport::CommonUtil
describe Gitlab::ImportExport::ProjectTreeRestorer do describe Gitlab::ImportExport::ProjectTreeRestorer do
let(:shared) { project.import_export_shared }
describe 'restore project tree' do describe 'restore project tree' do
before(:context) do before(:context) do
# Using an admin for import, so we can check assignment of existing members # Using an admin for import, so we can check assignment of existing members
...@@ -14,7 +16,7 @@ describe Gitlab::ImportExport::ProjectTreeRestorer do ...@@ -14,7 +16,7 @@ describe Gitlab::ImportExport::ProjectTreeRestorer do
RSpec::Mocks.with_temporary_scope do RSpec::Mocks.with_temporary_scope do
@project = create(:project, :builds_enabled, :issues_disabled, name: 'project', path: 'project') @project = create(:project, :builds_enabled, :issues_disabled, name: 'project', path: 'project')
@shared = @project.import_export_shared @shared = @project.import_export_shared
allow(@shared).to receive(:export_path).and_return('spec/lib/gitlab/import_export/') allow(@shared).to receive(:export_path).and_return('spec/fixtures/lib/gitlab/import_export/')
allow_any_instance_of(Repository).to receive(:fetch_source_branch!).and_return(true) allow_any_instance_of(Repository).to receive(:fetch_source_branch!).and_return(true)
allow_any_instance_of(Gitlab::Git::Repository).to receive(:branch_exists?).and_return(false) allow_any_instance_of(Gitlab::Git::Repository).to receive(:branch_exists?).and_return(false)
...@@ -274,36 +276,6 @@ describe Gitlab::ImportExport::ProjectTreeRestorer do ...@@ -274,36 +276,6 @@ describe Gitlab::ImportExport::ProjectTreeRestorer do
end end
end end
shared_examples 'restores project successfully' do
it 'correctly restores project' do
expect(shared.errors).to be_empty
expect(restored_project_json).to be_truthy
end
end
shared_examples 'restores project correctly' do |**results|
it 'has labels' do
expect(project.labels.size).to eq(results.fetch(:labels, 0))
end
it 'has label priorities' do
expect(project.labels.find_by(title: 'A project label').priorities).not_to be_empty
end
it 'has milestones' do
expect(project.milestones.size).to eq(results.fetch(:milestones, 0))
end
it 'has issues' do
expect(project.issues.size).to eq(results.fetch(:issues, 0))
end
it 'does not set params that are excluded from import_export settings' do
expect(project.import_type).to be_nil
expect(project.creator_id).not_to eq 123
end
end
shared_examples 'restores group correctly' do |**results| shared_examples 'restores group correctly' do |**results|
it 'has group label' do it 'has group label' do
expect(project.group.labels.size).to eq(results.fetch(:labels, 0)) expect(project.group.labels.size).to eq(results.fetch(:labels, 0))
...@@ -322,18 +294,17 @@ describe Gitlab::ImportExport::ProjectTreeRestorer do ...@@ -322,18 +294,17 @@ describe Gitlab::ImportExport::ProjectTreeRestorer do
context 'Light JSON' do context 'Light JSON' do
let(:user) { create(:user) } let(:user) { create(:user) }
let(:shared) { project.import_export_shared }
let!(:project) { create(:project, :builds_disabled, :issues_disabled, name: 'project', path: 'project') } let!(:project) { create(:project, :builds_disabled, :issues_disabled, name: 'project', path: 'project') }
let(:project_tree_restorer) { described_class.new(user: user, shared: shared, project: project) } let(:project_tree_restorer) { described_class.new(user: user, shared: shared, project: project) }
let(:restored_project_json) { project_tree_restorer.restore } let(:restored_project_json) { project_tree_restorer.restore }
before do before do
allow(shared).to receive(:export_path).and_return('spec/lib/gitlab/import_export/') allow(shared).to receive(:export_path).and_return('spec/fixtures/lib/gitlab/import_export/')
end end
context 'with a simple project' do context 'with a simple project' do
before do before do
project_tree_restorer.instance_variable_set(:@path, "spec/lib/gitlab/import_export/project.light.json") project_tree_restorer.instance_variable_set(:@path, "spec/fixtures/lib/gitlab/import_export/project.light.json")
restored_project_json restored_project_json
end end
...@@ -341,6 +312,7 @@ describe Gitlab::ImportExport::ProjectTreeRestorer do ...@@ -341,6 +312,7 @@ describe Gitlab::ImportExport::ProjectTreeRestorer do
it_behaves_like 'restores project correctly', it_behaves_like 'restores project correctly',
issues: 1, issues: 1,
labels: 2, labels: 2,
label_with_priorities: 'A project label',
milestones: 1, milestones: 1,
first_issue_labels: 1, first_issue_labels: 1,
services: 1 services: 1
...@@ -363,7 +335,12 @@ describe Gitlab::ImportExport::ProjectTreeRestorer do ...@@ -363,7 +335,12 @@ describe Gitlab::ImportExport::ProjectTreeRestorer do
create(:ci_build, token: 'abcd') create(:ci_build, token: 'abcd')
end end
it_behaves_like 'restores project successfully' it_behaves_like 'restores project correctly',
issues: 1,
labels: 2,
label_with_priorities: 'A project label',
milestones: 1,
first_issue_labels: 1
end end
end end
...@@ -430,15 +407,15 @@ describe Gitlab::ImportExport::ProjectTreeRestorer do ...@@ -430,15 +407,15 @@ describe Gitlab::ImportExport::ProjectTreeRestorer do
end end
before do before do
project_tree_restorer.instance_variable_set(:@path, "spec/lib/gitlab/import_export/project.group.json") project_tree_restorer.instance_variable_set(:@path, "spec/fixtures/lib/gitlab/import_export/project.group.json")
restored_project_json restored_project_json
end end
it_behaves_like 'restores project successfully'
it_behaves_like 'restores project correctly', it_behaves_like 'restores project correctly',
issues: 2, issues: 2,
labels: 2, labels: 2,
label_with_priorities: 'A project label',
milestones: 2, milestones: 2,
first_issue_labels: 1 first_issue_labels: 1
...@@ -459,7 +436,7 @@ describe Gitlab::ImportExport::ProjectTreeRestorer do ...@@ -459,7 +436,7 @@ describe Gitlab::ImportExport::ProjectTreeRestorer do
end end
before do before do
project_tree_restorer.instance_variable_set(:@path, "spec/lib/gitlab/import_export/project.light.json") project_tree_restorer.instance_variable_set(:@path, "spec/fixtures/lib/gitlab/import_export/project.light.json")
end end
it 'does not import any templated services' do it 'does not import any templated services' do
...@@ -501,7 +478,7 @@ describe Gitlab::ImportExport::ProjectTreeRestorer do ...@@ -501,7 +478,7 @@ describe Gitlab::ImportExport::ProjectTreeRestorer do
end end
it 'preserves the project milestone IID' do it 'preserves the project milestone IID' do
project_tree_restorer.instance_variable_set(:@path, "spec/lib/gitlab/import_export/project.milestone-iid.json") project_tree_restorer.instance_variable_set(:@path, "spec/fixtures/lib/gitlab/import_export/project.milestone-iid.json")
expect_any_instance_of(Gitlab::ImportExport::Shared).not_to receive(:error) expect_any_instance_of(Gitlab::ImportExport::Shared).not_to receive(:error)
...@@ -534,7 +511,6 @@ describe Gitlab::ImportExport::ProjectTreeRestorer do ...@@ -534,7 +511,6 @@ describe Gitlab::ImportExport::ProjectTreeRestorer do
describe '#restored_project' do describe '#restored_project' do
let(:project) { create(:project) } let(:project) { create(:project) }
let(:shared) { project.import_export_shared }
let(:tree_hash) { { 'visibility_level' => visibility } } let(:tree_hash) { { 'visibility_level' => visibility } }
let(:restorer) { described_class.new(user: nil, shared: shared, project: project) } let(:restorer) { described_class.new(user: nil, shared: shared, project: project) }
......
...@@ -21,7 +21,7 @@ describe Gitlab::ImportExport::RelationRenameService do ...@@ -21,7 +21,7 @@ describe Gitlab::ImportExport::RelationRenameService do
context 'when importing' do context 'when importing' do
let(:project_tree_restorer) { Gitlab::ImportExport::ProjectTreeRestorer.new(user: user, shared: shared, project: project) } let(:project_tree_restorer) { Gitlab::ImportExport::ProjectTreeRestorer.new(user: user, shared: shared, project: project) }
let(:import_path) { 'spec/lib/gitlab/import_export' } let(:import_path) { 'spec/fixtures/lib/gitlab/import_export' }
let(:file_content) { IO.read("#{import_path}/project.json") } let(:file_content) { IO.read("#{import_path}/project.json") }
let!(:json_file) { ActiveSupport::JSON.decode(file_content) } let!(:json_file) { ActiveSupport::JSON.decode(file_content) }
......
...@@ -2,8 +2,8 @@ require 'spec_helper' ...@@ -2,8 +2,8 @@ require 'spec_helper'
describe Gitlab::ImportExport::RepoSaver do describe Gitlab::ImportExport::RepoSaver do
describe 'bundle a project Git repo' do describe 'bundle a project Git repo' do
let(:user) { create(:user) } set(:user) { create(:user) }
let!(:project) { create(:project, :public, name: 'searchable_project') } let!(:project) { create(:project, :repository) }
let(:export_path) { "#{Dir.tmpdir}/project_tree_saver_spec" } let(:export_path) { "#{Dir.tmpdir}/project_tree_saver_spec" }
let(:shared) { project.import_export_shared } let(:shared) { project.import_export_shared }
let(:bundler) { described_class.new(project: project, shared: shared) } let(:bundler) { described_class.new(project: project, shared: shared) }
...@@ -20,5 +20,13 @@ describe Gitlab::ImportExport::RepoSaver do ...@@ -20,5 +20,13 @@ describe Gitlab::ImportExport::RepoSaver do
it 'bundles the repo successfully' do it 'bundles the repo successfully' do
expect(bundler.save).to be true expect(bundler.save).to be true
end end
context 'when the repo is empty' do
let!(:project) { create(:project) }
it 'bundles the repo successfully' do
expect(bundler.save).to be true
end
end
end end
end end
...@@ -731,3 +731,18 @@ ExternalPullRequest: ...@@ -731,3 +731,18 @@ ExternalPullRequest:
- target_repository - target_repository
- source_sha - source_sha
- target_sha - target_sha
DesignManagement::Design:
- id
- project_id
- issue_id
- filename
DesignManagement::Action:
- design_id
- event
- version_id
DesignManagement::Version:
- id
- created_at
- sha
- issue_id
- user_id
...@@ -2,8 +2,8 @@ require 'spec_helper' ...@@ -2,8 +2,8 @@ require 'spec_helper'
describe Gitlab::ImportExport::WikiRepoSaver do describe Gitlab::ImportExport::WikiRepoSaver do
describe 'bundle a wiki Git repo' do describe 'bundle a wiki Git repo' do
let(:user) { create(:user) } set(:user) { create(:user) }
let!(:project) { create(:project, :public, :wiki_repo, name: 'searchable_project') } let!(:project) { create(:project, :wiki_repo) }
let(:export_path) { "#{Dir.tmpdir}/project_tree_saver_spec" } let(:export_path) { "#{Dir.tmpdir}/project_tree_saver_spec" }
let(:shared) { project.import_export_shared } let(:shared) { project.import_export_shared }
let(:wiki_bundler) { described_class.new(project: project, shared: shared) } let(:wiki_bundler) { described_class.new(project: project, shared: shared) }
...@@ -23,5 +23,13 @@ describe Gitlab::ImportExport::WikiRepoSaver do ...@@ -23,5 +23,13 @@ describe Gitlab::ImportExport::WikiRepoSaver do
it 'bundles the repo successfully' do it 'bundles the repo successfully' do
expect(wiki_bundler.save).to be true expect(wiki_bundler.save).to be true
end end
context 'when the repo is empty' do
let!(:project) { create(:project) }
it 'bundles the repo successfully' do
expect(wiki_bundler.save).to be true
end
end
end end
end end
...@@ -1075,7 +1075,7 @@ describe Repository do ...@@ -1075,7 +1075,7 @@ describe Repository do
let(:ref) { 'refs/heads/master' } let(:ref) { 'refs/heads/master' }
it 'returns nil' do it 'returns nil' do
is_expected.to eq(nil) is_expected.to be_nil
end end
end end
...@@ -2002,7 +2002,7 @@ describe Repository do ...@@ -2002,7 +2002,7 @@ describe Repository do
it 'returns nil if repo does not exist' do it 'returns nil if repo does not exist' do
allow(repository).to receive(:root_ref).and_raise(Gitlab::Git::Repository::NoRepository) allow(repository).to receive(:root_ref).and_raise(Gitlab::Git::Repository::NoRepository)
expect(repository.avatar).to eq(nil) expect(repository.avatar).to be_nil
end end
it 'returns the first avatar file found in the repository' do it 'returns the first avatar file found in the repository' do
...@@ -2604,6 +2604,10 @@ describe Repository do ...@@ -2604,6 +2604,10 @@ describe Repository do
expect { repository.create_if_not_exists }.to change { repository.exists? }.from(false).to(true) expect { repository.create_if_not_exists }.to change { repository.exists? }.from(false).to(true)
end end
it 'returns true' do
expect(repository.create_if_not_exists).to eq(true)
end
it 'calls out to the repository client to create a repo' do it 'calls out to the repository client to create a repo' do
expect(repository.raw.gitaly_repository_client).to receive(:create_repository) expect(repository.raw.gitaly_repository_client).to receive(:create_repository)
...@@ -2618,6 +2622,10 @@ describe Repository do ...@@ -2618,6 +2622,10 @@ describe Repository do
repository.create_if_not_exists repository.create_if_not_exists
end end
it 'returns nil' do
expect(repository.create_if_not_exists).to be_nil
end
end end
context 'when the repository exists but the cache is not up to date' do context 'when the repository exists but the cache is not up to date' do
...@@ -2629,6 +2637,10 @@ describe Repository do ...@@ -2629,6 +2637,10 @@ describe Repository do
expect { repository.create_if_not_exists }.not_to raise_error expect { repository.create_if_not_exists }.not_to raise_error
end end
it 'returns nil' do
expect(repository.create_if_not_exists).to be_nil
end
end end
end end
......
...@@ -35,20 +35,27 @@ describe Projects::ImportExport::ExportService do ...@@ -35,20 +35,27 @@ describe Projects::ImportExport::ExportService do
end end
it 'saves the repo' do it 'saves the repo' do
# This spec errors when run against the EE codebase as there will be a third repository
# saved (the EE-specific design repository).
#
# Instead, skip this test when run within EE. There is a spec for the EE-specific design repo
# in the corresponding EE spec.
skip if Gitlab.ee?
# once for the normal repo, once for the wiki # once for the normal repo, once for the wiki
expect(Gitlab::ImportExport::RepoSaver).to receive(:new).twice.and_call_original expect(Gitlab::ImportExport::RepoSaver).to receive(:new).twice.and_call_original
service.execute service.execute
end end
it 'saves the lfs objects' do it 'saves the wiki repo' do
expect(Gitlab::ImportExport::LfsSaver).to receive(:new).and_call_original expect(Gitlab::ImportExport::WikiRepoSaver).to receive(:new).and_call_original
service.execute service.execute
end end
it 'saves the wiki repo' do it 'saves the lfs objects' do
expect(Gitlab::ImportExport::WikiRepoSaver).to receive(:new).and_call_original expect(Gitlab::ImportExport::LfsSaver).to receive(:new).and_call_original
service.execute service.execute
end end
...@@ -98,9 +105,9 @@ describe Projects::ImportExport::ExportService do ...@@ -98,9 +105,9 @@ describe Projects::ImportExport::ExportService do
end end
end end
context 'when saver services fail' do context 'when saving services fail' do
before do before do
allow(service).to receive(:save_services).and_return(false) allow(service).to receive(:save_exporters).and_return(false)
end end
after do after do
...@@ -122,7 +129,7 @@ describe Projects::ImportExport::ExportService do ...@@ -122,7 +129,7 @@ describe Projects::ImportExport::ExportService do
expect(Rails.logger).to receive(:error) expect(Rails.logger).to receive(:error)
end end
it 'the after export strategy is not called' do it 'does not call the export strategy' do
expect(service).not_to receive(:execute_after_export_action) expect(service).not_to receive(:execute_after_export_action)
end end
end end
......
# frozen_string_literal: true
# Shared examples for ProjectTreeRestorer (shared to allow the testing
# of EE-specific features)
RSpec.shared_examples 'restores project correctly' do |**results|
it 'restores the project' do
expect(shared.errors).to be_empty
expect(restored_project_json).to be_truthy
end
it 'has labels' do
labels_size = results.fetch(:labels, 0)
expect(project.labels.size).to eq(labels_size)
end
it 'has label priorities' do
label_with_priorities = results[:label_with_priorities]
if label_with_priorities
expect(project.labels.find_by(title: label_with_priorities).priorities).not_to be_empty
end
end
it 'has milestones' do
expect(project.milestones.size).to eq(results.fetch(:milestones, 0))
end
it 'has issues' do
expect(project.issues.size).to eq(results.fetch(:issues, 0))
end
it 'does not set params that are excluded from import_export settings' do
expect(project.import_type).to be_nil
expect(project.creator_id).not_to eq 123
end
end
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment