Commit 00df1306 authored by Rémy Coutable's avatar Rémy Coutable

Merge remote-tracking branch 'origin/master' into ce-to-ee-2017-05-17

Signed-off-by: default avatarRémy Coutable <remy@rymai.me>
parents 0a1885a6 322af127
......@@ -144,6 +144,13 @@ stages:
<<: *only-master-and-ee-or-mysql
<<: *except-docs
.only-canonical-masters: &only-canonical-masters
only:
- master@gitlab-org/gitlab-ce
- master@gitlab-org/gitlab-ee
- master@gitlab/gitlabhq
- master@gitlab/gitlab-ee
# Trigger a package build on omnibus-gitlab repository
build-package:
......@@ -173,17 +180,13 @@ knapsack:
update-knapsack:
<<: *knapsack-state
<<: *dedicated-runner
<<: *only-canonical-masters
stage: post-test
script:
- scripts/merge-reports ${KNAPSACK_RSPEC_SUITE_REPORT_PATH} knapsack/${CI_PROJECT_NAME}/rspec_pg_node_*.json
- scripts/merge-reports ${KNAPSACK_SPINACH_SUITE_REPORT_PATH} knapsack/${CI_PROJECT_NAME}/spinach_pg_node_*.json
- '[[ -z ${KNAPSACK_S3_BUCKET} ]] || scripts/sync-reports put $KNAPSACK_S3_BUCKET $KNAPSACK_RSPEC_SUITE_REPORT_PATH $KNAPSACK_SPINACH_SUITE_REPORT_PATH'
- rm -f knapsack/${CI_PROJECT_NAME}/*_node_*.json
only:
- master@gitlab-org/gitlab-ce
- master@gitlab-org/gitlab-ee
- master@gitlab/gitlabhq
- master@gitlab/gitlab-ee
setup-test-env:
<<: *use-pg
......@@ -202,76 +205,75 @@ setup-test-env:
- public/assets
- tmp/tests
rspec pg 0 20: *rspec-knapsack-pg
rspec pg 1 20: *rspec-knapsack-pg
rspec pg 2 20: *rspec-knapsack-pg
rspec pg 3 20: *rspec-knapsack-pg
rspec pg 4 20: *rspec-knapsack-pg
rspec pg 5 20: *rspec-knapsack-pg
rspec pg 6 20: *rspec-knapsack-pg
rspec pg 7 20: *rspec-knapsack-pg
rspec pg 8 20: *rspec-knapsack-pg
rspec pg 9 20: *rspec-knapsack-pg
rspec pg 10 20: *rspec-knapsack-pg
rspec pg 11 20: *rspec-knapsack-pg
rspec pg 12 20: *rspec-knapsack-pg
rspec pg 13 20: *rspec-knapsack-pg
rspec pg 14 20: *rspec-knapsack-pg
rspec pg 15 20: *rspec-knapsack-pg
rspec pg 16 20: *rspec-knapsack-pg
rspec pg 17 20: *rspec-knapsack-pg
rspec pg 18 20: *rspec-knapsack-pg
rspec pg 19 20: *rspec-knapsack-pg
rspec mysql 0 20: *rspec-knapsack-mysql
rspec mysql 1 20: *rspec-knapsack-mysql
rspec mysql 2 20: *rspec-knapsack-mysql
rspec mysql 3 20: *rspec-knapsack-mysql
rspec mysql 4 20: *rspec-knapsack-mysql
rspec mysql 5 20: *rspec-knapsack-mysql
rspec mysql 6 20: *rspec-knapsack-mysql
rspec mysql 7 20: *rspec-knapsack-mysql
rspec mysql 8 20: *rspec-knapsack-mysql
rspec mysql 9 20: *rspec-knapsack-mysql
rspec mysql 10 20: *rspec-knapsack-mysql
rspec mysql 11 20: *rspec-knapsack-mysql
rspec mysql 12 20: *rspec-knapsack-mysql
rspec mysql 13 20: *rspec-knapsack-mysql
rspec mysql 14 20: *rspec-knapsack-mysql
rspec mysql 15 20: *rspec-knapsack-mysql
rspec mysql 16 20: *rspec-knapsack-mysql
rspec mysql 17 20: *rspec-knapsack-mysql
rspec mysql 18 20: *rspec-knapsack-mysql
rspec mysql 19 20: *rspec-knapsack-mysql
spinach pg 0 10: *spinach-knapsack-pg
spinach pg 1 10: *spinach-knapsack-pg
spinach pg 2 10: *spinach-knapsack-pg
spinach pg 3 10: *spinach-knapsack-pg
spinach pg 4 10: *spinach-knapsack-pg
spinach pg 5 10: *spinach-knapsack-pg
spinach pg 6 10: *spinach-knapsack-pg
spinach pg 7 10: *spinach-knapsack-pg
spinach pg 8 10: *spinach-knapsack-pg
spinach pg 9 10: *spinach-knapsack-pg
spinach mysql 0 10: *spinach-knapsack-mysql
spinach mysql 1 10: *spinach-knapsack-mysql
spinach mysql 2 10: *spinach-knapsack-mysql
spinach mysql 3 10: *spinach-knapsack-mysql
spinach mysql 4 10: *spinach-knapsack-mysql
spinach mysql 5 10: *spinach-knapsack-mysql
spinach mysql 6 10: *spinach-knapsack-mysql
spinach mysql 7 10: *spinach-knapsack-mysql
spinach mysql 8 10: *spinach-knapsack-mysql
spinach mysql 9 10: *spinach-knapsack-mysql
# Other generic tests
rspec-pg 0 20: *rspec-knapsack-pg
rspec-pg 1 20: *rspec-knapsack-pg
rspec-pg 2 20: *rspec-knapsack-pg
rspec-pg 3 20: *rspec-knapsack-pg
rspec-pg 4 20: *rspec-knapsack-pg
rspec-pg 5 20: *rspec-knapsack-pg
rspec-pg 6 20: *rspec-knapsack-pg
rspec-pg 7 20: *rspec-knapsack-pg
rspec-pg 8 20: *rspec-knapsack-pg
rspec-pg 9 20: *rspec-knapsack-pg
rspec-pg 10 20: *rspec-knapsack-pg
rspec-pg 11 20: *rspec-knapsack-pg
rspec-pg 12 20: *rspec-knapsack-pg
rspec-pg 13 20: *rspec-knapsack-pg
rspec-pg 14 20: *rspec-knapsack-pg
rspec-pg 15 20: *rspec-knapsack-pg
rspec-pg 16 20: *rspec-knapsack-pg
rspec-pg 17 20: *rspec-knapsack-pg
rspec-pg 18 20: *rspec-knapsack-pg
rspec-pg 19 20: *rspec-knapsack-pg
rspec-mysql 0 20: *rspec-knapsack-mysql
rspec-mysql 1 20: *rspec-knapsack-mysql
rspec-mysql 2 20: *rspec-knapsack-mysql
rspec-mysql 3 20: *rspec-knapsack-mysql
rspec-mysql 4 20: *rspec-knapsack-mysql
rspec-mysql 5 20: *rspec-knapsack-mysql
rspec-mysql 6 20: *rspec-knapsack-mysql
rspec-mysql 7 20: *rspec-knapsack-mysql
rspec-mysql 8 20: *rspec-knapsack-mysql
rspec-mysql 9 20: *rspec-knapsack-mysql
rspec-mysql 10 20: *rspec-knapsack-mysql
rspec-mysql 11 20: *rspec-knapsack-mysql
rspec-mysql 12 20: *rspec-knapsack-mysql
rspec-mysql 13 20: *rspec-knapsack-mysql
rspec-mysql 14 20: *rspec-knapsack-mysql
rspec-mysql 15 20: *rspec-knapsack-mysql
rspec-mysql 16 20: *rspec-knapsack-mysql
rspec-mysql 17 20: *rspec-knapsack-mysql
rspec-mysql 18 20: *rspec-knapsack-mysql
rspec-mysql 19 20: *rspec-knapsack-mysql
spinach-pg 0 10: *spinach-knapsack-pg
spinach-pg 1 10: *spinach-knapsack-pg
spinach-pg 2 10: *spinach-knapsack-pg
spinach-pg 3 10: *spinach-knapsack-pg
spinach-pg 4 10: *spinach-knapsack-pg
spinach-pg 5 10: *spinach-knapsack-pg
spinach-pg 6 10: *spinach-knapsack-pg
spinach-pg 7 10: *spinach-knapsack-pg
spinach-pg 8 10: *spinach-knapsack-pg
spinach-pg 9 10: *spinach-knapsack-pg
spinach-mysql 0 10: *spinach-knapsack-mysql
spinach-mysql 1 10: *spinach-knapsack-mysql
spinach-mysql 2 10: *spinach-knapsack-mysql
spinach-mysql 3 10: *spinach-knapsack-mysql
spinach-mysql 4 10: *spinach-knapsack-mysql
spinach-mysql 5 10: *spinach-knapsack-mysql
spinach-mysql 6 10: *spinach-knapsack-mysql
spinach-mysql 7 10: *spinach-knapsack-mysql
spinach-mysql 8 10: *spinach-knapsack-mysql
spinach-mysql 9 10: *spinach-knapsack-mysql
# Static analysis jobs
.ruby-static-analysis: &ruby-static-analysis
variables:
SIMPLECOV: "false"
SETUP_DB: "false"
USE_BUNDLE_INSTALL: "true"
.rake-exec: &rake-exec
<<: *ruby-static-analysis
......@@ -316,6 +318,7 @@ downtime_check:
- /^[\d-]+-stable(-ee)?$/
- /(^docs[\/-].*|.*-docs$)/
# DB migration, rollback, and seed jobs
.db-migrate-reset: &db-migrate-reset
stage: test
<<: *dedicated-runner
......@@ -323,14 +326,38 @@ downtime_check:
script:
- bundle exec rake db:migrate:reset
rake pg db:migrate:reset:
db:migrate:reset-pg:
<<: *db-migrate-reset
<<: *use-pg
rake mysql db:migrate:reset:
db:migrate:reset-mysql:
<<: *db-migrate-reset
<<: *use-mysql
.migration-paths: &migration-paths
stage: test
<<: *dedicated-runner
variables:
SETUP_DB: "false"
<<: *only-canonical-masters
script:
- git fetch origin v8.14.10
- git checkout -f FETCH_HEAD
- bundle install $BUNDLE_INSTALL_FLAGS
- bundle exec rake db:drop db:create db:schema:load db:seed_fu
- git checkout $CI_COMMIT_SHA
- bundle install $BUNDLE_INSTALL_FLAGS
- . scripts/prepare_build.sh
- bundle exec rake db:migrate
migration:path-pg:
<<: *migration-paths
<<: *use-pg
migration:path-mysql:
<<: *migration-paths
<<: *use-mysql
.db-rollback: &db-rollback
stage: test
<<: *dedicated-runner
......@@ -339,11 +366,11 @@ rake mysql db:migrate:reset:
- bundle exec rake db:rollback STEP=120
- bundle exec rake db:migrate
rake pg db:rollback:
db:rollback-pg:
<<: *db-rollback
<<: *use-pg
rake mysql db:rollback:
db:rollback-mysql:
<<: *db-rollback
<<: *use-mysql
......@@ -365,15 +392,16 @@ rake mysql db:rollback:
paths:
- log/development.log
rake pg db:seed_fu:
db:seed_fu-pg:
<<: *db-seed_fu
<<: *use-pg
rake mysql db:seed_fu:
db:seed_fu-mysql:
<<: *db-seed_fu
<<: *use-mysql
rake gitlab:assets:compile:
# Frontend-related jobs
gitlab:assets:compile:
stage: test
<<: *dedicated-runner
<<: *except-docs
......@@ -394,7 +422,14 @@ rake gitlab:assets:compile:
paths:
- webpack-report/
<<<<<<< HEAD
rake karma:
=======
karma:
cache:
paths:
- vendor/ruby
>>>>>>> origin/master
stage: test
<<: *use-pg
<<: *dedicated-runner
......@@ -410,34 +445,6 @@ rake karma:
paths:
- coverage-javascript/
.migration-paths: &migration-paths
stage: test
<<: *dedicated-runner
variables:
SETUP_DB: "false"
only:
- master@gitlab-org/gitlab-ce
- master@gitlab-org/gitlab-ee
- master@gitlab/gitlabhq
- master@gitlab/gitlab-ee
script:
- git fetch origin v8.14.10
- git checkout -f FETCH_HEAD
- bundle install $BUNDLE_INSTALL_FLAGS
- bundle exec rake db:drop db:create db:schema:load db:seed_fu
- git checkout $CI_COMMIT_SHA
- bundle install $BUNDLE_INSTALL_FLAGS
- . scripts/prepare_build.sh
- bundle exec rake db:migrate
migration pg paths:
<<: *migration-paths
<<: *use-pg
migration mysql paths:
<<: *migration-paths
<<: *use-mysql
coverage:
stage: post-test
services: []
......@@ -494,8 +501,8 @@ pages:
<<: *dedicated-runner
dependencies:
- coverage
- rake karma
- rake gitlab:assets:compile
- karma
- gitlab:assets:compile
- lint:javascript:report
script:
- mv public/ .public/
......
......@@ -971,6 +971,15 @@ RSpec/DescribeSymbol:
RSpec/DescribedClass:
Enabled: true
# Configuration parameters: CustomIncludeMethods.
RSpec/EmptyExampleGroup:
Enabled: true
CustomIncludeMethods:
- run_permission_checks
- run_group_permission_checks
- it_should_email!
- it_should_not_email!
# Checks for long example.
RSpec/ExampleLength:
Enabled: false
......
......@@ -10,11 +10,6 @@
RSpec/BeforeAfterAll:
Enabled: false
# Offense count: 15
# Configuration parameters: CustomIncludeMethods.
RSpec/EmptyExampleGroup:
Enabled: false
# Offense count: 233
RSpec/EmptyLineAfterFinalLet:
Enabled: false
......
......@@ -187,6 +187,7 @@ export default {
>
<a
class="user-link has-tooltip"
data-container="body"
data-placement="bottom"
:href="assigneeUrl(user)"
:data-title="user.name"
......
......@@ -64,10 +64,51 @@ function UsersSelect(currentUser, els) {
unassignedSelected.remove();
}
<<<<<<< HEAD
// Save current selected user to the DOM
const input = document.createElement('input');
input.type = 'hidden';
input.name = $dropdown.data('field-name');
=======
$els.each((function(_this) {
return function(i, dropdown) {
var options = {};
var $block, $collapsedSidebar, $dropdown, $loading, $selectbox, $value, abilityName, assignTo, assigneeTemplate, collapsedAssigneeTemplate, defaultLabel, defaultNullUser, firstUser, issueURL, selectedId, selectedIdDefault, showAnyUser, showNullUser, showMenuAbove;
$dropdown = $(dropdown);
options.projectId = $dropdown.data('project-id');
options.groupId = $dropdown.data('group-id');
options.showCurrentUser = $dropdown.data('current-user');
options.todoFilter = $dropdown.data('todo-filter');
options.todoStateFilter = $dropdown.data('todo-state-filter');
showNullUser = $dropdown.data('null-user');
defaultNullUser = $dropdown.data('null-user-default');
showMenuAbove = $dropdown.data('showMenuAbove');
showAnyUser = $dropdown.data('any-user');
firstUser = $dropdown.data('first-user');
options.authorId = $dropdown.data('author-id');
defaultLabel = $dropdown.data('default-label');
issueURL = $dropdown.data('issueUpdate');
$selectbox = $dropdown.closest('.selectbox');
$block = $selectbox.closest('.block');
abilityName = $dropdown.data('ability-name');
$value = $block.find('.value');
$collapsedSidebar = $block.find('.sidebar-collapsed-user');
$loading = $block.find('.block-loading').fadeOut();
selectedIdDefault = (defaultNullUser && showNullUser) ? 0 : null;
selectedId = $dropdown.data('selected');
if (selectedId === undefined) {
selectedId = selectedIdDefault;
}
const assignYourself = function () {
const unassignedSelected = $dropdown.closest('.selectbox')
.find(`input[name='${$dropdown.data('field-name')}'][value=0]`);
if (unassignedSelected) {
unassignedSelected.remove();
}
>>>>>>> origin/master
const currentUserInfo = $dropdown.data('currentUserInfo');
......@@ -451,11 +492,52 @@ function UsersSelect(currentUser, els) {
username = user.username ? "@" + user.username : "";
avatar = user.avatar_url ? user.avatar_url : false;
<<<<<<< HEAD
let selected = false;
=======
if (selectedId === gon.current_user_id) {
$('.assign-to-me-link').hide();
} else {
$('.assign-to-me-link').show();
}
return;
}
if ($el.closest('.add-issues-modal').length) {
gl.issueBoards.ModalStore.store.filter[$dropdown.data('field-name')] = user.id;
} else if ($dropdown.hasClass('js-filter-submit') && (isIssueIndex || isMRIndex)) {
return Issuable.filterResults($dropdown.closest('form'));
} else if ($dropdown.hasClass('js-filter-submit')) {
return $dropdown.closest('form').submit();
} else if (!$dropdown.hasClass('js-multiselect')) {
selected = $dropdown.closest('.selectbox').find("input[name='" + ($dropdown.data('field-name')) + "']").val();
return assignTo(selected);
}
// Automatically close dropdown after assignee is selected
// since CE has no multiple assignees
// EE does not have a max-select
if ($dropdown.data('max-select') &&
getSelected().length === $dropdown.data('max-select')) {
// Close the dropdown
$dropdown.dropdown('toggle');
}
},
id: function (user) {
return user.id;
},
opened: function(e) {
const $el = $(e.currentTarget);
const selected = getSelected();
if ($dropdown.hasClass('js-issue-board-sidebar') && selected.length === 0) {
this.addInput($dropdown.data('field-name'), 0, {});
}
$el.find('.is-active').removeClass('is-active');
>>>>>>> origin/master
if (this.multiSelect) {
selected = getSelected().find(u => user.id === u);
<<<<<<< HEAD
const fieldName = this.fieldName;
const field = $dropdown.closest('.selectbox').find("input[name='" + fieldName + "'][value='" + user.id + "']");
......@@ -474,6 +556,29 @@ function UsersSelect(currentUser, els) {
img = "<img src='" + avatar + "' class='avatar avatar-inline' width='32' />";
}
}
=======
if (selected.length > 0) {
getSelected().forEach(selectedId => highlightSelected(selectedId));
} else if ($dropdown.hasClass('js-issue-board-sidebar')) {
highlightSelected(0);
} else {
highlightSelected(selectedId);
}
},
updateLabel: $dropdown.data('dropdown-title'),
renderRow: function(user) {
var avatar, img, listClosingTags, listWithName, listWithUserName, username;
username = user.username ? "@" + user.username : "";
avatar = user.avatar_url ? user.avatar_url : false;
let selected = false;
if (this.multiSelect) {
selected = getSelected().find(u => user.id === u);
const fieldName = this.fieldName;
const field = $dropdown.closest('.selectbox').find("input[name='" + fieldName + "'][value='" + user.id + "']");
>>>>>>> origin/master
return `
<li data-user-id=${user.id}>
......@@ -527,6 +632,8 @@ function UsersSelect(currentUser, els) {
break;
}
}
} else {
selected = user.id === selectedId;
}
if (showNullUser) {
nullUser = {
......
......@@ -70,6 +70,7 @@ export default {
</span>
</div>
<div class="normal">
<<<<<<< HEAD
<strong>
Request to merge
<span
......@@ -98,6 +99,31 @@ export default {
</a>
</span>
</strong>
=======
<b>Request to merge</b>
<span
class="label-branch"
:class="{'label-truncated has-tooltip': isBranchTitleLong(mr.sourceBranch)}"
:title="isBranchTitleLong(mr.sourceBranch) ? mr.sourceBranch : ''"
data-placement="bottom"
v-html="mr.sourceBranchLink"></span>
<button
class="btn btn-transparent btn-clipboard has-tooltip"
data-title="Copy branch name to clipboard"
:data-clipboard-text="mr.sourceBranch">
<i
aria-hidden="true"
class="fa fa-clipboard"></i>
</button>
<b>into</b>
<span
class="label-branch"
:class="{'label-truncated has-tooltip': isBranchTitleLong(mr.targetBranch)}"
:title="isBranchTitleLong(mr.targetBranch) ? mr.targetBranch : ''"
data-placement="bottom">
<a :href="mr.targetBranchCommitsPath">{{mr.targetBranch}}</a>
</span>
>>>>>>> origin/master
<span
v-if="shouldShowCommitsBehindText"
class="diverged-commits-count">
......
......@@ -19,6 +19,11 @@ export default {
isMakingRequest: false,
};
},
computed: {
isApprovalsLeft() {
return this.mr.approvals && this.mr.approvalsLeft;
},
},
methods: {
rebase() {
this.isMakingRequest = true;
......@@ -82,7 +87,7 @@ export default {
<div class="accept-merge-holder clearfix js-toggle-container accept-action">
<button
class="btn btn-small btn-reopen btn-success"
:disabled="mr.approvalsLeft || isMakingRequest"
:disabled="isApprovalsLeft || isMakingRequest"
@click="rebase">
<i
v-if="isMakingRequest"
......@@ -98,7 +103,7 @@ export default {
</div>
<div class="mr-info-list">
<div class="legend"></div>
<p v-if="mr.approvalsLeft">
<p v-if="isApprovalsLeft">
Rebasing is disabled until merge request has been approved.
</p>
</div>
......
......@@ -31,7 +31,7 @@ export default class MergeRequestStore extends CEMergeRequestStore {
}
initApprovals(data) {
this.isApproved = data.approved || false;
this.isApproved = this.isApproved || false;
this.approvals = this.approvals || null;
this.approvalsPath = data.approvals_path || this.approvalsPath;
this.approvalsRequired = Boolean(this.approvalsPath);
......@@ -40,7 +40,7 @@ export default class MergeRequestStore extends CEMergeRequestStore {
setApprovals(data) {
this.approvals = data;
this.approvalsLeft = !!data.approvals_left;
this.isApproved = data.approved || !this.approvalsLeft || false;
this.isApproved = !this.approvalsLeft || false;
this.preventMerge = this.approvalsRequired && this.approvalsLeft;
}
}
......@@ -177,7 +177,6 @@ export default {
});
},
handleMounted() {
this.checkStatus();
this.setFavicon();
this.initDeploymentsPolling();
},
......
......@@ -6,7 +6,7 @@ Vue.use(VueResource);
export default class MRWidgetService {
constructor(endpoints) {
this.mergeResource = Vue.resource(endpoints.mergePath);
this.mergeCheckResource = Vue.resource(endpoints.mergeCheckPath);
this.mergeCheckResource = Vue.resource(endpoints.statusPath);
this.cancelAutoMergeResource = Vue.resource(endpoints.cancelAutoMergePath);
this.removeWIPResource = Vue.resource(endpoints.removeWIPPath);
this.removeSourceBranchResource = Vue.resource(endpoints.sourceBranchPath);
......
......@@ -24,7 +24,7 @@ header {
&.navbar-gitlab {
padding: 0 16px;
z-index: 100;
z-index: 400;
margin-bottom: 0;
min-height: $header-height;
background-color: $gray-light;
......
......@@ -445,9 +445,6 @@
}
.participants-list {
display: flex;
flex-wrap: wrap;
justify-content: space-between;
margin: -5px;
}
......@@ -457,9 +454,13 @@
}
.participants-author {
flex-basis: 14%;
display: inline-block;
padding: 5px;
&:nth-of-type(7n) {
padding-right: 0;
}
.author_link {
display: block;
}
......
......@@ -34,6 +34,8 @@ class OmniauthCallbacksController < Devise::OmniauthCallbacksController
prompt_for_two_factor(@user)
else
log_audit_event(@user, with: :ldap)
flash[:notice] = 'LDAP sync in progress. This could take a few minutes. '\
'Refresh the page to see the changes.'
sign_in_and_redirect(@user)
end
else
......
......@@ -9,17 +9,18 @@ class Projects::MergeRequestsController < Projects::ApplicationController
before_action :module_enabled
before_action :merge_request, only: [
:edit, :update, :show, :diffs, :commits, :conflicts, :conflict_for_path, :pipelines, :merge, :merge_check,
:edit, :update, :show, :diffs, :commits, :conflicts, :conflict_for_path, :pipelines, :merge,
:pipeline_status, :ci_environments_status, :toggle_subscription, :cancel_merge_when_pipeline_succeeds,
:remove_wip, :resolve_conflicts, :assign_related_issues, :commit_change_content,
# EE
:approve, :approvals, :unapprove, :rebase
]
before_action :validates_merge_request, only: [:show, :diffs, :commits, :pipelines]
before_action :define_show_vars, only: [:show, :diffs, :commits, :conflicts, :conflict_for_path, :builds, :pipelines]
before_action :define_show_vars, only: [:diffs, :commits, :conflicts, :conflict_for_path, :builds, :pipelines]
before_action :define_commit_vars, only: [:diffs]
before_action :ensure_ref_fetched, only: [:show, :diffs, :commits, :builds, :conflicts, :conflict_for_path, :pipelines]
before_action :close_merge_request_without_source_project, only: [:show, :diffs, :commits, :builds, :pipelines]
before_action :check_if_can_be_merged, only: :show
before_action :apply_diff_view_cookie!, only: [:new_diffs]
before_action :build_merge_request, only: [:new, :new_diffs]
before_action :set_suggested_approvers, only: [:new, :new_diffs, :edit]
......@@ -79,9 +80,12 @@ class Projects::MergeRequestsController < Projects::ApplicationController
respond_to do |format|
format.html do
define_discussion_vars
define_show_vars
end
format.json do
Gitlab::PollingInterval.set_header(response, interval: 10_000)
render json: serializer.represent(@merge_request, basic: params[:basic])
end
......@@ -321,12 +325,6 @@ class Projects::MergeRequestsController < Projects::ApplicationController
render json: serializer.represent(@merge_request)
end
def merge_check
@merge_request.check_if_can_be_merged
render json: serializer.represent(@merge_request)
end
def commit_change_content
render partial: 'projects/merge_requests/widget/commit_change_content', layout: false
end
......@@ -731,6 +729,10 @@ class Projects::MergeRequestsController < Projects::ApplicationController
private
def check_if_can_be_merged
@merge_request.check_if_can_be_merged
end
def merge!
# Disable the CI check if merge_when_pipeline_succeeds is enabled since we have
# to wait until CI completes to know
......
......@@ -47,7 +47,7 @@ class Namespace < ActiveRecord::Base
before_destroy(prepend: true) { prepare_for_destroy }
after_destroy :rm_dir
scope :root, -> { where('type IS NULL') }
scope :for_user, -> { where('type IS NULL') }
scope :with_statistics, -> do
joins('LEFT JOIN project_statistics ps ON ps.namespace_id = namespaces.id')
......
......@@ -656,22 +656,8 @@ class Repository
"#{name}-#{highest_branch_id + 1}"
end
# Remove archives older than 2 hours
def branches_sorted_by(value)
case value
when 'name'
branches.sort_by(&:name)
when 'updated_desc'
branches.sort do |a, b|
commit(b.dereferenced_target).committed_date <=> commit(a.dereferenced_target).committed_date
end
when 'updated_asc'
branches.sort do |a, b|
commit(a.dereferenced_target).committed_date <=> commit(b.dereferenced_target).committed_date
end
else
branches
end
raw_repository.local_branches(sort_by: value)
end
def tags_sorted_by(value)
......
......@@ -372,7 +372,7 @@ class User < ActiveRecord::Base
end
def find_by_full_path(path, follow_redirects: false)
namespace = Namespace.find_by_full_path(path, follow_redirects: follow_redirects)
namespace = Namespace.for_user.find_by_full_path(path, follow_redirects: follow_redirects)
namespace&.owner
end
......
class MergeRequestBasicEntity < Grape::Entity
expose :assignee_id
expose :merge_status
expose :merge_error
expose :state
......
class MergeRequestEntity < IssuableEntity
expose :assignee_id
include RequestAwareEntity
expose :in_progress_merge_commit_sha
......@@ -21,7 +20,6 @@ class MergeRequestEntity < IssuableEntity
expose :rebase_commit_sha
expose :rebase_in_progress?, as: :rebase_in_progress
expose :should_be_rebased?, as: :should_be_rebased
expose :approved?, as: :approved
expose :ff_only_enabled do |merge_request|
merge_request.project.merge_requests_ff_only_enabled
end
......@@ -174,12 +172,6 @@ class MergeRequestEntity < IssuableEntity
format: :json)
end
expose :merge_check_path do |merge_request|
merge_check_namespace_project_merge_request_path(merge_request.project.namespace,
merge_request.project,
merge_request)
end
expose :ci_environments_status_path do |merge_request|
ci_environments_status_namespace_project_merge_request_path(merge_request.project.namespace,
merge_request.project,
......
......@@ -167,7 +167,7 @@
:javascript
gl.sidebarOptions = {
endpoint: "#{issuable_json_path(issuable)}",
endpoint: "#{issuable_json_path(issuable)}?basic=true",
editable: #{can_edit_issuable ? true : false},
currentUser: #{current_user.to_json(only: [:username, :id, :name], methods: :avatar_url)},
rootPath: "#{root_path}"
......
......@@ -39,11 +39,20 @@
- unless issuable.assignees.any?
= hidden_field_tag "#{issuable.to_ability_name}[assignee_ids][]", 0, id: nil
- options[:toggle_class] += ' js-multiselect js-save-user-data'
<<<<<<< HEAD
- data = { field_name: "#{issuable.to_ability_name}[assignee_ids][]" }
- data[:multi_select] = true
- data['dropdown-title'] = title
- data['dropdown-header'] = 'Assignee'
- data['max-select'] = 1
- options[:data].merge!(data)
=======
- options[:data][:field_name] = "#{issuable.to_ability_name}[assignee_ids][]"
- options[:data][:multi_select] = true
- options[:data]['dropdown-title'] = title
- options[:data]['dropdown-header'] = 'Assignee'
- else
- title = 'Select assignee'
>>>>>>> origin/master
= dropdown_tag(title, options: options)
class LdapAllGroupsSyncWorker
include Sidekiq::Worker
include CronjobQueue
def perform
logger.info 'Started LDAP group sync'
EE::Gitlab::LDAP::Sync::Groups.execute
logger.info 'Finished LDAP group sync'
end
end
class LdapGroupSyncWorker
include Sidekiq::Worker
include CronjobQueue
include DedicatedSidekiqQueue
def perform(group_id = nil)
if group_id
group = Group.find_by(id: group_id)
unless group
logger.warn "Could not find group #{group_id} for LDAP group sync"
return
def perform(group_ids, provider = nil)
groups = Group.where(id: Array(group_ids))
if provider
EE::Gitlab::LDAP::Sync::Proxy.open(provider) do |proxy|
sync_groups(groups, proxy: proxy)
end
else
sync_groups(groups)
end
end
logger.info "Started LDAP group sync for group #{group.name} (#{group.id})"
EE::Gitlab::LDAP::Sync::Group.execute_all_providers(group)
logger.info "Finished LDAP group sync for group #{group.name} (#{group.id})"
def sync_groups(groups, proxy: nil)
groups.each { |group| sync_group(group, proxy: proxy) }
end
def sync_group(group, proxy: nil)
logger.info "Started LDAP group sync for group #{group.name} (#{group.id})"
if proxy
EE::Gitlab::LDAP::Sync::Group.execute(group, proxy)
else
logger.info 'Started LDAP group sync'
EE::Gitlab::LDAP::Sync::Groups.execute
logger.info 'Finished LDAP group sync'
EE::Gitlab::LDAP::Sync::Group.execute_all_providers(group)
end
logger.info "Finished LDAP group sync for group #{group.name} (#{group.id})"
end
end
---
title: Add a user's memberships when logging in through LDAP
merge_request: 1819
author:
---
title: Add suport for find_local_branches GRPC from Gitaly
merge_request: 10059
author:
......@@ -393,7 +393,7 @@ Settings.cron_jobs['ldap_sync_worker']['cron'] ||= '30 1 * * *'
Settings.cron_jobs['ldap_sync_worker']['job_class'] = 'LdapSyncWorker'
Settings.cron_jobs['ldap_group_sync_worker'] ||= Settingslogic.new({})
Settings.cron_jobs['ldap_group_sync_worker']['cron'] ||= '0 * * * *'
Settings.cron_jobs['ldap_group_sync_worker']['job_class'] = 'LdapGroupSyncWorker'
Settings.cron_jobs['ldap_group_sync_worker']['job_class'] = 'LdapAllGroupsSyncWorker'
Settings.cron_jobs['geo_bulk_notify_worker'] ||= Settingslogic.new({})
Settings.cron_jobs['geo_bulk_notify_worker']['cron'] ||= '*/10 * * * * *'
Settings.cron_jobs['geo_bulk_notify_worker']['job_class'] ||= 'GeoBulkNotifyWorker'
......
......@@ -74,7 +74,6 @@ constraints(ProjectUrlConstrainer.new) do
get :conflicts
get :conflict_for_path
get :pipelines
get :merge_check
get :commit_change_content
post :merge
post :cancel_merge_when_pipeline_succeeds
......
......@@ -56,6 +56,7 @@
- [update_user_activity, 1]
- [propagate_service_template, 1]
# EE specific queues
- [ldap_group_sync, 2]
- [geo, 1]
- [project_mirror, 1]
- [project_update_repository_storage, 1]
......
......@@ -29,6 +29,11 @@ The process will also update the following user information:
## Group Sync
If your LDAP supports the `memberof` property, GitLab will add the user to any
new groups they might be added to when the user logs in. That way they don't need
to wait for the hourly sync to be granted access to the groups that they are in
in LDAP.
If `group_base` is set in LDAP configuration, a group sync process will run
every hour, on the hour. This allows GitLab group membership to be automatically
updated based on LDAP group members.
......
# Award Emoji
# Award Emoji API
> [Introduced][ce-4575] in GitLab 8.9, Snippet support in 8.12
......
# Boards
# Issue Boards API
Every API call to boards must be authenticated.
......
# Branches
# Branches API
## List repository branches
......
# Broadcast Messages
# Broadcast Messages API
> **Note:** This feature was introduced in GitLab 8.12.
......
# Build Variables
# Build Variables API
## List project variables
......
# Validate the .gitlab-ci.yml
# Validate the .gitlab-ci.yml (API)
> [Introduced][ce-5953] in GitLab 8.12.
......
# Runners API
# Register and Delete Runners API
API used by Runners to register and delete themselves.
......
# Adding deploy keys to multiple projects
# Adding deploy keys to multiple projects via API
If you want to easily add the same deploy key to multiple projects in the same
group, this can be achieved quite easily with the API.
......
# Deploy Keys
# Deploy Keys API
## List all deploy keys
......
# Milestones
# Milestones API
## List project milestones
......
# Namespaces
# Namespaces API
Usernames and groupnames fall under a special category called namespaces.
......
# Notes
# Notes API
Notes are comments on snippets, issues or merge requests.
......
# Pipeline triggers
# Pipeline triggers API
You can read more about [triggering pipelines through the API](../ci/triggers/README.md).
......
# Projects API
<<<<<<< HEAD
=======
>>>>>>> origin/master
### Project visibility level
......@@ -17,8 +20,6 @@ Constants for project visibility levels are next:
* `public`:
The project can be cloned without any authentication.
## List projects
Get a list of visible projects for authenticated user. When being accessed without authentication, all public projects are returned.
......
# Repositories
# Repositories API
## List repository tree
......
# Repository files
# Repository files API
**CRUD for repository files**
......
# Services
# Services API
## Asana
......
# Session
# Session API
## Deprecation Notice
......
# Sidekiq Metrics
# Sidekiq Metrics API
>**Note:** This endpoint is only available on GitLab 8.9 and above.
......
# System hooks
# System hooks API
All methods require administrator authorization.
......
# Tags
# Tags API
## List project repository tags
......
# Gitignores
# Gitignores API
## List gitignore templates
......
# GitLab CI YMLs
# GitLab CI YMLs API
## List GitLab CI YML templates
......
# Licenses
# Licenses API
## List license templates
......
# V3 to V4 version
# API V3 to API V4
Since GitLab 9.0, API V4 is the preferred version to be used.
......@@ -65,7 +65,6 @@ Below are the changes made between V3 and V4.
- Return 202 with JSON body on async removals on V4 API (`DELETE /projects/:id/repository/merged_branches` and `DELETE /projects/:id`) [!9449](https://gitlab.com/gitlab-org/gitlab-ce/merge_requests/9449)
- `GET /projects/:id/milestones?iid[]=x&iid[]=y` array filter has been renamed to `iids` [!9096](https://gitlab.com/gitlab-org/gitlab-ce/merge_requests/9096)
- Return basic info about pipeline in `GET /projects/:id/pipelines` [!8875](https://gitlab.com/gitlab-org/gitlab-ce/merge_requests/8875)
- Simplify project payload exposed on Environment endpoints [!9675](https://gitlab.com/gitlab-org/gitlab-ce/merge_requests/9675)
- Renamed all `build` references to `job` [!9463](https://gitlab.com/gitlab-org/gitlab-ce/merge_requests/9463)
- Drop `GET /projects/:id/repository/commits/:sha/jobs` [!9463](https://gitlab.com/gitlab-org/gitlab-ce/merge_requests/9463)
- Rename Build Triggers to be Pipeline Triggers API [!9713](https://gitlab.com/gitlab-org/gitlab-ce/merge_requests/9713)
......@@ -81,11 +80,4 @@ Below are the changes made between V3 and V4.
- `GET /projects/:id/repository/blobs/:sha` now returns JSON attributes for the blob identified by `:sha`, instead of finding the commit identified by `:sha` and returning the raw content of the blob in that commit identified by the required `?filepath=:filepath`
- Moved `GET /projects/:id/repository/commits/:sha/blob?file_path=:file_path` and `GET /projects/:id/repository/blobs/:sha?file_path=:file_path` to `GET /projects/:id/repository/files/:file_path/raw?ref=:sha`
- `GET /projects/:id/repository/tree` parameter `ref_name` has been renamed to `ref` for consistency
- `confirm` parameter for `POST /users` has been deprecated in favor of `skip_confirmation` parameter
#### EE-specific
- Remove the ProjectGitHook API. Use the ProjectPushRule API instead [!1301](https://gitlab.com/gitlab-org/gitlab-ee/merge_requests/1301)
- Removed `repository_storage` from `PUT /application/settings` and `GET /application/settings` (use `repository_storages` instead) [!1307](https://gitlab.com/gitlab-org/gitlab-ee/merge_requests/1307)
- Removed `elasticsearch_host` and `elasticsearch_port` from `PUT /application/settings` (use `elasticsearch_url` instead) [!1305](https://gitlab.com/gitlab-org/gitlab-ee/merge_requests/1305)
- Make approval API more RESTful. Use `POST /projects/:id/merge_requests/:merge_request_iid/unapprove` to unapprove a merge request. [!1518](https://gitlab.com/gitlab-org/gitlab-ee/merge_requests/1518)
- `confirm` parameter for `POST /users` has been deprecated in favor of `skip_confirmation` parameter
......@@ -66,9 +66,10 @@ learn how to leverage its potential even more.
submodules are involved
- [Auto deploy](autodeploy/index.md)
- [Use SSH keys in your build environment](ssh_keys/README.md)
- [Trigger jobs through the GitLab API](triggers/README.md)
- [Deploy Boards](../user/project/deploy_boards.md) - Check the current health
and status of each CI environment running on Kubernetes
- [Trigger pipelines through the GitLab API](triggers/README.md)
- [Trigger pipelines on a schedule](../user/project/pipelines/schedules.md)
- [Deploy Boards](../user/project/deploy_boards.md) - Check the current health
## Review Apps
......
# Triggering jobs through the API
# Triggering pipelines through the API
> **Note**:
> **Notes**:
- [Introduced][ci-229] in GitLab CE 7.14.
- GitLab 8.12 has a completely redesigned job permissions system. Read all
about the [new model and its implications](../../user/project/new_ci_build_permissions_model.md#job-triggers).
......@@ -208,7 +208,7 @@ curl --request POST \
https://gitlab.example.com/api/v4/projects/9/trigger/pipeline
```
### Using webhook to trigger job
### Using a webhook to trigger a pipeline
You can add the following webhook to another project in order to trigger a job:
......@@ -216,4 +216,18 @@ You can add the following webhook to another project in order to trigger a job:
https://gitlab.example.com/api/v4/projects/9/ref/master/trigger/pipeline?token=TOKEN&variables[UPLOAD_TO_S3]=true
```
### Using cron to trigger nightly pipelines
>**Note:**
The following behavior can also be achieved through GitLab's UI with
[pipeline schedules](../../user/project/pipelines/schedules.md).
Whether you craft a script or just run cURL directly, you can trigger jobs
in conjunction with cron. The example below triggers a job on the `master`
branch of project with ID `9` every night at `00:30`:
```bash
30 0 * * * curl --request POST --form token=TOKEN --form ref=master https://gitlab.example.com/api/v4/projects/9/trigger/pipeline
```
[ci-229]: https://gitlab.com/gitlab-org/gitlab-ci/merge_requests/229
# Frontend Testing
There are two types of tests you'll encounter while developing frontend code
at GitLab. We use Karma and Jasmine for JavaScript unit testing, and RSpec
feature tests with Capybara for integration testing.
There are two types of test suites you'll encounter while developing frontend code
at GitLab. We use Karma and Jasmine for JavaScript unit and integration testing, and RSpec
feature tests with Capybara for e2e (end-to-end) integration testing.
Feature tests need to be written for all new features. Regression tests ought
to be written for all bug fixes to prevent them from recurring in the future.
Unit and feature tests need to be written for all new features.
Most of the time, you should use rspec for your feature tests.
There are cases where the behaviour you are testing is not worth the time spent running the full application,
for example, if you are testing styling, animation or small actions that don't involve the backend,
you should write an integration test using Jasmine.
![Testing priority triangle](img/testing_triangle.png)
_This diagram demonstrates the relative priority of each test type we use_
Regression tests should be written for bug fixes to prevent them from recurring in the future.
See [the Testing Standards and Style Guidelines](../testing.md)
for more information on general testing practices at GitLab.
......@@ -13,10 +22,12 @@ for more information on general testing practices at GitLab.
## Karma test suite
GitLab uses the [Karma][karma] test runner with [Jasmine][jasmine] as its test
framework for our JavaScript unit tests. For tests that rely on DOM
manipulation, we generate HTML files using RSpec suites (see `spec/javascripts/fixtures/*.rb` for examples).
framework for our JavaScript unit and integration tests. For integration tests,
we generate HTML files using RSpec (see `spec/javascripts/fixtures/*.rb` for examples).
Some fixtures are still HAML templates that are translated to HTML files using the same mechanism (see `static_fixtures.rb`).
Those will be migrated over time.
Adding these static fixtures should be avoided as they are harder to keep up to date with real views.
The existing static fixtures will be migrated over time.
Please see [gitlab-org/gitlab-ce#24753](https://gitlab.com/gitlab-org/gitlab-ce/issues/24753) to track our progress.
Fixtures are served during testing by the [jasmine-jquery][jasmine-jquery] plugin.
JavaScript tests live in `spec/javascripts/`, matching the folder structure
......
......@@ -38,6 +38,11 @@ it's reassigned to someone else to take it from there.
if a user is not member of that project, it can only be
assigned to them if they created the issue themselves.
In [GitLab Enterprise Edition Starter and up](https://about.gitlab.com/gitlab-ee/),
you can assign multiple people to an issue.
The interface is exactly the same, except that you can select multiple users in the dropdown.
The multiple assignees are also visible in issue lists and issue boards, and all get the same notifications.
#### 4. Milestone
- Select a [milestone](../milestones/index.md) to attribute that issue to.
......
# Pipeline Schedules
> **Notes**:
- This feature was introduced in 9.1 as [Trigger Schedule][ce-10533].
- In 9.2, the feature was [renamed to Pipeline Schedule][ce-10853].
- Cron notation is parsed by [Rufus-Scheduler](https://github.com/jmettraux/rufus-scheduler).
Pipeline schedules can be used to run pipelines only once, or for example every
month on the 22nd for a certain branch.
## Using Pipeline schedules
In order to schedule a pipeline:
1. Navigate to your project's **Pipelines ➔ Schedules** and click the
**New Schedule** button.
1. Fill in the form
1. Hit **Save pipeline schedule** for the changes to take effect.
![New Schedule Form](img/pipeline_schedules_new_form.png)
>**Attention:**
The pipelines won't be executed precisely, because schedules are handled by
Sidekiq, which runs according to its interval.
See [advanced admin configuration](#advanced-admin-configuration) for more
information.
In the **Schedules** index page you can see a list of the pipelines that are
scheduled to run. The next run is automatically calculated by the server GitLab
is installed on.
![Schedules list](img/pipeline_schedules_list.png)
## Taking ownership
Pipelines are executed as a user, who owns a schedule. This influences what
projects and other resources the pipeline has access to. If a user does not own
a pipeline, you can take ownership by clicking the **Take ownership** button.
The next time a pipeline is scheduled, your credentials will be used.
![Schedules list](img/pipeline_schedules_ownership.png)
>**Note:**
When the owner of the schedule doesn't have the ability to create pipelines
anymore, due to e.g., being blocked or removed from the project, the schedule
is deactivated. Another user can take ownership and activate it, so the
schedule can be run again.
## Advanced admin configuration
The pipelines won't be executed precisely, because schedules are handled by
Sidekiq, which runs according to its interval. For example, if you set a
schedule to create a pipeline every minute (`* * * * *`) and the Sidekiq worker
runs on 00:00 and 12:00 every day (`0 */12 * * *`), only 2 pipelines will be
created per day. To change the Sidekiq worker's frequency, you have to edit the
`trigger_schedule_worker_cron` value in your `gitlab.rb` and restart GitLab.
For GitLab.com, you can check the [dedicated settings page][settings]. If you
don't have admin access to the server, ask your administrator.
[ce-10533]: https://gitlab.com/gitlab-org/gitlab-ce/merge_requests/10533
[ce-10853]: https://gitlab.com/gitlab-org/gitlab-ce/merge_requests/10853
[settings]: https://about.gitlab.com/gitlab-com/settings/#cron-jobs
module Gitlab
module Git
class Branch < Ref
def initialize(repository, name, target)
if target.is_a?(Gitaly::FindLocalBranchResponse)
target = target_from_gitaly_local_branches_response(target)
end
super(repository, name, target)
end
def target_from_gitaly_local_branches_response(response)
# Git messages have no encoding enforcements. However, in the UI we only
# handle UTF-8, so basically we cross our fingers that the message force
# encoded to UTF-8 is readable.
message = response.commit_subject.dup.force_encoding('UTF-8')
# NOTE: For ease of parsing in Gitaly, we have only the subject of
# the commit and not the full message. This is ok, since all the
# code that uses `local_branches` only cares at most about the
# commit message.
# TODO: Once gitaly "takes over" Rugged consider separating the
# subject from the message to make it clearer when there's one
# available but not the other.
hash = {
id: response.commit_id,
message: message,
authored_date: Time.at(response.commit_author.date.seconds),
author_name: response.commit_author.name,
author_email: response.commit_author.email,
committed_date: Time.at(response.commit_committer.date.seconds),
committer_name: response.commit_committer.name,
committer_email: response.commit_committer.email
}
Gitlab::Git::Commit.decorate(hash)
end
end
end
end
......@@ -19,13 +19,7 @@ module Gitlab
def ==(other)
return false unless other.is_a?(Gitlab::Git::Commit)
methods = [:message, :parent_ids, :authored_date, :author_name,
:author_email, :committed_date, :committer_name,
:committer_email]
methods.all? do |method|
send(method) == other.send(method)
end
id && id == other.id
end
class << self
......@@ -55,6 +49,7 @@ module Gitlab
# Commit.find(repo, 'master')
#
def find(repo, commit_id = "HEAD")
return commit_id if commit_id.is_a?(Gitlab::Git::Commit)
return decorate(commit_id) if commit_id.is_a?(Rugged::Commit)
obj = if commit_id.is_a?(String)
......
......@@ -80,14 +80,16 @@ module Gitlab
end
# Returns an Array of Branches
def branches
rugged.branches.map do |rugged_ref|
def branches(filter: nil, sort_by: nil)
branches = rugged.branches.each(filter).map do |rugged_ref|
begin
Gitlab::Git::Branch.new(self, rugged_ref.name, rugged_ref.target)
rescue Rugged::ReferenceError
# Omit invalid branch
end
end.compact.sort_by(&:name)
end.compact
sort_branches(branches, sort_by)
end
def reload_rugged
......@@ -108,9 +110,15 @@ module Gitlab
Gitlab::Git::Branch.new(self, rugged_ref.name, rugged_ref.target) if rugged_ref
end
def local_branches
rugged.branches.each(:local).map do |branch|
Gitlab::Git::Branch.new(self, branch.name, branch.target)
def local_branches(sort_by: nil)
gitaly_migrate(:local_branches) do |is_enabled|
if is_enabled
gitaly_ref_client.local_branches(sort_by: sort_by).map do |gitaly_branch|
Gitlab::Git::Branch.new(self, gitaly_branch.name, gitaly_branch)
end
else
branches(filter: :local, sort_by: sort_by)
end
end
end
......@@ -1202,6 +1210,23 @@ module Gitlab
diff.each_patch
end
def sort_branches(branches, sort_by)
case sort_by
when 'name'
branches.sort_by(&:name)
when 'updated_desc'
branches.sort do |a, b|
b.dereferenced_target.committed_date <=> a.dereferenced_target.committed_date
end
when 'updated_asc'
branches.sort do |a, b|
a.dereferenced_target.committed_date <=> b.dereferenced_target.committed_date
end
else
branches
end
end
def gitaly_ref_client
@gitaly_ref_client ||= Gitlab::GitalyClient::Ref.new(self)
end
......
......@@ -44,6 +44,12 @@ module Gitlab
branch_names.count
end
def local_branches(sort_by: nil)
request = Gitaly::FindLocalBranchesRequest.new(repository: @gitaly_repo)
request.sort_by = sort_by_param(sort_by) if sort_by
consume_branches_response(stub.find_local_branches(request))
end
private
def consume_refs_response(response, prefix:)
......@@ -51,6 +57,16 @@ module Gitlab
r.names.map { |name| name.sub(/\A#{Regexp.escape(prefix)}/, '') }
end
end
def sort_by_param(sort_by)
enum_value = Gitaly::FindLocalBranchesRequest::SortBy.resolve(sort_by.upcase.to_sym)
raise ArgumentError, "Invalid sort_by key `#{sort_by}`" unless enum_value
enum_value
end
def consume_branches_response(response)
response.flat_map { |r| r.branches }
end
end
end
end
......@@ -89,6 +89,7 @@ module Gitlab
def update_user
update_email
update_memberships
update_ssh_keys if sync_ssh_keys?
update_kerberos_identity if import_kerberos_identities?
end
......@@ -162,6 +163,16 @@ module Gitlab
ldap_config.active_directory && (Gitlab.config.kerberos.enabled || AuthHelper.kerberos_enabled? )
end
def update_memberships
return if ldap_user.nil? || ldap_user.group_cns.empty?
group_ids = LdapGroupLink.where(cn: ldap_user.group_cns, provider: provider)
.distinct(:group_id)
.pluck(:group_id)
LdapGroupSyncWorker.perform_async(group_ids, provider) if group_ids.any?
end
private
def logger
......
......@@ -107,7 +107,7 @@ module Gitlab
end
def user_attributes
%W(#{config.uid} cn mail dn)
%W(#{config.uid} cn mail dn memberof)
end
end
end
......
......@@ -45,6 +45,22 @@ module Gitlab
attribute_value(:email)
end
def memberof
return [] unless entry.attribute_names.include?(:memberof)
entry.memberof
end
def group_cns
memberof.map { |memberof_value| cn_from_memberof(memberof_value) }
end
def cn_from_memberof(memberof)
# Only get the first CN value of the string, that's the one that contains
# the group name
memberof.match(/(?:cn=([\w\s]+))/i)&.captures&.first
end
delegate :dn, to: :entry
private
......
......@@ -4,9 +4,22 @@ export SETUP_DB=${SETUP_DB:-true}
export USE_BUNDLE_INSTALL=${USE_BUNDLE_INSTALL:-true}
export BUNDLE_INSTALL_FLAGS="--without production --jobs $(nproc) --path vendor --retry 3 --quiet"
if [ "$USE_BUNDLE_INSTALL" != "false" ]; then
bundle install --clean $BUNDLE_INSTALL_FLAGS && bundle check
fi
# Only install knapsack after bundle install! Otherwise oddly some native
# gems could not be found under some circumstance. No idea why, hours wasted.
retry gem install knapsack fog-aws mime-types
cp config/resque.yml.example config/resque.yml
sed -i 's/localhost/redis/g' config/resque.yml
cp config/gitlab.yml.example config/gitlab.yml
# Determine the database by looking at the job name.
# For example, we'll get pg if the job is `rspec pg 19 20`
export GITLAB_DATABASE=$(echo $CI_JOB_NAME | cut -f2 -d' ')
# For example, we'll get pg if the job is `rspec-pg 19 20`
export GITLAB_DATABASE=$(echo $CI_JOB_NAME | cut -f1 -d' ' | cut -f2 -d-)
# This would make the default database postgresql, and we could also use
# pg to mean postgresql.
......@@ -24,7 +37,6 @@ if [ "$GITLAB_DATABASE" = 'postgresql' ]; then
# EE-only
sed -i 's/# host:.*/host: postgres/g' config/database_geo.yml
else # Assume it's mysql
sed -i 's/username:.*/username: root/g' config/database.yml
sed -i 's/password:.*/password:/g' config/database.yml
......@@ -36,19 +48,6 @@ else # Assume it's mysql
sed -i 's/# host:.*/host: mysql/g' config/database_geo.yml
fi
cp config/resque.yml.example config/resque.yml
sed -i 's/localhost/redis/g' config/resque.yml
cp config/gitlab.yml.example config/gitlab.yml
if [ "$USE_BUNDLE_INSTALL" != "false" ]; then
bundle install --clean $BUNDLE_INSTALL_FLAGS && bundle check
fi
# Only install knapsack after bundle install! Otherwise oddly some native
# gems could not be found under some circumstance. No idea why, hours wasted.
retry gem install knapsack fog-aws mime-types
if [ "$SETUP_DB" != "false" ]; then
bundle exec rake db:drop db:create db:schema:load db:migrate
......
......@@ -282,6 +282,18 @@ describe Projects::MergeRequestsController do
expect(response).to match_response_schema('entities/merge_request')
end
end
context 'number of queries' do
it 'verifies number of queries' do
# pre-create objects
merge_request
recorded = ActiveRecord::QueryRecorder.new { go(format: :json) }
expect(recorded.count).to be_within(1).of(100)
expect(recorded.cached_count).to eq(0)
end
end
end
describe "as diff" do
......
......@@ -92,7 +92,9 @@ FactoryGirl.define do
trait :test_repo do
after :create do |project|
TestEnv.copy_repo(project)
TestEnv.copy_repo(project,
bare_repo: TestEnv.factory_repo_path_bare,
refs: TestEnv::BRANCH_SHA)
end
end
......@@ -171,7 +173,9 @@ FactoryGirl.define do
end
after :create do |project, evaluator|
TestEnv.copy_repo(project)
TestEnv.copy_repo(project,
bare_repo: TestEnv.factory_repo_path_bare,
refs: TestEnv::BRANCH_SHA)
if evaluator.create_template
args = evaluator.create_template
......@@ -204,7 +208,9 @@ FactoryGirl.define do
path { 'forked-gitlabhq' }
after :create do |project|
TestEnv.copy_forked_repo_with_submodules(project)
TestEnv.copy_repo(project,
bare_repo: TestEnv.forked_repo_path_bare,
refs: TestEnv::FORKED_BRANCH_SHA)
end
end
......
......@@ -30,13 +30,6 @@ describe 'Issues', feature: true do
it 'opens new issue popup' do
expect(page).to have_content("Issue ##{issue.iid}")
end
describe 'fill in' do
before do
fill_in 'issue_title', with: 'bug 345'
fill_in 'issue_description', with: 'bug description'
end
end
end
describe 'Editing issue assignee' do
......@@ -593,15 +586,6 @@ describe 'Issues', feature: true do
expect(page).to have_content milestone.title
end
end
describe 'removing assignee' do
let(:user2) { create(:user) }
before do
issue.assignees << user2
issue.save
end
end
end
describe 'new issue' do
......
require 'spec_helper'
Dir["./spec/features/protected_tags/*.rb"].sort.each { |f| require f }
feature 'Projected Tags', feature: true, js: true do
let(:user) { create(:user, :admin) }
......
......@@ -3,7 +3,6 @@
"properties" : {
"id": { "type": "integer" },
"iid": { "type": "integer" },
"assignee_id": { "type": ["integer", "null"] },
"author_id": { "type": "integer" },
"description": { "type": ["string", "null"] },
"lock_version": { "type": ["string", "null"] },
......
......@@ -9,7 +9,8 @@
"human_time_estimate": { "type": ["string", "null"] },
"human_total_time_spent": { "type": ["string", "null"] },
"merge_error": { "type": ["string", "null"] },
"rebase_in_progress": { "type": "boolean" }
"rebase_in_progress": { "type": "boolean" },
"assignee_id": { "type": ["integer", "null"] }
},
"additionalProperties": false
}
describe('Sidebar', () => {
preloadFixtures('issues/open-issue.html.raw');
beforeEach(() => loadFixtures('issues/open-issue.html.raw'));
it('does not have a max select', () => {
const dropdown = document.querySelector('.js-author-search');
expect(dropdown.dataset.maxSelect).toBeUndefined();
});
});
......@@ -187,6 +187,21 @@ describe('Assignee component', () => {
expect(component.$el.querySelector('.user-list-more')).toBe(null);
});
it('sets tooltip container to body', () => {
const users = UsersMockHelper.createNumberRandomUsers(2);
component = new AssigneeComponent({
propsData: {
rootPath: 'http://localhost:3000',
users,
editable: true,
},
}).$mount();
expect(
component.$el.querySelector('.user-link').getAttribute('data-container'),
).toBe('body');
});
it('Shows the "show-less" assignees label', (done) => {
const users = UsersMockHelper.createNumberRandomUsers(6);
component = new AssigneeComponent({
......
......@@ -227,13 +227,11 @@ describe('mrWidgetOptions', () => {
describe('handleMounted', () => {
it('should call required methods to do the initial kick-off', () => {
spyOn(vm, 'checkStatus');
spyOn(vm, 'initDeploymentsPolling');
spyOn(vm, 'setFavicon');
vm.handleMounted();
expect(vm.checkStatus).toHaveBeenCalled();
expect(vm.setFavicon).toHaveBeenCalled();
expect(vm.initDeploymentsPolling).toHaveBeenCalled();
});
......
......@@ -43,7 +43,7 @@ describe Gitlab::LDAP::Adapter, lib: true do
describe '#user_attributes' do
it 'appends EE-specific attributes' do
stub_ldap_config(uid: 'uid', sync_ssh_keys: 'sshPublicKey')
expect(adapter.user_attributes).to match_array(%w(uid dn cn mail sshPublicKey))
expect(adapter.user_attributes).to match_array(%w(uid dn cn mail sshPublicKey memberof))
end
end
end
......@@ -65,10 +65,11 @@ describe EE::Gitlab::LDAP::Sync::Group, lib: true do
end
before do
allow(Gitlab::LDAP::Config)
.to receive(:providers).and_return(%w(main secondary))
allow(EE::Gitlab::LDAP::Sync::Proxy)
.to receive(:open).and_yield(double('proxy').as_null_object)
stub_ldap_config(providers: %w[main secundary])
adapter = ldap_adapter('main')
proxy = proxy(adapter, 'main')
allow(EE::Gitlab::LDAP::Sync::Proxy).to receive(:open).and_yield(proxy)
end
let(:group) do
......
......@@ -7,6 +7,51 @@ describe Gitlab::Git::Branch, seed_helper: true do
it { is_expected.to be_kind_of Array }
describe 'initialize' do
let(:commit_id) { 'f00' }
let(:commit_subject) { "My commit".force_encoding('ASCII-8BIT') }
let(:committer) do
Gitaly::FindLocalBranchCommitAuthor.new(
name: generate(:name),
email: generate(:email),
date: Google::Protobuf::Timestamp.new(seconds: 123)
)
end
let(:author) do
Gitaly::FindLocalBranchCommitAuthor.new(
name: generate(:name),
email: generate(:email),
date: Google::Protobuf::Timestamp.new(seconds: 456)
)
end
let(:gitaly_branch) do
Gitaly::FindLocalBranchResponse.new(
name: 'foo', commit_id: commit_id, commit_subject: commit_subject,
commit_author: author, commit_committer: committer
)
end
let(:attributes) do
{
id: commit_id,
message: commit_subject,
authored_date: Time.at(author.date.seconds),
author_name: author.name,
author_email: author.email,
committed_date: Time.at(committer.date.seconds),
committer_name: committer.name,
committer_email: committer.email
}
end
let(:branch) { described_class.new(repository, 'foo', gitaly_branch) }
it 'parses Gitaly::FindLocalBranchResponse correctly' do
expect(Gitlab::Git::Commit).to receive(:decorate).
with(hash_including(attributes)).and_call_original
expect(branch.dereferenced_target.message.encoding).to be(Encoding::UTF_8)
end
end
describe '#size' do
subject { super().size }
it { is_expected.to eq(SeedRepo::Repo::BRANCHES.size) }
......
......@@ -1105,7 +1105,9 @@ describe Gitlab::Git::Repository, seed_helper: true do
ref = double()
allow(ref).to receive(:name) { 'bad-branch' }
allow(ref).to receive(:target) { raise Rugged::ReferenceError }
allow(repository.rugged).to receive(:branches) { [ref] }
branches = double()
allow(branches).to receive(:each) { [ref].each }
allow(repository.rugged).to receive(:branches) { branches }
end
it 'should return empty branches' do
......@@ -1289,7 +1291,7 @@ describe Gitlab::Git::Repository, seed_helper: true do
describe '#local_branches' do
before(:all) do
@repo = Gitlab::Git::Repository.new('default', TEST_MUTABLE_REPO_PATH)
@repo = Gitlab::Git::Repository.new('default', File.join(TEST_MUTABLE_REPO_PATH, '.git'))
end
after(:all) do
......@@ -1304,6 +1306,28 @@ describe Gitlab::Git::Repository, seed_helper: true do
expect(@repo.local_branches.any? { |branch| branch.name == 'remote_branch' }).to eq(false)
expect(@repo.local_branches.any? { |branch| branch.name == 'local_branch' }).to eq(true)
end
context 'with gitaly enabled' do
before { stub_gitaly }
it 'gets the branches from GitalyClient' do
expect_any_instance_of(Gitlab::GitalyClient::Ref).to receive(:local_branches).
and_return([])
@repo.local_branches
end
it 'wraps GRPC not found' do
expect_any_instance_of(Gitlab::GitalyClient::Ref).to receive(:local_branches).
and_raise(GRPC::NotFound)
expect { @repo.local_branches }.to raise_error(Gitlab::Git::Repository::NoRepository)
end
it 'wraps GRPC exceptions' do
expect_any_instance_of(Gitlab::GitalyClient::Ref).to receive(:local_branches).
and_raise(GRPC::Unknown)
expect { @repo.local_branches }.to raise_error(Gitlab::Git::CommandError)
end
end
end
def create_remote_branch(remote_name, branch_name, source_branch_name)
......
......@@ -38,4 +38,27 @@ describe Gitlab::GitalyClient::Ref do
client.default_branch_name
end
end
describe '#local_branches' do
it 'sends a find_local_branches message' do
expect_any_instance_of(Gitaly::Ref::Stub).
to receive(:find_local_branches).with(gitaly_request_with_repo_path(repo_path)).
and_return([])
client.local_branches
end
it 'parses and sends the sort parameter' do
expect_any_instance_of(Gitaly::Ref::Stub).
to receive(:find_local_branches).
with(gitaly_request_with_params(sort_by: :UPDATED_DESC)).
and_return([])
client.local_branches(sort_by: 'updated_desc')
end
it 'raises an argument error if an invalid sort_by parameter is passed' do
expect { client.local_branches(sort_by: 'invalid_sort') }.to raise_error(ArgumentError)
end
end
end
require 'spec_helper'
describe Gitlab::LDAP::Access, lib: true do
include LdapHelpers
let(:access) { Gitlab::LDAP::Access.new user }
let(:user) { create(:omniauth_user) }
......@@ -173,6 +174,12 @@ describe Gitlab::LDAP::Access, lib: true do
subject
end
it 'updates the group memberships' do
expect(access).to receive(:update_memberships).once
subject
end
it 'syncs ssh keys if enabled by configuration' do
allow(access).to receive_messages(group_base: '', sync_ssh_keys?: true)
expect(access).to receive(:update_ssh_keys).once
......@@ -305,4 +312,50 @@ describe Gitlab::LDAP::Access, lib: true do
expect{ access.update_email }.to change(user, :email)
end
end
describe '#update_memberships' do
let(:provider) { user.ldap_identity.provider }
let(:entry) { ldap_user_entry(user.ldap_identity.extern_uid) }
let(:person_with_memberof) do
entry['memberof'] = ['CN=Group1,CN=Users,DC=The dc,DC=com',
'CN=Group2,CN=Builtin,DC=The dc,DC=com']
Gitlab::LDAP::Person.new(entry, provider)
end
it 'triggers a sync for all groups found in `memberof`' do
group_link_1 = create(:ldap_group_link, cn: 'Group1', provider: provider)
group_link_2 = create(:ldap_group_link, cn: 'Group2', provider: provider)
group_ids = [group_link_1, group_link_2].map(&:group_id)
allow(access).to receive(:ldap_user).and_return(person_with_memberof)
expect(LdapGroupSyncWorker).to receive(:perform_async)
.with(group_ids, provider)
access.update_memberships
end
it "doesn't continue when there is no `memberOf` param" do
allow(access).to receive(:ldap_user)
.and_return(Gitlab::LDAP::Person.new(entry, provider))
expect(LdapGroupLink).not_to receive(:where)
expect(LdapGroupSyncWorker).not_to receive(:perform_async)
access.update_memberships
end
it "doesn't trigger a sync when there are no links for the provider" do
_another_provider = create(:ldap_group_link,
cn: 'Group1',
provider: 'not-this-ldap')
allow(access).to receive(:ldap_user).and_return(person_with_memberof)
expect(LdapGroupSyncWorker).not_to receive(:perform_async)
access.update_memberships
end
end
end
......@@ -5,6 +5,11 @@ describe Gitlab::LDAP::Adapter, lib: true do
let(:ldap) { double(:ldap) }
let(:adapter) { ldap_adapter('ldapmain', ldap) }
let(:default_user_search_attributes) { }
def user_search_attributes(id_name)
[id_name, 'cn', 'mail', 'dn', 'memberof']
end
describe '#users' do
before do
......@@ -16,7 +21,7 @@ describe Gitlab::LDAP::Adapter, lib: true do
expect(adapter).to receive(:ldap_search) do |arg|
expect(arg[:filter].to_s).to eq('(uid=johndoe)')
expect(arg[:base]).to eq('dc=example,dc=com')
expect(arg[:attributes]).to match(%w{uid cn mail dn})
expect(arg[:attributes]).to match(user_search_attributes('uid'))
end.and_return({})
adapter.users('uid', 'johndoe')
......@@ -26,7 +31,7 @@ describe Gitlab::LDAP::Adapter, lib: true do
expect(adapter).to receive(:ldap_search).with(
base: 'uid=johndoe,ou=users,dc=example,dc=com',
scope: Net::LDAP::SearchScope_BaseObject,
attributes: %w{uid cn mail dn},
attributes: user_search_attributes('uid'),
filter: nil
).and_return({})
......@@ -63,7 +68,7 @@ describe Gitlab::LDAP::Adapter, lib: true do
it 'uses the right uid attribute when non-default' do
stub_ldap_config(uid: 'sAMAccountName')
expect(adapter).to receive(:ldap_search).with(
hash_including(attributes: %w{sAMAccountName cn mail dn})
hash_including(attributes: user_search_attributes('sAMAccountName'))
).and_return({})
adapter.users('sAMAccountName', 'johndoe')
......
......@@ -43,4 +43,51 @@ describe Gitlab::LDAP::Person do
expect(person.email).to eq([user_principal_name])
end
end
describe '#memberof' do
it 'returns an empty array if the field was not present' do
person = described_class.new(entry, 'ldapmain')
expect(person.memberof).to eq([])
end
it 'returns the values of `memberof` if the field was present' do
example_memberof = ['CN=Group Policy Creator Owners,CN=Users,DC=Vosmaer,DC=com',
'CN=Domain Admins,CN=Users,DC=Vosmaer,DC=com',
'CN=Enterprise Admins,CN=Users,DC=Vosmaer,DC=com',
'CN=Schema Admins,CN=Users,DC=Vosmaer,DC=com',
'CN=Administrators,CN=Builtin,DC=Vosmaer,DC=com']
entry['memberof'] = example_memberof
person = described_class.new(entry, 'ldapmain')
expect(person.memberof).to eq(example_memberof)
end
end
describe '#cn_from_memberof' do
it 'gets the group cn from the memberof value' do
person = described_class.new(entry, 'ldapmain')
expect(person.cn_from_memberof('cN=Group Policy Creator Owners,CN=Users,DC=Vosmaer,DC=com'))
.to eq('Group Policy Creator Owners')
end
it "doesn't break when there is no CN property" do
person = described_class.new(entry, 'ldapmain')
expect(person.cn_from_memberof('DC=Vosmaer,DC=com'))
.to be_nil
end
end
describe '#group_cns' do
it 'returns only CNs from the memberof values' do
example_memberof = ['CN=Group Policy Creator Owners,CN=Users,DC=Vosmaer,DC=com',
'CN=Administrators,CN=Builtin,DC=Vosmaer,DC=com']
entry['memberof'] = example_memberof
person = described_class.new(entry, 'ldapmain')
expect(person.group_cns).to eq(['Group Policy Creator Owners', 'Administrators'])
end
end
end
......@@ -5,9 +5,6 @@ describe ProjectSnippet, models: true do
it { is_expected.to belong_to(:project) }
end
describe "Mass assignment" do
end
describe "Validation" do
it { is_expected.to validate_presence_of(:project) }
end
......
......@@ -1042,13 +1042,6 @@ describe Project, models: true do
end
end
describe '#pipeline' do
let(:project) { create :project }
let(:pipeline) { create :ci_pipeline, project: project, ref: 'master' }
subject { project.pipeline(pipeline.sha, 'master') }
end
describe '#pipeline_for' do
let(:project) { create(:project, :repository) }
let!(:pipeline) { create_pipeline }
......
......@@ -106,9 +106,6 @@ describe ProtectedBranch, models: true do
end
end
describe "Mass assignment" do
end
describe 'Validation' do
it { is_expected.to validate_presence_of(:project) }
it { is_expected.to validate_presence_of(:name) }
......
......@@ -953,10 +953,20 @@ describe User, models: true do
end
context 'with a group route matching the given path' do
let!(:group) { create(:group, path: 'group_path') }
context 'when the group namespace has an owner_id (legacy data)' do
let!(:group) { create(:group, path: 'group_path', owner: user) }
it 'returns nil' do
expect(User.find_by_full_path('group_path')).to eq(nil)
it 'returns nil' do
expect(User.find_by_full_path('group_path')).to eq(nil)
end
end
context 'when the group namespace does not have an owner_id' do
let!(:group) { create(:group, path: 'group_path') }
it 'returns nil' do
expect(User.find_by_full_path('group_path')).to eq(nil)
end
end
end
end
......
......@@ -243,7 +243,6 @@ describe 'project routing' do
# diffs_namespace_project_merge_request GET /:namespace_id/:project_id/merge_requests/:id/diffs(.:format) projects/merge_requests#diffs
# commits_namespace_project_merge_request GET /:namespace_id/:project_id/merge_requests/:id/commits(.:format) projects/merge_requests#commits
# merge_namespace_project_merge_request POST /:namespace_id/:project_id/merge_requests/:id/merge(.:format) projects/merge_requests#merge
# merge_check_namespace_project_merge_request GET /:namespace_id/:project_id/merge_requests/:id/merge_check(.:format) projects/merge_requests#merge_check
# ci_status_namespace_project_merge_request GET /:namespace_id/:project_id/merge_requests/:id/ci_status(.:format) projects/merge_requests#ci_status
# toggle_subscription_namespace_project_merge_request POST /:namespace_id/:project_id/merge_requests/:id/toggle_subscription(.:format) projects/merge_requests#toggle_subscription
# branch_from_namespace_project_merge_requests GET /:namespace_id/:project_id/merge_requests/branch_from(.:format) projects/merge_requests#branch_from
......@@ -272,10 +271,6 @@ describe 'project routing' do
)
end
it 'to #merge_check' do
expect(get('/gitlab/gitlabhq/merge_requests/1/merge_check')).to route_to('projects/merge_requests#merge_check', namespace_id: 'gitlab', project_id: 'gitlabhq', id: '1')
end
it 'to #branch_from' do
expect(get('/gitlab/gitlabhq/merge_requests/branch_from')).to route_to('projects/merge_requests#branch_from', namespace_id: 'gitlab', project_id: 'gitlabhq')
end
......
......@@ -249,17 +249,34 @@ describe RootController, 'routing' do
end
end
# new_user_session GET /users/sign_in(.:format) devise/sessions#new
# user_session POST /users/sign_in(.:format) devise/sessions#create
# destroy_user_session DELETE /users/sign_out(.:format) devise/sessions#destroy
# user_omniauth_authorize /users/auth/:provider(.:format) omniauth_callbacks#passthru
# user_omniauth_callback /users/auth/:action/callback(.:format) omniauth_callbacks#(?-mix:(?!))
# user_password POST /users/password(.:format) devise/passwords#create
# new_user_password GET /users/password/new(.:format) devise/passwords#new
# edit_user_password GET /users/password/edit(.:format) devise/passwords#edit
# PUT /users/password(.:format) devise/passwords#update
describe "Authentication", "routing" do
# pending
it "GET /users/sign_in" do
expect(get("/users/sign_in")).to route_to('sessions#new')
end
it "POST /users/sign_in" do
expect(post("/users/sign_in")).to route_to('sessions#create')
end
it "DELETE /users/sign_out" do
expect(delete("/users/sign_out")).to route_to('sessions#destroy')
end
it "POST /users/password" do
expect(post("/users/password")).to route_to('passwords#create')
end
it "GET /users/password/new" do
expect(get("/users/password/new")).to route_to('passwords#new')
end
it "GET /users/password/edit" do
expect(get("/users/password/edit")).to route_to('passwords#edit')
end
it "PUT /users/password" do
expect(put("/users/password")).to route_to('passwords#update')
end
end
describe "Groups", "routing" do
......
......@@ -51,7 +51,6 @@ describe MergeRequestEntity do
## EE
:can_push_to_source_branch, :approvals_before_merge,
:squash, :rebase_commit_sha, :rebase_in_progress,
:approved, :should_be_rebased, :rebase_path,
:approvals_path, :ff_only_enabled)
end
......
RSpec::Matchers.define :gitaly_request_with_repo_path do |path|
match { |actual| actual.repository.path == path }
end
RSpec::Matchers.define :gitaly_request_with_params do |params|
match do |actual|
params.reduce(true) { |r, (key, val)| r && actual.send(key) == val }
end
end
RSpec.shared_examples "protected branches > access control > CE" do
ProtectedBranch::PushAccessLevel.human_access_levels.each do |(access_type_id, access_type_name)|
it "allows creating protected branches that #{access_type_name} can push to" do
visit namespace_project_protected_branches_path(project.namespace, project)
set_protected_branch_name('master')
within('.new_protected_branch') do
allowed_to_push_button = find(".js-allowed-to-push")
unless allowed_to_push_button.text == access_type_name
allowed_to_push_button.trigger('click')
within(".dropdown.open .dropdown-menu") { click_on access_type_name }
end
end
click_on "Protect"
expect(ProtectedBranch.count).to eq(1)
expect(ProtectedBranch.last.push_access_levels.map(&:access_level)).to eq([access_type_id])
end
it "allows updating protected branches so that #{access_type_name} can push to them" do
visit namespace_project_protected_branches_path(project.namespace, project)
set_protected_branch_name('master')
click_on "Protect"
expect(ProtectedBranch.count).to eq(1)
within(".protected-branches-list") do
find(".js-allowed-to-push").click
within('.js-allowed-to-push-container') do
expect(first("li")).to have_content("Roles")
click_on access_type_name
end
end
wait_for_ajax
expect(ProtectedBranch.last.push_access_levels.map(&:access_level)).to include(access_type_id)
end
end
ProtectedBranch::MergeAccessLevel.human_access_levels.each do |(access_type_id, access_type_name)|
it "allows creating protected branches that #{access_type_name} can merge to" do
visit namespace_project_protected_branches_path(project.namespace, project)
set_protected_branch_name('master')
within('.new_protected_branch') do
allowed_to_merge_button = find(".js-allowed-to-merge")
unless allowed_to_merge_button.text == access_type_name
allowed_to_merge_button.click
within(".dropdown.open .dropdown-menu") { click_on access_type_name }
end
end
click_on "Protect"
expect(ProtectedBranch.count).to eq(1)
expect(ProtectedBranch.last.merge_access_levels.map(&:access_level)).to eq([access_type_id])
end
it "allows updating protected branches so that #{access_type_name} can merge to them" do
visit namespace_project_protected_branches_path(project.namespace, project)
set_protected_branch_name('master')
click_on "Protect"
expect(ProtectedBranch.count).to eq(1)
within(".protected-branches-list") do
find(".js-allowed-to-merge").click
within('.js-allowed-to-merge-container') do
expect(first("li")).to have_content("Roles")
click_on access_type_name
end
end
wait_for_ajax
expect(ProtectedBranch.last.merge_access_levels.map(&:access_level)).to include(access_type_id)
end
end
end
RSpec.shared_examples "protected branches > access control > EE" do
[['merge', ProtectedBranch::MergeAccessLevel], ['push', ProtectedBranch::PushAccessLevel]].each do |git_operation, access_level_class|
# Need to set a default for the `git_operation` access level that _isn't_ being tested
other_git_operation = git_operation == 'merge' ? 'push' : 'merge'
roles = git_operation == 'merge' ? access_level_class.human_access_levels : access_level_class.human_access_levels.except(0)
let(:users) { create_list(:user, 5) }
let(:groups) { create_list(:group, 5) }
before do
users.each { |user| project.team << [user, :developer] }
groups.each { |group| project.project_group_links.create(group: group, group_access: Gitlab::Access::DEVELOPER) }
end
it "allows creating protected branches that roles, users, and groups can #{git_operation} to" do
visit namespace_project_protected_branches_path(project.namespace, project)
set_protected_branch_name('master')
set_allowed_to(git_operation, users.map(&:name))
set_allowed_to(git_operation, groups.map(&:name))
set_allowed_to(git_operation, roles.values)
set_allowed_to(other_git_operation)
click_on "Protect"
within(".protected-branches-list") { expect(page).to have_content('master') }
expect(ProtectedBranch.count).to eq(1)
roles.each { |(access_type_id, _)| expect(ProtectedBranch.last.send("#{git_operation}_access_levels".to_sym).map(&:access_level)).to include(access_type_id) }
users.each { |user| expect(ProtectedBranch.last.send("#{git_operation}_access_levels".to_sym).map(&:user_id)).to include(user.id) }
groups.each { |group| expect(ProtectedBranch.last.send("#{git_operation}_access_levels".to_sym).map(&:group_id)).to include(group.id) }
end
it "allows updating protected branches so that roles and users can #{git_operation} to it" do
visit namespace_project_protected_branches_path(project.namespace, project)
set_protected_branch_name('master')
set_allowed_to('merge')
set_allowed_to('push')
click_on "Protect"
set_allowed_to(git_operation, users.map(&:name), form: ".js-protected-branch-edit-form")
set_allowed_to(git_operation, groups.map(&:name), form: ".js-protected-branch-edit-form")
set_allowed_to(git_operation, roles.values, form: ".js-protected-branch-edit-form")
wait_for_ajax
expect(ProtectedBranch.count).to eq(1)
roles.each { |(access_type_id, _)| expect(ProtectedBranch.last.send("#{git_operation}_access_levels".to_sym).map(&:access_level)).to include(access_type_id) }
users.each { |user| expect(ProtectedBranch.last.send("#{git_operation}_access_levels".to_sym).map(&:user_id)).to include(user.id) }
groups.each { |group| expect(ProtectedBranch.last.send("#{git_operation}_access_levels".to_sym).map(&:group_id)).to include(group.id) }
end
it "allows updating protected branches so that roles and users cannot #{git_operation} to it" do
visit namespace_project_protected_branches_path(project.namespace, project)
set_protected_branch_name('master')
users.each { |user| set_allowed_to(git_operation, user.name) }
roles.each { |(_, access_type_name)| set_allowed_to(git_operation, access_type_name) }
groups.each { |group| set_allowed_to(git_operation, group.name) }
set_allowed_to(other_git_operation)
click_on "Protect"
users.each { |user| set_allowed_to(git_operation, user.name, form: ".js-protected-branch-edit-form") }
groups.each { |group| set_allowed_to(git_operation, group.name, form: ".js-protected-branch-edit-form") }
roles.each { |(_, access_type_name)| set_allowed_to(git_operation, access_type_name, form: ".js-protected-branch-edit-form") }
wait_for_ajax
expect(ProtectedBranch.count).to eq(1)
expect(ProtectedBranch.last.send("#{git_operation}_access_levels".to_sym)).to be_empty
end
it "prepends selected users that can #{git_operation} to" do
users = create_list(:user, 21)
users.each { |user| project.team << [user, :developer] }
visit namespace_project_protected_branches_path(project.namespace, project)
# Create Protected Branch
set_protected_branch_name('master')
set_allowed_to(git_operation, roles.values)
set_allowed_to(other_git_operation)
click_on 'Protect'
# Update Protected Branch
within(".protected-branches-list") do
find(".js-allowed-to-#{git_operation}").click
find(".dropdown-input-field").set(users.last.name) # Find a user that is not loaded
expect(page).to have_selector('.dropdown-header', count: 3)
%w{Roles Groups Users}.each_with_index do |header, index|
expect(all('.dropdown-header')[index]).to have_content(header)
end
wait_for_ajax
click_on users.last.name
find(".js-allowed-to-#{git_operation}").click # close
end
wait_for_ajax
# Verify the user is appended in the dropdown
find(".protected-branches-list .js-allowed-to-#{git_operation}").click
expect(page).to have_selector '.dropdown-content .is-active', text: users.last.name
expect(ProtectedBranch.count).to eq(1)
roles.each { |(access_type_id, _)| expect(ProtectedBranch.last.send("#{git_operation}_access_levels".to_sym).map(&:access_level)).to include(access_type_id) }
expect(ProtectedBranch.last.send("#{git_operation}_access_levels".to_sym).map(&:user_id)).to include(users.last.id)
end
end
context 'When updating a protected branch' do
it 'discards other roles when choosing "No one"' do
roles = ProtectedBranch::PushAccessLevel.human_access_levels.except(0)
visit namespace_project_protected_branches_path(project.namespace, project)
set_protected_branch_name('fix')
set_allowed_to('merge')
set_allowed_to('push', roles.values)
click_on "Protect"
wait_for_ajax
roles.each do |(access_type_id, _)|
expect(ProtectedBranch.last.push_access_levels.map(&:access_level)).to include(access_type_id)
end
expect(ProtectedBranch.last.push_access_levels.map(&:access_level)).not_to include(0)
set_allowed_to('push', 'No one', form: '.js-protected-branch-edit-form')
wait_for_ajax
roles.each do |(access_type_id, _)|
expect(ProtectedBranch.last.push_access_levels.map(&:access_level)).not_to include(access_type_id)
end
expect(ProtectedBranch.last.push_access_levels.map(&:access_level)).to include(0)
end
end
context 'When creating a protected branch' do
it 'discards other roles when choosing "No one"' do
roles = ProtectedBranch::PushAccessLevel.human_access_levels.except(0)
visit namespace_project_protected_branches_path(project.namespace, project)
set_protected_branch_name('master')
set_allowed_to('merge')
set_allowed_to('push', ProtectedBranch::PushAccessLevel.human_access_levels.values) # Last item (No one) should deselect the other ones
click_on "Protect"
wait_for_ajax
roles.each do |(access_type_id, _)|
expect(ProtectedBranch.last.push_access_levels.map(&:access_level)).not_to include(access_type_id)
end
expect(ProtectedBranch.last.push_access_levels.map(&:access_level)).to include(0)
end
end
end
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment