Commit 5d5a96c6 authored by Stan Hu's avatar Stan Hu

Merge branch 'ce-to-ee-2018-08-06' into 'master'

CE upstream - 2018-08-06 18:21 UTC

See merge request gitlab-org/gitlab-ee!6814
parents 00a1309e 7e4e1c0a
......@@ -36,6 +36,8 @@ class ImporterStatus {
const $targetField = $tr.find('.import-target');
const $namespaceInput = $targetField.find('.js-select-namespace option:selected');
const id = $tr.attr('id').replace('repo_', '');
const repoData = $tr.data();
let targetNamespace;
let newName;
if ($namespaceInput.length > 0) {
......@@ -45,12 +47,20 @@ class ImporterStatus {
}
$btn.disable().addClass('is-loading');
return axios.post(this.importUrl, {
this.id = id;
let attributes = {
repo_id: id,
target_namespace: targetNamespace,
new_name: newName,
ci_cd_only: this.ciCdOnly,
})
};
if (repoData) {
attributes = Object.assign(repoData, attributes);
}
return axios.post(this.importUrl, attributes)
.then(({ data }) => {
const job = $(`tr#repo_${id}`);
job.attr('id', `project_${data.id}`);
......@@ -70,6 +80,9 @@ class ImporterStatus {
.catch((error) => {
let details = error;
const $statusField = $(`#repo_${this.id} .job-status`);
$statusField.text(__('Failed'));
if (error.response && error.response.data && error.response.data.errors) {
details = error.response.data.errors;
}
......
......@@ -165,6 +165,7 @@ export default {
}
this.showEmptyState = false;
})
.then(this.resize)
.catch(() => {
this.state = 'unableToConnect';
});
......
import initPipelineDetails from '~/pipelines/pipeline_details_bundle';
import initPipelines from '../init_pipelines';
document.addEventListener('DOMContentLoaded', () => {
initPipelines();
initPipelineDetails();
});
// /builds is an alias for show
import '../show/index';
import initPipelines from '../init_pipelines';
document.addEventListener('DOMContentLoaded', initPipelines);
// /failures is an alias for show
import '../show/index';
......@@ -17,7 +17,7 @@ document.addEventListener('DOMContentLoaded', () => {
new ShortcutsNavigation(); // eslint-disable-line no-new
new NotificationsForm(); // eslint-disable-line no-new
new UserCallout({ // eslint-disable-line no-new
setCalloutPerProject: true,
setCalloutPerProject: false,
className: 'js-autodevops-banner',
});
......
......@@ -36,11 +36,11 @@
:key="index"
class="row prepend-top-10 append-bottom-10"
>
<strong class="col-sm-2 text-right">
<strong class="col-sm-3 text-right">
{{ field.text }}:
</strong>
<div class="col-sm-10 text-secondary">
<div class="col-sm-9 text-secondary">
<code-block
v-if="field.type === $options.fieldTypes.codeBock"
:code="field.value"
......
......@@ -4,15 +4,20 @@ import Icon from '~/vue_shared/components/icon.vue';
import { inserted } from '~/feature_highlight/feature_highlight_helper';
import { mouseenter, debouncedMouseleave, togglePopover } from '~/shared/popover';
/**
* Render a button with a question mark icon
* On hover shows a popover. The popover will be dismissed on mouseleave
*/
export default {
name: 'ReportsHelpPopover',
name: 'HelpPopover',
components: {
Icon,
},
props: {
options: {
type: Object,
required: true,
required: false,
default: () => ({}),
},
},
mounted() {
......
......@@ -2,7 +2,7 @@
import { __ } from '~/locale';
import StatusIcon from '~/vue_merge_request_widget/components/mr_widget_status_icon.vue';
import IssuesList from './issues_list.vue';
import Popover from './help_popover.vue';
import Popover from '../help_popover.vue';
const LOADING = 'LOADING';
const ERROR = 'ERROR';
......
<script>
import CiIcon from '~/vue_shared/components/ci_icon.vue';
import LoadingIcon from '~/vue_shared/components/loading_icon.vue';
import Popover from './help_popover.vue';
import Popover from '../help_popover.vue';
/**
* Renders the summary row for each report
......
......@@ -35,6 +35,7 @@ class ApplicationController < ActionController::Base
:gitea_import_enabled?, :github_import_configured?,
:gitlab_import_enabled?, :gitlab_import_configured?,
:bitbucket_import_enabled?, :bitbucket_import_configured?,
:bitbucket_server_import_enabled?,
:google_code_import_enabled?, :fogbugz_import_enabled?,
:git_import_enabled?, :gitlab_project_import_enabled?,
:manifest_import_enabled?
......@@ -345,6 +346,10 @@ class ApplicationController < ActionController::Base
!Gitlab::CurrentSettings.import_sources.empty?
end
def bitbucket_server_import_enabled?
Gitlab::CurrentSettings.import_sources.include?('bitbucket_server')
end
def github_import_enabled?
Gitlab::CurrentSettings.import_sources.include?('github')
end
......
# frozen_string_literal: true
class Import::BitbucketServerController < Import::BaseController
before_action :verify_bitbucket_server_import_enabled
before_action :bitbucket_auth, except: [:new, :configure]
before_action :validate_import_params, only: [:create]
# As a basic sanity check to prevent URL injection, restrict project
# repository input and repository slugs to allowed characters. For Bitbucket:
#
# Project keys must start with a letter and may only consist of ASCII letters, numbers and underscores (A-Z, a-z, 0-9, _).
#
# Repository names are limited to 128 characters. They must start with a
# letter or number and may contain spaces, hyphens, underscores, and periods.
# (https://community.atlassian.com/t5/Answers-Developer-Questions/stash-repository-names/qaq-p/499054)
VALID_BITBUCKET_CHARS = /\A[\w\-_\.\s]+\z/
def new
end
def create
repo = bitbucket_client.repo(@project_key, @repo_slug)
unless repo
return render json: { errors: "Project #{@project_key}/#{@repo_slug} could not be found" }, status: :unprocessable_entity
end
project_name = params[:new_name].presence || repo.name
namespace_path = params[:new_namespace].presence || current_user.username
target_namespace = find_or_create_namespace(namespace_path, current_user)
if current_user.can?(:create_projects, target_namespace)
project = Gitlab::BitbucketServerImport::ProjectCreator.new(@project_key, @repo_slug, repo, project_name, target_namespace, current_user, credentials).execute
if project.persisted?
render json: ProjectSerializer.new.represent(project)
else
render json: { errors: project_save_error(project) }, status: :unprocessable_entity
end
else
render json: { errors: 'This namespace has already been taken! Please choose another one.' }, status: :unprocessable_entity
end
rescue BitbucketServer::Client::ServerError => e
render json: { errors: "Unable to connect to server: #{e}" }, status: :unprocessable_entity
end
def configure
session[personal_access_token_key] = params[:personal_access_token]
session[bitbucket_server_username_key] = params[:bitbucket_username]
session[bitbucket_server_url_key] = params[:bitbucket_server_url]
redirect_to status_import_bitbucket_server_path
end
def status
repos = bitbucket_client.repos
@repos, @incompatible_repos = repos.partition { |repo| repo.valid? }
@already_added_projects = find_already_added_projects('bitbucket_server')
already_added_projects_names = @already_added_projects.pluck(:import_source)
@repos.to_a.reject! { |repo| already_added_projects_names.include?(repo.browse_url) }
rescue BitbucketServer::Connection::ConnectionError, BitbucketServer::Client::ServerError => e
flash[:alert] = "Unable to connect to server: #{e}"
clear_session_data
redirect_to new_import_bitbucket_server_path
end
def jobs
render json: find_jobs('bitbucket_server')
end
private
def bitbucket_client
@bitbucket_client ||= BitbucketServer::Client.new(credentials)
end
def validate_import_params
@project_key = params[:project]
@repo_slug = params[:repository]
return render_validation_error('Missing project key') unless @project_key.present? && @repo_slug.present?
return render_validation_error('Missing repository slug') unless @repo_slug.present?
return render_validation_error('Invalid project key') unless @project_key =~ VALID_BITBUCKET_CHARS
return render_validation_error('Invalid repository slug') unless @repo_slug =~ VALID_BITBUCKET_CHARS
end
def render_validation_error(message)
render json: { errors: message }, status: :unprocessable_entity
end
def bitbucket_auth
unless session[bitbucket_server_url_key].present? &&
session[bitbucket_server_username_key].present? &&
session[personal_access_token_key].present?
redirect_to new_import_bitbucket_server_path
end
end
def verify_bitbucket_server_import_enabled
render_404 unless bitbucket_server_import_enabled?
end
def bitbucket_server_url_key
:bitbucket_server_url
end
def bitbucket_server_username_key
:bitbucket_server_username
end
def personal_access_token_key
:bitbucket_server_personal_access_token
end
def clear_session_data
session[bitbucket_server_url_key] = nil
session[bitbucket_server_username_key] = nil
session[personal_access_token_key] = nil
end
def credentials
{
base_uri: session[bitbucket_server_url_key],
user: session[bitbucket_server_username_key],
password: session[personal_access_token_key]
}
end
end
......@@ -150,7 +150,7 @@ class ProjectsController < Projects::ApplicationController
def archive
return access_denied! unless can?(current_user, :archive_project, @project)
@project.archive!
::Projects::UpdateService.new(@project, current_user, archived: true).execute
respond_to do |format|
format.html { redirect_to project_path(@project) }
......@@ -160,7 +160,7 @@ class ProjectsController < Projects::ApplicationController
def unarchive
return access_denied! unless can?(current_user, :archive_project, @project)
@project.unarchive!
::Projects::UpdateService.new(@project, current_user, archived: false).execute
respond_to do |format|
format.html { redirect_to project_path(@project) }
......
......@@ -11,13 +11,23 @@ module NamespacesHelper
.includes(:route)
.order('routes.path')
users = [current_user.namespace]
selected_id = selected
unless extra_group.nil? || extra_group.is_a?(Group)
extra_group = Group.find(extra_group) if Namespace.find(extra_group).kind == 'group'
end
if extra_group && extra_group.is_a?(Group) && (!Group.exists?(name: extra_group.name) || Ability.allowed?(current_user, :read_group, extra_group))
groups |= [extra_group]
if extra_group && extra_group.is_a?(Group)
extra_group = dedup_extra_group(extra_group)
if Ability.allowed?(current_user, :read_group, extra_group)
# Assign the value to an invalid primary ID so that the select box works
extra_group.id = -1 unless extra_group.persisted?
selected_id = extra_group.id if selected == :extra_group
groups |= [extra_group]
else
selected_id = current_user.namespace.id
end
end
options = []
......@@ -27,11 +37,11 @@ module NamespacesHelper
options << options_for_group(users, display_path: display_path, type: 'user')
if selected == :current_user && current_user.namespace
selected = current_user.namespace.id
selected_id = current_user.namespace.id
end
end
grouped_options_for_select(options, selected)
grouped_options_for_select(options, selected_id)
end
def namespace_icon(namespace, size = 40)
......@@ -44,6 +54,17 @@ module NamespacesHelper
private
# Many importers create a temporary Group, so use the real
# group if one exists by that name to prevent duplicates.
def dedup_extra_group(extra_group)
unless extra_group.persisted?
existing_group = Group.find_by(name: extra_group.name)
extra_group = existing_group if existing_group&.persisted?
end
extra_group
end
def options_for_group(namespaces, display_path:, type:)
group_label = type.pluralize
elements = namespaces.sort_by(&:human_name).map! do |n|
......
......@@ -64,6 +64,8 @@ class MergeRequest < ActiveRecord::Base
class_name: 'MergeRequestsClosingIssues',
dependent: :delete_all # rubocop:disable Cop/ActiveRecordDependent
has_many :cached_closes_issues, through: :merge_requests_closing_issues, source: :issue
belongs_to :assignee, class_name: "User"
serialize :merge_params, Hash # rubocop:disable Cop/ActiveRecordSerialize
......@@ -769,8 +771,9 @@ class MergeRequest < ActiveRecord::Base
# Calculating this information for a number of merge requests requires
# running `ReferenceExtractor` on each of them separately.
# This optimization does not apply to issues from external sources.
def cache_merge_request_closes_issues!(current_user)
def cache_merge_request_closes_issues!(current_user = self.author)
return unless project.issues_enabled?
return if closed? || merged?
transaction do
self.merge_requests_closing_issues.delete_all
......@@ -783,6 +786,18 @@ class MergeRequest < ActiveRecord::Base
end
end
def visible_closing_issues_for(current_user = self.author)
strong_memoize(:visible_closing_issues_for) do
if self.target_project.has_external_issue_tracker?
closes_issues(current_user)
else
cached_closes_issues.select do |issue|
Ability.allowed?(current_user, :read_issue, issue)
end
end
end
end
# Return the set of issues that will be closed if this merge request is accepted.
def closes_issues(current_user = self.author)
if target_branch == project.default_branch
......@@ -802,7 +817,7 @@ class MergeRequest < ActiveRecord::Base
ext = Gitlab::ReferenceExtractor.new(project, current_user)
ext.analyze("#{title}\n#{description}")
ext.issues - closes_issues(current_user)
ext.issues - visible_closing_issues_for(current_user)
end
def target_project_path
......@@ -850,7 +865,7 @@ class MergeRequest < ActiveRecord::Base
end
def merge_commit_message(include_description: false)
closes_issues_references = closes_issues.map do |issue|
closes_issues_references = visible_closing_issues_for.map do |issue|
issue.to_reference(target_project)
end
......
# frozen_string_literal: true
module Postgresql
class ReplicationSlot < ActiveRecord::Base
self.table_name = 'pg_replication_slots'
# Returns true if the lag observed across all replication slots exceeds a
# given threshold.
#
# max - The maximum replication lag size, in bytes. Based on GitLab.com
# statistics it takes between 1 and 5 seconds to replicate around
# 100 MB of data.
def self.lag_too_great?(max = 100.megabytes)
lag_function = "#{Gitlab::Database.pg_wal_lsn_diff}" \
"(#{Gitlab::Database.pg_current_wal_insert_lsn}(), restart_lsn)::bigint"
# We force the use of a transaction here so the query always goes to the
# primary, even when using the EE DB load balancer.
sizes = transaction { pluck(lag_function) }
too_great = sizes.count { |size| size >= max }
# If too many replicas are falling behind too much, the availability of a
# GitLab instance might suffer. To prevent this from happening we require
# at least 1 replica to have data recent enough.
if sizes.any? && too_great.positive?
(sizes.length - too_great) <= 1
else
false
end
end
end
end
......@@ -671,6 +671,8 @@ class Project < ActiveRecord::Base
project_import_data.credentials ||= {}
project_import_data.credentials = project_import_data.credentials.merge(credentials)
end
project_import_data
end
def import?
......@@ -1338,14 +1340,6 @@ class Project < ActiveRecord::Base
:visibility_level
end
def archive!
update_attribute(:archived, true)
end
def unarchive!
update_attribute(:archived, false)
end
def change_head(branch)
if repository.branch_exists?(branch)
repository.before_change_head
......
......@@ -8,6 +8,10 @@ class JiraService < IssueTrackerService
validates :username, presence: true, if: :activated?
validates :password, presence: true, if: :activated?
validates :jira_issue_transition_id,
format: { with: Gitlab::Regex.jira_transition_id_regex, message: "transition ids can have only numbers which can be split with , or ;" },
allow_blank: true
prop_accessor :username, :password, :url, :api_url, :jira_issue_transition_id, :title, :description
before_update :reset_password
......@@ -91,7 +95,7 @@ class JiraService < IssueTrackerService
{ type: 'text', name: 'api_url', title: 'JIRA API URL', placeholder: 'If different from Web URL' },
{ type: 'text', name: 'username', placeholder: '', required: true },
{ type: 'password', name: 'password', placeholder: '', required: true },
{ type: 'text', name: 'jira_issue_transition_id', title: 'Transition ID', placeholder: '' }
{ type: 'text', name: 'jira_issue_transition_id', title: 'Transition ID(s)', placeholder: 'Use , or ; to separate multiple transition IDs' }
]
end
......@@ -191,8 +195,18 @@ class JiraService < IssueTrackerService
end
end
# jira_issue_transition_id can have multiple values split by , or ;
# the issue is transitioned at the order given by the user
# if any transition fails it will log the error message and stop the transition sequence
def transition_issue(issue)
issue.transitions.build.save(transition: { id: jira_issue_transition_id })
jira_issue_transition_id.scan(Gitlab::Regex.jira_transition_id_regex).each do |transition_id|
begin
issue.transitions.build.save!(transition: { id: transition_id })
rescue => error
Rails.logger.info "#{self.class.name} Issue Transition failed message ERROR: #{client_url} - #{error.message}"
return false
end
end
end
def add_issue_solved_comment(issue, commit_id, commit_url)
......
......@@ -208,7 +208,7 @@ class MergeRequestPresenter < Gitlab::View::Presenter::Delegated
end
def closing_issues
@closing_issues ||= closes_issues(current_user)
@closing_issues ||= visible_closing_issues_for(current_user)
end
def pipeline
......
......@@ -25,7 +25,7 @@ module MergeRequests
def close_issues(merge_request)
return unless merge_request.target_branch == project.default_branch
closed_issues = merge_request.closes_issues(current_user)
closed_issues = merge_request.visible_closing_issues_for(current_user)
closed_issues.each do |issue|
if can?(current_user, :update_issue, issue)
......
......@@ -14,6 +14,7 @@ module MergeRequests
merge_request.mark_as_unchecked
invalidate_cache_counts(merge_request, users: merge_request.assignees)
merge_request.update_project_counter_caches
merge_request.cache_merge_request_closes_issues!(current_user)
end
merge_request
......
= form_for @application_setting, url: admin_application_settings_path, html: { class: 'fieldset-form' } do |f|
= form_for @application_setting, url: admin_application_settings_path(anchor: 'js-abuse-settings'), html: { class: 'fieldset-form' } do |f|
= form_errors(@application_setting)
%fieldset
......
= form_for @application_setting, url: admin_application_settings_path, html: { class: 'fieldset-form' } do |f|
= form_for @application_setting, url: admin_application_settings_path(anchor: 'js-account-settings'), html: { class: 'fieldset-form' } do |f|
= form_errors(@application_setting)
%fieldset
......
= form_for @application_setting, url: admin_application_settings_path, html: { class: 'fieldset-form' } do |f|
= form_for @application_setting, url: admin_application_settings_path(anchor: 'js-background-settings'), html: { class: 'fieldset-form' } do |f|
= form_errors(@application_setting)
%fieldset
......
= form_for @application_setting, url: admin_application_settings_path, html: { class: 'fieldset-form' } do |f|
= form_for @application_setting, url: admin_application_settings_path(anchor: 'js-ci-cd-settings'), html: { class: 'fieldset-form' } do |f|
= form_errors(@application_setting)
%fieldset
......
= form_for @application_setting, url: admin_application_settings_path, html: { class: 'fieldset-form' } do |f|
= form_for @application_setting, url: admin_application_settings_path(anchor: 'js-email-settings'), html: { class: 'fieldset-form' } do |f|
= form_errors(@application_setting)
%fieldset
......
= form_for @application_setting, url: admin_application_settings_path, html: { class: 'fieldset-form' } do |f|
= form_for @application_setting, url: admin_application_settings_path(anchor: 'js-gitaly-settings'), html: { class: 'fieldset-form' } do |f|
= form_errors(@application_setting)
%fieldset
......
= form_for @application_setting, url: admin_application_settings_path, html: { class: 'fieldset-form' } do |f|
= form_for @application_setting, url: admin_application_settings_path(anchor: 'js-help-settings'), html: { class: 'fieldset-form' } do |f|
= form_errors(@application_setting)
%fieldset
......
= form_for @application_setting, url: admin_application_settings_path, html: { class: 'fieldset-form' } do |f|
= form_for @application_setting, url: admin_application_settings_path(anchor: 'js-influx-settings'), html: { class: 'fieldset-form' } do |f|
= form_errors(@application_setting)
%fieldset
......
= form_for @application_setting, url: admin_application_settings_path, html: { class: 'fieldset-form' } do |f|
= form_for @application_setting, url: admin_application_settings_path(anchor: 'js-ip-limits-settings'), html: { class: 'fieldset-form' } do |f|
= form_errors(@application_setting)
%fieldset
......
= form_for @application_setting, url: admin_application_settings_path, html: { class: 'fieldset-form' } do |f|
= form_for @application_setting, url: admin_application_settings_path(anchor: 'js-koding-settings'), html: { class: 'fieldset-form' } do |f|
= form_errors(@application_setting)
%fieldset
......
= form_for @application_setting, url: admin_application_settings_path, html: { class: 'fieldset-form' } do |f|
= form_for @application_setting, url: admin_application_settings_path(anchor: 'js-logging-settings'), html: { class: 'fieldset-form' } do |f|
= form_errors(@application_setting)
%fieldset
......
= form_for @application_setting, url: admin_application_settings_path, html: { class: 'fieldset-form' } do |f|
= form_for @application_setting, url: admin_application_settings_path(anchor: 'js-outbound-settings'), html: { class: 'fieldset-form' } do |f|
= form_errors(@application_setting)
%fieldset
......
= form_for @application_setting, url: admin_application_settings_path, html: { class: 'fieldset-form' } do |f|
= form_for @application_setting, url: admin_application_settings_path(anchor: 'js-pages-settings'), html: { class: 'fieldset-form' } do |f|
= form_errors(@application_setting)
%fieldset
......
= form_for @application_setting, url: admin_application_settings_path, html: { class: 'fieldset-form' } do |f|
= form_for @application_setting, url: admin_application_settings_path(anchor: 'js-performance-settings'), html: { class: 'fieldset-form' } do |f|
= form_errors(@application_setting)
%fieldset
......
= form_for @application_setting, url: admin_application_settings_path, html: { class: 'fieldset-form' } do |f|
= form_for @application_setting, url: admin_application_settings_path(anchor: 'js-performance-bar-settings'), html: { class: 'fieldset-form' } do |f|
= form_errors(@application_setting)
%fieldset
......
= form_for @application_setting, url: admin_application_settings_path, html: { class: 'fieldset-form' } do |f|
= form_for @application_setting, url: admin_application_settings_path(anchor: 'js-plantuml-settings'), html: { class: 'fieldset-form' } do |f|
= form_errors(@application_setting)
%fieldset
......
= form_for @application_setting, url: admin_application_settings_path, html: { class: 'fieldset-form' } do |f|
= form_for @application_setting, url: admin_application_settings_path(anchor: 'js-prometheus-settings'), html: { class: 'fieldset-form' } do |f|
= form_errors(@application_setting)
%fieldset
......
= form_for @application_setting, url: admin_application_settings_path, html: { class: 'fieldset-form' } do |f|
= form_for @application_setting, url: admin_application_settings_path(anchor: 'js-realtime-settings'), html: { class: 'fieldset-form' } do |f|
= form_errors(@application_setting)
%fieldset
......
= form_for @application_setting, url: admin_application_settings_path, html: { class: 'fieldset-form' } do |f|
= form_for @application_setting, url: admin_application_settings_path(anchor: 'js-registry-settings'), html: { class: 'fieldset-form' } do |f|
= form_errors(@application_setting)
%fieldset
......
= form_for @application_setting, url: admin_application_settings_path, html: { class: 'fieldset-form' } do |f|
= form_for @application_setting, url: admin_application_settings_path(anchor: 'js-repository-check-settings'), html: { class: 'fieldset-form' } do |f|
= form_errors(@application_setting)
%fieldset
......
= form_for @application_setting, url: admin_application_settings_path do |f|
= form_for @application_setting, url: admin_application_settings_path(anchor: 'js-mirror-settings') do |f|
= form_errors(@application_setting)
%fieldset
......
= form_for @application_setting, url: admin_application_settings_path, html: { class: 'fieldset-form' } do |f|
= form_for @application_setting, url: admin_application_settings_path(anchor: 'js-repository-storage-settings'), html: { class: 'fieldset-form' } do |f|
= form_errors(@application_setting)
%fieldset
......
= form_for @application_setting, url: admin_application_settings_path, html: { class: 'fieldset-form' } do |f|
= form_for @application_setting, url: admin_application_settings_path(anchor: 'js-signin-settings'), html: { class: 'fieldset-form' } do |f|
= form_errors(@application_setting)
%fieldset
......
= form_for @application_setting, url: admin_application_settings_path, html: { class: 'fieldset-form' } do |f|
= form_for @application_setting, url: admin_application_settings_path(anchor: 'js-signup-settings'), html: { class: 'fieldset-form' } do |f|
= form_errors(@application_setting)
%fieldset
......
= form_for @application_setting, url: admin_application_settings_path, html: { class: 'fieldset-form' } do |f|
= form_for @application_setting, url: admin_application_settings_path(anchor: 'js-spam-settings'), html: { class: 'fieldset-form' } do |f|
= form_errors(@application_setting)
%fieldset
......
= form_for @application_setting, url: admin_application_settings_path, html: { class: 'fieldset-form' } do |f|
= form_for @application_setting, url: admin_application_settings_path(anchor: 'js-terminal-settings'), html: { class: 'fieldset-form' } do |f|
= form_errors(@application_setting)
%fieldset
......
= form_for @application_setting, url: admin_application_settings_path, html: { class: 'fieldset-form' } do |f|
= form_for @application_setting, url: admin_application_settings_path(anchor: 'js-terms-settings'), html: { class: 'fieldset-form' } do |f|
= form_errors(@application_setting)
%fieldset
......
- application_setting = local_assigns.fetch(:application_setting)
= form_for application_setting, url: admin_application_settings_path, html: { class: 'fieldset-form' } do |f|
= form_for application_setting, url: admin_application_settings_path(anchor: 'js-third-party-offers-settings'), html: { class: 'fieldset-form' } do |f|
= form_errors(application_setting)
%fieldset
......
= form_for @application_setting, url: admin_application_settings_path, html: { class: 'fieldset-form' } do |f|
= form_for @application_setting, url: admin_application_settings_path(anchor: 'js-usage-settings'), html: { class: 'fieldset-form' } do |f|
= form_errors(@application_setting)
%fieldset
......
= form_for @application_setting, url: admin_application_settings_path, html: { class: 'fieldset-form' } do |f|
= form_for @application_setting, url: admin_application_settings_path(anchor: 'js-visibility-settings'), html: { class: 'fieldset-form' } do |f|
= form_errors(@application_setting)
%fieldset
......
- title = _('Bitbucket Server Import')
- page_title title
- breadcrumb_title title
- header_title "Projects", root_path
%h3.page-title
= icon 'bitbucket-square', text: _('Import repositories from Bitbucket Server')
%p
= _('Enter in your Bitbucket Server URL and personal access token below')
= form_tag configure_import_bitbucket_server_path, method: :post do
.form-group.row
= label_tag :bitbucket_server_url, 'Bitbucket Server URL', class: 'col-form-label col-md-2'
.col-md-4
= text_field_tag :bitbucket_server_url, '', class: 'form-control append-right-8', placeholder: _('https://your-bitbucket-server'), size: 40
.form-group.row
= label_tag :bitbucket_server_url, 'Username', class: 'col-form-label col-md-2'
.col-md-4
= text_field_tag :bitbucket_username, '', class: 'form-control append-right-8', placeholder: _('username'), size: 40
.form-group.row
= label_tag :personal_access_token, 'Password/Personal Access Token', class: 'col-form-label col-md-2'
.col-md-4
= password_field_tag :personal_access_token, '', class: 'form-control append-right-8', placeholder: _('Personal Access Token'), size: 40
.form-actions
= submit_tag _('List your Bitbucket Server repositories'), class: 'btn btn-success'
- page_title 'Bitbucket Server import'
- header_title 'Projects', root_path
%h3.page-title
%i.fa.fa-bitbucket-square
= _('Import projects from Bitbucket Server')
- if @repos.any?
%p.light
= _('Select projects you want to import.')
.btn-group
- if @incompatible_repos.any?
= button_tag class: 'btn btn-import btn-success js-import-all' do
= _('Import all compatible projects')
= icon('spinner spin', class: 'loading-icon')
- else
= button_tag class: 'btn btn-import btn-success js-import-all' do
= _('Import all projects')
= icon('spinner spin', class: 'loading-icon')
.btn-group
= link_to('Reconfigure', configure_import_bitbucket_server_path, class: 'btn btn-primary', method: :post)
.table-responsive.prepend-top-10
%table.table.import-jobs
%colgroup.import-jobs-from-col
%colgroup.import-jobs-to-col
%colgroup.import-jobs-status-col
%thead
%tr
%th= _('From Bitbucket Server')
%th= _('To GitLab')
%th= _(' Status')
%tbody
- @already_added_projects.each do |project|
%tr{ id: "project_#{project.id}", class: "#{project_status_css_class(project.import_status)}" }
%td
= link_to project.import_source, project.import_source, target: '_blank', rel: 'noopener noreferrer'
%td
= link_to project.full_path, [project.namespace.becomes(Namespace), project]
%td.job-status
- if project.import_status == 'finished'
= icon('check', text: 'Done')
- elsif project.import_status == 'started'
= icon('spin', text: 'started')
- else
= project.human_import_status_name
- @repos.each do |repo|
%tr{ id: "repo_#{repo.project_key}___#{repo.slug}", data: { project: repo.project_key, repository: repo.slug } }
%td
= link_to repo.browse_url, repo.browse_url, target: '_blank', rel: 'noopener noreferrer'
%td.import-target
%fieldset.row
.input-group
.project-path.input-group-prepend
- if current_user.can_select_namespace?
- selected = params[:namespace_id] || :extra_group
- opts = current_user.can_create_group? ? { extra_group: Group.new(name: repo.project_key, path: repo.project_key) } : {}
= select_tag :namespace_id, namespaces_options(selected, opts.merge({ display_path: true })), { class: 'input-group-text select2 js-select-namespace', tabindex: 1 }
- else
= text_field_tag :path, current_user.namespace_path, class: "input-group-text input-large form-control", tabindex: 1, disabled: true
%span.input-group-prepend
.input-group-text /
= text_field_tag :path, repo.name, class: "input-mini form-control", tabindex: 2, autofocus: true, required: true
%td.import-actions.job-status
= button_tag class: 'btn btn-import js-add-to-import' do
Import
= icon('spinner spin', class: 'loading-icon')
- @incompatible_repos.each do |repo|
%tr{ id: "repo_#{repo.project_key}___#{repo.slug}" }
%td
= link_to repo.browse_url, repo.browse_url, target: '_blank', rel: 'noopener noreferrer'
%td.import-target
%td.import-actions-job-status
= label_tag 'Incompatible Project', nil, class: 'label badge-danger'
- if @incompatible_repos.any?
%p
One or more of your Bitbucket Server projects cannot be imported into GitLab
directly because they use Subversion or Mercurial for version control,
rather than Git. Please convert
= link_to 'them to Git,', 'https://www.atlassian.com/git/tutorials/migrating-overview'
and go through the
= link_to 'import flow', status_import_bitbucket_server_path
again.
.js-importer-status{ data: { jobs_import_path: "#{jobs_import_bitbucket_server_path}", import_path: "#{import_bitbucket_server_path}" } }
......@@ -18,10 +18,14 @@
- if bitbucket_import_enabled?
%div
= link_to status_import_bitbucket_path, class: "btn import_bitbucket #{'how_to_import_link' unless bitbucket_import_configured?}" do
= icon('bitbucket', text: 'Bitbucket')
= icon('bitbucket', text: 'Bitbucket Cloud')
- unless bitbucket_import_configured?
= render 'bitbucket_import_modal'
- if bitbucket_server_import_enabled?
%div
= link_to status_import_bitbucket_server_path, class: "btn import_bitbucket" do
= icon('bitbucket-square', text: 'Bitbucket Server')
%div
- if gitlab_import_enabled?
%div
= link_to status_import_gitlab_path, class: "btn import_gitlab #{'how_to_import_link' unless gitlab_import_configured?}" do
......
......@@ -6,10 +6,22 @@ class BackgroundMigrationWorker
# The minimum amount of time between processing two jobs of the same migration
# class.
#
# This interval is set to 5 minutes so autovacuuming and other maintenance
# related tasks have plenty of time to clean up after a migration has been
# performed.
MIN_INTERVAL = 5.minutes.to_i
# This interval is set to 2 or 5 minutes so autovacuuming and other
# maintenance related tasks have plenty of time to clean up after a migration
# has been performed.
def self.minimum_interval
if enable_health_check?
2.minutes.to_i
else
5.minutes.to_i
end
end
def self.enable_health_check?
Rails.env.development? ||
Rails.env.test? ||
Feature.enabled?('background_migration_health_check')
end
# Performs the background migration.
#
......@@ -27,7 +39,8 @@ class BackgroundMigrationWorker
# running a migration of this class or we ran one recently. In this case
# we'll reschedule the job in such a way that it is picked up again around
# the time the lease expires.
self.class.perform_in(ttl || MIN_INTERVAL, class_name, arguments)
self.class
.perform_in(ttl || self.class.minimum_interval, class_name, arguments)
end
end
......@@ -39,17 +52,51 @@ class BackgroundMigrationWorker
[true, nil]
else
lease = lease_for(class_name)
perform = !!lease.try_obtain
# If we managed to acquire the lease but the DB is not healthy, then we
# want to simply reschedule our job and try again _after_ the lease
# expires.
if perform && !healthy_database?
database_unhealthy_counter.increment
[lease.try_obtain, lease.ttl]
perform = false
end
[perform, lease.ttl]
end
end
def lease_for(class_name)
Gitlab::ExclusiveLease
.new("#{self.class.name}:#{class_name}", timeout: MIN_INTERVAL)
.new(lease_key_for(class_name), timeout: self.class.minimum_interval)
end
def lease_key_for(class_name)
"#{self.class.name}:#{class_name}"
end
def always_perform?
Rails.env.test?
end
# Returns true if the database is healthy enough to allow the migration to be
# performed.
#
# class_name - The name of the background migration that we might want to
# run.
def healthy_database?
return true unless self.class.enable_health_check?
return true unless Gitlab::Database.postgresql?
!Postgresql::ReplicationSlot.lag_too_great?
end
def database_unhealthy_counter
Gitlab::Metrics.counter(
:background_migration_database_health_reschedules,
'The number of times a background migration is rescheduled because the database is unhealthy.'
)
end
end
---
title: Keep admin settings sections open after submitting forms
merge_request: 21040
author:
type: other
---
title: fix height of full-width Metrics charts on large screens
merge_request: 20866
author:
type: fixed
---
title: Persist 'Auto DevOps' banner dismissal globally
merge_request: 20540
author:
type: other
---
title: Moves help_popover component to a common location
merge_request:
author:
type: other
---
title: Increases title column on modal for reports
merge_request:
author:
type: other
---
title: Update design of project templates
merge_request: 21012
author:
type: changed
---
title: Allow multiple JIRA transition ids
merge_request: 20939
author:
type: changed
---
title: Retrieve merge request closing issues from database cache
merge_request: 20911
author:
type: fixed
---
title: Trigger system hooks when project is archived/unarchived
merge_request: 20995
author:
type: added
---
title: Fix rendering of pipeline failure view when directly navigationg to it
merge_request: 21043
author:
type: fixed
......@@ -24,6 +24,13 @@ namespace :import do
get :jobs
end
resource :bitbucket_server, only: [:create, :new], controller: :bitbucket_server do
post :configure
get :status
get :callback
get :jobs
end
resource :google_code, only: [:create, :new], controller: :google_code do
get :status
post :callback
......
......@@ -5,6 +5,9 @@ otherwise take a very long time (hours, days, years, etc) to complete. For
example, you can use background migrations to migrate data so that instead of
storing data in a single JSON column the data is stored in a separate table.
If the database cluster is considered to be in an unhealthy state, background
migrations automatically reschedule themselves for a later point in time.
## When To Use Background Migrations
>**Note:**
......
......@@ -115,7 +115,7 @@ in the table below.
| `JIRA API URL` | The base URL to the JIRA instance API. Web URL value will be used if not set. E.g., `https://jira-api.example.com`. |
| `Username` | The user name created in [configuring JIRA step](#configuring-jira). Using the email address will cause `401 unauthorized`. |
| `Password` |The password of the user created in [configuring JIRA step](#configuring-jira). |
| `Transition ID` | This is the ID of a transition that moves issues to the desired state. **Closing JIRA issues via commits or Merge Requests won't work if you don't set the ID correctly.** |
| `Transition ID` | This is the ID of a transition that moves issues to the desired state. It is possible to insert transition ids separated by `,` or `;` which means the issue will be moved to each state after another using the given order. **Closing JIRA issues via commits or Merge Requests won't work if you don't set the ID correctly.** |
### Getting a transition ID
......
......@@ -382,7 +382,7 @@ module API
end
get ':id/merge_requests/:merge_request_iid/closes_issues' do
merge_request = find_merge_request_with_access(params[:merge_request_iid])
issues = ::Kaminari.paginate_array(merge_request.closes_issues(current_user))
issues = ::Kaminari.paginate_array(merge_request.visible_closing_issues_for(current_user))
issues = paginate(issues)
external_issues, internal_issues = issues.partition { |issue| issue.is_a?(ExternalIssue) }
......
......@@ -324,7 +324,7 @@ module API
post ':id/archive' do
authorize!(:archive_project, user_project)
user_project.archive!
::Projects::UpdateService.new(user_project, current_user, archived: true).execute
present user_project, with: Entities::Project
end
......@@ -335,7 +335,7 @@ module API
post ':id/unarchive' do
authorize!(:archive_project, user_project)
user_project.unarchive!
::Projects::UpdateService.new(@project, current_user, archived: false).execute
present user_project, with: Entities::Project
end
......
# frozen_string_literal: true
module BitbucketServer
class Client
attr_reader :connection
ServerError = Class.new(StandardError)
SERVER_ERRORS = [SocketError,
OpenSSL::SSL::SSLError,
Errno::ECONNRESET,
Errno::ECONNREFUSED,
Errno::EHOSTUNREACH,
Net::OpenTimeout,
Net::ReadTimeout,
Gitlab::HTTP::BlockedUrlError,
BitbucketServer::Connection::ConnectionError].freeze
def initialize(options = {})
@connection = Connection.new(options)
end
def pull_requests(project_key, repo)
path = "/projects/#{project_key}/repos/#{repo}/pull-requests?state=ALL"
get_collection(path, :pull_request)
end
def activities(project_key, repo, pull_request_id)
path = "/projects/#{project_key}/repos/#{repo}/pull-requests/#{pull_request_id}/activities"
get_collection(path, :activity)
end
def repo(project, repo_name)
parsed_response = connection.get("/projects/#{project}/repos/#{repo_name}")
BitbucketServer::Representation::Repo.new(parsed_response)
end
def repos
path = "/repos"
get_collection(path, :repo)
end
def create_branch(project_key, repo, branch_name, sha)
payload = {
name: branch_name,
startPoint: sha,
message: 'GitLab temporary branch for import'
}
connection.post("/projects/#{project_key}/repos/#{repo}/branches", payload.to_json)
end
def delete_branch(project_key, repo, branch_name, sha)
payload = {
name: Gitlab::Git::BRANCH_REF_PREFIX + branch_name,
dryRun: false
}
connection.delete(:branches, "/projects/#{project_key}/repos/#{repo}/branches", payload.to_json)
end
private
def get_collection(path, type)
paginator = BitbucketServer::Paginator.new(connection, Addressable::URI.escape(path), type)
BitbucketServer::Collection.new(paginator)
rescue *SERVER_ERRORS => e
raise ServerError, e
end
end
end
# frozen_string_literal: true
module BitbucketServer
class Collection < Enumerator
def initialize(paginator)
super() do |yielder|
loop do
paginator.items.each { |item| yielder << item }
end
end
lazy
end
def method_missing(method, *args)
return super unless self.respond_to?(method)
self.__send__(method, *args) do |item| # rubocop:disable GitlabSecurity/PublicSend
block_given? ? yield(item) : item
end
end
end
end
# frozen_string_literal: true
module BitbucketServer
class Connection
include ActionView::Helpers::SanitizeHelper
DEFAULT_API_VERSION = '1.0'
SEPARATOR = '/'
attr_reader :api_version, :base_uri, :username, :token
ConnectionError = Class.new(StandardError)
def initialize(options = {})
@api_version = options.fetch(:api_version, DEFAULT_API_VERSION)
@base_uri = options[:base_uri]
@username = options[:user]
@token = options[:password]
end
def get(path, extra_query = {})
response = Gitlab::HTTP.get(build_url(path),
basic_auth: auth,
headers: accept_headers,
query: extra_query)
check_errors!(response)
response.parsed_response
end
def post(path, body)
response = Gitlab::HTTP.post(build_url(path),
basic_auth: auth,
headers: post_headers,
body: body)
check_errors!(response)
response.parsed_response
end
# We need to support two different APIs for deletion:
#
# /rest/api/1.0/projects/{projectKey}/repos/{repositorySlug}/branches/default
# /rest/branch-utils/1.0/projects/{projectKey}/repos/{repositorySlug}/branches
def delete(resource, path, body)
url = delete_url(resource, path)
response = Gitlab::HTTP.delete(url,
basic_auth: auth,
headers: post_headers,
body: body)
check_errors!(response)
response.parsed_response
end
private
def check_errors!(response)
raise ConnectionError, "Response is not valid JSON" unless response.parsed_response.is_a?(Hash)
return if response.code >= 200 && response.code < 300
details = sanitize(response.parsed_response.dig('errors', 0, 'message'))
message = "Error #{response.code}"
message += ": #{details}" if details
raise ConnectionError, message
rescue JSON::ParserError
raise ConnectionError, "Unable to parse the server response as JSON"
end
def auth
@auth ||= { username: username, password: token }
end
def accept_headers
@accept_headers ||= { 'Accept' => 'application/json' }
end
def post_headers
@post_headers ||= accept_headers.merge({ 'Content-Type' => 'application/json' })
end
def build_url(path)
return path if path.starts_with?(root_url)
url_join_paths(root_url, path)
end
def root_url
url_join_paths(base_uri, "/rest/api/#{api_version}")
end
def delete_url(resource, path)
if resource == :branches
url_join_paths(base_uri, "/rest/branch-utils/#{api_version}#{path}")
else
build_url(path)
end
end
# URI.join is stupid in that slashes are important:
#
# # URI.join('http://example.com/subpath', 'hello')
# => http://example.com/hello
#
# We really want http://example.com/subpath/hello
#
def url_join_paths(*paths)
paths.map { |path| strip_slashes(path) }.join(SEPARATOR)
end
def strip_slashes(path)
path = path[1..-1] if path.starts_with?(SEPARATOR)
path.chomp(SEPARATOR)
end
end
end
# frozen_string_literal: true
module BitbucketServer
class Page
attr_reader :attrs, :items
def initialize(raw, type)
@attrs = parse_attrs(raw)
@items = parse_values(raw, representation_class(type))
end
def next?
!attrs.fetch(:isLastPage, true)
end
def next
attrs.fetch(:nextPageStart)
end
private
def parse_attrs(raw)
raw.slice('size', 'nextPageStart', 'isLastPage').symbolize_keys
end
def parse_values(raw, bitbucket_rep_class)
return [] unless raw['values'] && raw['values'].is_a?(Array)
bitbucket_rep_class.decorate(raw['values'])
end
def representation_class(type)
BitbucketServer::Representation.const_get(type.to_s.camelize)
end
end
end
# frozen_string_literal: true
module BitbucketServer
class Paginator
PAGE_LENGTH = 25
def initialize(connection, url, type)
@connection = connection
@type = type
@url = url
@page = nil
end
def items
raise StopIteration unless has_next_page?
@page = fetch_next_page
@page.items
end
private
attr_reader :connection, :page, :url, :type
def has_next_page?
page.nil? || page.next?
end
def next_offset
page.nil? ? 0 : page.next
end
def fetch_next_page
parsed_response = connection.get(@url, start: next_offset, limit: PAGE_LENGTH)
Page.new(parsed_response, type)
end
end
end
# frozen_string_literal: true
module BitbucketServer
module Representation
class Activity < Representation::Base
def comment?
action == 'COMMENTED'
end
def inline_comment?
!!(comment? && comment_anchor)
end
def comment
return unless comment?
@comment ||=
if inline_comment?
PullRequestComment.new(raw)
else
Comment.new(raw)
end
end
# TODO Move this into MergeEvent
def merge_event?
action == 'MERGED'
end
def committer_user
commit.dig('committer', 'displayName')
end
def committer_email
commit.dig('committer', 'emailAddress')
end
def merge_timestamp
timestamp = commit['committerTimestamp']
self.class.convert_timestamp(timestamp)
end
def merge_commit
commit['id']
end
def created_at
self.class.convert_timestamp(created_date)
end
private
def commit
raw.fetch('commit', {})
end
def action
raw['action']
end
def comment_anchor
raw['commentAnchor']
end
def created_date
raw['createdDate']
end
end
end
end
# frozen_string_literal: true
module BitbucketServer
module Representation
class Base
attr_reader :raw
def initialize(raw)
@raw = raw
end
def self.decorate(entries)
entries.map { |entry| new(entry)}
end
def self.convert_timestamp(time_usec)
Time.at(time_usec / 1000) if time_usec.is_a?(Integer)
end
end
end
end
# frozen_string_literal: true
module BitbucketServer
module Representation
# A general comment with the structure:
# "comment": {
# "author": {
# "active": true,
# "displayName": "root",
# "emailAddress": "stanhu+bitbucket@gitlab.com",
# "id": 1,
# "links": {
# "self": [
# {
# "href": "http://localhost:7990/users/root"
# }
# ]
# },
# "name": "root",
# "slug": "root",
# "type": "NORMAL"
# }
# }
# }
class Comment < Representation::Base
attr_reader :parent_comment
CommentNode = Struct.new(:raw_comments, :parent)
def initialize(raw, parent_comment: nil)
super(raw)
@parent_comment = parent_comment
end
def id
raw_comment['id']
end
def author_username
author['displayName']
end
def author_email
author['emailAddress']
end
def note
raw_comment['text']
end
def created_at
self.class.convert_timestamp(created_date)
end
def updated_at
self.class.convert_timestamp(created_date)
end
# Bitbucket Server supports the ability to reply to any comment
# and created multiple threads. It represents these as a linked list
# of comments within comments. For example:
#
# "comments": [
# {
# "author" : ...
# "comments": [
# {
# "author": ...
#
# Since GitLab only supports a single thread, we flatten all these
# comments into a single discussion.
def comments
@comments ||= flatten_comments
end
private
# In order to provide context for each reply, we need to track
# the parent of each comment. This method works as follows:
#
# 1. Insert the root comment into the workset. The root element is the current note.
# 2. For each node in the workset:
# a. Examine if it has replies to that comment. If it does,
# insert that node into the workset.
# b. Parse that note into a Comment structure and add it to a flat list.
def flatten_comments
comments = raw_comment['comments']
workset =
if comments
[CommentNode.new(comments, self)]
else
[]
end
all_comments = []
until workset.empty?
node = workset.pop
parent = node.parent
node.raw_comments.each do |comment|
new_comments = comment.delete('comments')
current_comment = Comment.new({ 'comment' => comment }, parent_comment: parent)
all_comments << current_comment
workset << CommentNode.new(new_comments, current_comment) if new_comments
end
end
all_comments
end
def raw_comment
raw.fetch('comment', {})
end
def author
raw_comment['author']
end
def created_date
raw_comment['createdDate']
end
def updated_date
raw_comment['updatedDate']
end
end
end
end
# frozen_string_literal: true
module BitbucketServer
module Representation
class PullRequest < Representation::Base
def author
raw.dig('author', 'user', 'name')
end
def author_email
raw.dig('author', 'user', 'emailAddress')
end
def description
raw['description']
end
def iid
raw['id']
end
def state
case raw['state']
when 'MERGED'
'merged'
when 'DECLINED'
'closed'
else
'opened'
end
end
def merged?
state == 'merged'
end
def created_at
self.class.convert_timestamp(created_date)
end
def updated_at
self.class.convert_timestamp(updated_date)
end
def title
raw['title']
end
def source_branch_name
raw.dig('fromRef', 'id')
end
def source_branch_sha
raw.dig('fromRef', 'latestCommit')
end
def target_branch_name
raw.dig('toRef', 'id')
end
def target_branch_sha
raw.dig('toRef', 'latestCommit')
end
private
def created_date
raw['createdDate']
end
def updated_date
raw['updatedDate']
end
end
end
end
# frozen_string_literal: true
module BitbucketServer
module Representation
# An inline comment with the following structure that identifies
# the part of the diff:
#
# "commentAnchor": {
# "diffType": "EFFECTIVE",
# "fileType": "TO",
# "fromHash": "c5f4288162e2e6218180779c7f6ac1735bb56eab",
# "line": 1,
# "lineType": "ADDED",
# "orphaned": false,
# "path": "CHANGELOG.md",
# "toHash": "a4c2164330f2549f67c13f36a93884cf66e976be"
# }
#
# More details in https://docs.atlassian.com/bitbucket-server/rest/5.12.0/bitbucket-rest.html.
class PullRequestComment < Comment
def from_sha
comment_anchor['fromHash']
end
def to_sha
comment_anchor['toHash']
end
def to?
file_type == 'TO'
end
def from?
file_type == 'FROM'
end
def added?
line_type == 'ADDED'
end
def removed?
line_type == 'REMOVED'
end
# There are three line comment types: added, removed, or context.
#
# 1. An added type means a new line was inserted, so there is no old position.
# 2. A removed type means a line was removed, so there is no new position.
# 3. A context type means the line was unmodified, so there is both a
# old and new position.
def new_pos
return if removed?
return unless line_position
line_position[1]
end
def old_pos
return if added?
return unless line_position
line_position[0]
end
def file_path
comment_anchor.fetch('path')
end
private
def file_type
comment_anchor['fileType']
end
def line_type
comment_anchor['lineType']
end
# Each comment contains the following information about the diff:
#
# hunks: [
# {
# segments: [
# {
# "lines": [
# {
# "commentIds": [ N ],
# "source": X,
# "destination": Y
# }, ...
# ] ....
#
# To determine the line position of a comment, we search all the lines
# entries until we find this comment ID.
def line_position
@line_position ||= diff_hunks.each do |hunk|
segments = hunk.fetch('segments', [])
segments.each do |segment|
lines = segment.fetch('lines', [])
lines.each do |line|
if line['commentIds']&.include?(id)
return [line['source'], line['destination']]
end
end
end
end
end
def comment_anchor
raw.fetch('commentAnchor', {})
end
def diff
raw.fetch('diff', {})
end
def diff_hunks
diff.fetch('hunks', [])
end
end
end
end
# frozen_string_literal: true
module BitbucketServer
module Representation
class Repo < Representation::Base
def initialize(raw)
super(raw)
end
def project_key
raw.dig('project', 'key')
end
def project_name
raw.dig('project', 'name')
end
def slug
raw['slug']
end
def browse_url
# The JSON reponse contains an array of 1 element. Not sure if there
# are cases where multiple links would be provided.
raw.dig('links', 'self').first.fetch('href')
end
def clone_url
raw['links']['clone'].find { |link| link['name'].starts_with?('http') }.fetch('href')
end
def description
project['description']
end
def full_name
"#{project_name}/#{name}"
end
def issues_enabled?
true
end
def name
raw['name']
end
def valid?
raw['scmId'] == 'git'
end
def visibility_level
if project['public']
Gitlab::VisibilityLevel::PUBLIC
else
Gitlab::VisibilityLevel::PRIVATE
end
end
def project
raw['project']
end
def to_s
full_name
end
end
end
end
......@@ -46,7 +46,11 @@ module Gitlab
# arguments - The arguments to pass to the background migration's "perform"
# method.
def self.perform(class_name, arguments)
const_get(class_name).new.perform(*arguments)
migration_class_for(class_name).new.perform(*arguments)
end
def self.migration_class_for(class_name)
const_get(class_name)
end
end
end
This diff is collapsed.
module Gitlab
module BitbucketServerImport
class ProjectCreator
attr_reader :project_key, :repo_slug, :repo, :name, :namespace, :current_user, :session_data
def initialize(project_key, repo_slug, repo, name, namespace, current_user, session_data)
@project_key = project_key
@repo_slug = repo_slug
@repo = repo
@name = name
@namespace = namespace
@current_user = current_user
@session_data = session_data
end
def execute
::Projects::CreateService.new(
current_user,
name: name,
path: name,
description: repo.description,
namespace_id: namespace.id,
visibility_level: repo.visibility_level,
import_type: 'bitbucket_server',
import_source: repo.browse_url,
import_url: repo.clone_url,
import_data: {
credentials: session_data,
data: { project_key: project_key, repo_slug: repo_slug }
},
skip_wiki: true
).execute
end
end
end
end
......@@ -979,8 +979,8 @@ into similar problems in the future (e.g. when new tables are created).
# To not overload the worker too much we enforce a minimum interval both
# when scheduling and performing jobs.
if delay_interval < BackgroundMigrationWorker::MIN_INTERVAL
delay_interval = BackgroundMigrationWorker::MIN_INTERVAL
if delay_interval < BackgroundMigrationWorker.minimum_interval
delay_interval = BackgroundMigrationWorker.minimum_interval
end
model_class.each_batch(of: batch_size) do |relation, index|
......
......@@ -9,15 +9,16 @@ module Gitlab
# We exclude `bare_repository` here as it has no import class associated
ImportTable = [
ImportSource.new('github', 'GitHub', Gitlab::GithubImport::ParallelImporter),
ImportSource.new('bitbucket', 'Bitbucket', Gitlab::BitbucketImport::Importer),
ImportSource.new('gitlab', 'GitLab.com', Gitlab::GitlabImport::Importer),
ImportSource.new('google_code', 'Google Code', Gitlab::GoogleCodeImport::Importer),
ImportSource.new('fogbugz', 'FogBugz', Gitlab::FogbugzImport::Importer),
ImportSource.new('git', 'Repo by URL', nil),
ImportSource.new('gitlab_project', 'GitLab export', Gitlab::ImportExport::Importer),
ImportSource.new('gitea', 'Gitea', Gitlab::LegacyGithubImport::Importer),
ImportSource.new('manifest', 'Manifest file', nil)
ImportSource.new('github', 'GitHub', Gitlab::GithubImport::ParallelImporter),
ImportSource.new('bitbucket', 'Bitbucket Cloud', Gitlab::BitbucketImport::Importer),
ImportSource.new('bitbucket_server', 'Bitbucket Server', Gitlab::BitbucketServerImport::Importer),
ImportSource.new('gitlab', 'GitLab.com', Gitlab::GitlabImport::Importer),
ImportSource.new('google_code', 'Google Code', Gitlab::GoogleCodeImport::Importer),
ImportSource.new('fogbugz', 'FogBugz', Gitlab::FogbugzImport::Importer),
ImportSource.new('git', 'Repo by URL', nil),
ImportSource.new('gitlab_project', 'GitLab export', Gitlab::ImportExport::Importer),
ImportSource.new('gitea', 'Gitea', Gitlab::LegacyGithubImport::Importer),
ImportSource.new('manifest', 'Manifest file', nil)
].freeze
class << self
......
......@@ -100,5 +100,9 @@ module Gitlab
)
}mx
end
def jira_transition_id_regex
@jira_transition_id_regex ||= /\d+/
end
end
end
......@@ -16,6 +16,9 @@ msgstr ""
"Content-Transfer-Encoding: 8bit\n"
"Plural-Forms: nplurals=INTEGER; plural=EXPRESSION;\n"
msgid " Status"
msgstr ""
msgid " and"
msgstr ""
......@@ -987,6 +990,9 @@ msgstr ""
msgid "BillingPlans|per user"
msgstr ""
msgid "Bitbucket Server Import"
msgstr ""
msgid "Bitbucket import"
msgstr ""
......@@ -2666,6 +2672,9 @@ msgstr ""
msgid "Ends at (UTC)"
msgstr ""
msgid "Enter in your Bitbucket Server URL and personal access token below"
msgstr ""
msgid "Environments"
msgstr ""
......@@ -3022,6 +3031,9 @@ msgstr ""
msgid "From Bitbucket"
msgstr ""
msgid "From Bitbucket Server"
msgstr ""
msgid "From FogBugz"
msgstr ""
......@@ -3630,6 +3642,9 @@ msgstr ""
msgid "Import projects from Bitbucket"
msgstr ""
msgid "Import projects from Bitbucket Server"
msgstr ""
msgid "Import projects from FogBugz"
msgstr ""
......@@ -3639,6 +3654,9 @@ msgstr ""
msgid "Import projects from Google Code"
msgstr ""
msgid "Import repositories from Bitbucket Server"
msgstr ""
msgid "Import repositories from GitHub"
msgstr ""
......@@ -3958,6 +3976,9 @@ msgstr ""
msgid "List available repositories"
msgstr ""
msgid "List your Bitbucket Server repositories"
msgstr ""
msgid "List your GitHub repositories"
msgstr ""
......@@ -7647,6 +7668,9 @@ msgstr ""
msgid "here"
msgstr ""
msgid "https://your-bitbucket-server"
msgstr ""
msgid "import flow"
msgstr ""
......
require 'spec_helper'
describe Import::BitbucketServerController do
let(:user) { create(:user) }
let(:project_key) { 'test-project' }
let(:repo_slug) { 'some-repo' }
let(:client) { instance_double(BitbucketServer::Client) }
def assign_session_tokens
session[:bitbucket_server_url] = 'http://localhost:7990'
session[:bitbucket_server_username] = 'bitbucket'
session[:bitbucket_server_personal_access_token] = 'some-token'
end
before do
sign_in(user)
allow(controller).to receive(:bitbucket_server_import_enabled?).and_return(true)
end
describe 'GET new' do
render_views
it 'shows the input form' do
get :new
expect(response.body).to have_text('Bitbucket Server URL')
end
end
describe 'POST create' do
before do
allow(controller).to receive(:bitbucket_client).and_return(client)
repo = double(name: 'my-project')
allow(client).to receive(:repo).with(project_key, repo_slug).and_return(repo)
assign_session_tokens
end
set(:project) { create(:project) }
it 'returns the new project' do
allow(Gitlab::BitbucketServerImport::ProjectCreator)
.to receive(:new).with(project_key, repo_slug, anything, 'my-project', user.namespace, user, anything)
.and_return(double(execute: project))
post :create, project: project_key, repository: repo_slug, format: :json
expect(response).to have_gitlab_http_status(200)
end
it 'returns an error when an invalid project key is used' do
post :create, project: 'some&project'
expect(response).to have_gitlab_http_status(422)
end
it 'returns an error when an invalid repository slug is used' do
post :create, project: 'some-project', repository: 'try*this'
expect(response).to have_gitlab_http_status(422)
end
it 'returns an error when the project cannot be found' do
allow(client).to receive(:repo).with(project_key, repo_slug).and_return(nil)
post :create, project: project_key, repository: repo_slug, format: :json
expect(response).to have_gitlab_http_status(422)
end
it 'returns an error when the project cannot be saved' do
allow(Gitlab::BitbucketServerImport::ProjectCreator)
.to receive(:new).with(project_key, repo_slug, anything, 'my-project', user.namespace, user, anything)
.and_return(double(execute: build(:project)))
post :create, project: project_key, repository: repo_slug, format: :json
expect(response).to have_gitlab_http_status(422)
end
it "returns an error when the server can't be contacted" do
expect(client).to receive(:repo).with(project_key, repo_slug).and_raise(BitbucketServer::Client::ServerError)
post :create, project: project_key, repository: repo_slug, format: :json
expect(response).to have_gitlab_http_status(422)
end
end
describe 'POST configure' do
let(:token) { 'token' }
let(:username) { 'bitbucket-user' }
let(:url) { 'http://localhost:7990/bitbucket' }
it 'clears out existing session' do
post :configure
expect(session[:bitbucket_server_url]).to be_nil
expect(session[:bitbucket_server_username]).to be_nil
expect(session[:bitbucket_server_personal_access_token]).to be_nil
expect(response).to have_gitlab_http_status(302)
expect(response).to redirect_to(status_import_bitbucket_server_path)
end
it 'sets the session variables' do
post :configure, personal_access_token: token, bitbucket_username: username, bitbucket_server_url: url
expect(session[:bitbucket_server_url]).to eq(url)
expect(session[:bitbucket_server_username]).to eq(username)
expect(session[:bitbucket_server_personal_access_token]).to eq(token)
expect(response).to have_gitlab_http_status(302)
expect(response).to redirect_to(status_import_bitbucket_server_path)
end
end
describe 'GET status' do
render_views
before do
allow(controller).to receive(:bitbucket_client).and_return(client)
@repo = double(slug: 'vim', project_key: 'asd', full_name: 'asd/vim', "valid?" => true, project_name: 'asd', browse_url: 'http://test', name: 'vim')
@invalid_repo = double(slug: 'invalid', project_key: 'foobar', full_name: 'asd/foobar', "valid?" => false, browse_url: 'http://bad-repo')
assign_session_tokens
end
it 'assigns repository categories' do
created_project = create(:project, import_type: 'bitbucket_server', creator_id: user.id, import_source: 'foo/bar', import_status: 'finished')
expect(client).to receive(:repos).and_return([@repo, @invalid_repo])
get :status
expect(assigns(:already_added_projects)).to eq([created_project])
expect(assigns(:repos)).to eq([@repo])
expect(assigns(:incompatible_repos)).to eq([@invalid_repo])
end
end
describe 'GET jobs' do
before do
assign_session_tokens
end
it 'returns a list of imported projects' do
created_project = create(:project, import_type: 'bitbucket_server', creator_id: user.id)
get :jobs
expect(json_response.count).to eq(1)
expect(json_response.first['id']).to eq(created_project.id)
expect(json_response.first['import_status']).to eq('none')
end
end
end
......@@ -118,6 +118,10 @@ FactoryBot.define do
end
end
after(:create) do |merge_request, evaluator|
merge_request.cache_merge_request_closes_issues!
end
factory :merged_merge_request, traits: [:merged]
factory :closed_merge_request, traits: [:closed]
factory :reopened_merge_request, traits: [:opened]
......
......@@ -53,14 +53,14 @@ describe 'Explore Groups page', :js do
expect(find('.js-groups-list-holder .content-list li:first-child .stats .number-projects')).to have_text("1")
# Archive project
empty_project.archive!
::Projects::UpdateService.new(empty_project, user, archived: true).execute
visit explore_groups_path
# Check project count
expect(find('.js-groups-list-holder .content-list li:first-child .stats .number-projects')).to have_text("0")
# Unarchive project
empty_project.unarchive!
::Projects::UpdateService.new(empty_project, user, archived: false).execute
visit explore_groups_path
# Check project count
......
......@@ -52,6 +52,7 @@ describe 'Group issues page' do
context 'issues list', :nested_groups do
let(:subgroup) { create(:group, parent: group) }
let(:subgroup_project) { create(:project, :public, group: subgroup)}
let(:user_in_group) { create(:group_member, :maintainer, user: create(:user), group: group ).user }
let!(:issue) { create(:issue, project: project, title: 'root group issue') }
let!(:subgroup_issue) { create(:issue, project: subgroup_project, title: 'subgroup issue') }
......@@ -67,7 +68,7 @@ describe 'Group issues page' do
context 'when project is archived' do
before do
project.archive!
::Projects::UpdateService.new(project, user_in_group, archived: true).execute
end
it 'does not render issue' do
......
......@@ -55,7 +55,7 @@ describe LabelsFinder do
context 'filtering by group_id' do
it 'returns labels available for any non-archived project within the group' do
group_1.add_developer(user)
project_1.archive!
::Projects::UpdateService.new(project_1, user, archived: true).execute
finder = described_class.new(user, group_id: group_1.id)
expect(finder.execute).to eq [group_label_2, group_label_1, project_label_5]
......
......@@ -36,7 +36,7 @@ describe MoveToProjectFinder do
it 'does not return archived projects' do
reporter_project.add_reporter(user)
reporter_project.archive!
::Projects::UpdateService.new(reporter_project, user, archived: true).execute
other_reporter_project = create(:project)
other_reporter_project.add_reporter(user)
......
This diff is collapsed.
{
"author":{
"approved":false,
"role":"AUTHOR",
"status":"UNAPPROVED",
"user":{
"active":true,
"displayName":"root",
"emailAddress":"joe.montana@49ers.com",
"id":1,
"links":{
"self":[
{
"href":"http://localhost:7990/users/root"
}
]
},
"name":"root",
"slug":"root",
"type":"NORMAL"
}
},
"closed":true,
"closedDate":1530600648850,
"createdDate":1530600635690,
"description":"Test",
"fromRef":{
"displayId":"root/CODE_OF_CONDUCTmd-1530600625006",
"id":"refs/heads/root/CODE_OF_CONDUCTmd-1530600625006",
"latestCommit":"074e2b4dddc5b99df1bf9d4a3f66cfc15481fdc8",
"repository":{
"forkable":true,
"id":1,
"links":{
"clone":[
{
"href":"http://root@localhost:7990/scm/test/rouge.git",
"name":"http"
},
{
"href":"ssh://git@localhost:7999/test/rouge.git",
"name":"ssh"
}
],
"self":[
{
"href":"http://localhost:7990/projects/TEST/repos/rouge/browse"
}
]
},
"name":"rouge",
"project":{
"description":"Test",
"id":1,
"key":"TEST",
"links":{
"self":[
{
"href":"http://localhost:7990/projects/TEST"
}
]
},
"name":"test",
"public":false,
"type":"NORMAL"
},
"public":false,
"scmId":"git",
"slug":"rouge",
"state":"AVAILABLE",
"statusMessage":"Available"
}
},
"id":7,
"links":{
"self":[
{
"href":"http://localhost:7990/projects/TEST/repos/rouge/pull-requests/7"
}
]
},
"locked":false,
"open":false,
"participants":[
],
"properties":{
"commentCount":1,
"openTaskCount":0,
"resolvedTaskCount":0
},
"reviewers":[
],
"state":"MERGED",
"title":"Added a new line",
"toRef":{
"displayId":"master",
"id":"refs/heads/master",
"latestCommit":"839fa9a2d434eb697815b8fcafaecc51accfdbbc",
"repository":{
"forkable":true,
"id":1,
"links":{
"clone":[
{
"href":"http://root@localhost:7990/scm/test/rouge.git",
"name":"http"
},
{
"href":"ssh://git@localhost:7999/test/rouge.git",
"name":"ssh"
}
],
"self":[
{
"href":"http://localhost:7990/projects/TEST/repos/rouge/browse"
}
]
},
"name":"rouge",
"project":{
"description":"Test",
"id":1,
"key":"TEST",
"links":{
"self":[
{
"href":"http://localhost:7990/projects/TEST"
}
]
},
"name":"test",
"public":false,
"type":"NORMAL"
},
"public":false,
"scmId":"git",
"slug":"rouge",
"state":"AVAILABLE",
"statusMessage":"Available"
}
},
"updatedDate":1530600648850,
"version":2
}
......@@ -31,6 +31,44 @@ describe NamespacesHelper do
expect(options).to include(user.name)
end
it 'avoids duplicate groups when extra_group is used' do
allow(helper).to receive(:current_user).and_return(admin)
options = helper.namespaces_options(user_group.id, display_path: true, extra_group: build(:group, name: admin_group.name))
expect(options.scan("data-name=\"#{admin_group.name}\"").count).to eq(1)
expect(options).to include(admin_group.name)
end
it 'selects existing group' do
allow(helper).to receive(:current_user).and_return(admin)
options = helper.namespaces_options(:extra_group, display_path: true, extra_group: user_group)
expect(options).to include("selected=\"selected\" value=\"#{user_group.id}\"")
expect(options).to include(admin_group.name)
end
it 'selects the new group by default' do
allow(helper).to receive(:current_user).and_return(user)
options = helper.namespaces_options(:extra_group, display_path: true, extra_group: build(:group, name: 'new-group'))
expect(options).to include(user_group.name)
expect(options).not_to include(admin_group.name)
expect(options).to include("selected=\"selected\" value=\"-1\"")
end
it 'falls back to current user selection' do
allow(helper).to receive(:current_user).and_return(user)
options = helper.namespaces_options(:extra_group, display_path: true, extra_group: build(:group, name: admin_group.name))
expect(options).to include(user_group.name)
expect(options).not_to include(admin_group.name)
expect(options).to include("selected=\"selected\" value=\"#{user.namespace.id}\"")
end
it 'returns only groups if groups_only option is true' do
allow(helper).to receive(:current_user).and_return(user)
......
require 'spec_helper'
describe BitbucketServer::Client do
let(:base_uri) { 'https://test:7990/stash/' }
let(:options) { { base_uri: base_uri, user: 'bitbucket', password: 'mypassword' } }
let(:project) { 'SOME-PROJECT' }
let(:repo_slug) { 'my-repo' }
let(:headers) { { "Content-Type" => "application/json" } }
subject { described_class.new(options) }
describe '#pull_requests' do
let(:path) { "/projects/#{project}/repos/#{repo_slug}/pull-requests?state=ALL" }
it 'requests a collection' do
expect(BitbucketServer::Paginator).to receive(:new).with(anything, path, :pull_request)
subject.pull_requests(project, repo_slug)
end
it 'throws an exception when connection fails' do
allow(BitbucketServer::Collection).to receive(:new).and_raise(OpenSSL::SSL::SSLError)
expect { subject.pull_requests(project, repo_slug) }.to raise_error(described_class::ServerError)
end
end
describe '#activities' do
let(:path) { "/projects/#{project}/repos/#{repo_slug}/pull-requests/1/activities" }
it 'requests a collection' do
expect(BitbucketServer::Paginator).to receive(:new).with(anything, path, :activity)
subject.activities(project, repo_slug, 1)
end
end
describe '#repo' do
let(:path) { "/projects/#{project}/repos/#{repo_slug}" }
let(:url) { "#{base_uri}rest/api/1.0/projects/SOME-PROJECT/repos/my-repo" }
it 'requests a specific repository' do
stub_request(:get, url).to_return(status: 200, headers: headers, body: '{}')
subject.repo(project, repo_slug)
expect(WebMock).to have_requested(:get, url)
end
end
describe '#repos' do
let(:path) { "/repos" }
it 'requests a collection' do
expect(BitbucketServer::Paginator).to receive(:new).with(anything, path, :repo)
subject.repos
end
end
describe '#create_branch' do
let(:branch) { 'test-branch' }
let(:sha) { '12345678' }
let(:url) { "#{base_uri}rest/api/1.0/projects/SOME-PROJECT/repos/my-repo/branches" }
it 'requests Bitbucket to create a branch' do
stub_request(:post, url).to_return(status: 204, headers: headers, body: '{}')
subject.create_branch(project, repo_slug, branch, sha)
expect(WebMock).to have_requested(:post, url)
end
end
describe '#delete_branch' do
let(:branch) { 'test-branch' }
let(:sha) { '12345678' }
let(:url) { "#{base_uri}rest/branch-utils/1.0/projects/SOME-PROJECT/repos/my-repo/branches" }
it 'requests Bitbucket to create a branch' do
stub_request(:delete, url).to_return(status: 204, headers: headers, body: '{}')
subject.delete_branch(project, repo_slug, branch, sha)
expect(WebMock).to have_requested(:delete, url)
end
end
end
require 'spec_helper'
describe BitbucketServer::Connection do
let(:options) { { base_uri: 'https://test:7990', user: 'bitbucket', password: 'mypassword' } }
let(:payload) { { 'test' => 1 } }
let(:headers) { { "Content-Type" => "application/json" } }
let(:url) { 'https://test:7990/rest/api/1.0/test?something=1' }
subject { described_class.new(options) }
describe '#get' do
it 'returns JSON body' do
WebMock.stub_request(:get, url).with(headers: { 'Accept' => 'application/json' }).to_return(body: payload.to_json, status: 200, headers: headers)
expect(subject.get(url, { something: 1 })).to eq(payload)
end
it 'throws an exception if the response is not 200' do
WebMock.stub_request(:get, url).with(headers: { 'Accept' => 'application/json' }).to_return(body: payload.to_json, status: 500, headers: headers)
expect { subject.get(url) }.to raise_error(described_class::ConnectionError)
end
it 'throws an exception if the response is not JSON' do
WebMock.stub_request(:get, url).with(headers: { 'Accept' => 'application/json' }).to_return(body: 'bad data', status: 200, headers: headers)
expect { subject.get(url) }.to raise_error(described_class::ConnectionError)
end
end
describe '#post' do
let(:headers) { { 'Accept' => 'application/json', 'Content-Type' => 'application/json' } }
it 'returns JSON body' do
WebMock.stub_request(:post, url).with(headers: headers).to_return(body: payload.to_json, status: 200, headers: headers)
expect(subject.post(url, payload)).to eq(payload)
end
it 'throws an exception if the response is not 200' do
WebMock.stub_request(:post, url).with(headers: headers).to_return(body: payload.to_json, status: 500, headers: headers)
expect { subject.post(url, payload) }.to raise_error(described_class::ConnectionError)
end
end
describe '#delete' do
let(:headers) { { 'Accept' => 'application/json', 'Content-Type' => 'application/json' } }
context 'branch API' do
let(:branch_path) { '/projects/foo/repos/bar/branches' }
let(:branch_url) { 'https://test:7990/rest/branch-utils/1.0/projects/foo/repos/bar/branches' }
let(:path) { }
it 'returns JSON body' do
WebMock.stub_request(:delete, branch_url).with(headers: headers).to_return(body: payload.to_json, status: 200, headers: headers)
expect(subject.delete(:branches, branch_path, payload)).to eq(payload)
end
it 'throws an exception if the response is not 200' do
WebMock.stub_request(:delete, branch_url).with(headers: headers).to_return(body: payload.to_json, status: 500, headers: headers)
expect { subject.delete(:branches, branch_path, payload) }.to raise_error(described_class::ConnectionError)
end
end
end
end
require 'spec_helper'
describe BitbucketServer::Page do
let(:response) { { 'values' => [{ 'description' => 'Test' }], 'isLastPage' => false, 'nextPageStart' => 2 } }
before do
# Autoloading hack
BitbucketServer::Representation::PullRequest.new({})
end
describe '#items' do
it 'returns collection of needed objects' do
page = described_class.new(response, :pull_request)
expect(page.items.first).to be_a(BitbucketServer::Representation::PullRequest)
expect(page.items.count).to eq(1)
end
end
describe '#attrs' do
it 'returns attributes' do
page = described_class.new(response, :pull_request)
expect(page.attrs.keys).to include(:isLastPage, :nextPageStart)
end
end
describe '#next?' do
it 'returns true' do
page = described_class.new(response, :pull_request)
expect(page.next?).to be_truthy
end
it 'returns false' do
response['isLastPage'] = true
response.delete('nextPageStart')
page = described_class.new(response, :pull_request)
expect(page.next?).to be_falsey
end
end
describe '#next' do
it 'returns next attribute' do
page = described_class.new(response, :pull_request)
expect(page.next).to eq(2)
end
end
end
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment