Commit 952e2dc6 authored by Douwe Maan's avatar Douwe Maan

Merge branch 'master' into eReGeBe/gitlab-ce-feature/milestone-md

parents 79108533 7acea6bd
......@@ -115,6 +115,11 @@ bundler:audit:
script:
- "bundle exec bundle-audit check --update --ignore OSVDB-115941"
db-migrate-reset:
stage: test
script:
- RAILS_ENV=test bundle exec rake db:migrate:reset
# Ruby 2.2 jobs
spec:feature:ruby22:
......
......@@ -2,8 +2,14 @@ Please view this file on the master branch, on stable branches it's out of date.
v 8.8.0 (unreleased)
v 8.7.0 (unreleased)
v 8.7.1 (unreleased)
- Use the `can?` helper instead of `current_user.can?`
v 8.7.0
- Gitlab::GitAccess and Gitlab::GitAccessWiki are now instrumented
- Fix vulnerability that made it possible to gain access to private labels and milestones
- The number of InfluxDB points stored per UDP packet can now be configured
- Fix error when cross-project label reference used with non-existent project
- Transactions for /internal/allowed now have an "action" tag set
- Method instrumentation now uses Module#prepend instead of aliasing methods
- Repository.clean_old_archives is now instrumented
......@@ -50,8 +56,9 @@ v 8.7.0 (unreleased)
- Add endpoints to archive or unarchive a project !3372
- Fix a bug whith trailing slash in bamboo_url
- Add links to CI setup documentation from project settings and builds pages
- Display project members page to all members
- Handle nil descriptions in Slack issue messages (Stan Hu)
- Add automated repository integrity checks
- Add automated repository integrity checks (OFF by default)
- API: Expose open_issues_count, closed_issues_count, open_merge_requests_count for labels (Robert Schilling)
- API: Ability to star and unstar a project (Robert Schilling)
- Add default scope to projects to exclude projects pending deletion
......@@ -68,6 +75,7 @@ v 8.7.0 (unreleased)
- Hide `Create a group` help block when creating a new project in a group
- Implement 'TODOs View' as an option for dashboard preferences !3379 (Elias W.)
- Allow issues and merge requests to be assigned to the author !2765
- Make Ci::Commit to group only similar builds and make it stateful (ref, tag)
- Gracefully handle notes on deleted commits in merge requests (Stan Hu)
- Decouple membership and notifications
- Fix creation of merge requests for orphaned branches (Stan Hu)
......@@ -77,6 +85,7 @@ v 8.7.0 (unreleased)
- Remove "Congratulations!" tweet button on newly-created project. (Connor Shea)
- Fix admin/projects when using visibility levels on search (PotHix)
- Build status notifications
- Update email confirmation interface
- API: Expose user location (Robert Schilling)
- API: Do not leak group existence via return code (Robert Schilling)
- ClosingIssueExtractor regex now also works with colons. e.g. "Fixes: #1234" !3591
......@@ -87,7 +96,6 @@ v 8.7.0 (unreleased)
- Fix repository cache invalidation issue when project is recreated with an empty repo (Stan Hu)
- Fix: Allow empty recipients list for builds emails service when pushed is added (Frank Groeneveld)
- Improved markdown forms
- Show JavaScript errors in sentry
- Diff design updates (colors, button styles, etc)
- Copying and pasting a diff no longer pastes the line numbers or +/-
- Add null check to formData when updating profile content to fix Firefox bug
......@@ -105,6 +113,7 @@ v 8.7.0 (unreleased)
- Updated print style for issues
- Use GitHub Issue/PR number as iid to keep references
- Import GitHub labels
- Add option to filter by "Owned projects" on dashboard page
- Import GitHub milestones
- Fix emoji catgories in the emoji picker
- Execute system web hooks on push to the project
......
......@@ -134,7 +134,7 @@ GEM
execjs
coffee-script-source (1.10.0)
colorize (0.7.7)
concurrent-ruby (1.0.0)
concurrent-ruby (1.0.1)
connection_pool (2.2.0)
coveralls (0.8.13)
json (~> 1.8)
......@@ -629,7 +629,7 @@ GEM
recaptcha (1.0.2)
json
redcarpet (3.3.3)
redis (3.2.2)
redis (3.3.0)
redis-actionpack (4.0.1)
actionpack (~> 4)
redis-rack (~> 1.5.0)
......@@ -736,10 +736,9 @@ GEM
rack
shoulda-matchers (2.8.0)
activesupport (>= 3.0.0)
sidekiq (4.0.1)
sidekiq (4.1.1)
concurrent-ruby (~> 1.0)
connection_pool (~> 2.2, >= 2.2.0)
json (~> 1.0)
redis (~> 3.2, >= 3.2.1)
sidekiq-cron (0.4.0)
redis-namespace (>= 1.5.2)
......
......@@ -105,6 +105,25 @@ sensitive as to how you word things. Use Emoji to express your feelings (heart,
star, smile, etc.). Some good tips about giving feedback to merge requests is in
the [Thoughtbot code review guide].
## Feature Freeze
5 working days before the 22nd the stable branches for the upcoming release will
be frozen for major changes. Merge requests may still be merged into master
during this period. By freezing the stable branches prior to a release there's
no need to worry about last minute merge requests potentially breaking a lot of
things.
What is considered to be a major change is determined on a case by case basis as
this definition depends very much on the context of changes. For example, a 5
line change might have a big impact on the entire application. Ultimately the
decision will be made by those reviewing a merge request and the release
manager.
During the feature freeze all merge requests that are meant to go into the next
release should have the correct milestone assigned _and_ have the label
~"Pick into Stable" set. Merge requests without a milestone and this label will
not be merged into any stable branches.
## Copy & paste responses
### Improperly formatted issue
......
......@@ -55,7 +55,6 @@
#= require_tree .
#= require fuzzaldrin-plus
#= require cropper
#= require raven
window.slugify = (text) ->
text.replace(/[^-a-zA-Z0-9]+/g, '_').toLowerCase()
......
class @CommitsList
@timer = null
@init: (ref, limit) ->
@init: (limit) ->
$("body").on "click", ".day-commits-table li.commit", (event) ->
if event.target.nodeName != "A"
location.href = $(this).attr("url")
......
......@@ -221,6 +221,9 @@ class GitLabDropdown
menu.toggleClass PAGE_TWO_CLASS
# Focus first visible input on active page
@dropdown.find('[class^="dropdown-page-"]:visible :text:visible:first').focus()
parseData: (data) ->
@renderedData = data
......@@ -240,7 +243,8 @@ class GitLabDropdown
shouldPropagate: (e) =>
if @options.multiSelect
$target = $(e.target)
if not $target.hasClass('dropdown-menu-close') and not $target.hasClass('dropdown-menu-close-icon')
if not $target.hasClass('dropdown-menu-close') and not $target.hasClass('dropdown-menu-close-icon') and not $target.data('is-link')
e.stopPropagation()
return false
else
......@@ -375,7 +379,6 @@ class GitLabDropdown
selectedObject = @renderedData[selectedIndex]
value = if @options.id then @options.id(selectedObject, el) else selectedObject.id
field = @dropdown.parent().find("input[name='#{fieldName}'][value='#{value}']")
if el.hasClass(ACTIVE_CLASS)
el.removeClass(ACTIVE_CLASS)
field.remove()
......
......@@ -19,23 +19,19 @@ class @LabelsSelect
$form = $dropdown.closest('form')
$sidebarCollapsedValue = $block.find('.sidebar-collapsed-icon span')
$value = $block.find('.value')
$loading = $block.find('.block-loading').fadeOut()
if newLabelField.length
$newLabelCreateButton = $('.js-new-label-btn')
$newLabelError = $('.js-label-error')
$colorPreview = $('.js-dropdown-label-color-preview')
$newLabelError = $dropdown.parent().find('.js-label-error')
$newLabelError.hide()
$newLabelCreateButton = $('.js-new-label-btn')
# Suggested colors in the dropdown to chose from pre-chosen colors
$('.suggest-colors-dropdown a').on 'click', (e) ->
$newLabelError.hide()
$loading = $block.find('.block-loading').fadeOut()
issueURLSplit = issueUpdateURL.split('/') if issueUpdateURL?
if issueUpdateURL
labelHTMLTemplate = _.template(
'<% _.each(labels, function(label){ %>
<a href="<%= ["",issueURLSplit[1], issueURLSplit[2],""].join("/") %>issues?label_name=<%= _.escape(label.title) %>">
<span class="label has-tooltip color-label" title="<%= _.escape(label.description) %>" style="background-color: <%= label.color %>;">
<span class="label has-tooltip color-label" title="<%= _.escape(label.description) %>" style="background-color: <%= label.color %>; color: <%= label.text_color %>;">
<%= _.escape(label.title) %>
</span>
</a>
......@@ -43,7 +39,9 @@ class @LabelsSelect
)
labelNoneHTMLTemplate = _.template('<div class="light">None</div>')
if newLabelField.length and $dropdown.hasClass 'js-extra-options'
if newLabelField.length
# Suggested colors in the dropdown to chose from pre-chosen colors
$('.suggest-colors-dropdown a').on "click", (e) ->
e.preventDefault()
e.stopPropagation()
......@@ -82,14 +80,17 @@ class @LabelsSelect
enableLabelCreateButton = ->
if newLabelField.val() isnt '' and newColorField.val() isnt ''
$newLabelError.hide()
$('.js-new-label-btn').disable()
$newLabelCreateButton.enable()
else
$newLabelCreateButton.disable()
saveLabel = ->
# Create new label with API
Api.newLabel projectId, {
name: newLabelField.val()
color: newColorField.val()
}, (label) ->
$('.js-new-label-btn').enable()
$newLabelCreateButton.enable()
if label.message?
$newLabelError
......@@ -98,10 +99,6 @@ class @LabelsSelect
else
$('.dropdown-menu-back', $dropdown.parent()).trigger 'click'
$newLabelCreateButton.enable()
else
$newLabelCreateButton.disable()
newLabelField.on 'keyup change', enableLabelCreateButton
newColorField.on 'keyup change', enableLabelCreateButton
......@@ -112,24 +109,7 @@ class @LabelsSelect
.on 'click', (e) ->
e.preventDefault()
e.stopPropagation()
if newLabelField.val() isnt '' and newColorField.val() isnt ''
$newLabelError.hide()
$('.js-new-label-btn').disable()
# Create new label with API
Api.newLabel projectId, {
name: newLabelField.val()
color: newColorField.val()
}, (label) ->
$('.js-new-label-btn').enable()
if label.message?
$newLabelError
.text label.message
.show()
else
$('.dropdown-menu-back', $dropdown.parent()).trigger 'click'
saveLabel()
saveLabelData = ->
selected = $dropdown
......@@ -243,7 +223,7 @@ class @LabelsSelect
fieldName: $dropdown.data('field-name')
id: (label) ->
if $dropdown.hasClass("js-filter-submit") and not label.isAny?
label.title
_.escape label.title
else
label.id
......
......@@ -87,8 +87,8 @@ class @MergeRequestTabs
if window.location.hash
navBarHeight = $('.navbar-gitlab').outerHeight()
$el = $("#{container} #{window.location.hash}")
$.scrollTo("#{container} #{window.location.hash}", offset: -navBarHeight) if $el.length
$el = $("#{container} #{window.location.hash}:not(.match)")
$.scrollTo("#{container} #{window.location.hash}:not(.match)", offset: -navBarHeight) if $el.length
# Activate a tab based on the current action
activateTab: (action) ->
......@@ -176,12 +176,12 @@ class @MergeRequestTabs
if locationHash isnt ''
hashClassString = ".#{locationHash.replace('#', '')}"
$diffLine = $(locationHash)
$diffLine = $("#{locationHash}:not(.match)", $('#diffs'))
if $diffLine.is ':not(tr)'
$diffLine = $("td#{locationHash}, td#{hashClassString}")
if not $diffLine.is 'tr'
$diffLine = $('#diffs').find("td#{locationHash}, td#{hashClassString}")
else
$diffLine = $('td', $diffLine)
$diffLine = $diffLine.find('td')
if $diffLine.length
$diffLine.addClass 'hll'
......
@raven =
init: ->
if gon.sentry_dsn?
Raven.config(gon.sentry_dsn, {
includePaths: [/gon.relative_url_root/]
ignoreErrors: [
# Random plugins/extensions
'top.GLOBALS',
# See: http://blog.errorception.com/2012/03/tale-of-unfindable-js-error. html
'originalCreateNotification',
'canvas.contentDocument',
'MyApp_RemoveAllHighlights',
'http://tt.epicplay.com',
'Can\'t find variable: ZiteReader',
'jigsaw is not defined',
'ComboSearch is not defined',
'http://loading.retry.widdit.com/',
'atomicFindClose',
# ISP "optimizing" proxy - `Cache-Control: no-transform` seems to
# reduce this. (thanks @acdha)
# See http://stackoverflow.com/questions/4113268
'bmi_SafeAddOnload',
'EBCallBackMessageReceived',
# See http://toolbar.conduit.com/Developer/HtmlAndGadget/Methods/JSInjection.aspx
'conduitPage'
],
ignoreUrls: [
# Chrome extensions
/extensions\//i,
/^chrome:\/\//i,
# Other plugins
/127\.0\.0\.1:4001\/isrunning/i, # Cacaoweb
/webappstoolbarba\.texthelp\.com\//i,
/metrics\.itunes\.apple\.com\.edgesuite\.net\//i
]
}).install()
if gon.current_user_id
Raven.setUserContext({
id: gon.current_user_id
})
$ ->
raven.init()
......@@ -144,6 +144,10 @@
}
}
.btn-lg {
padding: 12px 20px;
}
.btn-transparent {
color: $btn-transparent-color;
background-color: transparent;
......
......@@ -320,7 +320,7 @@
}
}
.dropdown-input-field {
.dropdown-input-field, .default-dropdown-input {
width: 100%;
padding: 0 7px;
color: $dropdown-input-color;
......
......@@ -38,12 +38,14 @@
.filename {
&.old {
display: inline-block;
span.idiff {
background-color: #f8cbcb;
}
}
&.new {
display: inline-block;
span.idiff {
background-color: #a6f3a6;
}
......@@ -129,6 +131,11 @@
td.line-numbers {
float: none;
border-left: 1px solid #ddd;
i {
float: none;
margin-right: 0;
}
}
td.lines {
padding: 0;
......
.well-confirmation {
margin-bottom: 20px;
border-bottom: 1px solid #eee;
> h1 {
font-weight: 400;
}
.lead {
margin-bottom: 20px;
}
}
.confirmation-content {
a {
color: $md-link-color;
}
}
......@@ -249,6 +249,10 @@
background: $gray-dark;
border: 1px solid $border-gray-dark;
}
&.btn-primary {
@extend .btn-primary
}
}
a:not(.issuable-pager) {
......
......@@ -183,6 +183,9 @@ ul.notes {
}
}
.author_link {
color: $gl-gray;
}
}
.note-headline-light,
......
......@@ -10,6 +10,8 @@ module FilterProjects
def filter_projects(projects)
projects = projects.search(params[:filter_projects]) if params[:filter_projects].present?
projects = projects.non_archived if params[:archived].blank?
projects = projects.personal(current_user) if params[:personal].present? && current_user
projects
end
end
class ConfirmationsController < Devise::ConfirmationsController
def almost_there
flash[:notice] = nil
render layout: "devise_empty"
end
protected
def after_resending_confirmation_instructions_path_for(resource)
users_almost_there_path
end
def after_confirmation_path_for(resource_name, resource)
if signed_in?(resource_name)
after_sign_in_path_for(resource)
......
......@@ -83,8 +83,7 @@ class Projects::ApplicationController < ApplicationController
end
def apply_diff_view_cookie!
view = params[:view] || cookies[:diff_view]
cookies.permanent[:diff_view] = params[:view] = view if view
cookies.permanent[:diff_view] = params.delete(:view) if params[:view].present?
end
def builds_enabled
......
......@@ -38,13 +38,13 @@ class Projects::CommitController < Projects::ApplicationController
end
def cancel_builds
ci_commit.builds.running_or_pending.each(&:cancel)
ci_builds.running_or_pending.each(&:cancel)
redirect_back_or_default default: builds_namespace_project_commit_path(project.namespace, project, commit.sha)
end
def retry_builds
ci_commit.builds.latest.failed.each do |build|
ci_builds.latest.failed.each do |build|
if build.retryable?
Ci::Build.retry(build)
end
......@@ -99,8 +99,12 @@ class Projects::CommitController < Projects::ApplicationController
@commit ||= @project.commit(params[:id])
end
def ci_commit
@ci_commit ||= project.ci_commit(commit.sha)
def ci_commits
@ci_commits ||= project.ci_commits.where(sha: commit.sha)
end
def ci_builds
@ci_builds ||= Ci::Build.where(commit: ci_commits)
end
def define_show_vars
......@@ -113,7 +117,8 @@ class Projects::CommitController < Projects::ApplicationController
@diff_refs = [commit.parent || commit, commit]
@notes_count = commit.notes.count
@statuses = ci_commit.statuses if ci_commit
@statuses = CommitStatus.where(commit: ci_commits)
@builds = Ci::Build.where(commit: ci_commits)
end
def assign_change_commit_vars(mr_source_branch)
......
......@@ -101,7 +101,6 @@ class Projects::IssuesController < Projects::ApplicationController
end
respond_to do |format|
format.js
format.html do
if @issue.valid?
redirect_to issue_path(@issue)
......@@ -110,7 +109,7 @@ class Projects::IssuesController < Projects::ApplicationController
end
end
format.json do
render json: @issue.to_json(include: [:milestone, :labels, assignee: { methods: :avatar_url }])
render json: @issue.to_json(include: { milestone: {}, assignee: { methods: :avatar_url }, labels: { methods: :text_color } })
end
end
end
......
......@@ -149,13 +149,12 @@ class Projects::MergeRequestsController < Projects::ApplicationController
if @merge_request.valid?
respond_to do |format|
format.js
format.html do
redirect_to([@merge_request.target_project.namespace.becomes(Namespace),
@merge_request.target_project, @merge_request])
end
format.json do
render json: @merge_request.to_json(include: [:milestone, :labels, assignee: { methods: :avatar_url }])
render json: @merge_request.to_json(include: { milestone: {}, assignee: { methods: :avatar_url }, labels: { methods: :text_color } })
end
end
else
......@@ -321,6 +320,7 @@ class Projects::MergeRequestsController < Projects::ApplicationController
def define_widget_vars
@ci_commit = @merge_request.ci_commit
@ci_commits = [@ci_commit].compact
closes_issues
end
......
class Projects::ProjectMembersController < Projects::ApplicationController
# Authorize
before_action :authorize_admin_project_member!, except: :leave
before_action :authorize_admin_project_member!, except: [:leave, :index]
def index
@project_members = @project.project_members
......
......@@ -31,11 +31,11 @@ class RegistrationsController < Devise::RegistrationsController
end
def after_sign_up_path_for(_resource)
new_user_session_path
users_almost_there_path
end
def after_inactive_sign_up_path_for(_resource)
new_user_session_path
users_almost_there_path
end
private
......
......@@ -272,16 +272,13 @@ class IssuableFinder
items = items.without_label
else
items = items.with_label(label_names)
if projects
items = items.where(labels: { project_id: projects })
end
end
end
# When filtering by multiple labels we may end up duplicating issues (if one
# has multiple labels). This ensures we only return unique issues.
items.distinct
items
end
def by_due_date(items)
......@@ -321,7 +318,7 @@ class IssuableFinder
end
def label_names
params[:label_name].split(',')
params[:label_name].is_a?(String) ? params[:label_name].split(',') : params[:label_name]
end
def current_user_related?
......
......@@ -4,14 +4,6 @@ module CiStatusHelper
builds_namespace_project_commit_path(project.namespace, project, ci_commit.sha)
end
def ci_status_icon(ci_commit)
ci_icon_for_status(ci_commit.status)
end
def ci_status_label(ci_commit)
ci_label_for_status(ci_commit.status)
end
def ci_status_with_icon(status, target = nil)
content = ci_icon_for_status(status) + '&nbsp;'.html_safe + ci_label_for_status(status)
klass = "ci-status ci-#{status}"
......@@ -47,10 +39,13 @@ module CiStatusHelper
end
def render_ci_status(ci_commit, tooltip_placement: 'auto left')
link_to ci_status_icon(ci_commit),
# TODO: split this method into
# - render_commit_status
# - render_pipeline_status
link_to ci_icon_for_status(ci_commit.status),
ci_status_path(ci_commit),
class: "ci-status-link ci-status-icon-#{ci_commit.status.dasherize}",
title: "Build #{ci_status_label(ci_commit)}",
title: "Build #{ci_label_for_status(ci_commit.status)}",
data: { toggle: 'tooltip', placement: tooltip_placement }
end
......
......@@ -9,7 +9,13 @@ module DiffHelper
end
def diff_view
params[:view] == 'parallel' ? 'parallel' : 'inline'
diff_views = %w(inline parallel)
if diff_views.include?(cookies[:diff_view])
cookies[:diff_view]
else
diff_views.first
end
end
def diff_hard_limit_enabled?
......
......@@ -25,6 +25,10 @@ module GitlabRoutingHelper
namespace_project_commits_path(project.namespace, project, @ref || project.repository.root_ref)
end
def project_pipelines_path(project, *args)
namespace_project_pipelines_path(project.namespace, project, *args)
end
def project_builds_path(project, *args)
namespace_project_builds_path(project.namespace, project, *args)
end
......
......@@ -84,6 +84,14 @@ module PageLayoutHelper
end
end
def nav(name = nil)
if name
@nav = name
else
@nav
end
end
def fluid_layout(enabled = false)
if @fluid_layout.nil?
@fluid_layout = (current_user && current_user.layout == "fluid") || enabled
......
......@@ -144,6 +144,10 @@ module ProjectsHelper
nav_tabs << :settings
end
if can?(current_user, :read_project_member, project)
nav_tabs << :team
end
if can?(current_user, :read_issue, project)
nav_tabs << :issues
end
......
......@@ -110,4 +110,12 @@ module TabHelper
'active'
end
end
def profile_tab_class
if controller.controller_path =~ /\Aprofiles/
return 'active'
end
'active' if current_controller?('oauth/applications')
end
end
......@@ -37,8 +37,6 @@
module Ci
class Build < CommitStatus
LAZY_ATTRIBUTES = ['trace']
belongs_to :runner, class_name: 'Ci::Runner'
belongs_to :trigger_request, class_name: 'Ci::TriggerRequest'
belongs_to :erased_by, class_name: 'User'
......@@ -50,25 +48,17 @@ module Ci
scope :unstarted, ->() { where(runner_id: nil) }
scope :ignore_failures, ->() { where(allow_failure: false) }
scope :similar, ->(build) { where(ref: build.ref, tag: build.tag, trigger_request_id: build.trigger_request_id) }
mount_uploader :artifacts_file, ArtifactUploader
mount_uploader :artifacts_metadata, ArtifactUploader
acts_as_taggable
# To prevent db load megabytes of data from trace
default_scope -> { select(Ci::Build.columns_without_lazy) }
before_destroy { project }
class << self
def columns_without_lazy
(column_names - LAZY_ATTRIBUTES).map do |column_name|
"#{table_name}.#{column_name}"
end
end
after_create :execute_hooks
class << self
def last_month
where('created_at > ?', Date.today - 1.month)
end
......@@ -126,12 +116,16 @@ module Ci
end
def retried?
!self.commit.latest_statuses_for_ref(self.ref).include?(self)
!self.commit.statuses.latest.include?(self)
end
def retry
Ci::Build.retry(self)
end
def depends_on_builds
# Get builds of the same type
latest_builds = self.commit.builds.similar(self).latest
latest_builds = self.commit.builds.latest
# Return builds from previous stages
latest_builds.where('stage_idx < ?', stage_idx)
......
......@@ -19,21 +19,28 @@
module Ci
class Commit < ActiveRecord::Base
extend Ci::Model
include Statuseable
belongs_to :project, class_name: '::Project', foreign_key: :gl_project_id
has_many :statuses, class_name: 'CommitStatus'
has_many :builds, class_name: 'Ci::Build'
has_many :trigger_requests, dependent: :destroy, class_name: 'Ci::TriggerRequest'
delegate :stages, to: :statuses
validates_presence_of :sha
validates_presence_of :status
validate :valid_commit_sha
# Invalidate object and save if when touched
after_touch :update_state
def self.truncate_sha(sha)
sha[0...8]
end
def to_param
sha
def self.stages
CommitStatus.where(commit: all).stages
end
def project_id
......@@ -68,15 +75,20 @@ module Ci
nil
end
def stage
running_or_pending = statuses.latest.running_or_pending.ordered
running_or_pending.first.try(:stage)
def branch?
!tag?
end
def retryable?
builds.latest.any? do |build|
build.failed? && build.retryable?
end
end
def create_builds(ref, tag, user, trigger_request = nil)
def create_builds(user, trigger_request = nil)
return unless config_processor
config_processor.stages.any? do |stage|
CreateBuildsService.new.execute(self, stage, ref, tag, user, trigger_request, 'success').present?
CreateBuildsService.new(self).execute(stage, user, 'success', trigger_request).present?
end
end
......@@ -84,7 +96,7 @@ module Ci
return unless config_processor
# don't create other builds if this one is retried
latest_builds = builds.similar(build).latest
latest_builds = builds.latest
return unless latest_builds.exists?(build.id)
# get list of stages after this build
......@@ -92,88 +104,21 @@ module Ci
next_stages.delete(build.stage)
# get status for all prior builds
prior_builds = latest_builds.reject { |other_build| next_stages.include?(other_build.stage) }
status = Ci::Status.get_status(prior_builds)
prior_builds = latest_builds.where.not(stage: next_stages)
prior_status = prior_builds.status
# create builds for next stages based
next_stages.any? do |stage|
CreateBuildsService.new.execute(self, stage, build.ref, build.tag, build.user, build.trigger_request, status).present?
end
end
def refs
statuses.order(:ref).pluck(:ref).uniq
end
def latest_statuses
@latest_statuses ||= statuses.latest.to_a
end
def latest_statuses_for_ref(ref)
latest_statuses.select { |status| status.ref == ref }
CreateBuildsService.new(self).execute(stage, build.user, prior_status, build.trigger_request).present?
end
def matrix_builds(build = nil)
matrix_builds = builds.latest.ordered
matrix_builds = matrix_builds.similar(build) if build
matrix_builds.to_a
end
def retried
@retried ||= (statuses.order(id: :desc) - statuses.latest)
end
def status
if yaml_errors.present?
return 'failed'
end
@status ||= Ci::Status.get_status(latest_statuses)
end
def pending?
status == 'pending'
end
def running?
status == 'running'
end
def success?
status == 'success'
end
def failed?
status == 'failed'
end
def canceled?
status == 'canceled'
end
def active?
running? || pending?
end
def complete?
canceled? || success? || failed?
end
def duration
duration_array = statuses.map(&:duration).compact
duration_array.reduce(:+).to_i
end
def started_at
@started_at ||= statuses.order('started_at ASC').first.try(:started_at)
end
def finished_at
@finished_at ||= statuses.order('finished_at DESC').first.try(:finished_at)
end
def coverage
coverage_array = latest_statuses.map(&:coverage).compact
coverage_array = statuses.latest.map(&:coverage).compact
if coverage_array.size >= 1
'%.2f' % (coverage_array.reduce(:+) / coverage_array.size)
end
......@@ -181,7 +126,10 @@ module Ci
def config_processor
return nil unless ci_yaml_file
@config_processor ||= Ci::GitlabCiYamlProcessor.new(ci_yaml_file, project.path_with_namespace)
return @config_processor if defined?(@config_processor)
@config_processor ||= begin
Ci::GitlabCiYamlProcessor.new(ci_yaml_file, project.path_with_namespace)
rescue Ci::GitlabCiYamlProcessor::ValidationError, Psych::SyntaxError => e
save_yaml_error(e.message)
nil
......@@ -189,16 +137,19 @@ module Ci
save_yaml_error("Undefined error")
nil
end
end
def ci_yaml_file
return @ci_yaml_file if defined?(@ci_yaml_file)
@ci_yaml_file ||= begin
blob = project.repository.blob_at(sha, '.gitlab-ci.yml')
blob.load_all_data!(project.repository)
blob.data
end
rescue
nil
end
end
def skip_ci?
git_commit_message =~ /(\[ci skip\])/ if git_commit_message
......@@ -206,10 +157,23 @@ module Ci
private
def update_state
statuses.reload
self.status = if yaml_errors.blank?
statuses.latest.status || 'skipped'
else
'failed'
end
self.started_at = statuses.started_at
self.finished_at = statuses.finished_at
self.duration = statuses.latest.duration
save
end
def save_yaml_error(error)
return if self.yaml_errors?
self.yaml_errors = error
save
update_state
end
end
end
......@@ -207,12 +207,13 @@ class Commit
@raw.short_id(7)
end
def ci_commit
project.ci_commit(sha)
def ci_commits
@ci_commits ||= project.ci_commits.where(sha: sha)
end
def status
ci_commit.try(:status) || :not_found
return @status if defined?(@status)
@status ||= ci_commits.status
end
def revert_branch_name
......
......@@ -33,30 +33,23 @@
#
class CommitStatus < ActiveRecord::Base
include Statuseable
self.table_name = 'ci_builds'
belongs_to :project, class_name: '::Project', foreign_key: :gl_project_id
belongs_to :commit, class_name: 'Ci::Commit'
belongs_to :commit, class_name: 'Ci::Commit', touch: true
belongs_to :user
validates :commit, presence: true
validates :status, inclusion: { in: %w(pending running failed success canceled) }
validates_presence_of :name
alias_attribute :author, :user
scope :running, -> { where(status: 'running') }
scope :pending, -> { where(status: 'pending') }
scope :success, -> { where(status: 'success') }
scope :failed, -> { where(status: 'failed') }
scope :running_or_pending, -> { where(status: [:running, :pending]) }
scope :finished, -> { where(status: [:success, :failed, :canceled]) }
scope :latest, -> { where(id: unscope(:select).select('max(id)').group(:name, :ref)) }
scope :latest, -> { where(id: unscope(:select).select('max(id)').group(:name, :commit_id)) }
scope :ordered, -> { order(:ref, :stage_idx, :name) }
scope :for_ref, ->(ref) { where(ref: ref) }
AVAILABLE_STATUSES = ['pending', 'running', 'success', 'failed', 'canceled']
scope :ignored, -> { where(allow_failure: true, status: [:failed, :canceled]) }
state_machine :status, initial: :pending do
event :run do
......@@ -86,31 +79,24 @@ class CommitStatus < ActiveRecord::Base
after_transition [:pending, :running] => :success do |commit_status|
MergeRequests::MergeWhenBuildSucceedsService.new(commit_status.commit.project, nil).trigger(commit_status)
end
state :pending, value: 'pending'
state :running, value: 'running'
state :failed, value: 'failed'
state :success, value: 'success'
state :canceled, value: 'canceled'
end
delegate :sha, :short_sha, to: :commit, prefix: false
delegate :sha, :short_sha, to: :commit
# TODO: this should be removed with all references
def before_sha
Gitlab::Git::BLANK_SHA
commit.before_sha || Gitlab::Git::BLANK_SHA
end
def started?
!pending? && !canceled? && started_at
def self.stages
order_by = 'max(stage_idx)'
group('stage').order(order_by).pluck(:stage, order_by).map(&:first).compact
end
def active?
running? || pending?
def self.stages_status
all.stages.inject({}) do |h, stage|
h[stage] = all.where(stage: stage).status
h
end
def complete?
canceled? || success? || failed?
end
def ignored?
......@@ -118,11 +104,13 @@ class CommitStatus < ActiveRecord::Base
end
def duration
duration =
if started_at && finished_at
finished_at - started_at
elsif started_at
Time.now - started_at
end
duration
end
def stuck?
......
......@@ -37,7 +37,6 @@ module Issuable
scope :closed, -> { with_state(:closed) }
scope :order_milestone_due_desc, -> { joins(:milestone).reorder('milestones.due_date DESC, milestones.id DESC') }
scope :order_milestone_due_asc, -> { joins(:milestone).reorder('milestones.due_date ASC, milestones.id ASC') }
scope :with_label, ->(title) { joins(:labels).where(labels: { title: title }) }
scope :without_label, -> { joins("LEFT OUTER JOIN label_links ON label_links.target_type = '#{name}' AND label_links.target_id = #{table_name}.id").where(label_links: { id: nil }) }
scope :join_project, -> { joins(:project) }
......@@ -122,6 +121,14 @@ module Issuable
joins(join_clause).group(issuable_table[:id]).reorder("COUNT(notes.id) DESC")
end
def with_label(title)
if title.is_a?(Array) && title.count > 1
joins(:labels).where(labels: { title: title }).group('issues.id').having("count(distinct labels.title) = #{title.count}")
else
joins(:labels).where(labels: { title: title })
end
end
end
def today?
......
module Statuseable
extend ActiveSupport::Concern
AVAILABLE_STATUSES = %w(pending running success failed canceled skipped)
class_methods do
def status_sql
builds = all.select('count(*)').to_sql
success = all.success.select('count(*)').to_sql
ignored = all.ignored.select('count(*)').to_sql if all.respond_to?(:ignored)
ignored ||= '0'
pending = all.pending.select('count(*)').to_sql
running = all.running.select('count(*)').to_sql
canceled = all.canceled.select('count(*)').to_sql
skipped = all.skipped.select('count(*)').to_sql
deduce_status = "(CASE
WHEN (#{builds})=0 THEN NULL
WHEN (#{builds})=(#{success})+(#{ignored}) THEN 'success'
WHEN (#{builds})=(#{pending}) THEN 'pending'
WHEN (#{builds})=(#{canceled}) THEN 'canceled'
WHEN (#{builds})=(#{skipped}) THEN 'skipped'
WHEN (#{running})+(#{pending})>0 THEN 'running'
ELSE 'failed'
END)"
deduce_status
end
def status
all.pluck(self.status_sql).first
end
def duration
duration_array = all.map(&:duration).compact
duration_array.reduce(:+)
end
def started_at
all.minimum(:started_at)
end
def finished_at
all.maximum(:finished_at)
end
end
included do
validates :status, inclusion: { in: AVAILABLE_STATUSES }
state_machine :status, initial: :pending do
state :pending, value: 'pending'
state :running, value: 'running'
state :failed, value: 'failed'
state :success, value: 'success'
state :canceled, value: 'canceled'
state :skipped, value: 'skipped'
end
scope :running, -> { where(status: 'running') }
scope :pending, -> { where(status: 'pending') }
scope :success, -> { where(status: 'success') }
scope :failed, -> { where(status: 'failed') }
scope :canceled, -> { where(status: 'canceled') }
scope :skipped, -> { where(status: 'skipped') }
scope :running_or_pending, -> { where(status: [:running, :pending]) }
scope :finished, -> { where(status: [:success, :failed, :canceled]) }
end
def started?
!pending? && !canceled? && started_at
end
def active?
running? || pending?
end
def complete?
canceled? || success? || failed?
end
end
......@@ -586,7 +586,7 @@ class MergeRequest < ActiveRecord::Base
end
def ci_commit
@ci_commit ||= source_project.ci_commit(last_commit.id) if last_commit && source_project
@ci_commit ||= source_project.ci_commit(last_commit.id, source_branch) if last_commit && source_project
end
def diff_refs
......
......@@ -963,12 +963,12 @@ class Project < ActiveRecord::Base
!namespace.share_with_group_lock
end
def ci_commit(sha)
ci_commits.find_by(sha: sha)
def ci_commit(sha, ref)
ci_commits.order(id: :desc).find_by(sha: sha, ref: ref)
end
def ensure_ci_commit(sha)
ci_commit(sha) || ci_commits.create(sha: sha)
def ensure_ci_commit(sha, ref)
ci_commit(sha, ref) || ci_commits.create(sha: sha, ref: ref)
end
def enable_ci
......
module Ci
class CreateBuildsService
def execute(commit, stage, ref, tag, user, trigger_request, status)
builds_attrs = commit.config_processor.builds_for_stage_and_ref(stage, ref, tag, trigger_request)
def initialize(commit)
@commit = commit
end
def execute(stage, user, status, trigger_request = nil)
builds_attrs = config_processor.builds_for_stage_and_ref(stage, @commit.ref, @commit.tag, trigger_request)
# check when to create next build
builds_attrs = builds_attrs.select do |build_attrs|
......@@ -17,7 +21,8 @@ module Ci
builds_attrs.map do |build_attrs|
# don't create the same build twice
unless commit.builds.find_by(ref: ref, tag: tag, trigger_request: trigger_request, name: build_attrs[:name])
unless @commit.builds.find_by(ref: @commit.ref, tag: @commit.tag,
trigger_request: trigger_request, name: build_attrs[:name])
build_attrs.slice!(:name,
:commands,
:tag_list,
......@@ -26,17 +31,21 @@ module Ci
:stage,
:stage_idx)
build_attrs.merge!(ref: ref,
tag: tag,
build_attrs.merge!(ref: @commit.ref,
tag: @commit.tag,
trigger_request: trigger_request,
user: user,
project: commit.project)
project: @commit.project)
build = commit.builds.create!(build_attrs)
build.execute_hooks
build
@commit.builds.create!(build_attrs)
end
end
end
private
def config_processor
@config_processor ||= @commit.config_processor
end
end
end
......@@ -7,14 +7,14 @@ module Ci
# check if ref is tag
tag = project.repository.find_tag(ref).present?
ci_commit = project.ensure_ci_commit(commit.sha)
ci_commit = project.ci_commits.create(sha: commit.sha, ref: ref, tag: tag)
trigger_request = trigger.trigger_requests.create!(
variables: variables,
commit: ci_commit,
)
if ci_commit.create_builds(ref, tag, nil, trigger_request)
if ci_commit.create_builds(nil, trigger_request)
trigger_request
end
end
......
......@@ -3,8 +3,9 @@ module Ci
def execute(project, opts)
sha = opts[:sha] || ref_sha(project, opts[:ref])
commit = project.ci_commits.find_by(sha: sha)
image_name = image_for_commit(commit)
ci_commits = project.ci_commits.where(sha: sha)
ci_commits = ci_commits.where(ref: opts[:ref]) if opts[:ref]
image_name = image_for_status(ci_commits.status)
image_path = Rails.root.join('public/ci', image_name)
OpenStruct.new(path: image_path, name: image_name)
......@@ -16,9 +17,9 @@ module Ci
project.commit(ref).try(:sha) if ref
end
def image_for_commit(commit)
return 'build-unknown.svg' unless commit
'build-' + commit.status + ".svg"
def image_for_status(status)
status ||= 'unknown'
'build-' + status + ".svg"
end
end
end
......@@ -2,6 +2,7 @@ class CreateCommitBuildsService
def execute(project, user, params)
return false unless project.builds_enabled?
before_sha = params[:checkout_sha] || params[:before]
sha = params[:checkout_sha] || params[:after]
origin_ref = params[:ref]
......@@ -10,15 +11,16 @@ class CreateCommitBuildsService
end
ref = Gitlab::Git.ref_name(origin_ref)
tag = Gitlab::Git.tag_ref?(origin_ref)
# Skip branch removal
if sha == Gitlab::Git::BLANK_SHA
return false
end
commit = project.ci_commit(sha)
commit = project.ci_commit(sha, ref)
unless commit
commit = project.ci_commits.new(sha: sha)
commit = project.ci_commits.new(sha: sha, ref: ref, before_sha: before_sha, tag: tag)
# Skip creating ci_commit when no gitlab-ci.yml is found
unless commit.ci_yaml_file
......@@ -32,10 +34,10 @@ class CreateCommitBuildsService
# Skip creating builds for commits that have [ci skip]
unless commit.skip_ci?
# Create builds for commit
tag = Gitlab::Git.tag_ref?(origin_ref)
commit.create_builds(ref, tag, user)
commit.create_builds(user)
end
commit.touch
commit
end
end
......@@ -37,8 +37,9 @@ class IssuableBaseService < BaseService
end
def filter_params(issuable_ability_name = :issue)
params[:assignee_id] = "" if params[:assignee_id] == IssuableFinder::NONE
params[:milestone_id] = "" if params[:milestone_id] == IssuableFinder::NONE
filter_assignee
filter_milestone
filter_labels
ability = :"admin_#{issuable_ability_name}"
......@@ -49,6 +50,29 @@ class IssuableBaseService < BaseService
end
end
def filter_assignee
if params[:assignee_id] == IssuableFinder::NONE
params[:assignee_id] = ''
end
end
def filter_milestone
milestone_id = params[:milestone_id]
return unless milestone_id
if milestone_id == IssuableFinder::NONE ||
project.milestones.find_by(id: milestone_id).nil?
params[:milestone_id] = ''
end
end
def filter_labels
return if params[:label_ids].to_a.empty?
params[:label_ids] =
project.labels.where(id: params[:label_ids]).pluck(:id)
end
def update(issuable)
change_state(issuable)
filter_params
......
.well-confirmation.text-center
%h1.prepend-top-0
Almost there...
%p.lead
Please check your email to confirm your account
%p.confirmation-content.text-center
No confirmation email received? Please check your spam folder or
.append-bottom-20.prepend-top-20.text-center
%a.btn.btn-lg.btn-success{ href: new_user_confirmation_path }
Request new confirmation email
- page_title "Applications"
- header_title page_title, applications_profile_path
.row.prepend-top-default
.col-lg-3.profile-settings-sidebar
......
......@@ -25,6 +25,10 @@
.content-wrapper
= render "layouts/flash"
= yield :flash_message
- if defined?(nav) && nav
.layout-nav
%div{ class: container_class }
= render "layouts/nav/#{nav}"
%div{ class: (container_class unless @no_container) }
.content
.clearfix
......
......@@ -6,6 +6,6 @@
= yield :scripts_body_top
= render "layouts/header/default", title: header_title
= render 'layouts/page', sidebar: sidebar
= render 'layouts/page', sidebar: sidebar, nav: nav
= yield :scripts_body
!!! 5
%html{ lang: "en"}
= render "layouts/head"
%body.ui_charcoal.login-page.application.navless
= render "layouts/header/empty"
= render "layouts/broadcast"
.container.navless-container
.content
= render "layouts/flash"
= yield
%hr
.container
.footer-links
= link_to "Explore", explore_root_path
= link_to "Help", help_path
= link_to "About GitLab", "https://about.gitlab.com/"
......@@ -48,8 +48,7 @@
%span
Help
%li.separate-item
= nav_link(controller: :profile) do
= nav_link(html_options: {class: profile_tab_class}) do
= link_to profile_path, title: 'Profile Settings', data: {placement: 'bottom'} do
= icon('user fw')
%span
......
%ul.nav.nav-sidebar
= nav_link do
= link_to root_path, title: 'Go to dashboard', class: 'back-link' do
= icon('caret-square-o-left fw')
%span
Go to dashboard
%li.separate-item
%ul.nav-links
= nav_link(path: 'profiles#show', html_options: {class: 'home'}) do
= link_to profile_path, title: 'Profile Settings' do
= icon('user fw')
%span
Profile Settings
Profile
= nav_link(controller: [:accounts, :two_factor_auths]) do
= link_to profile_account_path, title: 'Account' do
= icon('gear fw')
......@@ -27,7 +19,6 @@
= icon('envelope-o fw')
%span
Emails
%span.count= number_with_delimiter(current_user.emails.count + 1)
- unless current_user.ldap_user?
= nav_link(controller: :passwords) do
= link_to edit_profile_password_path, title: 'Password' do
......@@ -45,7 +36,6 @@
= icon('key fw')
%span
SSH Keys
%span.count= number_with_delimiter(current_user.keys.count)
= nav_link(controller: :preferences) do
= link_to profile_preferences_path, title: 'Preferences' do
-# TODO (rspeicher): Better icon?
......
......@@ -77,7 +77,7 @@
Merge Requests
%span.count.merge_counter= number_with_delimiter(@project.merge_requests.opened.count)
- if project_nav_tab? :settings
- if project_nav_tab? :team
= nav_link(controller: [:project_members, :teams]) do
= link_to namespace_project_project_members_path(@project.namespace, @project), title: 'Members', class: 'team-tab tab' do
= icon('users fw')
......
- page_title "Profile Settings"
- header_title "Profile Settings", profile_path unless header_title
- sidebar "profile"
- sidebar "dashboard"
- nav "profile"
= render template: "layouts/application"
- page_title "Account"
- header_title page_title, profile_account_path
- if current_user.ldap_user?
.alert.alert-info
......
- page_title "Audit Log"
- header_title page_title, audit_log_profile_path
.row.prepend-top-default
.col-lg-3.profile-settings-sidebar
......
- page_title "Emails"
- header_title page_title, profile_emails_path
.row.prepend-top-default
.col-lg-3.profile-settings-sidebar
......
- page_title "SSH Keys"
- header_title page_title, profile_keys_path
.row.prepend-top-default
.col-lg-3.profile-settings-sidebar
......
- page_title "Notifications"
- header_title page_title, profile_notifications_path
%div
- if @user.errors.any?
......
- page_title "Password"
- header_title page_title, edit_profile_password_path
.row.prepend-top-default
.col-lg-3.profile-settings-sidebar
......
- page_title 'Preferences'
- header_title page_title, profile_preferences_path
= form_for @user, url: profile_preferences_path, remote: true, method: :put, html: {class: 'row prepend-top-default js-preferences-form'} do |f|
.col-lg-3.profile-settings-sidebar
......
......@@ -55,6 +55,9 @@
%li
gcovr (C/C++) -
%code ^TOTAL.*\s+(\d+\%)$
%li
tap --coverage-report=text-summary (Node.js) -
%code ^Statements\s*:\s*([^%]+)
.form-group
.col-sm-offset-2.col-sm-10
......
.project-last-commit
- ci_commit = project.ci_commit(commit.sha)
- if ci_commit
= link_to ci_status_path(ci_commit), class: "ci-status ci-#{ci_commit.status}" do
= ci_status_icon(ci_commit)
= ci_status_label(ci_commit)
- if commit.status
= link_to builds_namespace_project_commit_path(commit.project.namespace, commit.project, commit), class: "ci-status ci-#{commit.status}" do
= ci_icon_for_status(commit.status)
= ci_label_for_status(commit.status)
= link_to commit.short_id, namespace_project_commit_path(project.namespace, project, commit), class: "commit_short_id"
= link_to_gfm commit.title, namespace_project_commit_path(project.namespace, project, commit), class: "commit-row-message"
......
- if @lines.present?
- if @form.unfold? && @form.since != 1 && !@form.bottom?
%tr.line_holder{ id: @form.since }
%tr.line_holder
= render "projects/diffs/match_line", { line: @match_line,
line_old: @form.since, line_new: @form.since, bottom: false, new_file: false }
- @lines.each_with_index do |line, index|
- line_new = index + @form.since
- line_old = line_new - @form.offset
%tr.line_holder
%tr.line_holder{ id: line_old }
%td.old_line.diff-line-num{ data: { linenumber: line_old } }
= link_to raw(line_old), "#"
= link_to raw(line_old), "##{line_old}"
%td.new_line.diff-line-num{ data: { linenumber: line_old } }
= link_to raw(line_new) , "#"
= link_to raw(line_new) , "##{line_old}"
%td.line_content.noteable_line==#{' ' * @form.indent}#{line}
- if @form.unfold? && @form.bottom? && @form.to < @blob.loc
......
......@@ -58,6 +58,6 @@
%th Coverage
%th
= render @builds, commit_sha: true, stage: true, allow_retry: true, coverage: @project.build_coverage_enabled?
= render @builds, commit_sha: true, ref: true, stage: true, allow_retry: true, coverage: @project.build_coverage_enabled?
= paginate @builds, theme: 'gitlab'
......@@ -13,7 +13,7 @@
= link_to "merge request #{merge_request.to_reference}", merge_request_path(merge_request)
#up-build-trace
- builds = @build.commit.matrix_builds(@build)
- builds = @build.commit.builds.latest.to_a
- if builds.size > 1
%ul.nav-links.no-top.no-bottom
- builds.each do |build|
......
......@@ -19,6 +19,7 @@
%td
= link_to build.short_sha, namespace_project_commit_path(build.project.namespace, build.project, build.sha), class: "monospace"
- if defined?(ref) && ref
%td
- if build.ref
= link_to build.ref, namespace_project_commits_path(build.project.namespace, build.project, build.ref)
......@@ -48,6 +49,8 @@
%span.label.label-info triggered
- if build.try(:allow_failure)
%span.label.label-danger allowed to fail
- if defined?(retried) && retried
%span.label.label-warning retried
%td.duration
- if build.duration
......
.gray-content-block.middle-block
.pull-right
- if can?(current_user, :update_build, @ci_commit.project)
- if @ci_commit.builds.latest.failed.any?(&:retryable?)
= link_to "Retry failed", retry_builds_namespace_project_commit_path(@ci_commit.project.namespace, @ci_commit.project, @ci_commit.sha), class: 'btn btn-grouped btn-primary', method: :post
- if @ci_commit.builds.running_or_pending.any?
= link_to "Cancel running", cancel_builds_namespace_project_commit_path(@ci_commit.project.namespace, @ci_commit.project, @ci_commit.sha), data: { confirm: 'Are you sure?' }, class: 'btn btn-grouped btn-danger', method: :post
.oneline
= pluralize @statuses.count(:id), "build"
- if defined?(link_to_commit) && link_to_commit
for commit
= link_to @ci_commit.short_sha, namespace_project_commit_path(@ci_commit.project.namespace, @ci_commit.project, @ci_commit.sha), class: "monospace"
- if @ci_commit.duration > 0
in
= time_interval_in_words @ci_commit.duration
- if @ci_commit.yaml_errors.present?
.bs-callout.bs-callout-danger
%h4 Found errors in your .gitlab-ci.yml:
%ul
- @ci_commit.yaml_errors.split(",").each do |error|
%li= error
You can also test your .gitlab-ci.yml in the #{link_to "Lint", ci_lint_path}
- if @ci_commit.project.builds_enabled? && !@ci_commit.ci_yaml_file
.bs-callout.bs-callout-warning
\.gitlab-ci.yml not found in this commit
.table-holder
%table.table.builds
%thead
%tr
%th Status
%th Build ID
%th Ref
%th Stage
%th Name
%th Duration
%th Finished at
- if @ci_commit.project.build_coverage_enabled?
%th Coverage
%th
- @ci_commit.refs.each do |ref|
- builds = @ci_commit.statuses.for_ref(ref).latest.ordered
= render builds, coverage: @ci_commit.project.build_coverage_enabled?, stage: true, allow_retry: true
- if @ci_commit.retried.any?
.gray-content-block.second-block
Retried builds
.table-holder
%table.table.builds
%thead
%tr
%th Status
%th Build ID
%th Ref
%th Stage
%th Name
%th Duration
%th Finished at
- if @ci_commit.project.build_coverage_enabled?
%th Coverage
%th
= render @ci_commit.retried, coverage: @ci_commit.project.build_coverage_enabled?, stage: true
- @ci_commits.each do |ci_commit|
= render "ci_commit", ci_commit: ci_commit
.gray-content-block.middle-block
.pull-right
- if can?(current_user, :update_build, @project)
- if ci_commit.builds.latest.failed.any?(&:retryable?)
= link_to "Retry failed", retry_builds_namespace_project_commit_path(@project.namespace, @project, ci_commit.sha), class: 'btn btn-grouped btn-primary', method: :post
- if ci_commit.builds.running_or_pending.any?
= link_to "Cancel running", cancel_builds_namespace_project_commit_path(@project.namespace, @project, ci_commit.sha), data: { confirm: 'Are you sure?' }, class: 'btn btn-grouped btn-danger', method: :post
.oneline
= pluralize ci_commit.statuses.count(:id), "build"
- if ci_commit.ref
for
%span.label.label-info
= ci_commit.ref
- if defined?(link_to_commit) && link_to_commit
for commit
= link_to ci_commit.short_sha, namespace_project_commit_path(@project.namespace, @project, ci_commit.sha), class: "monospace"
- if ci_commit.duration > 0
in
= time_interval_in_words ci_commit.duration
- if ci_commit.yaml_errors.present?
.bs-callout.bs-callout-danger
%h4 Found errors in your .gitlab-ci.yml:
%ul
- ci_commit.yaml_errors.split(",").each do |error|
%li= error
You can also test your .gitlab-ci.yml in the #{link_to "Lint", ci_lint_path}
- if @project.builds_enabled? && !ci_commit.ci_yaml_file
.bs-callout.bs-callout-warning
\.gitlab-ci.yml not found in this commit
.table-holder
%table.table.builds
%thead
%tr
%th Status
%th Build ID
%th Stage
%th Name
%th Duration
%th Finished at
- if @project.build_coverage_enabled?
%th Coverage
%th
- builds = ci_commit.statuses.latest.ordered
= render builds, coverage: @project.build_coverage_enabled?, stage: true, ref: false, allow_retry: true
- if ci_commit.retried.any?
.gray-content-block.second-block
Retried builds
.table-holder
%table.table.builds
%thead
%tr
%th Status
%th Build ID
%th Ref
%th Stage
%th Name
%th Duration
%th Finished at
- if @project.build_coverage_enabled?
%th Coverage
%th
= render ci_commit.retried, coverage: @project.build_coverage_enabled?, stage: true, ref: false
......@@ -43,12 +43,12 @@
- @commit.parents.each do |parent|
= link_to parent.short_id, namespace_project_commit_path(@project.namespace, @project, parent), class: "monospace"
- if @ci_commit
- if @commit.status
.pull-right
= link_to ci_status_path(@ci_commit), class: "ci-status ci-#{@ci_commit.status}" do
= ci_status_icon(@ci_commit)
= link_to builds_namespace_project_commit_path(@project.namespace, @project, @commit.id), class: "ci-status ci-#{@commit.status}" do
= ci_icon_for_status(@commit.status)
build:
= ci_status_label(@ci_commit)
= ci_label_for_status(@commit.status)
.commit-info-row.branches
%i.fa.fa-spinner.fa-spin
......
......@@ -5,7 +5,7 @@
.prepend-top-default
= render "commit_box"
- if @ci_commit
- if @commit.status
= render "ci_menu"
- else
%div.block-connector
......
......@@ -4,9 +4,8 @@
- notes = commit.notes
- note_count = notes.user.count
- ci_commit = project.ci_commit(commit.sha)
- cache_key = [project.path_with_namespace, commit.id, current_application_settings, note_count]
- cache_key.push(ci_commit.status) if ci_commit
- cache_key.push(commit.status) if commit.status
= cache(cache_key) do
%li.commit.js-toggle-container{ id: "commit-#{commit.short_id}" }
......@@ -17,8 +16,8 @@
%a.text-expander.js-toggle-button ...
.pull-right
- if ci_commit
= render_ci_status(ci_commit)
- if commit.status
= render_ci_status(commit)
= clipboard_button(clipboard_text: commit.id)
= link_to commit.short_id, namespace_project_commit_path(project.namespace, project, commit), class: "commit_short_id"
......
......@@ -39,4 +39,4 @@
= spinner
:javascript
CommitsList.init("#{@ref}", #{@limit});
CommitsList.init(#{@limit});
......@@ -16,6 +16,7 @@
%td
= link_to generic_commit_status.short_sha, namespace_project_commit_path(generic_commit_status.project.namespace, generic_commit_status.project, generic_commit_status.sha), class: "monospace"
- if defined?(ref) && ref
%td
- if generic_commit_status.ref
= link_to generic_commit_status.ref, namespace_project_commits_path(generic_commit_status.project.namespace, generic_commit_status.project, generic_commit_status.ref)
......
......@@ -5,7 +5,7 @@
- @related_branches.each do |branch|
%li
- sha = @project.repository.find_branch(branch).target
- ci_commit = @project.ci_commit(sha) if sha
- ci_commit = @project.ci_commit(sha, branch) if sha
- if ci_commit
%span.related-branch-ci-status
= render_ci_status(ci_commit)
......
......@@ -3,7 +3,7 @@
- page_card_attributes @merge_request.card_attributes
- header_title project_title(@project, "Merge Requests", namespace_project_merge_requests_path(@project.namespace, @project))
- if params[:view] == 'parallel'
- if diff_view == 'parallel'
- fluid_layout true
.merge-request{'data-url' => merge_request_path(@merge_request)}
......
- page_title "#{@merge_request.title} (#{merge_request.to_reference}", "Merge Requests"
- page_title "#{@merge_request.title} (#{@merge_request.to_reference}", "Merge Requests"
= render "header_title"
.merge-request
......
= render "projects/commit/builds", link_to_commit: true
= render "projects/commit/ci_commit", ci_commit: @ci_commit, link_to_commit: true
......@@ -8,7 +8,7 @@
group, members with
%strong #{group_links.human_access}
role (#{shared_group_users_count})
- if current_user.can?(:admin_group, shared_group)
- if can?(current_user, :admin_group, shared_group)
.panel-head-actions
= link_to group_group_members_path(shared_group), class: 'btn btn-sm' do
%i.fa.fa-pencil-square-o
......
......@@ -8,39 +8,7 @@
= h(multi_label_name(params[:label_name], "Label"))
= icon('chevron-down')
.dropdown-menu.dropdown-select.dropdown-menu-paging.dropdown-menu-labels.dropdown-menu-selectable
.dropdown-page-one
= dropdown_title("Filter by label")
= dropdown_filter("Search labels")
= dropdown_content
- if @project
= dropdown_footer do
%ul.dropdown-footer-list
- if can? current_user, :admin_label, @project
%li
%a.dropdown-toggle-page{href: "#"}
Create new
%li
= link_to namespace_project_labels_path(@project.namespace, @project) do
- if can? current_user, :admin_label, @project
Manage labels
- else
View labels
= render partial: "shared/issuable/label_page_default", locals: { title: "Filter by label" }
- if can? current_user, :admin_label, @project and @project
.dropdown-page-two.dropdown-new-label
= dropdown_title("Create new label", back: true)
= dropdown_content do
.dropdown-labels-error.js-label-error
%input#new_label_name.dropdown-input-field{type: "text", placeholder: "Name new label"}
.suggest-colors.suggest-colors-dropdown
- suggested_colors.each do |color|
= link_to '#', style: "background-color: #{color}", data: { color: color } do
&nbsp
.dropdown-label-color-input
.dropdown-label-color-preview.js-dropdown-label-color-preview
%input#new_label_color.dropdown-input-field{ type: "text" }
.clearfix
%button.btn.btn-primary.pull-left.js-new-label-btn{type: "button"}
Create
%button.btn.btn-default.pull-right.js-cancel-label-btn{type: "button"}
Cancel
= render partial: "shared/issuable/label_page_create"
= dropdown_loading
.dropdown-page-two.dropdown-new-label
= dropdown_title("Create new label", back: true)
= dropdown_content do
.dropdown-labels-error.js-label-error
%input#new_label_name.default-dropdown-input{ type: "text", placeholder: "Name new label" }
.suggest-colors.suggest-colors-dropdown
- suggested_colors.each do |color|
= link_to '#', style: "background-color: #{color}", data: { color: color } do
&nbsp
.dropdown-label-color-input
.dropdown-label-color-preview.js-dropdown-label-color-preview
%input#new_label_color.default-dropdown-input{ type: "text" }
.clearfix
%button.btn.btn-primary.pull-left.js-new-label-btn{ type: "button" }
Create
%button.btn.btn-default.pull-right.js-cancel-label-btn{ type: "button" }
Cancel
- title = local_assigns.fetch(:title, 'Assign labels')
- filter_placeholder = local_assigns.fetch(:filter_placeholder, 'Search labels')
.dropdown-page-one
= dropdown_title(title)
= dropdown_filter(filter_placeholder)
= dropdown_content
- if @project
= dropdown_footer do
%ul.dropdown-footer-list
- if can? current_user, :admin_label, @project
%li
%a.dropdown-toggle-page{href: "#"}
Create new
%li
= link_to namespace_project_labels_path(@project.namespace, @project), :"data-is-link" => true do
- if can? current_user, :admin_label, @project
Manage labels
- else
View labels
= dropdown_loading
\ No newline at end of file
......@@ -20,7 +20,7 @@
%a.btn.btn-default.issuable-pager.disabled{href: '#'}
Next
= form_for [@project.namespace.becomes(Namespace), @project, issuable], remote: true, html: {class: 'issuable-context-form inline-update js-issuable-update'} do |f|
= form_for [@project.namespace.becomes(Namespace), @project, issuable], remote: true, format: :json, html: {class: 'issuable-context-form inline-update js-issuable-update'} do |f|
.block.assignee
.sidebar-collapsed-icon.sidebar-collapsed-user{data: {toggle: "tooltip", placement: "left", container: "body"}, title: (issuable.assignee.to_reference if issuable.assignee)}
- if issuable.assignee
......@@ -129,24 +129,9 @@
Label
= icon('chevron-down')
.dropdown-menu.dropdown-select.dropdown-menu-paging.dropdown-menu-labels.dropdown-menu-selectable
.dropdown-page-one
= dropdown_title("Assign labels")
= dropdown_filter("Search labels")
= dropdown_content
- if @project
= dropdown_footer do
%ul.dropdown-footer-list
- if can? current_user, :admin_label, @project
%li
%a.dropdown-toggle-page{href: "#"}
Create new
%li
= link_to namespace_project_labels_path(@project.namespace, @project) do
- if can? current_user, :admin_label, @project
Manage labels
- else
View labels
= dropdown_loading
= render partial: "shared/issuable/label_page_default"
- if can? current_user, :admin_label, @project and @project
= render partial: "shared/issuable/label_page_create"
= render "shared/issuable/participants", participants: issuable.participants(current_user)
- if current_user
......
- @sort ||= sort_value_recently_updated
- personal = params[:personal]
- archived = params[:archived]
.dropdown.inline
%button.dropdown-toggle.btn{type: 'button', 'data-toggle' => 'dropdown'}
......@@ -10,7 +11,7 @@
Sort by
- projects_sort_options_hash.each do |value, title|
%li
= link_to filter_projects_path(sort: value, archived: archived), class: ("is-active" if @sort == value) do
= link_to filter_projects_path(sort: value, archived: archived, personal: personal), class: ("is-active" if @sort == value) do
= title
%li.divider
......@@ -20,3 +21,11 @@
%li
= link_to filter_projects_path(sort: @sort, archived: true), class: ("is-active" if params[:archived].present?) do
Show archived projects
- if current_user
%li.divider
%li
= link_to filter_projects_path(sort: @sort, personal: nil), class: ("is-active" unless personal) do
Owned by anyone
%li
= link_to filter_projects_path(sort: @sort, personal: true), class: ("is-active" if personal) do
Owned by me
......@@ -6,9 +6,8 @@
- css_class = '' unless local_assigns[:css_class]
- show_last_commit_as_description = false unless local_assigns[:show_last_commit_as_description] == true && project.commit
- css_class += " no-description" if project.description.blank? && !show_last_commit_as_description
- ci_commit = project.ci_commit(project.commit.sha) if ci && !project.empty_repo? && project.commit
- cache_key = [project.namespace, project, controller.controller_name, controller.action_name, current_application_settings, 'v2.3']
- cache_key.push(ci_commit.status) if ci_commit
- cache_key.push(project.commit.status) if project.commit.try(:status)
%li.project-row{ class: css_class }
= cache(cache_key) do
......@@ -16,9 +15,9 @@
- if project.main_language
%span
= project.main_language
- if ci_commit
- if project.commit.try(:status)
%span
= render_ci_status(ci_commit)
= render_ci_status(project.commit)
- if forks
%span
= icon('code-fork')
......
#!/usr/bin/env ruby
begin
load File.expand_path('../spring', __FILE__)
rescue LoadError => e
raise unless e.message.include?('spring')
end
APP_PATH = File.expand_path('../../config/application', __FILE__)
require_relative '../config/boot'
require 'rails/commands'
#!/usr/bin/env ruby
begin
load File.expand_path('../spring', __FILE__)
rescue LoadError => e
raise unless e.message.include?('spring')
end
require_relative '../config/boot'
require 'rake'
Rake.application.run
#!/usr/bin/env ruby
begin
load File.expand_path("../spring", __FILE__)
rescue LoadError
load File.expand_path('../spring', __FILE__)
rescue LoadError => e
raise unless e.message.include?('spring')
end
require 'bundler/setup'
load Gem.bin_path('rspec-core', 'rspec')
#!/usr/bin/env ruby
begin
load File.expand_path("../spring", __FILE__)
rescue LoadError
load File.expand_path('../spring', __FILE__)
rescue LoadError => e
raise unless e.message.include?('spring')
end
require 'bundler/setup'
load Gem.bin_path('spinach', 'spinach')
......@@ -4,12 +4,12 @@
# It gets overwritten when you run the `spring binstub` command.
unless defined?(Spring)
require "rubygems"
require "bundler"
require 'rubygems'
require 'bundler'
if match = Bundler.default_lockfile.read.match(/^GEM$.*?^ (?: )*spring \((.*?)\)$.*?^$/m)
Gem.paths = { "GEM_PATH" => [Bundler.bundle_path.to_s, *Gem.path].uniq }
gem "spring", match[1]
require "spring/binstub"
if (match = Bundler.default_lockfile.read.match(/^GEM$.*?^ (?: )*spring \((.*?)\)$.*?^$/m))
Gem.paths = { 'GEM_PATH' => [Bundler.bundle_path.to_s, *Gem.path].uniq.join(Gem.path_separator) }
gem 'spring', match[1]
require 'spring/binstub'
end
end
#!/usr/bin/env ruby
begin
load File.expand_path('../spring', __FILE__)
rescue LoadError => e
raise unless e.message.include?('spring')
end
require 'bundler/setup'
load Gem.bin_path('teaspoon', 'teaspoon')
......@@ -107,6 +107,10 @@ if Gitlab::Metrics.enabled?
config.instrument_methods(const)
config.instrument_instance_methods(const)
end
# Instrument the classes used for checking if somebody has push access.
config.instrument_instance_methods(Gitlab::GitAccess)
config.instrument_instance_methods(Gitlab::GitAccessWiki)
end
GC::Profiler.enable
......
......@@ -418,6 +418,7 @@ Rails.application.routes.draw do
devise_scope :user do
get '/users/auth/:provider/omniauth_error' => 'omniauth_callbacks#omniauth_error', as: :omniauth_error
get '/users/almost_there' => 'confirmations#almost_there'
end
root to: "root#index"
......
......@@ -19,7 +19,7 @@ class Gitlab::Seeder::Builds
commits = @project.repository.commits('master', nil, 5)
commits_sha = commits.map { |commit| commit.raw.id }
commits_sha.map do |sha|
@project.ensure_ci_commit(sha)
@project.ensure_ci_commit(sha, 'master')
end
rescue
[]
......
......@@ -4,6 +4,8 @@ class AddTrigramIndexesForSearching < ActiveRecord::Migration
def up
return unless Gitlab::Database.postgresql?
create_trigrams_extension
unless trigrams_enabled?
raise 'You must enable the pg_trgm extension. You can do so by running ' \
'"CREATE EXTENSION pg_trgm;" as a PostgreSQL super user, this must be ' \
......@@ -37,6 +39,15 @@ class AddTrigramIndexesForSearching < ActiveRecord::Migration
row && row['enabled'] == 't' ? true : false
end
def create_trigrams_extension
# This may not work if the user doesn't have permission. We attempt in
# case we do have permission, particularly for test/dev environments.
begin
enable_extension 'pg_trgm'
rescue
end
end
def to_index
{
ci_runners: [:token, :description],
......
class AddFieldsToCiCommit < ActiveRecord::Migration
def change
add_column :ci_commits, :status, :string
add_column :ci_commits, :started_at, :timestamp
add_column :ci_commits, :finished_at, :timestamp
add_column :ci_commits, :duration, :integer
end
end
class UpdateCiCommit < ActiveRecord::Migration
# This migration can be run online, but needs to be executed for the second time after restarting Unicorn workers
# Otherwise Offline migration should be used.
def change
execute("UPDATE ci_commits SET status=#{status}, ref=#{ref}, tag=#{tag} WHERE status IS NULL")
end
private
def status
builds = '(SELECT COUNT(*) FROM ci_builds WHERE ci_builds.commit_id=ci_commits.id)'
success = "(SELECT COUNT(*) FROM ci_builds WHERE ci_builds.commit_id=ci_commits.id AND status='success')"
ignored = "(SELECT COUNT(*) FROM ci_builds WHERE ci_builds.commit_id=ci_commits.id AND (status='failed' OR status='canceled') AND allow_failure)"
pending = "(SELECT COUNT(*) FROM ci_builds WHERE ci_builds.commit_id=ci_commits.id AND status='pending')"
running = "(SELECT COUNT(*) FROM ci_builds WHERE ci_builds.commit_id=ci_commits.id AND status='running')"
canceled = "(SELECT COUNT(*) FROM ci_builds WHERE ci_builds.commit_id=ci_commits.id AND status='canceled')"
"(CASE
WHEN #{builds}=0 THEN 'skipped'
WHEN #{builds}=#{success}+#{ignored} THEN 'success'
WHEN #{builds}=#{pending} THEN 'pending'
WHEN #{builds}=#{canceled} THEN 'canceled'
WHEN #{running}+#{pending}>0 THEN 'running'
ELSE 'failed'
END)"
end
def ref
'(SELECT ref FROM ci_builds WHERE ci_builds.commit_id=ci_commits.id ORDER BY id DESC LIMIT 1)'
end
def tag
'(SELECT tag FROM ci_builds WHERE ci_builds.commit_id=ci_commits.id ORDER BY id DESC LIMIT 1)'
end
end
class AddCiCommitIndexes < ActiveRecord::Migration
disable_ddl_transaction!
def change
add_index :ci_commits, [:gl_project_id, :sha], index_options
add_index :ci_commits, [:gl_project_id, :status], index_options
add_index :ci_commits, [:status], index_options
end
private
def index_options
if Gitlab::Database.postgresql?
{ algorithm: :concurrently }
else
{ }
end
end
end
class DisableRepositoryChecks < ActiveRecord::Migration
def up
change_column_default :application_settings, :repository_checks_enabled, false
execute 'UPDATE application_settings SET repository_checks_enabled = false'
end
def down
change_column_default :application_settings, :repository_checks_enabled, true
execute 'UPDATE application_settings SET repository_checks_enabled = true'
end
end
......@@ -11,7 +11,7 @@
#
# It's strongly recommended that you check this file into your version control system.
ActiveRecord::Schema.define(version: 20160419120017) do
ActiveRecord::Schema.define(version: 20160421130527) do
# These are extensions that must be enabled in order to support this database
enable_extension "plpgsql"
......@@ -77,7 +77,7 @@ ActiveRecord::Schema.define(version: 20160419120017) do
t.string "akismet_api_key"
t.boolean "email_author_in_body", default: false
t.integer "default_group_visibility"
t.boolean "repository_checks_enabled", default: true
t.boolean "repository_checks_enabled", default: false
t.integer "metrics_packet_size", default: 1
t.text "shared_runners_text"
end
......@@ -171,14 +171,21 @@ ActiveRecord::Schema.define(version: 20160419120017) do
t.text "yaml_errors"
t.datetime "committed_at"
t.integer "gl_project_id"
t.string "status"
t.datetime "started_at"
t.datetime "finished_at"
t.integer "duration"
end
add_index "ci_commits", ["gl_project_id", "sha"], name: "index_ci_commits_on_gl_project_id_and_sha", using: :btree
add_index "ci_commits", ["gl_project_id", "status"], name: "index_ci_commits_on_gl_project_id_and_status", using: :btree
add_index "ci_commits", ["gl_project_id"], name: "index_ci_commits_on_gl_project_id", using: :btree
add_index "ci_commits", ["project_id", "committed_at", "id"], name: "index_ci_commits_on_project_id_and_committed_at_and_id", using: :btree
add_index "ci_commits", ["project_id", "committed_at"], name: "index_ci_commits_on_project_id_and_committed_at", using: :btree
add_index "ci_commits", ["project_id", "sha"], name: "index_ci_commits_on_project_id_and_sha", using: :btree
add_index "ci_commits", ["project_id"], name: "index_ci_commits_on_project_id", using: :btree
add_index "ci_commits", ["sha"], name: "index_ci_commits_on_sha", using: :btree
add_index "ci_commits", ["status"], name: "index_ci_commits_on_status", using: :btree
create_table "ci_events", force: :cascade do |t|
t.integer "project_id"
......
......@@ -41,6 +41,8 @@
- [Git LFS configuration](workflow/lfs/lfs_administration.md)
- [Housekeeping](administration/housekeeping.md) Keep your Git repository tidy and fast.
- [GitLab Performance Monitoring](monitoring/performance/introduction.md) Configure GitLab and InfluxDB for measuring performance metrics
- [Sidekiq Troubleshooting](administration/troubleshooting/sidekiq.md) Debug when Sidekiq appears hung and is not processing jobs
- [High Availability](administration/high_availability/README.md) Configure multiple servers for scaling or high availability
## Contributor documentation
......
# High Availability
GitLab supports several different types of clustering and high-availability.
The solution you choose will be based on the level of scalability and
availability you require. The easiest solutions are scalable, but not necessarily
highly available.
## Architecture
### Active/Passive
For pure high-availability/failover with no scaling you can use an
active/passive configuration. This utilizes DRBD (Distributed Replicated
Block Device) to keep all data in sync. DRBD requires a low latency link to
remain in sync. It is not advisable to attempt to run DRBD between data centers
or in different cloud availability zones.
Components/Servers Required:
- 2 servers/virtual machines (one active/one passive)
### Active/Active
This architecture scales easily because all application servers handle
user requests simultaneously. The database, Redis, and GitLab application are
all deployed on separate servers. The configuration is **only** highly-available
if the database, Redis and storage are also configured as such.
**Steps to configure active/active:**
1. [Configure the database](database.md)
1. [Configure Redis](redis.md)
1. [Configure NFS](nfs.md)
1. [Configure the GitLab application servers](gitlab.md)
1. [Configure the load balancers](load_balancer.md)
# Configuring a Database for GitLab HA
You can choose to install and manage a database server (PostgreSQL/MySQL)
yourself, or you can use GitLab Omnibus packages to help. GitLab recommends
PostgreSQL. This is the database that will be installed if you use the
Omnibus package to manage your database.
## Configure your own database server
If you're hosting GitLab on a cloud provider, you can optionally use a
managed service for PostgreSQL. For example, AWS offers a managed Relational
Database Service (RDS) that runs PostgreSQL.
If you use a cloud-managed service, or provide your own PostgreSQL:
1. Set up a `gitlab` username with a password of your choice. The `gitlab` user
needs privileges to create the `gitlabhq_production` database.
1. Configure the GitLab application servers with the appropriate details.
This step is covered in [Configuring GitLab for HA](gitlab.md)
## Configure using Omnibus
1. Download/install GitLab Omnibus using **steps 1 and 2** from
[GitLab downloads](https://about.gitlab.com/downloads). Do not complete other
steps on the download page.
1. Create/edit `/etc/gitlab/gitlab.rb` and use the following configuration.
Be sure to change the `external_url` to match your eventual GitLab front-end
URL.
```ruby
external_url 'https://gitlab.example.com'
# Disable all components except PostgreSQL
postgresql['enable'] = true
bootstrap['enable'] = false
nginx['enable'] = false
unicorn['enable'] = false
sidekiq['enable'] = false
redis['enable'] = false
gitlab_workhorse['enable'] = false
mailroom['enable'] = false
# PostgreSQL configuration
postgresql['sql_password'] = 'DB password'
postgresql['md5_auth_cidr_addresses'] = ['0.0.0.0/0']
postgresql['listen_address'] = '0.0.0.0'
```
1. Run `sudo gitlab-ctl reconfigure` to install and configure PostgreSQL.
> **Note**: This `reconfigure` step will result in some errors.
That's OK - don't be alarmed.
1. Open a database prompt:
```
su - gitlab-psql
/bin/bash
psql -h /var/opt/gitlab/postgresql -d template1
# Output:
psql (9.2.15)
Type "help" for help.
template1=#
```
1. Run the following command at the database prompt and you will be asked to
enter the new password for the PostgreSQL superuser.
```
\password
# Output:
Enter new password:
Enter it again:
```
1. Similarly, set the password for the `gitlab` database user. Use the same
password that you specified in the `/etc/gitlab/gitlab.rb` file for
`postgresql['sql_password']`.
```
\password gitlab
# Output:
Enter new password:
Enter it again:
```
1. Enable the `pg_trgm` extension:
```
CREATE EXTENSION pg_trgm;
# Output:
CREATE EXTENSION
```
1. Exit the database prompt by typing `\q` and Enter.
1. Exit the `gitlab-psql` user by running `exit` twice.
1. Run `sudo gitlab-ctl reconfigure` a final time.
1. Run `touch /etc/gitlab/skip-auto-migrations` to prevent database migrations
from running on upgrade. Only the primary GitLab application server should
handle migrations.
---
Read more on high-availability configuration:
1. [Configure Redis](redis.md)
1. [Configure NFS](nfs.md)
1. [Configure the GitLab application servers](gitlab.md)
1. [Configure the load balancers](load_balancer.md)
# Configuring GitLab for HA
Assuming you have already configured a database, Redis, and NFS, you can
configure the GitLab application server(s) now. Complete the steps below
for each GitLab application server in your environment.
> **Note:** There is some additional configuration near the bottom for
secondary GitLab application servers. It's important to read and understand
these additional steps before proceeding with GitLab installation.
1. If necessary, install the NFS client utility packages using the following
commands:
```
# Ubuntu/Debian
apt-get install nfs-common
# CentOS/Red Hat
yum install nfs-utils nfs-utils-lib
```
1. Specify the necessary NFS shares. Mounts are specified in
`/etc/fstab`. The exact contents of `/etc/fstab` will depend on how you chose
to configure your NFS server. See [NFS documentation](nfs.md) for the various
options. Here is an example snippet to add to `/etc/fstab`:
```
10.1.0.1:/var/opt/gitlab/.ssh /var/opt/gitlab/.ssh nfs defaults,soft,rsize=1048576,wsize=1048576,noatime,nobootwait,lookupcache=positive 0 2
10.1.0.1:/var/opt/gitlab/gitlab-rails/uploads /var/opt/gitlab/gitlab-rails/uploads nfs defaults,soft,rsize=1048576,wsize=1048576,noatime,nobootwait,lookupcache=positive 0 2
10.1.0.1:/var/opt/gitlab/gitlab-rails/shared /var/opt/gitlab/gitlab-rails/shared nfs defaults,soft,rsize=1048576,wsize=1048576,noatime,nobootwait,lookupcache=positive 0 2
10.1.0.1:/var/opt/gitlab/gitlab-ci/builds /var/opt/gitlab/gitlab-ci/builds nfs defaults,soft,rsize=1048576,wsize=1048576,noatime,nobootwait,lookupcache=positive 0 2
10.1.1.1:/var/opt/gitlab/git-data /var/opt/gitlab/git-data nfs defaults,soft,rsize=1048576,wsize=1048576,noatime,nobootwait,lookupcache=positive 0 2
```
1. Create the shared directories. These may be different depending on your NFS
mount locations.
```
mkdir -p /var/opt/gitlab/.ssh /var/opt/gitlab/gitlab-rails/uploads /var/opt/gitlab/gitlab-rails/shared /var/opt/gitlab/gitlab-ci/builds /var/opt/gitlab/git-data
```
1. Download/install GitLab Omnibus using **steps 1 and 2** from
[GitLab downloads](https://about.gitlab.com/downloads). Do not complete other
steps on the download page.
1. Create/edit `/etc/gitlab/gitlab.rb` and use the following configuration.
Be sure to change the `external_url` to match your eventual GitLab front-end
URL. Depending your the NFS configuration, you may need to change some GitLab
data locations. See [NFS documentation](nfs.md) for `/etc/gitlab/gitlab.rb`
configuration values for various scenarios. The example below assumes you've
added NFS mounts in the default data locations.
```ruby
external_url 'https://gitlab.example.com'
# Prevent GitLab from starting if NFS data mounts are not available
high_availability['mountpoint'] = '/var/opt/gitlab/git-data'
# Disable components that will not be on the GitLab application server
postgresql['enable'] = false
redis['enable'] = false
# PostgreSQL connection details
gitlab_rails['db_adapter'] = 'postgresql'
gitlab_rails['db_encoding'] = 'unicode'
gitlab_rails['db_host'] = '10.1.0.5' # IP/hostname of database server
gitlab_rails['db_password'] = 'DB password'
# Redis connection details
gitlab_rails['redis_port'] = '6379'
gitlab_rails['redis_host'] = '10.1.0.6' # IP/hostname of Redis server
gitlab_rails['redis_password'] = 'Redis Password'
```
1. Run `sudo gitlab-ctl reconfigure` to compile the configuration.
## Primary GitLab application server
As a final step, run the setup rake task on the first GitLab application server.
It is not necessary to run this on additional application servers.
1. Initialize the database by running `sudo gitlab-rake gitlab:setup`.
> **WARNING:** Only run this setup task on **NEW** GitLab instances because it
will wipe any existing data.
> **Note:** When you specify `https` in the `external_url`, as in the example
above, GitLab assumes you have SSL certificates in `/etc/gitlab/ssl/`. If
certificates are not present, Nginx will fail to start. See
[Nginx documentation](http://docs.gitlab.com/omnibus/settings/nginx.html#enable-https)
for more information.
## Additional configuration for secondary GitLab application servers
Secondary GitLab servers (servers configured **after** the first GitLab server)
need some additional configuration.
1. Configure shared secrets. These values can be obtained from the primary
GitLab server in `/etc/gitlab/gitlab-secrets.json`. Add these to
`/etc/gitlab/gitlab.rb` **prior to** running the first `reconfigure` in
the steps above.
```ruby
gitlab_shell['secret_token'] = 'fbfb19c355066a9afb030992231c4a363357f77345edd0f2e772359e5be59b02538e1fa6cae8f93f7d23355341cea2b93600dab6d6c3edcdced558fc6d739860'
gitlab_rails['secret_token'] = 'b719fe119132c7810908bba18315259ed12888d4f5ee5430c42a776d840a396799b0a5ef0a801348c8a357f07aa72bbd58e25a84b8f247a25c72f539c7a6c5fa'
gitlab_ci['secret_key_base'] = '6e657410d57c71b4fc3ed0d694e7842b1895a8b401d812c17fe61caf95b48a6d703cb53c112bc01ebd197a85da81b18e29682040e99b4f26594772a4a2c98c6d'
gitlab_ci['db_key_base'] = 'bf2e47b68d6cafaef1d767e628b619365becf27571e10f196f98dc85e7771042b9203199d39aff91fcb6837c8ed83f2a912b278da50999bb11a2fbc0fba52964'
```
1. Run `touch /etc/gitlab/skip-auto-migrations` to prevent database migrations
from running on upgrade. Only the primary GitLab application server should
handle migrations.
## Troubleshooting
- `mount: wrong fs type, bad option, bad superblock on`
You have not installed the necessary NFS client utilities. See step 1 above.
- `mount: mount point /var/opt/gitlab/... does not exist`
This particular directory does not exist on the NFS server. Ensure
the share is exported and exists on the NFS server and try to remount.
---
Read more on high-availability configuration:
1. [Configure the database](database.md)
1. [Configure Redis](redis.md)
1. [Configure NFS](nfs.md)
1. [Configure the load balancers](load_balancer.md)
# Load Balancer for GitLab HA
In an active/active GitLab configuration, you will need a load balancer to route
traffic to the application servers. The specifics on which load balancer to use
or the exact configuration is beyond the scope of GitLab documentation. We hope
that if you're managing HA systems like GitLab you have a load balancer of
choice already. Some examples including HAProxy (open-source), F5 Big-IP LTM,
and Citrix Net Scaler. This documentation will outline what ports and protocols
you need to use with GitLab.
## Basic ports
| LB Port | Backend Port | Protocol |
| ------- | ------------ | -------- |
| 80 | 80 | HTTP |
| 443 | 443 | HTTPS [^1] |
| 22 | 22 | TCP |
## GitLab Pages Ports
If you're using GitLab Pages you will need some additional port configurations.
GitLab Pages requires a separate VIP. Configure DNS to point the
`pages_external_url` from `/etc/gitlab/gitlab.rb` at the new VIP. See the
[GitLab Pages documentation][gitlab-pages] for more information.
| LB Port | Backend Port | Protocol |
| ------- | ------------ | -------- |
| 80 | Varies [^2] | HTTP |
| 443 | Varies [^2] | TCP [^3] |
## Alternate SSH Port
Some organizations have policies against opening SSH port 22. In this case,
it may be helpful to configure an alternate SSH hostname that allows users
to use SSH on port 443. An alternate SSH hostname will require a new VIP
compared to the other GitLab HTTP configuration above.
Configure DNS for an alternate SSH hostname such as altssh.gitlab.example.com.
| LB Port | Backend Port | Protocol |
| ------- | ------------ | -------- |
| 443 | 22 | TCP |
---
Read more on high-availability configuration:
1. [Configure the database](database.md)
1. [Configure Redis](redis.md)
1. [Configure NFS](nfs.md)
1. [Configure the GitLab application servers](gitlab.md)
[^1]: When using HTTPS protocol for port 443, you will need to add an SSL
certificate to the load balancers. If you wish to terminate SSL at the
GitLab application server instead, use TCP protocol.
[^2]: The backend port for GitLab Pages depends on the
`gitlab_pages['external_http']` and `gitlab_pages['external_https']`
setting. See [GitLab Pages documentation][gitlab-pages] for more details.
[^3]: Port 443 for GitLab Pages should always use the TCP protocol. Users can
configure custom domains with custom SSL, which would not be possible
if SSL was terminated at the load balancer.
[gitlab-pages]: http://doc.gitlab.com/ee/pages/administration.html
# NFS
## Required NFS Server features
**File locking**: GitLab **requires** file locking which is only supported
natively in NFS version 4. NFSv3 also supports locking as long as
Linux Kernel 2.6.5+ is used. We recommend using version 4 and do not
specifically test NFSv3.
**no_root_squash**: NFS normally changes the `root` user to `nobody`. This is
a good security measure when NFS shares will be accessed by many different
users. However, in this case only GitLab will use the NFS share so it
is safe. GitLab requires the `no_root_squash` setting because we need to
manage file permissions automatically. Without the setting you will receive
errors when the Omnibus package tries to alter permissions. Note that GitLab
and other bundled components do **not** run as `root` but as non-privileged
users. The requirement for `no_root_squash` is to allow the Omnibus package to
set ownership and permissions on files, as needed.
### Recommended options
When you define your NFS exports, we recommend you also add the following
options:
- `sync` - Force synchronous behavior. Default is asynchronous and under certain
circumstances it could lead to data loss if a failure occurs before data has
synced.
## Client mount options
Below is an example of an NFS mount point we use on GitLab.com:
```
10.1.1.1:/var/opt/gitlab/git-data /var/opt/gitlab/git-data nfs4 defaults,soft,rsize=1048576,wsize=1048576,noatime,nobootwait,lookupcache=positive 0 2
```
Notice several options that you should consider using:
| Setting | Description |
| ------- | ----------- |
| `nobootwait` | Don't halt boot process waiting for this mount to become available
| `lookupcache=positive` | Tells the NFS client to honor `positive` cache results but invalidates any `negative` cache results. Negative cache results cause problems with Git. Specifically, a `git push` can fail to register uniformly across all NFS clients. The negative cache causes the clients to 'remember' that the files did not exist previously.
## Mount locations
When using default Omnibus configuration you will need to share 5 data locations
between all GitLab cluster nodes. No other locations should be shared. The
following are the 5 locations you need to mount:
| Location | Description |
| -------- | ----------- |
| `/var/opt/gitlab/git-data` | Git repository data. This will account for a large portion of your data
| `/var/opt/gitlab/.ssh` | SSH `authorized_keys` file and keys used to import repositories from some other Git services
| `/var/opt/gitlab/gitlab-rails/uploads` | User uploaded attachments
| `/var/opt/gitlab/gitlab-rails/shared` | Build artifacts, GitLab Pages, LFS objects, temp files, etc. If you're using LFS this may also account for a large portion of your data
| `/var/opt/gitlab/gitlab-ci/builds` | GitLab CI build traces
Other GitLab directories should not be shared between nodes. They contain
node-specific files and GitLab code that does not need to be shared. To ship
logs to a central location consider using remote syslog. GitLab Omnibus packages
provide configuration for [UDP log shipping][udp-log-shipping].
### Consolidating mount points
If you don't want to configure 5-6 different NFS mount points, you have a few
alternative options.
#### Change default file locations
Omnibus allows you to configure the file locations. With custom configuration
you can specify just one main mountpoint and have all of these locations
as subdirectories. Mount `/gitlab-data` then use the following Omnibus
configuration to move each data location to a subdirectory:
```ruby
user['home'] = '/gitlab-data/home'
git_data_dir '/gitlab-data/git-data'
gitlab_rails['shared_path'] = '/gitlab-data/shared'
gitlab_rails['uploads_directory'] = "/gitlab-data/uploads"
gitlab_ci['builds_directory'] = '/gitlab-data/builds'
```
To move the `git` home directory, all GitLab services must be stopped. Run
`gitlab-ctl stop && initctl stop gitlab-runsvdir`. Then continue with the
reconfigure.
Run `sudo gitlab-ctl reconfigure` to start using the central location. Please
be aware that if you had existing data you will need to manually copy/rsync it
to these new locations and then restart GitLab.
#### Bind mounts
Bind mounts provide a way to specify just one NFS mount and then
bind the default GitLab data locations to the NFS mount. Start by defining your
single NFS mount point as you normally would in `/etc/fstab`. Let's assume your
NFS mount point is `/gitlab-data`. Then, add the following bind mounts in
`/etc/fstab`:
```bash
/gitlab-data/git-data /var/opt/gitlab/git-data none bind 0 0
/gitlab-data/.ssh /var/opt/gitlab/.ssh none bind 0 0
/gitlab-data/uploads /var/opt/gitlab/gitlab-rails/uploads none bind 0 0
/gitlab-data/shared /var/opt/gitlab/gitlab-rails/shared none bind 0 0
/gitlab-data/builds /var/opt/gitlab/gitlab-ci/builds none bind 0 0
```
---
Read more on high-availability configuration:
1. [Configure the database](database.md)
1. [Configure Redis](redis.md)
1. [Configure the GitLab application servers](gitlab.md)
1. [Configure the load balancers](load_balancer.md)
[udp-log-shipping]: http://doc.gitlab.com/omnibus/settings/logs.html#udp-log-shipping-gitlab-enterprise-edition-only "UDP log shipping"
# Configuring Redis for GitLab HA
You can choose to install and manage Redis yourself, or you can use GitLab
Omnibus packages to help.
## Configure your own Redis server
If you're hosting GitLab on a cloud provider, you can optionally use a
managed service for Redis. For example, AWS offers a managed ElastiCache service
that runs Redis.
> **Note:** Redis does not require authentication by default. See
[Redis Security](http://redis.io/topics/security) documentation for more
information. We recommend using a combination of a Redis password and tight
firewall rules to secure your Redis service.
## Configure using Omnibus
1. Download/install GitLab Omnibus using **steps 1 and 2** from
[GitLab downloads](https://about.gitlab.com/downloads). Do not complete other
steps on the download page.
1. Create/edit `/etc/gitlab/gitlab.rb` and use the following configuration.
Be sure to change the `external_url` to match your eventual GitLab front-end
URL.
```ruby
external_url 'https://gitlab.example.com'
# Disable all components except PostgreSQL
redis['enable'] = true
bootstrap['enable'] = false
nginx['enable'] = false
unicorn['enable'] = false
sidekiq['enable'] = false
postgresql['enable'] = false
gitlab_workhorse['enable'] = false
mailroom['enable'] = false
# Redis configuration
redis['port'] = 6379
redis['bind'] = '0.0.0.0'
# If you wish to use Redis authentication (recommended)
redis['password'] = 'Redis Password'
```
1. Run `sudo gitlab-ctl reconfigure` to install and configure PostgreSQL.
> **Note**: This `reconfigure` step will result in some errors.
That's OK - don't be alarmed.
1. Run `touch /etc/gitlab/skip-auto-migrations` to prevent database migrations
from running on upgrade. Only the primary GitLab application server should
handle migrations.
---
Read more on high-availability configuration:
1. [Configure the database](database.md)
1. [Configure NFS](nfs.md)
1. [Configure the GitLab application servers](gitlab.md)
1. [Configure the load balancers](load_balancer.md)
# Repository checks
>**Note:**
This feature was [introduced][ce-3232] in GitLab 8.7.
This feature was [introduced][ce-3232] in GitLab 8.7. It is OFF by
default because it still causes too many false alarms.
Git has a built-in mechanism, [git fsck][git-fsck], to verify the
integrity of all data commited to a repository. GitLab administrators
......
# Troubleshooting Sidekiq
Sidekiq is the background job processor GitLab uses to asynchronously run
tasks. When things go wrong it can be difficult to troubleshoot. These
situations also tend to be high-pressure because a production system job queue
may be filling up. Users will notice when this happens because new branches
may not show up and merge requests may not be updated. The following are some
troubleshooting steps that will help you diagnose the bottleneck.
> **Note:** GitLab administrators/users should consider working through these
debug steps with GitLab Support so the backtraces can be analyzed by our team.
It may reveal a bug or necessary improvement in GitLab.
> **Note:** In any of the backtraces, be weary of suspecting cases where every
thread appears to be waiting in the database, Redis, or waiting to acquire
a mutex. This **may** mean there's contention in the database, for example,
but look for one thread that is different than the rest. This other thread
may be using all available CPU, or have a Ruby Global Interpreter Lock,
preventing other threads from continuing.
## Thread dump
Send the Sidekiq process ID the `TTIN` signal and it will output thread
backtraces in the log file.
```
kill -TTIN <sidekiq_pid>
```
Check in `/var/log/gitlab/sidekiq/current` or `$GITLAB_HOME/log/sidekiq.log` for
the backtrace output. The backtraces will be lengthy and generally start with
several `WARN` level messages. Here's an example of a single thread's backtrace:
```
2016-04-13T06:21:20.022Z 31517 TID-orn4urby0 WARN: ActiveRecord::RecordNotFound: Couldn't find Note with 'id'=3375386
2016-04-13T06:21:20.022Z 31517 TID-orn4urby0 WARN: /opt/gitlab/embedded/service/gem/ruby/2.1.0/gems/activerecord-4.2.5.2/lib/active_record/core.rb:155:in `find'
/opt/gitlab/embedded/service/gitlab-rails/app/workers/new_note_worker.rb:7:in `perform'
/opt/gitlab/embedded/service/gem/ruby/2.1.0/gems/sidekiq-4.0.1/lib/sidekiq/processor.rb:150:in `execute_job'
/opt/gitlab/embedded/service/gem/ruby/2.1.0/gems/sidekiq-4.0.1/lib/sidekiq/processor.rb:132:in `block (2 levels) in process'
/opt/gitlab/embedded/service/gem/ruby/2.1.0/gems/sidekiq-4.0.1/lib/sidekiq/middleware/chain.rb:127:in `block in invoke'
/opt/gitlab/embedded/service/gitlab-rails/lib/gitlab/sidekiq_middleware/memory_killer.rb:17:in `call'
/opt/gitlab/embedded/service/gem/ruby/2.1.0/gems/sidekiq-4.0.1/lib/sidekiq/middleware/chain.rb:129:in `block in invoke'
/opt/gitlab/embedded/service/gitlab-rails/lib/gitlab/sidekiq_middleware/arguments_logger.rb:6:in `call'
...
```
In some cases Sidekiq may be hung and unable to respond to the `TTIN` signal.
Move on to other troubleshooting methods if this happens.
## Process profiling with `perf`
Linux has a process profiling tool called `perf` that is helpful when a certain
process is eating up a lot of CPU. If you see high CPU usage and Sidekiq won't
respond to the `TTIN` signal, this is a good next step.
If `perf` is not installed on your system, install it with `apt-get` or `yum`:
```
# Debian
sudo apt-get install linux-tools
# Ubuntu (may require these additional Kernel packages)
sudo apt-get install linux-tools-common linux-tools-generic linux-tools-`uname -r`
# Red Hat/CentOS
sudo yum install perf
```
Run perf against the Sidekiq PID:
```
sudo perf record -p <sidekiq_pid>
```
Let this run for 30-60 seconds and then press Ctrl-C. Then view the perf report:
```
sudo perf report
# Sample output
Samples: 348K of event 'cycles', Event count (approx.): 280908431073
97.69% ruby nokogiri.so [.] xmlXPathNodeSetMergeAndClear
0.18% ruby libruby.so.2.1.0 [.] objspace_malloc_increase
0.12% ruby libc-2.12.so [.] _int_malloc
0.10% ruby libc-2.12.so [.] _int_free
```
Above you see sample output from a perf report. It shows that 97% of the CPU is
being spent inside Nokogiri and `xmlXPathNodeSetMergeAndClear`. For something
this obvious you should then go investigate what job in GitLab would use
Nokogiri and XPath. Combine with `TTIN` or `gdb` output to show the
corresponding Ruby code where this is happening.
## The GNU Project Debugger (gdb)
`gdb` can be another effective tool for debugging Sidekiq. It gives you a little
more interactive way to look at each thread and see what's causing problems.
> **Note:** Attaching to a process with `gdb` will suspends the normal operation
of the process (Sidekiq will not process jobs while `gdb` is attached).
Start by attaching to the Sidekiq PID:
```
gdb -p <sidekiq_pid>
```
Then gather information on all the threads:
```
info threads
# Example output
30 Thread 0x7fe5fbd63700 (LWP 26060) 0x0000003f7cadf113 in poll () from /lib64/libc.so.6
29 Thread 0x7fe5f2b3b700 (LWP 26533) 0x0000003f7ce0b68c in pthread_cond_wait@@GLIBC_2.3.2 () from /lib64/libpthread.so.0
28 Thread 0x7fe5f2a3a700 (LWP 26534) 0x0000003f7ce0ba5e in pthread_cond_timedwait@@GLIBC_2.3.2 () from /lib64/libpthread.so.0
27 Thread 0x7fe5f2939700 (LWP 26535) 0x0000003f7ce0b68c in pthread_cond_wait@@GLIBC_2.3.2 () from /lib64/libpthread.so.0
26 Thread 0x7fe5f2838700 (LWP 26537) 0x0000003f7ce0b68c in pthread_cond_wait@@GLIBC_2.3.2 () from /lib64/libpthread.so.0
25 Thread 0x7fe5f2737700 (LWP 26538) 0x0000003f7ce0b68c in pthread_cond_wait@@GLIBC_2.3.2 () from /lib64/libpthread.so.0
24 Thread 0x7fe5f2535700 (LWP 26540) 0x0000003f7ce0b68c in pthread_cond_wait@@GLIBC_2.3.2 () from /lib64/libpthread.so.0
23 Thread 0x7fe5f2434700 (LWP 26541) 0x0000003f7ce0b68c in pthread_cond_wait@@GLIBC_2.3.2 () from /lib64/libpthread.so.0
22 Thread 0x7fe5f2232700 (LWP 26543) 0x0000003f7ce0b68c in pthread_cond_wait@@GLIBC_2.3.2 () from /lib64/libpthread.so.0
21 Thread 0x7fe5f2131700 (LWP 26544) 0x00007fe5f7b570f0 in xmlXPathNodeSetMergeAndClear ()
from /opt/gitlab/embedded/service/gem/ruby/2.1.0/gems/nokogiri-1.6.7.2/lib/nokogiri/nokogiri.so
...
```
If you see a suspicious thread, like the Nokogiri one above, you may want
to get more information:
```
thread 21
bt
# Example output
#0 0x00007ff0d6afe111 in xmlXPathNodeSetMergeAndClear () from /opt/gitlab/embedded/service/gem/ruby/2.1.0/gems/nokogiri-1.6.7.2/lib/nokogiri/nokogiri.so
#1 0x00007ff0d6b0b836 in xmlXPathNodeCollectAndTest () from /opt/gitlab/embedded/service/gem/ruby/2.1.0/gems/nokogiri-1.6.7.2/lib/nokogiri/nokogiri.so
#2 0x00007ff0d6b09037 in xmlXPathCompOpEval () from /opt/gitlab/embedded/service/gem/ruby/2.1.0/gems/nokogiri-1.6.7.2/lib/nokogiri/nokogiri.so
#3 0x00007ff0d6b09017 in xmlXPathCompOpEval () from /opt/gitlab/embedded/service/gem/ruby/2.1.0/gems/nokogiri-1.6.7.2/lib/nokogiri/nokogiri.so
#4 0x00007ff0d6b092e0 in xmlXPathCompOpEval () from /opt/gitlab/embedded/service/gem/ruby/2.1.0/gems/nokogiri-1.6.7.2/lib/nokogiri/nokogiri.so
#5 0x00007ff0d6b0bc37 in xmlXPathRunEval () from /opt/gitlab/embedded/service/gem/ruby/2.1.0/gems/nokogiri-1.6.7.2/lib/nokogiri/nokogiri.so
#6 0x00007ff0d6b0be5f in xmlXPathEvalExpression () from /opt/gitlab/embedded/service/gem/ruby/2.1.0/gems/nokogiri-1.6.7.2/lib/nokogiri/nokogiri.so
#7 0x00007ff0d6a97dc3 in evaluate (argc=2, argv=0x1022d058, self=<value optimized out>) at xml_xpath_context.c:221
#8 0x00007ff0daeab0ea in vm_call_cfunc_with_frame (th=0x1022a4f0, reg_cfp=0x1032b810, ci=<value optimized out>) at vm_insnhelper.c:1510
```
To output a backtrace from all threads at once:
```
apply all thread bt
```
## Check for blocking queries
Sometimes the speed at which Sidekiq processes jobs can be so fast that it can
cause database contention. Check for blocking queries when backtraces above
show that many threads are stuck in the database adapter.
The PostgreSQL wiki has details on the query you can run to see blocking
queries. The query is different based on PostgreSQL version. See
[Lock Monitoring](https://wiki.postgresql.org/wiki/Lock_Monitoring) for
the query details.
# Downgrading from EE to CE
If you ever decide to downgrade your Enterprise Edition back to the Community
Edition, there are a few steps you need take before installing the CE package
on top of the current EE package, or, if you are in an installation from source,
before you change remotes and fetch the latest CE code.
## Disable Enterprise-only features
First thing to do is to disable the following features.
### Authentication mechanisms
Kerberos and Atlassian Crowd are only available on the Enterprise Edition, so
you should disable these mechanisms before downgrading and you should provide
alternative authentication methods to your users.
### Git Annex
Git Annex is also only available on the Enterprise Edition. This means that if
you have repositories that use Git Annex to store large files, these files will
no longer be easily available via Git. You should consider migrating these
repositories to use Git LFS before downgrading to the Community Edition.
### Remove Jenkins CI Service entries from the database
The `JenkinsService` class is only available on the Enterprise Edition codebase,
so if you downgrade to the Community Edition, you'll come across the following
error:
```
Completed 500 Internal Server Error in 497ms (ActiveRecord: 32.2ms)
ActionView::Template::Error (The single-table inheritance mechanism failed to locate the subclass: 'JenkinsService'. This
error is raised because the column 'type' is reserved for storing the class in case of inheritance. Please rename this
column if you didn't intend it to be used for storing the inheritance class or overwrite Service.inheritance_column to
use another column for that information.)
```
All services are created automatically for every project you have, so in order
to avoid getting this error, you need to remove all instances of the
`JenkinsService` from your database:
**Omnibus Installation**
```
$ sudo gitlab-rails runner "Service.where(type: 'JenkinsService').delete_all"
```
**Source Installation**
```
$ bundle exec rails runner "Service.where(type: 'JenkinsService').delete_all" production
```
## Downgrade to CE
After performing the above mentioned steps, you are now ready to downgrade your
GitLab installation to the Community Edition.
**Omnibus Installation**
To downgrade an Omnibus installation, it is sufficient to install the Community
Edition package on top of the currently installed one. You can do this manually,
by directly [downloading the package](https://packages.gitlab.com/gitlab/gitlab-ce)
you need, or by adding our CE package repository and following the
[CE installation instructions](https://about.gitlab.com/downloads/).
**Source Installation**
To downgrade a source installation, you need to replace the current remote of
your GitLab installation with the Community Edition's remote, fetch the latest
changes, and checkout the latest stable branch:
```
$ git remote set-url origin git@gitlab.com:gitlab-org/gitlab-ce.git
$ git fetch --all
$ git checkout 8-x-stable
```
Remember to follow the correct [update guides](../update/README.md) to make
sure all dependencies are up to date.
......@@ -45,8 +45,8 @@ sudo -u git -H git checkout 8-7-stable-ee
```bash
cd /home/git/gitlab-shell
sudo -u git -H git fetch --all
sudo -u git -H git checkout v2.7.0
sudo -u git -H git fetch --all --tags
sudo -u git -H git checkout v2.7.2
```
### 5. Update gitlab-workhorse
......
Depending on the installation method and your GitLab version, there are multiple update guides. Choose one that fits your needs.
# Updating GitLab
Depending on the installation method and your GitLab version, there are multiple
update guides.
There are currently 3 official ways to install GitLab:
- Omnibus packages
- Source installation
- Docker installation
Based on your installation, choose a section below that fits your needs.
---
<!-- START doctoc generated TOC please keep comment here to allow auto update -->
<!-- DON'T EDIT THIS SECTION, INSTEAD RE-RUN doctoc TO UPDATE -->
**Table of Contents** *generated with [DocToc](https://github.com/thlorenz/doctoc)*
- [Omnibus Packages](#omnibus-packages)
- [Installation from source](#installation-from-source)
- [Installation using Docker](#installation-using-docker)
- [Upgrading between editions](#upgrading-between-editions)
- [Community to Enterprise Edition](#community-to-enterprise-edition)
- [Enterprise to Community Edition](#enterprise-to-community-edition)
- [Miscellaneous](#miscellaneous)
<!-- END doctoc generated TOC please keep comment here to allow auto update -->
## Omnibus Packages
- [Omnibus update guide](https://gitlab.com/gitlab-org/omnibus-gitlab/blob/master/doc/update.md) contains the steps needed to update a GitLab [package](https://about.gitlab.com/downloads/).
- The [Omnibus update guide](http://doc.gitlab.com/omnibus/update/README.html)
contains the steps needed to update an Omnibus GitLab package.
## Installation from source
- [The individual upgrade guides](https://gitlab.com/gitlab-org/gitlab-ce/tree/master/doc/update) are for those who have installed GitLab from source.
- [The CE to EE update guides](https://gitlab.com/subscribers/gitlab-ee/tree/master/doc/update) are for subscribers of the Enterprise Edition only. The steps are very similar to a version upgrade: stop the server, get the code, update config files for the new functionality, install libs and do migrations, update the init script, start the application and check the application status.
- [Upgrader](upgrader.md) is an automatic ruby script that performs the update for installations from source.
- [Patch versions](patch_versions.md) guide includes the steps needed for a patch version, eg. 6.2.0 to 6.2.1.
- [Upgrading Community Edition from source][source-ce] - The individual
upgrade guides are for those who have installed GitLab CE from source.
- [Upgrading Enterprise Edition from source][source-ee] - The individual
upgrade guides are for those who have installed GitLab EE from source.
- [Patch versions](patch_versions.md) guide includes the steps needed for a
patch version, eg. 6.2.0 to 6.2.1, and apply to both Community and Enterprise
Editions.
## Installation using Docker
GitLab provides official Docker images for both Community and Enterprise
editions. They are based on the Omnibus package and instructions on how to
update them are in [a separate document][omnidocker].
## Upgrading between editions
GitLab comes in two flavors: [Community Edition][ce] which is MIT licensed,
and [Enterprise Edition][ee] which builds on top of the Community Edition and
includes extra features mainly aimed at organizations with more than 100 users.
Below you can find some guides to help you change editions easily.
### Community to Enterprise Edition
>**Note:**
The following guides are for subscribers of the Enterprise Edition only.
If you wish to upgrade your GitLab installation from Community to Enterprise
Edition, follow the guides below based on the installation method:
- [Source CE to EE update guides][source-ee] - Find your version, and follow the
`-ce-to-ee.md` guide. The steps are very similar to a version upgrade: stop
the server, get the code, update config files for the new functionality,
install libraries and do migrations, update the init script, start the
application and check its status.
- [Omnibus CE to EE][omni-ce-ee] - Follow this guide to update your Omnibus
GitLab Community Edition to the Enterprise Edition.
### Enterprise to Community Edition
If you need to downgrade your Enterprise Edition installation back to Community
Edition, you can follow [this guide][ee-ce] to make the process as smooth as
possible.
## Miscellaneous
- [MySQL to PostgreSQL](mysql_to_postgresql.md) guides you through migrating your database from MySQL to PostgreSQL.
- [MySQL installation guide](https://gitlab.com/gitlab-org/gitlab-ce/blob/master/doc/install/database_mysql.md) contains additional information about configuring GitLab to work with a MySQL database.
- [MySQL to PostgreSQL](mysql_to_postgresql.md) guides you through migrating
your database from MySQL to PostgreSQL.
- [MySQL installation guide](../install/database_mysql.md) contains additional
information about configuring GitLab to work with a MySQL database.
- [Restoring from backup after a failed upgrade](restore_after_failure.md)
[omnidocker]: http://doc.gitlab.com/omnibus/docker/README.html
[source-ee]: https://gitlab.com/gitlab-org/gitlab-ee/tree/master/doc/update
[source-ce]: https://gitlab.com/gitlab-org/gitlab-ce/tree/master/doc/update
[ee-ce]: ../downgrade_ee_to_ce/README.md
[ce]: https://about.gitlab.com/features/#community
[ee]: https://about.gitlab.com/features/#enterprise
[omni-ce-ee]: http://doc.gitlab.com/omnibus/update/README.html#from-community-edition-to-enterprise-edition
# Cherry-pick changes
_**Note:** This feature was [introduced][ce-3514] in GitLab 8.7._
>**Note:**
This feature was [introduced][ce-3514] in GitLab 8.7.
---
......
# Import your project from GitHub to GitLab
_**Note:** In order to enable the GitHub import setting, you should first
enable the [GitHub integration][gh-import] in your GitLab instance._
>**Note:**
In order to enable the GitHub import setting, you should first
enable the [GitHub integration][gh-import] in your GitLab instance.
At its current state, GitHub importer can import:
......@@ -10,10 +11,13 @@ At its current state, GitHub importer can import:
- the issues (introduced in GitLab 7.7)
- the pull requests (introduced in GitLab 8.4)
- the wiki pages (introduced in GitLab 8.4)
- the milestones (introduced in GitLab 8.7)
- the labels (introduced in GitLab 8.7)
It is not yet possible to import your labels, milestones and cross-repository
pull requests (those from forks). We are working on improving this in the near
future.
With GitLab 8.7+, references to pull requests and issues are preserved.
It is not yet possible to import your cross-repository pull requests (those from
forks). We are working on improving this in the near future.
The importer page is visible when you [create a new project][new-project].
Click on the **GitHub** link and you will be redirected to GitHub for
......
......@@ -22,4 +22,8 @@ class Spinach::Features::ProfileActiveTab < Spinach::FeatureSteps
step 'the active main tab should be Audit Log' do
ensure_active_main_tab('Audit Log')
end
def ensure_active_main_tab(content)
expect(find('.layout-nav li.active')).to have_content(content)
end
end
......@@ -519,7 +519,7 @@ class Spinach::Features::ProjectMergeRequests < Spinach::FeatureSteps
step '"Bug NS-05" has CI status' do
project = merge_request.source_project
project.enable_ci
ci_commit = create :ci_commit, project: project, sha: merge_request.last_commit.id
ci_commit = create :ci_commit, project: project, sha: merge_request.last_commit.id, ref: merge_request.source_branch
create :ci_build, commit: ci_commit
end
......
......@@ -10,16 +10,16 @@ module SharedBuilds
end
step 'project has a recent build' do
@ci_commit = create(:ci_commit, project: @project, sha: @project.commit.sha)
@ci_commit = create(:ci_commit, project: @project, sha: @project.commit.sha, ref: 'master')
@build = create(:ci_build_with_coverage, commit: @ci_commit)
end
step 'recent build is successful' do
@build.update_column(:status, 'success')
@build.update(status: 'success')
end
step 'recent build failed' do
@build.update_column(:status, 'failed')
@build.update(status: 'failed')
end
step 'project has another build that is running' do
......
......@@ -230,7 +230,7 @@ module SharedProject
step 'project "Shop" has CI build' do
project = Project.find_by(name: "Shop")
create :ci_commit, project: project, sha: project.commit.sha
create :ci_commit, project: project, sha: project.commit.sha, ref: 'master', status: 'skipped'
end
step 'I should see last commit with CI status' do
......
......@@ -21,10 +21,9 @@ module API
authorize!(:read_commit_status, user_project)
not_found!('Commit') unless user_project.commit(params[:sha])
ci_commit = user_project.ci_commit(params[:sha])
return [] unless ci_commit
statuses = ci_commit.statuses
ci_commits = user_project.ci_commits.where(sha: params[:sha])
statuses = ::CommitStatus.where(commit: ci_commits)
statuses = statuses.latest unless parse_boolean(params[:all])
statuses = statuses.where(ref: params[:ref]) if params[:ref].present?
statuses = statuses.where(stage: params[:stage]) if params[:stage].present?
......@@ -51,7 +50,21 @@ module API
commit = @project.commit(params[:sha])
not_found! 'Commit' unless commit
ci_commit = @project.ensure_ci_commit(commit.sha)
# Since the CommitStatus is attached to Ci::Commit (in the future Pipeline)
# We need to always have the pipeline object
# To have a valid pipeline object that can be attached to specific MR
# Other CI service needs to send `ref`
# If we don't receive it, we will attach the CommitStatus to
# the first found branch on that commit
ref = params[:ref]
unless ref
branches = @project.repository.branch_names_contains(commit.sha)
not_found! 'References for commit' if branches.none?
ref = branches.first
end
ci_commit = @project.ensure_ci_commit(commit.sha, ref)
name = params[:name] || params[:context]
status = GenericCommitStatus.running_or_pending.find_by(commit: ci_commit, name: name, ref: params[:ref])
......
module Banzai
module Filter
# HTML Filter to add a `rel="nofollow"` attribute to external links
#
# HTML Filter to modify the attributes of external links
class ExternalLinkFilter < HTML::Pipeline::Filter
def call
doc.search('a').each do |node|
......@@ -15,7 +14,7 @@ module Banzai
# Skip internal links
next if link.start_with?(internal_url)
node.set_attribute('rel', 'nofollow')
node.set_attribute('rel', 'nofollow noreferrer')
end
doc
......
......@@ -18,9 +18,7 @@ module Banzai
def references_in(text, pattern = Label.reference_pattern)
text.gsub(pattern) do |match|
project = project_from_ref($~[:project])
params = label_params($~[:label_id].to_i, $~[:label_name])
label = project.labels.find_by(params)
label = find_label($~[:project], $~[:label_id], $~[:label_name])
if label
yield match, label.id, $~[:project], $~
......@@ -30,18 +28,12 @@ module Banzai
end
end
def url_for_object(label, project)
h = Gitlab::Routing.url_helpers
h.namespace_project_issues_url(project.namespace, project, label_name: label.name,
only_path: context[:only_path])
end
def find_label(project_ref, label_id, label_name)
project = project_from_ref(project_ref)
return unless project
def object_link_text(object, matches)
if context[:project] == object.project
LabelsHelper.render_colored_label(object)
else
LabelsHelper.render_colored_cross_project_label(object)
end
label_params = label_params(label_id, label_name)
project.labels.find_by(label_params)
end
# Parameters to pass to `Label.find_by` based on the given arguments
......@@ -55,7 +47,21 @@ module Banzai
if name
{ name: name.tr('"', '') }
else
{ id: id }
{ id: id.to_i }
end
end
def url_for_object(label, project)
h = Gitlab::Routing.url_helpers
h.namespace_project_issues_url(project.namespace, project, label_name: label.name,
only_path: context[:only_path])
end
def object_link_text(object, matches)
if context[:project] == object.project
LabelsHelper.render_colored_label(object)
else
LabelsHelper.render_colored_cross_project_label(object)
end
end
end
......
module Ci
class Status
def self.get_status(statuses)
if statuses.none?
'skipped'
elsif statuses.all? { |status| status.success? || status.ignored? }
'success'
elsif statuses.all?(&:pending?)
'pending'
elsif statuses.any?(&:running?) || statuses.any?(&:pending?)
'running'
elsif statuses.all?(&:canceled?)
'canceled'
else
'failed'
end
end
end
end
......@@ -7,7 +7,6 @@ module Gitlab
gon.max_file_size = current_application_settings.max_attachment_size
gon.relative_url_root = Gitlab.config.gitlab.relative_url_root
gon.user_color_scheme = Gitlab::ColorSchemes.for_user(current_user).css_class
gon.sentry_dsn = ApplicationSetting.current.sentry_dsn if Rails.env.production?
if current_user
gon.current_user_id = current_user.id
......
......@@ -29,8 +29,8 @@ module Gitlab
"in #{GRACE_TIME} seconds"
sleep(GRACE_TIME)
Sidekiq.logger.warn "sending SIGUSR1 to PID #{Process.pid}"
Process.kill('SIGUSR1', Process.pid)
Sidekiq.logger.warn "sending SIGTERM to PID #{Process.pid}"
Process.kill('SIGTERM', Process.pid)
Sidekiq.logger.warn "waiting #{SHUTDOWN_WAIT} seconds before sending "\
"#{SHUTDOWN_SIGNAL} to PID #{Process.pid}"
......
......@@ -300,14 +300,6 @@ describe Projects::MergeRequestsController do
expect(response.cookies['diff_view']).to eq('parallel')
end
it 'assigns :view param based on cookie' do
request.cookies['diff_view'] = 'parallel'
go
expect(controller.params[:view]).to eq 'parallel'
end
end
describe 'GET commits' do
......
......@@ -46,4 +46,20 @@ describe Projects::ProjectMembersController do
end
end
end
describe '#index' do
let(:project) { create(:project, :private) }
context 'when user is member' do
let(:member) { create(:user) }
before do
project.team << [member, :guest]
sign_in(member)
get :index, namespace_id: project.namespace.to_param, project_id: project.to_param
end
it { expect(response.status).to eq(200) }
end
end
end
require 'spec_helper'
describe "Dashboard > User filters projects", feature: true do
describe 'filtering personal projects' do
before do
user = create(:user)
project = create(:project, name: "Victorialand", namespace: user.namespace)
project.team << [user, :master]
user2 = create(:user)
project2 = create(:project, name: "Treasure", namespace: user2.namespace)
project2.team << [user, :developer]
login_as(user)
visit dashboard_projects_path
end
it 'filters by projects "Owned by me"' do
click_link "Owned by me"
expect(page).to have_css('.is-active', text: 'Owned by me')
expect(page).to have_content('Victorialand')
expect(page).not_to have_content('Treasure')
end
end
end
require 'rails_helper'
feature 'Issue filtering by Labels', feature: true do
include WaitForAjax
let(:project) { create(:project, :public) }
let!(:user) { create(:user)}
let!(:label) { create(:label, project: project) }
before do
['bug', 'feature', 'enhancement'].each do |title|
create(:label,
project: project,
title: title)
end
bug = create(:label, project: project, title: 'bug')
feature = create(:label, project: project, title: 'feature')
enhancement = create(:label, project: project, title: 'enhancement')
issue1 = create(:issue, title: "Bugfix1", project: project)
issue1.labels << project.labels.find_by(title: 'bug')
issue1.labels << bug
issue2 = create(:issue, title: "Bugfix2", project: project)
issue2.labels << project.labels.find_by(title: 'bug')
issue2.labels << project.labels.find_by(title: 'enhancement')
issue2.labels << bug
issue2.labels << enhancement
issue3 = create(:issue, title: "Feature1", project: project)
issue3.labels << project.labels.find_by(title: 'feature')
issue3.labels << feature
project.team << [user, :master]
login_as(user)
......@@ -31,10 +31,10 @@ feature 'Issue filtering by Labels', feature: true do
context 'filter by label bug', js: true do
before do
page.find('.js-label-select').click
sleep 0.5
wait_for_ajax
execute_script("$('.dropdown-menu-labels li:contains(\"bug\") a').click()")
page.first('.labels-filter .dropdown-title .dropdown-menu-close-icon').click
sleep 2
wait_for_ajax
end
it 'should show issue "Bugfix1" and "Bugfix2" in issues list' do
......@@ -59,10 +59,10 @@ feature 'Issue filtering by Labels', feature: true do
context 'filter by label feature', js: true do
before do
page.find('.js-label-select').click
sleep 0.5
wait_for_ajax
execute_script("$('.dropdown-menu-labels li:contains(\"feature\") a').click()")
page.first('.labels-filter .dropdown-title .dropdown-menu-close-icon').click
sleep 2
wait_for_ajax
end
it 'should show issue "Feature1" in issues list' do
......@@ -87,10 +87,10 @@ feature 'Issue filtering by Labels', feature: true do
context 'filter by label enhancement', js: true do
before do
page.find('.js-label-select').click
sleep 0.5
wait_for_ajax
execute_script("$('.dropdown-menu-labels li:contains(\"enhancement\") a').click()")
page.first('.labels-filter .dropdown-title .dropdown-menu-close-icon').click
sleep 2
wait_for_ajax
end
it 'should show issue "Bugfix2" in issues list' do
......@@ -115,20 +115,16 @@ feature 'Issue filtering by Labels', feature: true do
context 'filter by label enhancement or feature', js: true do
before do
page.find('.js-label-select').click
sleep 0.5
wait_for_ajax
execute_script("$('.dropdown-menu-labels li:contains(\"enhancement\") a').click()")
execute_script("$('.dropdown-menu-labels li:contains(\"feature\") a').click()")
page.first('.labels-filter .dropdown-title .dropdown-menu-close-icon').click
sleep 2
wait_for_ajax
end
it 'should show issue "Bugfix2" or "Feature1" in issues list' do
expect(page).to have_content "Bugfix2"
expect(page).to have_content "Feature1"
end
it 'should not show "Bugfix1" in issues list' do
it 'should not show "Bugfix1" or "Feature1" in issues list' do
expect(page).not_to have_content "Bugfix1"
expect(page).not_to have_content "Feature1"
end
it 'should show label "enhancement" and "feature" in filtered-labels' do
......@@ -141,19 +137,18 @@ feature 'Issue filtering by Labels', feature: true do
end
end
context 'filter by label enhancement or bug in issues list', js: true do
context 'filter by label enhancement and bug in issues list', js: true do
before do
page.find('.js-label-select').click
sleep 0.5
wait_for_ajax
execute_script("$('.dropdown-menu-labels li:contains(\"enhancement\") a').click()")
execute_script("$('.dropdown-menu-labels li:contains(\"bug\") a').click()")
page.first('.labels-filter .dropdown-title .dropdown-menu-close-icon').click
sleep 2
wait_for_ajax
end
it 'should show issue "Bugfix2" or "Bugfix1" in issues list' do
it 'should show issue "Bugfix2" in issues list' do
expect(page).to have_content "Bugfix2"
expect(page).to have_content "Bugfix1"
end
it 'should not show "Feature1"' do
......
require 'rails_helper'
feature 'Issue Sidebar', feature: true do
let(:project) { create(:project) }
let(:issue) { create(:issue, project: project) }
let!(:user) { create(:user)}
before do
create(:label, project: project, title: 'bug')
login_as(user)
end
context 'as a allowed user' do
before do
project.team << [user, :developer]
visit_issue(project, issue)
end
describe 'when clicking on edit labels', js: true do
it 'dropdown has an option to create a new label' do
find('.block.labels .edit-link').click
page.within('.block.labels') do
expect(page).to have_content 'Create new'
end
end
end
context 'creating a new label', js: true do
it 'option to crate a new label is present' do
page.within('.block.labels') do
find('.edit-link').click
expect(page).to have_content 'Create new'
end
end
it 'dropdown switches to "create label" section' do
page.within('.block.labels') do
find('.edit-link').click
click_link 'Create new'
expect(page).to have_content 'Create new label'
end
end
it 'new label is added' do
page.within('.block.labels') do
find('.edit-link').click
sleep 1
click_link 'Create new'
fill_in 'new_label_name', with: 'wontfix'
page.find(".suggest-colors a", match: :first).click
click_button 'Create'
page.within('.dropdown-page-one') do
expect(page).to have_content 'wontfix'
end
end
end
end
end
context 'as a guest' do
before do
project.team << [user, :guest]
visit_issue(project, issue)
end
it 'does not have a option to edit labels' do
expect(page).not_to have_selector('.block.labels .edit-link')
end
end
def visit_issue(project, issue)
visit namespace_project_issue_path(project.namespace, project, issue)
end
end
......@@ -178,6 +178,19 @@ describe 'Issues', feature: true do
expect(first_issue).to include('foo')
end
context 'with a filter on labels' do
let(:label) { create(:label, project: project) }
before { create(:label_link, label: label, target: foo) }
it 'sorts by least recently due date by excluding nil due dates' do
bar.update(due_date: nil)
visit namespace_project_issues_path(project.namespace, project, label_names: [label.name], sort: sort_value_due_date_later)
expect(first_issue).to include('foo')
end
end
end
describe 'filtering by due date' do
......@@ -304,6 +317,27 @@ describe 'Issues', feature: true do
expect(issue.reload.assignee).to be_nil
end
it 'allows user to select an assignee', js: true do
issue2 = create(:issue, project: project, author: @user)
visit namespace_project_issue_path(project.namespace, project, issue2)
page.within('.assignee') do
expect(page).to have_content "No assignee"
end
page.within '.assignee' do
click_link 'Edit'
end
page.within '.dropdown-menu-user' do
click_link @user.name
end
page.within('.assignee') do
expect(page).to have_content @user.name
end
end
end
context 'by unauthorized user' do
......
......@@ -165,7 +165,12 @@ describe 'GitLab Markdown', feature: true do
describe 'ExternalLinkFilter' do
it 'adds nofollow to external link' do
link = doc.at_css('a:contains("Google")')
expect(link.attr('rel')).to match 'nofollow'
expect(link.attr('rel')).to include('nofollow')
end
it 'adds noreferrer to external link' do
link = doc.at_css('a:contains("Google")')
expect(link.attr('rel')).to include('noreferrer')
end
it 'ignores internal link' do
......
require 'spec_helper'
feature 'Projects > Members > Anonymous user sees members', feature: true do
let(:user) { create(:user) }
let(:group) { create(:group, :public) }
let(:project) { create(:empty_project, :public) }
background do
project.team << [user, :master]
create(:project_group_link, project: project, group: group)
end
scenario "anonymous user visits the project's members page and sees the list of members" do
visit namespace_project_project_members_path(project.namespace, project)
expect(current_path).to eq(
namespace_project_project_members_path(project.namespace, project))
expect(page).to have_content(user.name)
end
end
......@@ -101,12 +101,12 @@ describe "Internal Project Access", feature: true do
it { is_expected.to be_allowed_for :admin }
it { is_expected.to be_allowed_for owner }
it { is_expected.to be_allowed_for master }
it { is_expected.to be_denied_for developer }
it { is_expected.to be_denied_for reporter }
it { is_expected.to be_denied_for guest }
it { is_expected.to be_denied_for :user }
it { is_expected.to be_denied_for :external }
it { is_expected.to be_allowed_for developer }
it { is_expected.to be_allowed_for reporter }
it { is_expected.to be_allowed_for guest }
it { is_expected.to be_allowed_for :user }
it { is_expected.to be_denied_for :visitor }
it { is_expected.to be_denied_for :external }
end
describe "GET /:project_path/blob" do
......
......@@ -101,9 +101,9 @@ describe "Private Project Access", feature: true do
it { is_expected.to be_allowed_for :admin }
it { is_expected.to be_allowed_for owner }
it { is_expected.to be_allowed_for master }
it { is_expected.to be_denied_for developer }
it { is_expected.to be_denied_for reporter }
it { is_expected.to be_denied_for guest }
it { is_expected.to be_allowed_for developer }
it { is_expected.to be_allowed_for reporter }
it { is_expected.to be_allowed_for guest }
it { is_expected.to be_denied_for :user }
it { is_expected.to be_denied_for :external }
it { is_expected.to be_denied_for :visitor }
......
......@@ -101,12 +101,12 @@ describe "Public Project Access", feature: true do
it { is_expected.to be_allowed_for :admin }
it { is_expected.to be_allowed_for owner }
it { is_expected.to be_allowed_for master }
it { is_expected.to be_denied_for developer }
it { is_expected.to be_denied_for reporter }
it { is_expected.to be_denied_for guest }
it { is_expected.to be_denied_for :user }
it { is_expected.to be_denied_for :external }
it { is_expected.to be_denied_for :visitor }
it { is_expected.to be_allowed_for developer }
it { is_expected.to be_allowed_for reporter }
it { is_expected.to be_allowed_for guest }
it { is_expected.to be_allowed_for :user }
it { is_expected.to be_allowed_for :visitor }
it { is_expected.to be_allowed_for :external }
end
describe "GET /:project_path/builds" do
......
......@@ -13,8 +13,8 @@ feature 'Signup', feature: true do
fill_in 'user_password_sign_up', with: user.password
click_button "Sign up"
expect(current_path).to eq user_session_path
expect(page).to have_content("A message with a confirmation link has been sent to your email address.")
expect(current_path).to eq users_almost_there_path
expect(page).to have_content("Please check your email to confirm your account")
end
end
......
require 'spec_helper'
describe 'Dashboard Todos', feature: true do
let(:user){ create(:user) }
let(:author){ create(:user) }
let(:project){ create(:project) }
let(:issue){ create(:issue) }
let(:todos_per_page){ Todo.default_per_page }
let(:todos_total){ todos_per_page + 1 }
let(:user) { create(:user) }
let(:author) { create(:user) }
let(:project) { create(:project) }
let(:issue) { create(:issue) }
describe 'GET /dashboard/todos' do
context 'User does not have todos' do
......@@ -46,31 +44,35 @@ describe 'Dashboard Todos', feature: true do
end
context 'User has multiple pages of Todos' do
let(:todo_total_pages){ (todos_total.to_f/todos_per_page).ceil }
before do
todos_total.times do
create(:todo, :mentioned, user: user, project: project, target: issue, author: author)
end
allow(Todo).to receive(:default_per_page).and_return(1)
# Create just enough records to cause us to paginate
create_list(:todo, 2, :mentioned, user: user, project: project, target: issue, author: author)
login_as(user)
visit dashboard_todos_path
end
it 'is paginated' do
visit dashboard_todos_path
expect(page).to have_selector('.gl-pagination')
end
it 'is has the right number of pages' do
expect(page).to have_selector('.gl-pagination .page', count: todo_total_pages)
visit dashboard_todos_path
expect(page).to have_selector('.gl-pagination .page', count: 2)
end
describe 'deleting last todo from last page', js: true do
describe 'completing last todo from last page', js: true do
it 'redirects to the previous page' do
page.within('.gl-pagination') do
click_link todo_total_pages.to_s
end
first('.done-todo').click
visit dashboard_todos_path(page: 2)
expect(page).to have_content(Todo.first.body)
click_link('Done')
expect(current_path).to eq dashboard_todos_path
expect(page).to have_content(Todo.last.body)
end
end
......
......@@ -6,8 +6,8 @@ describe CiStatusHelper do
let(:success_commit) { double("Ci::Commit", status: 'success') }
let(:failed_commit) { double("Ci::Commit", status: 'failed') }
describe 'ci_status_icon' do
it { expect(helper.ci_status_icon(success_commit)).to include('fa-check') }
it { expect(helper.ci_status_icon(failed_commit)).to include('fa-close') }
describe 'ci_icon_for_status' do
it { expect(helper.ci_icon_for_status(success_commit.status)).to include('fa-check') }
it { expect(helper.ci_icon_for_status(failed_commit.status)).to include('fa-close') }
end
end
......@@ -11,6 +11,26 @@ describe DiffHelper do
let(:diff_refs) { [commit.parent, commit] }
let(:diff_file) { Gitlab::Diff::File.new(diff, diff_refs) }
describe 'diff_view' do
it 'returns a valid value when cookie is set' do
helper.request.cookies[:diff_view] = 'parallel'
expect(helper.diff_view).to eq 'parallel'
end
it 'returns a default value when cookie is invalid' do
helper.request.cookies[:diff_view] = 'invalid'
expect(helper.diff_view).to eq 'inline'
end
it 'returns a default value when cookie is nil' do
expect(helper.request.cookies).to be_empty
expect(helper.diff_view).to eq 'inline'
end
end
describe 'diff_hard_limit_enabled?' do
it 'should return true if param is provided' do
allow(controller).to receive(:params) { { force_show_diff: true } }
......
......@@ -24,6 +24,14 @@ describe Banzai::Filter::ExternalLinkFilter, lib: true do
doc = filter(act)
expect(doc.at_css('a')).to have_attribute('rel')
expect(doc.at_css('a')['rel']).to eq 'nofollow'
expect(doc.at_css('a')['rel']).to include 'nofollow'
end
it 'adds rel="noreferrer" to external links' do
act = %q(<a href="https://google.com/">Google</a>)
doc = filter(act)
expect(doc.at_css('a')).to have_attribute('rel')
expect(doc.at_css('a')['rel']).to include 'noreferrer'
end
end
......@@ -178,6 +178,7 @@ describe Banzai::Filter::LabelReferenceFilter, lib: true do
end
describe 'cross project label references' do
context 'valid project referenced' do
let(:another_project) { create(:empty_project, :public) }
let(:project_name) { another_project.name_with_namespace }
let(:label) { create(:label, project: another_project, color: '#00ff00') }
......@@ -201,4 +202,13 @@ describe Banzai::Filter::LabelReferenceFilter, lib: true do
expect(result.css('a').first.text).to eq "#{label.name} in #{project_name}"
end
end
context 'project that does not exist referenced' do
let(:result) { reference_filter('aaa/bbb~ccc') }
it 'does not link reference' do
expect(result.to_html).to eq 'aaa/bbb~ccc'
end
end
end
end
......@@ -42,7 +42,7 @@ describe Gitlab::Badge::Build do
end
context 'build exists' do
let(:ci_commit) { create(:ci_commit, project: project, sha: sha) }
let(:ci_commit) { create(:ci_commit, project: project, sha: sha, ref: branch) }
let!(:build) { create(:ci_build, commit: ci_commit) }
......@@ -57,7 +57,7 @@ describe Gitlab::Badge::Build do
describe '#data' do
let(:data) { badge.data }
it 'contains infromation about success' do
it 'contains information about success' do
expect(status_node(data, 'success')).to be_truthy
end
end
......@@ -74,7 +74,7 @@ describe Gitlab::Badge::Build do
describe '#data' do
let(:data) { badge.data }
it 'contains infromation about failure' do
it 'contains information about failure' do
expect(status_node(data, 'failed')).to be_truthy
end
end
......
......@@ -27,6 +27,8 @@ describe Ci::Commit, models: true do
it { is_expected.to have_many(:trigger_requests) }
it { is_expected.to have_many(:builds) }
it { is_expected.to validate_presence_of :sha }
it { is_expected.to validate_presence_of :status }
it { is_expected.to delegate_method(:stages).to(:statuses) }
it { is_expected.to respond_to :git_author_name }
it { is_expected.to respond_to :git_author_email }
......@@ -52,57 +54,9 @@ describe Ci::Commit, models: true do
it { expect(commit.sha).to start_with(subject) }
end
describe :stage do
subject { commit.stage }
before do
@second = FactoryGirl.create :commit_status, commit: commit, name: 'deploy', stage: 'deploy', stage_idx: 1, status: 'pending'
@first = FactoryGirl.create :commit_status, commit: commit, name: 'test', stage: 'test', stage_idx: 0, status: 'pending'
end
it 'returns first running stage' do
is_expected.to eq('test')
end
context 'first build succeeded' do
before do
@first.success
end
it 'returns last running stage' do
is_expected.to eq('deploy')
end
end
context 'all builds succeeded' do
before do
@first.success
@second.success
end
it 'returns nil' do
is_expected.to be_nil
end
end
end
describe :create_next_builds do
end
describe :refs do
subject { commit.refs }
before do
FactoryGirl.create :commit_status, commit: commit, name: 'deploy'
FactoryGirl.create :commit_status, commit: commit, name: 'deploy', ref: 'develop'
FactoryGirl.create :commit_status, commit: commit, name: 'deploy', ref: 'master'
end
it 'returns all refs' do
is_expected.to contain_exactly('master', 'develop', nil)
end
end
describe :retried do
subject { commit.retried }
......@@ -117,10 +71,10 @@ describe Ci::Commit, models: true do
end
describe :create_builds do
let!(:commit) { FactoryGirl.create :ci_commit, project: project }
let!(:commit) { FactoryGirl.create :ci_commit, project: project, ref: 'master', tag: false }
def create_builds(trigger_request = nil)
commit.create_builds('master', false, nil, trigger_request)
commit.create_builds(nil, trigger_request)
end
def create_next_builds
......@@ -143,67 +97,6 @@ describe Ci::Commit, models: true do
expect(create_next_builds).to be_falsey
end
context 'for different ref' do
def create_develop_builds
commit.create_builds('develop', false, nil, nil)
end
it 'creates builds' do
expect(create_builds).to be_truthy
commit.builds.update_all(status: "success")
expect(commit.builds.count(:all)).to eq(2)
expect(create_develop_builds).to be_truthy
commit.builds.update_all(status: "success")
expect(commit.builds.count(:all)).to eq(4)
expect(commit.refs.size).to eq(2)
expect(commit.builds.pluck(:name).uniq.size).to eq(2)
end
end
context 'for build triggers' do
let(:trigger) { FactoryGirl.create :ci_trigger, project: project }
let(:trigger_request) { FactoryGirl.create :ci_trigger_request, commit: commit, trigger: trigger }
it 'creates builds' do
expect(create_builds(trigger_request)).to be_truthy
expect(commit.builds.count(:all)).to eq(2)
end
it 'rebuilds commit' do
expect(create_builds).to be_truthy
expect(commit.builds.count(:all)).to eq(2)
expect(create_builds(trigger_request)).to be_truthy
expect(commit.builds.count(:all)).to eq(4)
end
it 'creates next builds' do
expect(create_builds(trigger_request)).to be_truthy
expect(commit.builds.count(:all)).to eq(2)
commit.builds.update_all(status: "success")
expect(create_next_builds).to be_truthy
expect(commit.builds.count(:all)).to eq(4)
end
context 'for [ci skip]' do
before do
allow(commit).to receive(:git_commit_message) { 'message [ci skip]' }
end
it 'rebuilds commit' do
expect(commit.status).to eq('skipped')
expect(create_builds).to be_truthy
# since everything in Ci::Commit is cached we need to fetch a new object
new_commit = Ci::Commit.find_by_id(commit.id)
expect(new_commit.status).to eq('pending')
end
end
end
context 'custom stage with first job allowed to fail' do
let(:yaml) do
{
......@@ -284,6 +177,7 @@ describe Ci::Commit, models: true do
commit.builds.running_or_pending.each(&:success)
expect(commit.builds.pluck(:status)).to contain_exactly('success', 'success', 'success', 'success')
commit.reload
expect(commit.status).to eq('success')
end
......@@ -306,6 +200,7 @@ describe Ci::Commit, models: true do
commit.builds.running_or_pending.each(&:success)
expect(commit.builds.pluck(:status)).to contain_exactly('success', 'failed', 'success', 'success')
commit.reload
expect(commit.status).to eq('failed')
end
......@@ -329,6 +224,7 @@ describe Ci::Commit, models: true do
expect(commit.builds.pluck(:name)).to contain_exactly('build', 'test', 'test_failure', 'cleanup')
expect(commit.builds.pluck(:status)).to contain_exactly('success', 'failed', 'failed', 'success')
commit.reload
expect(commit.status).to eq('failed')
end
......@@ -351,6 +247,7 @@ describe Ci::Commit, models: true do
commit.builds.running_or_pending.each(&:success)
expect(commit.builds.pluck(:status)).to contain_exactly('success', 'success', 'failed', 'success')
commit.reload
expect(commit.status).to eq('failed')
end
end
......@@ -402,4 +299,98 @@ describe Ci::Commit, models: true do
expect(commit.coverage).to be_nil
end
end
describe '#retryable?' do
subject { commit.retryable? }
context 'no failed builds' do
before do
FactoryGirl.create :ci_build, name: "rspec", commit: commit, status: 'success'
end
it 'be not retryable' do
is_expected.to be_falsey
end
end
context 'with failed builds' do
before do
FactoryGirl.create :ci_build, name: "rspec", commit: commit, status: 'running'
FactoryGirl.create :ci_build, name: "rubocop", commit: commit, status: 'failed'
end
it 'be retryable' do
is_expected.to be_truthy
end
end
end
describe '#stages' do
let(:commit2) { FactoryGirl.create :ci_commit, project: project }
subject { CommitStatus.where(commit: [commit, commit2]).stages }
before do
FactoryGirl.create :ci_build, commit: commit2, stage: 'test', stage_idx: 1
FactoryGirl.create :ci_build, commit: commit, stage: 'build', stage_idx: 0
end
it 'return all stages' do
is_expected.to eq(%w(build test))
end
end
describe '#update_state' do
it 'execute update_state after touching object' do
expect(commit).to receive(:update_state).and_return(true)
commit.touch
end
context 'dependent objects' do
let(:commit_status) { build :commit_status, commit: commit }
it 'execute update_state after saving dependent object' do
expect(commit).to receive(:update_state).and_return(true)
commit_status.save
end
end
context 'update state' do
let(:current) { Time.now.change(usec: 0) }
let(:build) { FactoryGirl.create :ci_build, :success, commit: commit, started_at: current - 120, finished_at: current - 60 }
before do
build
end
[:status, :started_at, :finished_at, :duration].each do |param|
it "update #{param}" do
expect(commit.send(param)).to eq(build.send(param))
end
end
end
end
describe '#branch?' do
subject { commit.branch? }
context 'is not a tag' do
before do
commit.tag = false
end
it 'return true when tag is set to false' do
is_expected.to be_truthy
end
end
context 'is not a tag' do
before do
commit.tag = true
end
it 'return false when tag is set to true' do
is_expected.to be_falsey
end
end
end
end
......@@ -163,4 +163,12 @@ eos
it { expect(commit.reverts_commit?(another_commit)).to be_truthy }
end
end
describe '#ci_commits' do
# TODO: kamil
end
describe '#status' do
# TODO: kamil
end
end
......@@ -163,37 +163,73 @@ describe CommitStatus, models: true do
end
it 'return unique statuses' do
is_expected.to eq([@commit2, @commit3, @commit4, @commit5])
is_expected.to eq([@commit4, @commit5])
end
end
describe :for_ref do
subject { CommitStatus.for_ref('bb').order(:id) }
describe :running_or_pending do
subject { CommitStatus.running_or_pending.order(:id) }
before do
@commit1 = FactoryGirl.create :commit_status, commit: commit, name: 'aa', ref: 'bb', status: 'running'
@commit2 = FactoryGirl.create :commit_status, commit: commit, name: 'cc', ref: 'cc', status: 'pending'
@commit3 = FactoryGirl.create :commit_status, commit: commit, name: 'aa', ref: nil, status: 'success'
@commit4 = FactoryGirl.create :commit_status, commit: commit, name: 'dd', ref: nil, status: 'failed'
@commit5 = FactoryGirl.create :commit_status, commit: commit, name: 'ee', ref: nil, status: 'canceled'
end
it 'return statuses with equal and nil ref set' do
is_expected.to eq([@commit1])
it 'return statuses that are running or pending' do
is_expected.to eq([@commit1, @commit2])
end
end
describe :running_or_pending do
subject { CommitStatus.running_or_pending.order(:id) }
describe '#before_sha' do
subject { commit_status.before_sha }
context 'when no before_sha is set for ci::commit' do
before { commit.before_sha = nil }
it 'return blank sha' do
is_expected.to eq(Gitlab::Git::BLANK_SHA)
end
end
context 'for before_sha set for ci::commit' do
let(:value) { '1234' }
before { commit.before_sha = value }
it 'return the set value' do
is_expected.to eq(value)
end
end
end
describe '#stages' do
before do
@commit1 = FactoryGirl.create :commit_status, commit: commit, name: 'aa', ref: 'bb', status: 'running'
@commit2 = FactoryGirl.create :commit_status, commit: commit, name: 'cc', ref: 'cc', status: 'pending'
@commit3 = FactoryGirl.create :commit_status, commit: commit, name: 'aa', ref: nil, status: 'success'
@commit4 = FactoryGirl.create :commit_status, commit: commit, name: 'dd', ref: nil, status: 'failed'
@commit5 = FactoryGirl.create :commit_status, commit: commit, name: 'ee', ref: nil, status: 'canceled'
FactoryGirl.create :commit_status, commit: commit, stage: 'build', stage_idx: 0, status: 'success'
FactoryGirl.create :commit_status, commit: commit, stage: 'build', stage_idx: 0, status: 'failed'
FactoryGirl.create :commit_status, commit: commit, stage: 'deploy', stage_idx: 2, status: 'running'
FactoryGirl.create :commit_status, commit: commit, stage: 'test', stage_idx: 1, status: 'success'
end
it 'return statuses that are running or pending' do
is_expected.to eq([@commit1, @commit2])
context 'stages list' do
subject { CommitStatus.where(commit: commit).stages }
it 'return ordered list of stages' do
is_expected.to eq(%w(build test deploy))
end
end
context 'stages with statuses' do
subject { CommitStatus.where(commit: commit).stages_status }
it 'return list of stages with statuses' do
is_expected.to eq({
'build' => 'failed',
'test' => 'success',
'deploy' => 'running'
})
end
end
end
end
......@@ -212,4 +212,34 @@ describe Issue, "Issuable" do
expect(issue.downvotes).to eq(1)
end
end
describe ".with_label" do
let(:project) { create(:project, :public) }
let(:bug) { create(:label, project: project, title: 'bug') }
let(:feature) { create(:label, project: project, title: 'feature') }
let(:enhancement) { create(:label, project: project, title: 'enhancement') }
let(:issue1) { create(:issue, title: "Bugfix1", project: project) }
let(:issue2) { create(:issue, title: "Bugfix2", project: project) }
let(:issue3) { create(:issue, title: "Feature1", project: project) }
before(:each) do
issue1.labels << bug
issue1.labels << feature
issue2.labels << bug
issue2.labels << enhancement
issue3.labels << feature
end
it 'finds the correct issue containing just enhancement label' do
expect(Issue.with_label(enhancement.title)).to match_array([issue2])
end
it 'finds the correct issues containing the same label' do
expect(Issue.with_label(bug.title)).to match_array([issue1, issue2])
end
it 'finds the correct issues containing only both labels' do
expect(Issue.with_label([bug.title, enhancement.title])).to match_array([issue2])
end
end
end
require 'spec_helper'
describe Ci::Status do
describe '.get_status' do
subject { described_class.get_status(statuses) }
describe Statuseable do
before do
@object = Object.new
@object.extend(Statuseable::ClassMethods)
end
describe '.status' do
before do
allow(@object).to receive(:all).and_return(CommitStatus.where(id: statuses))
end
subject { @object.status }
shared_examples 'build status summary' do
context 'all successful' do
......
......@@ -404,12 +404,12 @@ describe MergeRequest, models: true do
describe 'when the source project exists' do
it 'returns the latest commit' do
commit = double(:commit, id: '123abc')
ci_commit = double(:ci_commit)
ci_commit = double(:ci_commit, ref: 'master')
allow(subject).to receive(:last_commit).and_return(commit)
expect(subject.source_project).to receive(:ci_commit).
with('123abc').
with('123abc', 'master').
and_return(ci_commit)
expect(subject.ci_commit).to eq(ci_commit)
......
......@@ -441,9 +441,22 @@ describe Project, models: true do
describe :ci_commit do
let(:project) { create :project }
let(:commit) { create :ci_commit, project: project }
let(:commit) { create :ci_commit, project: project, ref: 'master' }
it { expect(project.ci_commit(commit.sha)).to eq(commit) }
subject { project.ci_commit(commit.sha, 'master') }
it { is_expected.to eq(commit) }
context 'return latest' do
let(:commit2) { create :ci_commit, project: project, ref: 'master' }
before do
commit
commit2
end
it { is_expected.to eq(commit2) }
end
end
describe :builds_enabled do
......
......@@ -59,7 +59,7 @@ describe API::API, api: true do
describe 'GET /projects/:id/repository/commits/:sha/builds' do
before do
project.ensure_ci_commit(commit.sha)
project.ensure_ci_commit(commit.sha, 'master')
get api("/projects/#{project.id}/repository/commits/#{commit.sha}/builds", api_user)
end
......
......@@ -16,7 +16,8 @@ describe API::CommitStatus, api: true do
let(:get_url) { "/projects/#{project.id}/repository/commits/#{sha}/statuses" }
context 'ci commit exists' do
let!(:ci_commit) { project.ensure_ci_commit(commit.id) }
let!(:master) { project.ci_commits.create(sha: commit.id, ref: 'master') }
let!(:develop) { project.ci_commits.create(sha: commit.id, ref: 'develop') }
it_behaves_like 'a paginated resources' do
let(:request) { get api(get_url, reporter) }
......@@ -25,16 +26,16 @@ describe API::CommitStatus, api: true do
context "reporter user" do
let(:statuses_id) { json_response.map { |status| status['id'] } }
def create_status(opts = {})
create(:commit_status, { commit: ci_commit }.merge(opts))
def create_status(commit, opts = {})
create(:commit_status, { commit: commit, ref: commit.ref }.merge(opts))
end
let!(:status1) { create_status(status: 'running') }
let!(:status2) { create_status(name: 'coverage', status: 'pending') }
let!(:status3) { create_status(ref: 'develop', status: 'running', allow_failure: true) }
let!(:status4) { create_status(name: 'coverage', status: 'success') }
let!(:status5) { create_status(name: 'coverage', ref: 'develop', status: 'success') }
let!(:status6) { create_status(status: 'success') }
let!(:status1) { create_status(master, status: 'running') }
let!(:status2) { create_status(master, name: 'coverage', status: 'pending') }
let!(:status3) { create_status(develop, status: 'running', allow_failure: true) }
let!(:status4) { create_status(master, name: 'coverage', status: 'success') }
let!(:status5) { create_status(develop, name: 'coverage', status: 'success') }
let!(:status6) { create_status(master, status: 'success') }
context 'latest commit statuses' do
before { get api(get_url, reporter) }
......
......@@ -48,14 +48,14 @@ describe API::API, api: true do
expect(response.status).to eq(404)
end
it "should return not_found for CI status" do
it "should return nil for commit without CI" do
get api("/projects/#{project.id}/repository/commits/#{project.repository.commit.id}", user)
expect(response.status).to eq(200)
expect(json_response['status']).to eq('not_found')
expect(json_response['status']).to be_nil
end
it "should return status for CI" do
ci_commit = project.ensure_ci_commit(project.repository.commit.sha)
ci_commit = project.ensure_ci_commit(project.repository.commit.sha, 'master')
get api("/projects/#{project.id}/repository/commits/#{project.repository.commit.id}", user)
expect(response.status).to eq(200)
expect(json_response['status']).to eq(ci_commit.status)
......
......@@ -20,8 +20,8 @@ describe Ci::API::API do
describe "POST /builds/register" do
it "should start a build" do
commit = FactoryGirl.create(:ci_commit, project: project)
commit.create_builds('master', false, nil)
commit = FactoryGirl.create(:ci_commit, project: project, ref: 'master')
commit.create_builds(nil)
build = commit.builds.first
post ci_api("/builds/register"), token: runner.token, info: { platform: :darwin }
......@@ -56,8 +56,8 @@ describe Ci::API::API do
end
it "returns options" do
commit = FactoryGirl.create(:ci_commit, project: project)
commit.create_builds('master', false, nil)
commit = FactoryGirl.create(:ci_commit, project: project, ref: 'master')
commit.create_builds(nil)
post ci_api("/builds/register"), token: runner.token, info: { platform: :darwin }
......@@ -66,8 +66,8 @@ describe Ci::API::API do
end
it "returns variables" do
commit = FactoryGirl.create(:ci_commit, project: project)
commit.create_builds('master', false, nil)
commit = FactoryGirl.create(:ci_commit, project: project, ref: 'master')
commit.create_builds(nil)
project.variables << Ci::Variable.new(key: "SECRET_KEY", value: "secret_value")
post ci_api("/builds/register"), token: runner.token, info: { platform: :darwin }
......@@ -83,10 +83,10 @@ describe Ci::API::API do
it "returns variables for triggers" do
trigger = FactoryGirl.create(:ci_trigger, project: project)
commit = FactoryGirl.create(:ci_commit, project: project)
commit = FactoryGirl.create(:ci_commit, project: project, ref: 'master')
trigger_request = FactoryGirl.create(:ci_trigger_request_with_variables, commit: commit, trigger: trigger)
commit.create_builds('master', false, nil, trigger_request)
commit.create_builds(nil, trigger_request)
project.variables << Ci::Variable.new(key: "SECRET_KEY", value: "secret_value")
post ci_api("/builds/register"), token: runner.token, info: { platform: :darwin }
......@@ -103,8 +103,8 @@ describe Ci::API::API do
end
it "returns dependent builds" do
commit = FactoryGirl.create(:ci_commit, project: project)
commit.create_builds('master', false, nil, nil)
commit = FactoryGirl.create(:ci_commit, project: project, ref: 'master')
commit.create_builds(nil, nil)
commit.builds.where(stage: 'test').each(&:success)
post ci_api("/builds/register"), token: runner.token, info: { platform: :darwin }
......
require 'spec_helper'
describe Ci::CreateBuildsService, services: true do
let(:commit) { create(:ci_commit) }
let(:commit) { create(:ci_commit, ref: 'master') }
let(:user) { create(:user) }
describe '#execute' do
......@@ -9,7 +9,7 @@ describe Ci::CreateBuildsService, services: true do
#
subject do
described_class.new.execute(commit, 'test', 'master', nil, user, nil, status)
described_class.new(commit).execute(commit, nil, user, status)
end
context 'next builds available' do
......
......@@ -5,7 +5,7 @@ module Ci
let(:service) { ImageForBuildService.new }
let(:project) { FactoryGirl.create(:empty_project) }
let(:commit_sha) { '01234567890123456789' }
let(:commit) { project.ensure_ci_commit(commit_sha) }
let(:commit) { project.ensure_ci_commit(commit_sha, 'master') }
let(:build) { FactoryGirl.create(:ci_build, commit: commit) }
describe :execute do
......
......@@ -100,7 +100,7 @@ describe Issues::BulkUpdateService, services: true do
describe :update_milestone do
before do
@milestone = create :milestone
@milestone = create(:milestone, project: @project)
@params = {
issues_ids: [issue.id],
milestone_id: @milestone.id
......
......@@ -3,40 +3,75 @@ require 'spec_helper'
describe Issues::CreateService, services: true do
let(:project) { create(:empty_project) }
let(:user) { create(:user) }
describe '#execute' do
let(:issue) { described_class.new(project, user, opts).execute }
context 'when params are valid' do
let(:assignee) { create(:user) }
let(:milestone) { create(:milestone, project: project) }
let(:labels) { create_pair(:label, project: project) }
describe :execute do
context 'valid params' do
before do
project.team << [user, :master]
project.team << [assignee, :master]
end
opts = {
title: 'Awesome issue',
let(:opts) do
{ title: 'Awesome issue',
description: 'please fix',
assignee: assignee
}
@issue = Issues::CreateService.new(project, user, opts).execute
assignee: assignee,
label_ids: labels.map(&:id),
milestone_id: milestone.id }
end
it { expect(@issue).to be_valid }
it { expect(@issue.title).to eq('Awesome issue') }
it { expect(@issue.assignee).to eq assignee }
it { expect(issue).to be_valid }
it { expect(issue.title).to eq('Awesome issue') }
it { expect(issue.assignee).to eq assignee }
it { expect(issue.labels).to match_array labels }
it { expect(issue.milestone).to eq milestone }
it 'creates a pending todo for new assignee' do
attributes = {
project: project,
author: user,
user: assignee,
target_id: @issue.id,
target_type: @issue.class.name,
target_id: issue.id,
target_type: issue.class.name,
action: Todo::ASSIGNED,
state: :pending
}
expect(Todo.where(attributes).count).to eq 1
end
context 'when label belongs to different project' do
let(:label) { create(:label) }
let(:opts) do
{ title: 'Title',
description: 'Description',
label_ids: [label.id] }
end
it 'does not assign label'do
expect(issue.labels).to_not include label
end
end
context 'when milestone belongs to different project' do
let(:milestone) { create(:milestone) }
let(:opts) do
{ title: 'Title',
description: 'Description',
milestone_id: milestone.id }
end
it 'does not assign milestone' do
expect(issue.milestone).to_not eq milestone
end
end
end
end
end
......@@ -4,10 +4,15 @@ describe Issues::UpdateService, services: true do
let(:user) { create(:user) }
let(:user2) { create(:user) }
let(:user3) { create(:user) }
let(:issue) { create(:issue, title: 'Old title', assignee_id: user3.id) }
let(:label) { create(:label) }
let(:project) { create(:empty_project) }
let(:label) { create(:label, project: project) }
let(:label2) { create(:label) }
let(:project) { issue.project }
let(:issue) do
create(:issue, title: 'Old title',
assignee_id: user3.id,
project: project)
end
before do
project.team << [user, :master]
......
require 'spec_helper'
describe MergeRequests::UpdateService, services: true do
let(:project) { create(:project) }
let(:user) { create(:user) }
let(:user2) { create(:user) }
let(:user3) { create(:user) }
let(:merge_request) { create(:merge_request, :simple, title: 'Old title', assignee_id: user3.id) }
let(:project) { merge_request.project }
let(:label) { create(:label) }
let(:label) { create(:label, project: project) }
let(:label2) { create(:label) }
let(:merge_request) do
create(:merge_request, :simple, title: 'Old title',
assignee_id: user3.id,
source_project: project)
end
before do
project.team << [user, :master]
project.team << [user2, :developer]
......
/*! Raven.js 2.3.0 (b09d766) | github.com/getsentry/raven-js */
/*
* Includes TraceKit
* https://github.com/getsentry/TraceKit
*
* Copyright 2016 Matt Robenolt and other contributors
* Released under the BSD license
* https://github.com/getsentry/raven-js/blob/master/LICENSE
*
*/
(function(f){if(typeof exports==="object"&&typeof module!=="undefined"){module.exports=f()}else if(typeof define==="function"&&define.amd){define([],f)}else{var g;if(typeof window!=="undefined"){g=window}else if(typeof global!=="undefined"){g=global}else if(typeof self!=="undefined"){g=self}else{g=this}g.Raven = f()}})(function(){var define,module,exports;return (function e(t,n,r){function s(o,u){if(!n[o]){if(!t[o]){var a=typeof require=="function"&&require;if(!u&&a)return a(o,!0);if(i)return i(o,!0);var f=new Error("Cannot find module '"+o+"'");throw f.code="MODULE_NOT_FOUND",f}var l=n[o]={exports:{}};t[o][0].call(l.exports,function(e){var n=t[o][1][e];return s(n?n:e)},l,l.exports,e,t,n,r)}return n[o].exports}var i=typeof require=="function"&&require;for(var o=0;o<r.length;o++)s(r[o]);return s})({1:[function(_dereq_,module,exports){
'use strict';
function RavenConfigError(message) {
this.name = 'RavenConfigError';
this.message = message;
}
RavenConfigError.prototype = new Error();
RavenConfigError.prototype.constructor = RavenConfigError;
module.exports = RavenConfigError;
},{}],2:[function(_dereq_,module,exports){
/*global XDomainRequest:false*/
'use strict';
var TraceKit = _dereq_(5);
var RavenConfigError = _dereq_(1);
var utils = _dereq_(4);
var isFunction = utils.isFunction;
var isUndefined = utils.isUndefined;
var isError = utils.isError;
var isEmptyObject = utils.isEmptyObject;
var hasKey = utils.hasKey;
var joinRegExp = utils.joinRegExp;
var each = utils.each;
var objectMerge = utils.objectMerge;
var truncate = utils.truncate;
var urlencode = utils.urlencode;
var uuid4 = utils.uuid4;
var dsnKeys = 'source protocol user pass host port path'.split(' '),
dsnPattern = /^(?:(\w+):)?\/\/(?:(\w+)(:\w+)?@)?([\w\.-]+)(?::(\d+))?(\/.*)/;
function now() {
return +new Date();
}
// First, check for JSON support
// If there is no JSON, we no-op the core features of Raven
// since JSON is required to encode the payload
function Raven() {
this._hasJSON = !!(typeof JSON === 'object' && JSON.stringify);
// Raven can run in contexts where there's no document (react-native)
this._hasDocument = typeof document !== 'undefined';
this._lastCapturedException = null;
this._lastEventId = null;
this._globalServer = null;
this._globalKey = null;
this._globalProject = null;
this._globalContext = {};
this._globalOptions = {
logger: 'javascript',
ignoreErrors: [],
ignoreUrls: [],
whitelistUrls: [],
includePaths: [],
crossOrigin: 'anonymous',
collectWindowErrors: true,
maxMessageLength: 0,
stackTraceLimit: 50
};
this._ignoreOnError = 0;
this._isRavenInstalled = false;
this._originalErrorStackTraceLimit = Error.stackTraceLimit;
// capture references to window.console *and* all its methods first
// before the console plugin has a chance to monkey patch
this._originalConsole = window.console || {};
this._originalConsoleMethods = {};
this._plugins = [];
this._startTime = now();
this._wrappedBuiltIns = [];
for (var method in this._originalConsole) { // eslint-disable-line guard-for-in
this._originalConsoleMethods[method] = this._originalConsole[method];
}
}
/*
* The core Raven singleton
*
* @this {Raven}
*/
Raven.prototype = {
// Hardcode version string so that raven source can be loaded directly via
// webpack (using a build step causes webpack #1617). Grunt verifies that
// this value matches package.json during build.
// See: https://github.com/getsentry/raven-js/issues/465
VERSION: '2.3.0',
debug: false,
TraceKit: TraceKit, // alias to TraceKit
/*
* Configure Raven with a DSN and extra options
*
* @param {string} dsn The public Sentry DSN
* @param {object} options Optional set of of global options [optional]
* @return {Raven}
*/
config: function(dsn, options) {
var self = this;
if (this._globalServer) {
this._logDebug('error', 'Error: Raven has already been configured');
return this;
}
if (!dsn) return this;
// merge in options
if (options) {
each(options, function(key, value){
// tags and extra are special and need to be put into context
if (key === 'tags' || key === 'extra') {
self._globalContext[key] = value;
} else {
self._globalOptions[key] = value;
}
});
}
var uri = this._parseDSN(dsn),
lastSlash = uri.path.lastIndexOf('/'),
path = uri.path.substr(1, lastSlash);
this._dsn = dsn;
// "Script error." is hard coded into browsers for errors that it can't read.
// this is the result of a script being pulled in from an external domain and CORS.
this._globalOptions.ignoreErrors.push(/^Script error\.?$/);
this._globalOptions.ignoreErrors.push(/^Javascript error: Script error\.? on line 0$/);
// join regexp rules into one big rule
this._globalOptions.ignoreErrors = joinRegExp(this._globalOptions.ignoreErrors);
this._globalOptions.ignoreUrls = this._globalOptions.ignoreUrls.length ? joinRegExp(this._globalOptions.ignoreUrls) : false;
this._globalOptions.whitelistUrls = this._globalOptions.whitelistUrls.length ? joinRegExp(this._globalOptions.whitelistUrls) : false;
this._globalOptions.includePaths = joinRegExp(this._globalOptions.includePaths);
this._globalKey = uri.user;
this._globalSecret = uri.pass && uri.pass.substr(1);
this._globalProject = uri.path.substr(lastSlash + 1);
this._globalServer = this._getGlobalServer(uri);
this._globalEndpoint = this._globalServer +
'/' + path + 'api/' + this._globalProject + '/store/';
if (this._globalOptions.fetchContext) {
TraceKit.remoteFetching = true;
}
if (this._globalOptions.linesOfContext) {
TraceKit.linesOfContext = this._globalOptions.linesOfContext;
}
TraceKit.collectWindowErrors = !!this._globalOptions.collectWindowErrors;
// return for chaining
return this;
},
/*
* Installs a global window.onerror error handler
* to capture and report uncaught exceptions.
* At this point, install() is required to be called due
* to the way TraceKit is set up.
*
* @return {Raven}
*/
install: function() {
var self = this;
if (this.isSetup() && !this._isRavenInstalled) {
TraceKit.report.subscribe(function () {
self._handleOnErrorStackInfo.apply(self, arguments);
});
this._wrapBuiltIns();
// Install all of the plugins
this._drainPlugins();
this._isRavenInstalled = true;
}
Error.stackTraceLimit = this._globalOptions.stackTraceLimit;
return this;
},
/*
* Wrap code within a context so Raven can capture errors
* reliably across domains that is executed immediately.
*
* @param {object} options A specific set of options for this context [optional]
* @param {function} func The callback to be immediately executed within the context
* @param {array} args An array of arguments to be called with the callback [optional]
*/
context: function(options, func, args) {
if (isFunction(options)) {
args = func || [];
func = options;
options = undefined;
}
return this.wrap(options, func).apply(this, args);
},
/*
* Wrap code within a context and returns back a new function to be executed
*
* @param {object} options A specific set of options for this context [optional]
* @param {function} func The function to be wrapped in a new context
* @return {function} The newly wrapped functions with a context
*/
wrap: function(options, func) {
var self = this;
// 1 argument has been passed, and it's not a function
// so just return it
if (isUndefined(func) && !isFunction(options)) {
return options;
}
// options is optional
if (isFunction(options)) {
func = options;
options = undefined;
}
// At this point, we've passed along 2 arguments, and the second one
// is not a function either, so we'll just return the second argument.
if (!isFunction(func)) {
return func;
}
// We don't wanna wrap it twice!
try {
if (func.__raven__) {
return func;
}
} catch (e) {
// Just accessing the __raven__ prop in some Selenium environments
// can cause a "Permission denied" exception (see raven-js#495).
// Bail on wrapping and return the function as-is (defers to window.onerror).
return func;
}
// If this has already been wrapped in the past, return that
if (func.__raven_wrapper__ ){
return func.__raven_wrapper__ ;
}
function wrapped() {
var args = [], i = arguments.length,
deep = !options || options && options.deep !== false;
// Recursively wrap all of a function's arguments that are
// functions themselves.
while(i--) args[i] = deep ? self.wrap(options, arguments[i]) : arguments[i];
try {
return func.apply(this, args);
} catch(e) {
self._ignoreNextOnError();
self.captureException(e, options);
throw e;
}
}
// copy over properties of the old function
for (var property in func) {
if (hasKey(func, property)) {
wrapped[property] = func[property];
}
}
func.__raven_wrapper__ = wrapped;
wrapped.prototype = func.prototype;
// Signal that this function has been wrapped already
// for both debugging and to prevent it to being wrapped twice
wrapped.__raven__ = true;
wrapped.__inner__ = func;
return wrapped;
},
/*
* Uninstalls the global error handler.
*
* @return {Raven}
*/
uninstall: function() {
TraceKit.report.uninstall();
this._restoreBuiltIns();
Error.stackTraceLimit = this._originalErrorStackTraceLimit;
this._isRavenInstalled = false;
return this;
},
/*
* Manually capture an exception and send it over to Sentry
*
* @param {error} ex An exception to be logged
* @param {object} options A specific set of options for this error [optional]
* @return {Raven}
*/
captureException: function(ex, options) {
// If not an Error is passed through, recall as a message instead
if (!isError(ex)) return this.captureMessage(ex, options);
// Store the raw exception object for potential debugging and introspection
this._lastCapturedException = ex;
// TraceKit.report will re-raise any exception passed to it,
// which means you have to wrap it in try/catch. Instead, we
// can wrap it here and only re-raise if TraceKit.report
// raises an exception different from the one we asked to
// report on.
try {
var stack = TraceKit.computeStackTrace(ex);
this._handleStackInfo(stack, options);
} catch(ex1) {
if(ex !== ex1) {
throw ex1;
}
}
return this;
},
/*
* Manually send a message to Sentry
*
* @param {string} msg A plain message to be captured in Sentry
* @param {object} options A specific set of options for this message [optional]
* @return {Raven}
*/
captureMessage: function(msg, options) {
// config() automagically converts ignoreErrors from a list to a RegExp so we need to test for an
// early call; we'll error on the side of logging anything called before configuration since it's
// probably something you should see:
if (!!this._globalOptions.ignoreErrors.test && this._globalOptions.ignoreErrors.test(msg)) {
return;
}
// Fire away!
this._send(
objectMerge({
message: msg + '' // Make sure it's actually a string
}, options)
);
return this;
},
addPlugin: function(plugin /*arg1, arg2, ... argN*/) {
var pluginArgs = Array.prototype.slice.call(arguments, 1);
this._plugins.push([plugin, pluginArgs]);
if (this._isRavenInstalled) {
this._drainPlugins();
}
return this;
},
/*
* Set/clear a user to be sent along with the payload.
*
* @param {object} user An object representing user data [optional]
* @return {Raven}
*/
setUserContext: function(user) {
// Intentionally do not merge here since that's an unexpected behavior.
this._globalContext.user = user;
return this;
},
/*
* Merge extra attributes to be sent along with the payload.
*
* @param {object} extra An object representing extra data [optional]
* @return {Raven}
*/
setExtraContext: function(extra) {
this._mergeContext('extra', extra);
return this;
},
/*
* Merge tags to be sent along with the payload.
*
* @param {object} tags An object representing tags [optional]
* @return {Raven}
*/
setTagsContext: function(tags) {
this._mergeContext('tags', tags);
return this;
},
/*
* Clear all of the context.
*
* @return {Raven}
*/
clearContext: function() {
this._globalContext = {};
return this;
},
/*
* Get a copy of the current context. This cannot be mutated.
*
* @return {object} copy of context
*/
getContext: function() {
// lol javascript
return JSON.parse(JSON.stringify(this._globalContext));
},
/*
* Set release version of application
*
* @param {string} release Typically something like a git SHA to identify version
* @return {Raven}
*/
setRelease: function(release) {
this._globalOptions.release = release;
return this;
},
/*
* Set the dataCallback option
*
* @param {function} callback The callback to run which allows the
* data blob to be mutated before sending
* @return {Raven}
*/
setDataCallback: function(callback) {
this._globalOptions.dataCallback = callback;
return this;
},
/*
* Set the shouldSendCallback option
*
* @param {function} callback The callback to run which allows
* introspecting the blob before sending
* @return {Raven}
*/
setShouldSendCallback: function(callback) {
this._globalOptions.shouldSendCallback = callback;
return this;
},
/**
* Override the default HTTP transport mechanism that transmits data
* to the Sentry server.
*
* @param {function} transport Function invoked instead of the default
* `makeRequest` handler.
*
* @return {Raven}
*/
setTransport: function(transport) {
this._globalOptions.transport = transport;
return this;
},
/*
* Get the latest raw exception that was captured by Raven.
*
* @return {error}
*/
lastException: function() {
return this._lastCapturedException;
},
/*
* Get the last event id
*
* @return {string}
*/
lastEventId: function() {
return this._lastEventId;
},
/*
* Determine if Raven is setup and ready to go.
*
* @return {boolean}
*/
isSetup: function() {
if (!this._hasJSON) return false; // needs JSON support
if (!this._globalServer) {
if (!this.ravenNotConfiguredError) {
this.ravenNotConfiguredError = true;
this._logDebug('error', 'Error: Raven has not been configured.');
}
return false;
}
return true;
},
afterLoad: function () {
// TODO: remove window dependence?
// Attempt to initialize Raven on load
var RavenConfig = window.RavenConfig;
if (RavenConfig) {
this.config(RavenConfig.dsn, RavenConfig.config).install();
}
},
showReportDialog: function (options) {
if (!window.document) // doesn't work without a document (React native)
return;
options = options || {};
var lastEventId = options.eventId || this.lastEventId();
if (!lastEventId) {
throw new RavenConfigError('Missing eventId');
}
var dsn = options.dsn || this._dsn;
if (!dsn) {
throw new RavenConfigError('Missing DSN');
}
var encode = encodeURIComponent;
var qs = '';
qs += '?eventId=' + encode(lastEventId);
qs += '&dsn=' + encode(dsn);
var user = options.user || this._globalContext.user;
if (user) {
if (user.name) qs += '&name=' + encode(user.name);
if (user.email) qs += '&email=' + encode(user.email);
}
var globalServer = this._getGlobalServer(this._parseDSN(dsn));
var script = document.createElement('script');
script.async = true;
script.src = globalServer + '/api/embed/error-page/' + qs;
(document.head || document.body).appendChild(script);
},
/**** Private functions ****/
_ignoreNextOnError: function () {
var self = this;
this._ignoreOnError += 1;
setTimeout(function () {
// onerror should trigger before setTimeout
self._ignoreOnError -= 1;
});
},
_triggerEvent: function(eventType, options) {
// NOTE: `event` is a native browser thing, so let's avoid conflicting wiht it
var evt, key;
if (!this._hasDocument)
return;
options = options || {};
eventType = 'raven' + eventType.substr(0,1).toUpperCase() + eventType.substr(1);
if (document.createEvent) {
evt = document.createEvent('HTMLEvents');
evt.initEvent(eventType, true, true);
} else {
evt = document.createEventObject();
evt.eventType = eventType;
}
for (key in options) if (hasKey(options, key)) {
evt[key] = options[key];
}
if (document.createEvent) {
// IE9 if standards
document.dispatchEvent(evt);
} else {
// IE8 regardless of Quirks or Standards
// IE9 if quirks
try {
document.fireEvent('on' + evt.eventType.toLowerCase(), evt);
} catch(e) {
// Do nothing
}
}
},
/**
* Install any queued plugins
*/
_wrapBuiltIns: function() {
var self = this;
function fill(obj, name, replacement, noUndo) {
var orig = obj[name];
obj[name] = replacement(orig);
if (!noUndo) {
self._wrappedBuiltIns.push([obj, name, orig]);
}
}
function wrapTimeFn(orig) {
return function (fn, t) { // preserve arity
// Make a copy of the arguments
var args = [].slice.call(arguments);
var originalCallback = args[0];
if (isFunction(originalCallback)) {
args[0] = self.wrap(originalCallback);
}
// IE < 9 doesn't support .call/.apply on setInterval/setTimeout, but it
// also supports only two arguments and doesn't care what this is, so we
// can just call the original function directly.
if (orig.apply) {
return orig.apply(this, args);
} else {
return orig(args[0], args[1]);
}
};
}
fill(window, 'setTimeout', wrapTimeFn);
fill(window, 'setInterval', wrapTimeFn);
if (window.requestAnimationFrame) {
fill(window, 'requestAnimationFrame', function (orig) {
return function (cb) {
return orig(self.wrap(cb));
};
});
}
// event targets borrowed from bugsnag-js:
// https://github.com/bugsnag/bugsnag-js/blob/master/src/bugsnag.js#L666
'EventTarget Window Node ApplicationCache AudioTrackList ChannelMergerNode CryptoOperation EventSource FileReader HTMLUnknownElement IDBDatabase IDBRequest IDBTransaction KeyOperation MediaController MessagePort ModalWindow Notification SVGElementInstance Screen TextTrack TextTrackCue TextTrackList WebSocket WebSocketWorker Worker XMLHttpRequest XMLHttpRequestEventTarget XMLHttpRequestUpload'.replace(/\w+/g, function (global) {
var proto = window[global] && window[global].prototype;
if (proto && proto.hasOwnProperty && proto.hasOwnProperty('addEventListener')) {
fill(proto, 'addEventListener', function(orig) {
return function (evt, fn, capture, secure) { // preserve arity
try {
if (fn && fn.handleEvent) {
fn.handleEvent = self.wrap(fn.handleEvent);
}
} catch (err) {
// can sometimes get 'Permission denied to access property "handle Event'
}
return orig.call(this, evt, self.wrap(fn), capture, secure);
};
});
fill(proto, 'removeEventListener', function (orig) {
return function (evt, fn, capture, secure) {
fn = fn && (fn.__raven_wrapper__ ? fn.__raven_wrapper__ : fn);
return orig.call(this, evt, fn, capture, secure);
};
});
}
});
if ('XMLHttpRequest' in window) {
fill(XMLHttpRequest.prototype, 'send', function(origSend) {
return function (data) { // preserve arity
var xhr = this;
'onreadystatechange onload onerror onprogress'.replace(/\w+/g, function (prop) {
if (prop in xhr && Object.prototype.toString.call(xhr[prop]) === '[object Function]') {
fill(xhr, prop, function (orig) {
return self.wrap(orig);
}, true /* noUndo */); // don't track filled methods on XHR instances
}
});
return origSend.apply(this, arguments);
};
});
}
var $ = window.jQuery || window.$;
if ($ && $.fn && $.fn.ready) {
fill($.fn, 'ready', function (orig) {
return function (fn) {
return orig.call(this, self.wrap(fn));
};
});
}
},
_restoreBuiltIns: function () {
// restore any wrapped builtins
var builtin;
while (this._wrappedBuiltIns.length) {
builtin = this._wrappedBuiltIns.shift();
var obj = builtin[0],
name = builtin[1],
orig = builtin[2];
obj[name] = orig;
}
},
_drainPlugins: function() {
var self = this;
// FIX ME TODO
each(this._plugins, function(_, plugin) {
var installer = plugin[0];
var args = plugin[1];
installer.apply(self, [self].concat(args));
});
},
_parseDSN: function(str) {
var m = dsnPattern.exec(str),
dsn = {},
i = 7;
try {
while (i--) dsn[dsnKeys[i]] = m[i] || '';
} catch(e) {
throw new RavenConfigError('Invalid DSN: ' + str);
}
if (dsn.pass && !this._globalOptions.allowSecretKey) {
throw new RavenConfigError('Do not specify your secret key in the DSN. See: http://bit.ly/raven-secret-key');
}
return dsn;
},
_getGlobalServer: function(uri) {
// assemble the endpoint from the uri pieces
var globalServer = '//' + uri.host +
(uri.port ? ':' + uri.port : '');
if (uri.protocol) {
globalServer = uri.protocol + ':' + globalServer;
}
return globalServer;
},
_handleOnErrorStackInfo: function() {
// if we are intentionally ignoring errors via onerror, bail out
if (!this._ignoreOnError) {
this._handleStackInfo.apply(this, arguments);
}
},
_handleStackInfo: function(stackInfo, options) {
var self = this;
var frames = [];
if (stackInfo.stack && stackInfo.stack.length) {
each(stackInfo.stack, function(i, stack) {
var frame = self._normalizeFrame(stack);
if (frame) {
frames.push(frame);
}
});
}
this._triggerEvent('handle', {
stackInfo: stackInfo,
options: options
});
this._processException(
stackInfo.name,
stackInfo.message,
stackInfo.url,
stackInfo.lineno,
frames.slice(0, this._globalOptions.stackTraceLimit),
options
);
},
_normalizeFrame: function(frame) {
if (!frame.url) return;
// normalize the frames data
var normalized = {
filename: frame.url,
lineno: frame.line,
colno: frame.column,
'function': frame.func || '?'
}, context = this._extractContextFromFrame(frame), i;
if (context) {
var keys = ['pre_context', 'context_line', 'post_context'];
i = 3;
while (i--) normalized[keys[i]] = context[i];
}
normalized.in_app = !( // determine if an exception came from outside of our app
// first we check the global includePaths list.
!!this._globalOptions.includePaths.test && !this._globalOptions.includePaths.test(normalized.filename) ||
// Now we check for fun, if the function name is Raven or TraceKit
/(Raven|TraceKit)\./.test(normalized['function']) ||
// finally, we do a last ditch effort and check for raven.min.js
/raven\.(min\.)?js$/.test(normalized.filename)
);
return normalized;
},
_extractContextFromFrame: function(frame) {
// immediately check if we should even attempt to parse a context
if (!frame.context || !this._globalOptions.fetchContext) return;
var context = frame.context,
pivot = ~~(context.length / 2),
i = context.length, isMinified = false;
while (i--) {
// We're making a guess to see if the source is minified or not.
// To do that, we make the assumption if *any* of the lines passed
// in are greater than 300 characters long, we bail.
// Sentry will see that there isn't a context
if (context[i].length > 300) {
isMinified = true;
break;
}
}
if (isMinified) {
// The source is minified and we don't know which column. Fuck it.
if (isUndefined(frame.column)) return;
// If the source is minified and has a frame column
// we take a chunk of the offending line to hopefully shed some light
return [
[], // no pre_context
context[pivot].substr(frame.column, 50), // grab 50 characters, starting at the offending column
[] // no post_context
];
}
return [
context.slice(0, pivot), // pre_context
context[pivot], // context_line
context.slice(pivot + 1) // post_context
];
},
_processException: function(type, message, fileurl, lineno, frames, options) {
var stacktrace, fullMessage;
if (!!this._globalOptions.ignoreErrors.test && this._globalOptions.ignoreErrors.test(message)) return;
message += '';
message = truncate(message, this._globalOptions.maxMessageLength);
fullMessage = (type ? type + ': ' : '') + message;
fullMessage = truncate(fullMessage, this._globalOptions.maxMessageLength);
if (frames && frames.length) {
fileurl = frames[0].filename || fileurl;
// Sentry expects frames oldest to newest
// and JS sends them as newest to oldest
frames.reverse();
stacktrace = {frames: frames};
} else if (fileurl) {
stacktrace = {
frames: [{
filename: fileurl,
lineno: lineno,
in_app: true
}]
};
}
if (!!this._globalOptions.ignoreUrls.test && this._globalOptions.ignoreUrls.test(fileurl)) return;
if (!!this._globalOptions.whitelistUrls.test && !this._globalOptions.whitelistUrls.test(fileurl)) return;
var data = objectMerge({
// sentry.interfaces.Exception
exception: {
values: [{
type: type,
value: message,
stacktrace: stacktrace
}]
},
culprit: fileurl,
message: fullMessage
}, options);
// Fire away!
this._send(data);
},
_trimPacket: function(data) {
// For now, we only want to truncate the two different messages
// but this could/should be expanded to just trim everything
var max = this._globalOptions.maxMessageLength;
data.message = truncate(data.message, max);
if (data.exception) {
var exception = data.exception.values[0];
exception.value = truncate(exception.value, max);
}
return data;
},
_getHttpData: function() {
if (!this._hasDocument || !document.location || !document.location.href) {
return;
}
var httpData = {
headers: {
'User-Agent': navigator.userAgent
}
};
httpData.url = document.location.href;
if (document.referrer) {
httpData.headers.Referer = document.referrer;
}
return httpData;
},
_send: function(data) {
var self = this;
var globalOptions = this._globalOptions;
var baseData = {
project: this._globalProject,
logger: globalOptions.logger,
platform: 'javascript'
}, httpData = this._getHttpData();
if (httpData) {
baseData.request = httpData;
}
data = objectMerge(baseData, data);
// Merge in the tags and extra separately since objectMerge doesn't handle a deep merge
data.tags = objectMerge(objectMerge({}, this._globalContext.tags), data.tags);
data.extra = objectMerge(objectMerge({}, this._globalContext.extra), data.extra);
// Send along our own collected metadata with extra
data.extra['session:duration'] = now() - this._startTime;
// If there are no tags/extra, strip the key from the payload alltogther.
if (isEmptyObject(data.tags)) delete data.tags;
if (this._globalContext.user) {
// sentry.interfaces.User
data.user = this._globalContext.user;
}
// Include the release if it's defined in globalOptions
if (globalOptions.release) data.release = globalOptions.release;
// Include server_name if it's defined in globalOptions
if (globalOptions.serverName) data.server_name = globalOptions.serverName;
if (isFunction(globalOptions.dataCallback)) {
data = globalOptions.dataCallback(data) || data;
}
// Why??????????
if (!data || isEmptyObject(data)) {
return;
}
// Check if the request should be filtered or not
if (isFunction(globalOptions.shouldSendCallback) && !globalOptions.shouldSendCallback(data)) {
return;
}
// Send along an event_id if not explicitly passed.
// This event_id can be used to reference the error within Sentry itself.
// Set lastEventId after we know the error should actually be sent
this._lastEventId = data.event_id || (data.event_id = uuid4());
// Try and clean up the packet before sending by truncating long values
data = this._trimPacket(data);
this._logDebug('debug', 'Raven about to send:', data);
if (!this.isSetup()) return;
var auth = {
sentry_version: '7',
sentry_client: 'raven-js/' + this.VERSION,
sentry_key: this._globalKey
};
if (this._globalSecret) {
auth.sentry_secret = this._globalSecret;
}
var url = this._globalEndpoint;
(globalOptions.transport || this._makeRequest).call(this, {
url: url,
auth: auth,
data: data,
options: globalOptions,
onSuccess: function success() {
self._triggerEvent('success', {
data: data,
src: url
});
},
onError: function failure() {
self._triggerEvent('failure', {
data: data,
src: url
});
}
});
},
_makeImageRequest: function(opts) {
// Tack on sentry_data to auth options, which get urlencoded
opts.auth.sentry_data = JSON.stringify(opts.data);
var img = this._newImage(),
src = opts.url + '?' + urlencode(opts.auth),
crossOrigin = opts.options.crossOrigin;
if (crossOrigin || crossOrigin === '') {
img.crossOrigin = crossOrigin;
}
img.onload = opts.onSuccess;
img.onerror = img.onabort = opts.onError;
img.src = src;
},
_makeXhrRequest: function(opts) {
var request;
var url = opts.url;
function handler() {
if (request.status === 200) {
if (opts.onSuccess) {
opts.onSuccess();
}
} else if (opts.onError) {
opts.onError();
}
}
request = new XMLHttpRequest();
if ('withCredentials' in request) {
request.onreadystatechange = function () {
if (request.readyState !== 4) {
return;
}
handler();
};
} else {
request = new XDomainRequest();
// xdomainrequest cannot go http -> https (or vice versa),
// so always use protocol relative
url = url.replace(/^https?:/, '');
// onreadystatechange not supported by XDomainRequest
request.onload = handler;
}
// NOTE: auth is intentionally sent as part of query string (NOT as custom
// HTTP header) so as to avoid preflight CORS requests
request.open('POST', url + '?' + urlencode(opts.auth));
request.send(JSON.stringify(opts.data));
},
_makeRequest: function(opts) {
var hasCORS =
'withCredentials' in new XMLHttpRequest() ||
typeof XDomainRequest !== 'undefined';
return (hasCORS ? this._makeXhrRequest : this._makeImageRequest)(opts);
},
// Note: this is shitty, but I can't figure out how to get
// sinon to stub document.createElement without breaking everything
// so this wrapper is just so I can stub it for tests.
_newImage: function() {
return document.createElement('img');
},
_logDebug: function(level) {
if (this._originalConsoleMethods[level] && this.debug) {
// In IE<10 console methods do not have their own 'apply' method
Function.prototype.apply.call(
this._originalConsoleMethods[level],
this._originalConsole,
[].slice.call(arguments, 1)
);
}
},
_mergeContext: function(key, context) {
if (isUndefined(context)) {
delete this._globalContext[key];
} else {
this._globalContext[key] = objectMerge(this._globalContext[key] || {}, context);
}
}
};
// Deprecations
Raven.prototype.setUser = Raven.prototype.setUserContext;
Raven.prototype.setReleaseContext = Raven.prototype.setRelease;
module.exports = Raven;
},{"1":1,"4":4,"5":5}],3:[function(_dereq_,module,exports){
/**
* Enforces a single instance of the Raven client, and the
* main entry point for Raven. If you are a consumer of the
* Raven library, you SHOULD load this file (vs raven.js).
**/
'use strict';
var RavenConstructor = _dereq_(2);
var _Raven = window.Raven;
var Raven = new RavenConstructor();
/*
* Allow multiple versions of Raven to be installed.
* Strip Raven from the global context and returns the instance.
*
* @return {Raven}
*/
Raven.noConflict = function () {
window.Raven = _Raven;
return Raven;
};
Raven.afterLoad();
module.exports = Raven;
},{"2":2}],4:[function(_dereq_,module,exports){
'use strict';
var objectPrototype = Object.prototype;
function isUndefined(what) {
return what === void 0;
}
function isFunction(what) {
return typeof what === 'function';
}
function isString(what) {
return objectPrototype.toString.call(what) === '[object String]';
}
function isObject(what) {
return typeof what === 'object' && what !== null;
}
function isEmptyObject(what) {
for (var _ in what) return false; // eslint-disable-line guard-for-in, no-unused-vars
return true;
}
// Sorta yanked from https://github.com/joyent/node/blob/aa3b4b4/lib/util.js#L560
// with some tiny modifications
function isError(what) {
var toString = objectPrototype.toString.call(what);
return isObject(what) &&
toString === '[object Error]' ||
toString === '[object Exception]' || // Firefox NS_ERROR_FAILURE Exceptions
what instanceof Error;
}
function each(obj, callback) {
var i, j;
if (isUndefined(obj.length)) {
for (i in obj) {
if (hasKey(obj, i)) {
callback.call(null, i, obj[i]);
}
}
} else {
j = obj.length;
if (j) {
for (i = 0; i < j; i++) {
callback.call(null, i, obj[i]);
}
}
}
}
function objectMerge(obj1, obj2) {
if (!obj2) {
return obj1;
}
each(obj2, function(key, value){
obj1[key] = value;
});
return obj1;
}
function truncate(str, max) {
return !max || str.length <= max ? str : str.substr(0, max) + '\u2026';
}
/**
* hasKey, a better form of hasOwnProperty
* Example: hasKey(MainHostObject, property) === true/false
*
* @param {Object} host object to check property
* @param {string} key to check
*/
function hasKey(object, key) {
return objectPrototype.hasOwnProperty.call(object, key);
}
function joinRegExp(patterns) {
// Combine an array of regular expressions and strings into one large regexp
// Be mad.
var sources = [],
i = 0, len = patterns.length,
pattern;
for (; i < len; i++) {
pattern = patterns[i];
if (isString(pattern)) {
// If it's a string, we need to escape it
// Taken from: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Guide/Regular_Expressions
sources.push(pattern.replace(/([.*+?^=!:${}()|\[\]\/\\])/g, '\\$1'));
} else if (pattern && pattern.source) {
// If it's a regexp already, we want to extract the source
sources.push(pattern.source);
}
// Intentionally skip other cases
}
return new RegExp(sources.join('|'), 'i');
}
function urlencode(o) {
var pairs = [];
each(o, function(key, value) {
pairs.push(encodeURIComponent(key) + '=' + encodeURIComponent(value));
});
return pairs.join('&');
}
function uuid4() {
var crypto = window.crypto || window.msCrypto;
if (!isUndefined(crypto) && crypto.getRandomValues) {
// Use window.crypto API if available
var arr = new Uint16Array(8);
crypto.getRandomValues(arr);
// set 4 in byte 7
arr[3] = arr[3] & 0xFFF | 0x4000;
// set 2 most significant bits of byte 9 to '10'
arr[4] = arr[4] & 0x3FFF | 0x8000;
var pad = function(num) {
var v = num.toString(16);
while (v.length < 4) {
v = '0' + v;
}
return v;
};
return pad(arr[0]) + pad(arr[1]) + pad(arr[2]) + pad(arr[3]) + pad(arr[4]) +
pad(arr[5]) + pad(arr[6]) + pad(arr[7]);
} else {
// http://stackoverflow.com/questions/105034/how-to-create-a-guid-uuid-in-javascript/2117523#2117523
return 'xxxxxxxxxxxx4xxxyxxxxxxxxxxxxxxx'.replace(/[xy]/g, function(c) {
var r = Math.random()*16|0,
v = c === 'x' ? r : r&0x3|0x8;
return v.toString(16);
});
}
}
module.exports = {
isUndefined: isUndefined,
isFunction: isFunction,
isString: isString,
isObject: isObject,
isEmptyObject: isEmptyObject,
isError: isError,
each: each,
objectMerge: objectMerge,
truncate: truncate,
hasKey: hasKey,
joinRegExp: joinRegExp,
urlencode: urlencode,
uuid4: uuid4
};
},{}],5:[function(_dereq_,module,exports){
'use strict';
var utils = _dereq_(4);
var hasKey = utils.hasKey;
var isString = utils.isString;
var isUndefined = utils.isUndefined;
/*
TraceKit - Cross brower stack traces - github.com/occ/TraceKit
MIT license
*/
var TraceKit = {
remoteFetching: false,
collectWindowErrors: true,
// 3 lines before, the offending line, 3 lines after
linesOfContext: 7,
debug: false
};
// global reference to slice
var _slice = [].slice;
var UNKNOWN_FUNCTION = '?';
// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Error#Error_types
var ERROR_TYPES_RE = /^(?:Uncaught )?((?:Eval|Internal|Range|Reference|Syntax|Type|URI)Error)\: ?(.*)$/;
function getLocationHref() {
if (typeof document === 'undefined')
return '';
return document.location.href;
}
/**
* TraceKit.report: cross-browser processing of unhandled exceptions
*
* Syntax:
* TraceKit.report.subscribe(function(stackInfo) { ... })
* TraceKit.report.unsubscribe(function(stackInfo) { ... })
* TraceKit.report(exception)
* try { ...code... } catch(ex) { TraceKit.report(ex); }
*
* Supports:
* - Firefox: full stack trace with line numbers, plus column number
* on top frame; column number is not guaranteed
* - Opera: full stack trace with line and column numbers
* - Chrome: full stack trace with line and column numbers
* - Safari: line and column number for the top frame only; some frames
* may be missing, and column number is not guaranteed
* - IE: line and column number for the top frame only; some frames
* may be missing, and column number is not guaranteed
*
* In theory, TraceKit should work on all of the following versions:
* - IE5.5+ (only 8.0 tested)
* - Firefox 0.9+ (only 3.5+ tested)
* - Opera 7+ (only 10.50 tested; versions 9 and earlier may require
* Exceptions Have Stacktrace to be enabled in opera:config)
* - Safari 3+ (only 4+ tested)
* - Chrome 1+ (only 5+ tested)
* - Konqueror 3.5+ (untested)
*
* Requires TraceKit.computeStackTrace.
*
* Tries to catch all unhandled exceptions and report them to the
* subscribed handlers. Please note that TraceKit.report will rethrow the
* exception. This is REQUIRED in order to get a useful stack trace in IE.
* If the exception does not reach the top of the browser, you will only
* get a stack trace from the point where TraceKit.report was called.
*
* Handlers receive a stackInfo object as described in the
* TraceKit.computeStackTrace docs.
*/
TraceKit.report = (function reportModuleWrapper() {
var handlers = [],
lastArgs = null,
lastException = null,
lastExceptionStack = null;
/**
* Add a crash handler.
* @param {Function} handler
*/
function subscribe(handler) {
installGlobalHandler();
handlers.push(handler);
}
/**
* Remove a crash handler.
* @param {Function} handler
*/
function unsubscribe(handler) {
for (var i = handlers.length - 1; i >= 0; --i) {
if (handlers[i] === handler) {
handlers.splice(i, 1);
}
}
}
/**
* Remove all crash handlers.
*/
function unsubscribeAll() {
uninstallGlobalHandler();
handlers = [];
}
/**
* Dispatch stack information to all handlers.
* @param {Object.<string, *>} stack
*/
function notifyHandlers(stack, isWindowError) {
var exception = null;
if (isWindowError && !TraceKit.collectWindowErrors) {
return;
}
for (var i in handlers) {
if (hasKey(handlers, i)) {
try {
handlers[i].apply(null, [stack].concat(_slice.call(arguments, 2)));
} catch (inner) {
exception = inner;
}
}
}
if (exception) {
throw exception;
}
}
var _oldOnerrorHandler, _onErrorHandlerInstalled;
/**
* Ensures all global unhandled exceptions are recorded.
* Supported by Gecko and IE.
* @param {string} message Error message.
* @param {string} url URL of script that generated the exception.
* @param {(number|string)} lineNo The line number at which the error
* occurred.
* @param {?(number|string)} colNo The column number at which the error
* occurred.
* @param {?Error} ex The actual Error object.
*/
function traceKitWindowOnError(message, url, lineNo, colNo, ex) {
var stack = null;
if (lastExceptionStack) {
TraceKit.computeStackTrace.augmentStackTraceWithInitialElement(lastExceptionStack, url, lineNo, message);
processLastException();
} else if (ex) {
// New chrome and blink send along a real error object
// Let's just report that like a normal error.
// See: https://mikewest.org/2013/08/debugging-runtime-errors-with-window-onerror
stack = TraceKit.computeStackTrace(ex);
notifyHandlers(stack, true);
} else {
var location = {
'url': url,
'line': lineNo,
'column': colNo
};
location.func = TraceKit.computeStackTrace.guessFunctionName(location.url, location.line);
location.context = TraceKit.computeStackTrace.gatherContext(location.url, location.line);
var name = undefined;
var msg = message; // must be new var or will modify original `arguments`
var groups;
if (isString(message)) {
var groups = message.match(ERROR_TYPES_RE);
if (groups) {
name = groups[1];
msg = groups[2];
}
}
stack = {
'name': name,
'message': msg,
'url': getLocationHref(),
'stack': [location]
};
notifyHandlers(stack, true);
}
if (_oldOnerrorHandler) {
return _oldOnerrorHandler.apply(this, arguments);
}
return false;
}
function installGlobalHandler ()
{
if (_onErrorHandlerInstalled) {
return;
}
_oldOnerrorHandler = window.onerror;
window.onerror = traceKitWindowOnError;
_onErrorHandlerInstalled = true;
}
function uninstallGlobalHandler ()
{
if (!_onErrorHandlerInstalled) {
return;
}
window.onerror = _oldOnerrorHandler;
_onErrorHandlerInstalled = false;
_oldOnerrorHandler = undefined;
}
function processLastException() {
var _lastExceptionStack = lastExceptionStack,
_lastArgs = lastArgs;
lastArgs = null;
lastExceptionStack = null;
lastException = null;
notifyHandlers.apply(null, [_lastExceptionStack, false].concat(_lastArgs));
}
/**
* Reports an unhandled Error to TraceKit.
* @param {Error} ex
* @param {?boolean} rethrow If false, do not re-throw the exception.
* Only used for window.onerror to not cause an infinite loop of
* rethrowing.
*/
function report(ex, rethrow) {
var args = _slice.call(arguments, 1);
if (lastExceptionStack) {
if (lastException === ex) {
return; // already caught by an inner catch block, ignore
} else {
processLastException();
}
}
var stack = TraceKit.computeStackTrace(ex);
lastExceptionStack = stack;
lastException = ex;
lastArgs = args;
// If the stack trace is incomplete, wait for 2 seconds for
// slow slow IE to see if onerror occurs or not before reporting
// this exception; otherwise, we will end up with an incomplete
// stack trace
window.setTimeout(function () {
if (lastException === ex) {
processLastException();
}
}, (stack.incomplete ? 2000 : 0));
if (rethrow !== false) {
throw ex; // re-throw to propagate to the top level (and cause window.onerror)
}
}
report.subscribe = subscribe;
report.unsubscribe = unsubscribe;
report.uninstall = unsubscribeAll;
return report;
}());
/**
* TraceKit.computeStackTrace: cross-browser stack traces in JavaScript
*
* Syntax:
* s = TraceKit.computeStackTrace(exception) // consider using TraceKit.report instead (see below)
* Returns:
* s.name - exception name
* s.message - exception message
* s.stack[i].url - JavaScript or HTML file URL
* s.stack[i].func - function name, or empty for anonymous functions (if guessing did not work)
* s.stack[i].args - arguments passed to the function, if known
* s.stack[i].line - line number, if known
* s.stack[i].column - column number, if known
* s.stack[i].context - an array of source code lines; the middle element corresponds to the correct line#
*
* Supports:
* - Firefox: full stack trace with line numbers and unreliable column
* number on top frame
* - Opera 10: full stack trace with line and column numbers
* - Opera 9-: full stack trace with line numbers
* - Chrome: full stack trace with line and column numbers
* - Safari: line and column number for the topmost stacktrace element
* only
* - IE: no line numbers whatsoever
*
* Tries to guess names of anonymous functions by looking for assignments
* in the source code. In IE and Safari, we have to guess source file names
* by searching for function bodies inside all page scripts. This will not
* work for scripts that are loaded cross-domain.
* Here be dragons: some function names may be guessed incorrectly, and
* duplicate functions may be mismatched.
*
* TraceKit.computeStackTrace should only be used for tracing purposes.
* Logging of unhandled exceptions should be done with TraceKit.report,
* which builds on top of TraceKit.computeStackTrace and provides better
* IE support by utilizing the window.onerror event to retrieve information
* about the top of the stack.
*
* Note: In IE and Safari, no stack trace is recorded on the Error object,
* so computeStackTrace instead walks its *own* chain of callers.
* This means that:
* * in Safari, some methods may be missing from the stack trace;
* * in IE, the topmost function in the stack trace will always be the
* caller of computeStackTrace.
*
* This is okay for tracing (because you are likely to be calling
* computeStackTrace from the function you want to be the topmost element
* of the stack trace anyway), but not okay for logging unhandled
* exceptions (because your catch block will likely be far away from the
* inner function that actually caused the exception).
*
*/
TraceKit.computeStackTrace = (function computeStackTraceWrapper() {
var sourceCache = {};
/**
* Attempts to retrieve source code via XMLHttpRequest, which is used
* to look up anonymous function names.
* @param {string} url URL of source code.
* @return {string} Source contents.
*/
function loadSource(url) {
if (!TraceKit.remoteFetching) { //Only attempt request if remoteFetching is on.
return '';
}
try {
var getXHR = function() {
try {
return new window.XMLHttpRequest();
} catch (e) {
// explicitly bubble up the exception if not found
return new window.ActiveXObject('Microsoft.XMLHTTP');
}
};
var request = getXHR();
request.open('GET', url, false);
request.send('');
return request.responseText;
} catch (e) {
return '';
}
}
/**
* Retrieves source code from the source code cache.
* @param {string} url URL of source code.
* @return {Array.<string>} Source contents.
*/
function getSource(url) {
if (!isString(url)) return [];
if (!hasKey(sourceCache, url)) {
// URL needs to be able to fetched within the acceptable domain. Otherwise,
// cross-domain errors will be triggered.
var source = '';
var domain = '';
try { domain = document.domain; } catch (e) {}
if (url.indexOf(domain) !== -1) {
source = loadSource(url);
}
sourceCache[url] = source ? source.split('\n') : [];
}
return sourceCache[url];
}
/**
* Tries to use an externally loaded copy of source code to determine
* the name of a function by looking at the name of the variable it was
* assigned to, if any.
* @param {string} url URL of source code.
* @param {(string|number)} lineNo Line number in source code.
* @return {string} The function name, if discoverable.
*/
function guessFunctionName(url, lineNo) {
var reFunctionArgNames = /function ([^(]*)\(([^)]*)\)/,
reGuessFunction = /['"]?([0-9A-Za-z$_]+)['"]?\s*[:=]\s*(function|eval|new Function)/,
line = '',
maxLines = 10,
source = getSource(url),
m;
if (!source.length) {
return UNKNOWN_FUNCTION;
}
// Walk backwards from the first line in the function until we find the line which
// matches the pattern above, which is the function definition
for (var i = 0; i < maxLines; ++i) {
line = source[lineNo - i] + line;
if (!isUndefined(line)) {
if ((m = reGuessFunction.exec(line))) {
return m[1];
} else if ((m = reFunctionArgNames.exec(line))) {
return m[1];
}
}
}
return UNKNOWN_FUNCTION;
}
/**
* Retrieves the surrounding lines from where an exception occurred.
* @param {string} url URL of source code.
* @param {(string|number)} line Line number in source code to centre
* around for context.
* @return {?Array.<string>} Lines of source code.
*/
function gatherContext(url, line) {
var source = getSource(url);
if (!source.length) {
return null;
}
var context = [],
// linesBefore & linesAfter are inclusive with the offending line.
// if linesOfContext is even, there will be one extra line
// *before* the offending line.
linesBefore = Math.floor(TraceKit.linesOfContext / 2),
// Add one extra line if linesOfContext is odd
linesAfter = linesBefore + (TraceKit.linesOfContext % 2),
start = Math.max(0, line - linesBefore - 1),
end = Math.min(source.length, line + linesAfter - 1);
line -= 1; // convert to 0-based index
for (var i = start; i < end; ++i) {
if (!isUndefined(source[i])) {
context.push(source[i]);
}
}
return context.length > 0 ? context : null;
}
/**
* Escapes special characters, except for whitespace, in a string to be
* used inside a regular expression as a string literal.
* @param {string} text The string.
* @return {string} The escaped string literal.
*/
function escapeRegExp(text) {
return text.replace(/[\-\[\]{}()*+?.,\\\^$|#]/g, '\\$&');
}
/**
* Escapes special characters in a string to be used inside a regular
* expression as a string literal. Also ensures that HTML entities will
* be matched the same as their literal friends.
* @param {string} body The string.
* @return {string} The escaped string.
*/
function escapeCodeAsRegExpForMatchingInsideHTML(body) {
return escapeRegExp(body).replace('<', '(?:<|&lt;)').replace('>', '(?:>|&gt;)').replace('&', '(?:&|&amp;)').replace('"', '(?:"|&quot;)').replace(/\s+/g, '\\s+');
}
/**
* Determines where a code fragment occurs in the source code.
* @param {RegExp} re The function definition.
* @param {Array.<string>} urls A list of URLs to search.
* @return {?Object.<string, (string|number)>} An object containing
* the url, line, and column number of the defined function.
*/
function findSourceInUrls(re, urls) {
var source, m;
for (var i = 0, j = urls.length; i < j; ++i) {
// console.log('searching', urls[i]);
if ((source = getSource(urls[i])).length) {
source = source.join('\n');
if ((m = re.exec(source))) {
// console.log('Found function in ' + urls[i]);
return {
'url': urls[i],
'line': source.substring(0, m.index).split('\n').length,
'column': m.index - source.lastIndexOf('\n', m.index) - 1
};
}
}
}
// console.log('no match');
return null;
}
/**
* Determines at which column a code fragment occurs on a line of the
* source code.
* @param {string} fragment The code fragment.
* @param {string} url The URL to search.
* @param {(string|number)} line The line number to examine.
* @return {?number} The column number.
*/
function findSourceInLine(fragment, url, line) {
var source = getSource(url),
re = new RegExp('\\b' + escapeRegExp(fragment) + '\\b'),
m;
line -= 1;
if (source && source.length > line && (m = re.exec(source[line]))) {
return m.index;
}
return null;
}
/**
* Determines where a function was defined within the source code.
* @param {(Function|string)} func A function reference or serialized
* function definition.
* @return {?Object.<string, (string|number)>} An object containing
* the url, line, and column number of the defined function.
*/
function findSourceByFunctionBody(func) {
if (typeof document === 'undefined')
return;
var urls = [window.location.href],
scripts = document.getElementsByTagName('script'),
body,
code = '' + func,
codeRE = /^function(?:\s+([\w$]+))?\s*\(([\w\s,]*)\)\s*\{\s*(\S[\s\S]*\S)\s*\}\s*$/,
eventRE = /^function on([\w$]+)\s*\(event\)\s*\{\s*(\S[\s\S]*\S)\s*\}\s*$/,
re,
parts,
result;
for (var i = 0; i < scripts.length; ++i) {
var script = scripts[i];
if (script.src) {
urls.push(script.src);
}
}
if (!(parts = codeRE.exec(code))) {
re = new RegExp(escapeRegExp(code).replace(/\s+/g, '\\s+'));
}
// not sure if this is really necessary, but I don’t have a test
// corpus large enough to confirm that and it was in the original.
else {
var name = parts[1] ? '\\s+' + parts[1] : '',
args = parts[2].split(',').join('\\s*,\\s*');
body = escapeRegExp(parts[3]).replace(/;$/, ';?'); // semicolon is inserted if the function ends with a comment.replace(/\s+/g, '\\s+');
re = new RegExp('function' + name + '\\s*\\(\\s*' + args + '\\s*\\)\\s*{\\s*' + body + '\\s*}');
}
// look for a normal function definition
if ((result = findSourceInUrls(re, urls))) {
return result;
}
// look for an old-school event handler function
if ((parts = eventRE.exec(code))) {
var event = parts[1];
body = escapeCodeAsRegExpForMatchingInsideHTML(parts[2]);
// look for a function defined in HTML as an onXXX handler
re = new RegExp('on' + event + '=[\\\'"]\\s*' + body + '\\s*[\\\'"]', 'i');
if ((result = findSourceInUrls(re, urls[0]))) {
return result;
}
// look for ???
re = new RegExp(body);
if ((result = findSourceInUrls(re, urls))) {
return result;
}
}
return null;
}
// Contents of Exception in various browsers.
//
// SAFARI:
// ex.message = Can't find variable: qq
// ex.line = 59
// ex.sourceId = 580238192
// ex.sourceURL = http://...
// ex.expressionBeginOffset = 96
// ex.expressionCaretOffset = 98
// ex.expressionEndOffset = 98
// ex.name = ReferenceError
//
// FIREFOX:
// ex.message = qq is not defined
// ex.fileName = http://...
// ex.lineNumber = 59
// ex.columnNumber = 69
// ex.stack = ...stack trace... (see the example below)
// ex.name = ReferenceError
//
// CHROME:
// ex.message = qq is not defined
// ex.name = ReferenceError
// ex.type = not_defined
// ex.arguments = ['aa']
// ex.stack = ...stack trace...
//
// INTERNET EXPLORER:
// ex.message = ...
// ex.name = ReferenceError
//
// OPERA:
// ex.message = ...message... (see the example below)
// ex.name = ReferenceError
// ex.opera#sourceloc = 11 (pretty much useless, duplicates the info in ex.message)
// ex.stacktrace = n/a; see 'opera:config#UserPrefs|Exceptions Have Stacktrace'
/**
* Computes stack trace information from the stack property.
* Chrome and Gecko use this property.
* @param {Error} ex
* @return {?Object.<string, *>} Stack trace information.
*/
function computeStackTraceFromStackProp(ex) {
if (isUndefined(ex.stack) || !ex.stack) return;
var chrome = /^\s*at (.*?) ?\(((?:file|https?|blob|chrome-extension|native|eval|<anonymous>).*?)(?::(\d+))?(?::(\d+))?\)?\s*$/i,
gecko = /^\s*(.*?)(?:\((.*?)\))?(?:^|@)((?:file|https?|blob|chrome|\[native).*?)(?::(\d+))?(?::(\d+))?\s*$/i,
winjs = /^\s*at (?:((?:\[object object\])?.+) )?\(?((?:ms-appx|https?|blob):.*?):(\d+)(?::(\d+))?\)?\s*$/i,
lines = ex.stack.split('\n'),
stack = [],
parts,
element,
reference = /^(.*) is undefined$/.exec(ex.message);
for (var i = 0, j = lines.length; i < j; ++i) {
if ((parts = chrome.exec(lines[i]))) {
var isNative = parts[2] && parts[2].indexOf('native') !== -1;
element = {
'url': !isNative ? parts[2] : null,
'func': parts[1] || UNKNOWN_FUNCTION,
'args': isNative ? [parts[2]] : [],
'line': parts[3] ? +parts[3] : null,
'column': parts[4] ? +parts[4] : null
};
} else if ( parts = winjs.exec(lines[i]) ) {
element = {
'url': parts[2],
'func': parts[1] || UNKNOWN_FUNCTION,
'args': [],
'line': +parts[3],
'column': parts[4] ? +parts[4] : null
};
} else if ((parts = gecko.exec(lines[i]))) {
element = {
'url': parts[3],
'func': parts[1] || UNKNOWN_FUNCTION,
'args': parts[2] ? parts[2].split(',') : [],
'line': parts[4] ? +parts[4] : null,
'column': parts[5] ? +parts[5] : null
};
} else {
continue;
}
if (!element.func && element.line) {
element.func = guessFunctionName(element.url, element.line);
}
if (element.line) {
element.context = gatherContext(element.url, element.line);
}
stack.push(element);
}
if (!stack.length) {
return null;
}
if (stack[0].line && !stack[0].column && reference) {
stack[0].column = findSourceInLine(reference[1], stack[0].url, stack[0].line);
} else if (!stack[0].column && !isUndefined(ex.columnNumber)) {
// FireFox uses this awesome columnNumber property for its top frame
// Also note, Firefox's column number is 0-based and everything else expects 1-based,
// so adding 1
stack[0].column = ex.columnNumber + 1;
}
return {
'name': ex.name,
'message': ex.message,
'url': getLocationHref(),
'stack': stack
};
}
/**
* Computes stack trace information from the stacktrace property.
* Opera 10 uses this property.
* @param {Error} ex
* @return {?Object.<string, *>} Stack trace information.
*/
function computeStackTraceFromStacktraceProp(ex) {
// Access and store the stacktrace property before doing ANYTHING
// else to it because Opera is not very good at providing it
// reliably in other circumstances.
var stacktrace = ex.stacktrace;
if (isUndefined(ex.stacktrace) || !ex.stacktrace) return;
var opera10Regex = / line (\d+).*script (?:in )?(\S+)(?:: in function (\S+))?$/i,
opera11Regex = / line (\d+), column (\d+)\s*(?:in (?:<anonymous function: ([^>]+)>|([^\)]+))\((.*)\))? in (.*):\s*$/i,
lines = stacktrace.split('\n'),
stack = [],
parts;
for (var line = 0; line < lines.length; line += 2) {
var element = null;
if ((parts = opera10Regex.exec(lines[line]))) {
element = {
'url': parts[2],
'line': +parts[1],
'column': null,
'func': parts[3],
'args':[]
};
} else if ((parts = opera11Regex.exec(lines[line]))) {
element = {
'url': parts[6],
'line': +parts[1],
'column': +parts[2],
'func': parts[3] || parts[4],
'args': parts[5] ? parts[5].split(',') : []
};
}
if (element) {
if (!element.func && element.line) {
element.func = guessFunctionName(element.url, element.line);
}
if (element.line) {
try {
element.context = gatherContext(element.url, element.line);
} catch (exc) {}
}
if (!element.context) {
element.context = [lines[line + 1]];
}
stack.push(element);
}
}
if (!stack.length) {
return null;
}
return {
'name': ex.name,
'message': ex.message,
'url': getLocationHref(),
'stack': stack
};
}
/**
* NOT TESTED.
* Computes stack trace information from an error message that includes
* the stack trace.
* Opera 9 and earlier use this method if the option to show stack
* traces is turned on in opera:config.
* @param {Error} ex
* @return {?Object.<string, *>} Stack information.
*/
function computeStackTraceFromOperaMultiLineMessage(ex) {
// Opera includes a stack trace into the exception message. An example is:
//
// Statement on line 3: Undefined variable: undefinedFunc
// Backtrace:
// Line 3 of linked script file://localhost/Users/andreyvit/Projects/TraceKit/javascript-client/sample.js: In function zzz
// undefinedFunc(a);
// Line 7 of inline#1 script in file://localhost/Users/andreyvit/Projects/TraceKit/javascript-client/sample.html: In function yyy
// zzz(x, y, z);
// Line 3 of inline#1 script in file://localhost/Users/andreyvit/Projects/TraceKit/javascript-client/sample.html: In function xxx
// yyy(a, a, a);
// Line 1 of function script
// try { xxx('hi'); return false; } catch(ex) { TraceKit.report(ex); }
// ...
var lines = ex.message.split('\n');
if (lines.length < 4) {
return null;
}
var lineRE1 = /^\s*Line (\d+) of linked script ((?:file|https?|blob)\S+)(?:: in function (\S+))?\s*$/i,
lineRE2 = /^\s*Line (\d+) of inline#(\d+) script in ((?:file|https?|blob)\S+)(?:: in function (\S+))?\s*$/i,
lineRE3 = /^\s*Line (\d+) of function script\s*$/i,
stack = [],
scripts = document.getElementsByTagName('script'),
inlineScriptBlocks = [],
parts;
for (var s in scripts) {
if (hasKey(scripts, s) && !scripts[s].src) {
inlineScriptBlocks.push(scripts[s]);
}
}
for (var line = 2; line < lines.length; line += 2) {
var item = null;
if ((parts = lineRE1.exec(lines[line]))) {
item = {
'url': parts[2],
'func': parts[3],
'args': [],
'line': +parts[1],
'column': null
};
} else if ((parts = lineRE2.exec(lines[line]))) {
item = {
'url': parts[3],
'func': parts[4],
'args': [],
'line': +parts[1],
'column': null // TODO: Check to see if inline#1 (+parts[2]) points to the script number or column number.
};
var relativeLine = (+parts[1]); // relative to the start of the <SCRIPT> block
var script = inlineScriptBlocks[parts[2] - 1];
if (script) {
var source = getSource(item.url);
if (source) {
source = source.join('\n');
var pos = source.indexOf(script.innerText);
if (pos >= 0) {
item.line = relativeLine + source.substring(0, pos).split('\n').length;
}
}
}
} else if ((parts = lineRE3.exec(lines[line]))) {
var url = window.location.href.replace(/#.*$/, '');
var re = new RegExp(escapeCodeAsRegExpForMatchingInsideHTML(lines[line + 1]));
var src = findSourceInUrls(re, [url]);
item = {
'url': url,
'func': '',
'args': [],
'line': src ? src.line : parts[1],
'column': null
};
}
if (item) {
if (!item.func) {
item.func = guessFunctionName(item.url, item.line);
}
var context = gatherContext(item.url, item.line);
var midline = (context ? context[Math.floor(context.length / 2)] : null);
if (context && midline.replace(/^\s*/, '') === lines[line + 1].replace(/^\s*/, '')) {
item.context = context;
} else {
// if (context) alert("Context mismatch. Correct midline:\n" + lines[i+1] + "\n\nMidline:\n" + midline + "\n\nContext:\n" + context.join("\n") + "\n\nURL:\n" + item.url);
item.context = [lines[line + 1]];
}
stack.push(item);
}
}
if (!stack.length) {
return null; // could not parse multiline exception message as Opera stack trace
}
return {
'name': ex.name,
'message': lines[0],
'url': getLocationHref(),
'stack': stack
};
}
/**
* Adds information about the first frame to incomplete stack traces.
* Safari and IE require this to get complete data on the first frame.
* @param {Object.<string, *>} stackInfo Stack trace information from
* one of the compute* methods.
* @param {string} url The URL of the script that caused an error.
* @param {(number|string)} lineNo The line number of the script that
* caused an error.
* @param {string=} message The error generated by the browser, which
* hopefully contains the name of the object that caused the error.
* @return {boolean} Whether or not the stack information was
* augmented.
*/
function augmentStackTraceWithInitialElement(stackInfo, url, lineNo, message) {
var initial = {
'url': url,
'line': lineNo
};
if (initial.url && initial.line) {
stackInfo.incomplete = false;
if (!initial.func) {
initial.func = guessFunctionName(initial.url, initial.line);
}
if (!initial.context) {
initial.context = gatherContext(initial.url, initial.line);
}
var reference = / '([^']+)' /.exec(message);
if (reference) {
initial.column = findSourceInLine(reference[1], initial.url, initial.line);
}
if (stackInfo.stack.length > 0) {
if (stackInfo.stack[0].url === initial.url) {
if (stackInfo.stack[0].line === initial.line) {
return false; // already in stack trace
} else if (!stackInfo.stack[0].line && stackInfo.stack[0].func === initial.func) {
stackInfo.stack[0].line = initial.line;
stackInfo.stack[0].context = initial.context;
return false;
}
}
}
stackInfo.stack.unshift(initial);
stackInfo.partial = true;
return true;
} else {
stackInfo.incomplete = true;
}
return false;
}
/**
* Computes stack trace information by walking the arguments.caller
* chain at the time the exception occurred. This will cause earlier
* frames to be missed but is the only way to get any stack trace in
* Safari and IE. The top frame is restored by
* {@link augmentStackTraceWithInitialElement}.
* @param {Error} ex
* @return {?Object.<string, *>} Stack trace information.
*/
function computeStackTraceByWalkingCallerChain(ex, depth) {
var functionName = /function\s+([_$a-zA-Z\xA0-\uFFFF][_$a-zA-Z0-9\xA0-\uFFFF]*)?\s*\(/i,
stack = [],
funcs = {},
recursion = false,
parts,
item,
source;
for (var curr = computeStackTraceByWalkingCallerChain.caller; curr && !recursion; curr = curr.caller) {
if (curr === computeStackTrace || curr === TraceKit.report) {
// console.log('skipping internal function');
continue;
}
item = {
'url': null,
'func': UNKNOWN_FUNCTION,
'line': null,
'column': null
};
if (curr.name) {
item.func = curr.name;
} else if ((parts = functionName.exec(curr.toString()))) {
item.func = parts[1];
}
if (typeof item.func === 'undefined') {
try {
item.func = parts.input.substring(0, parts.input.indexOf('{'));
} catch (e) { }
}
if ((source = findSourceByFunctionBody(curr))) {
item.url = source.url;
item.line = source.line;
if (item.func === UNKNOWN_FUNCTION) {
item.func = guessFunctionName(item.url, item.line);
}
var reference = / '([^']+)' /.exec(ex.message || ex.description);
if (reference) {
item.column = findSourceInLine(reference[1], source.url, source.line);
}
}
if (funcs['' + curr]) {
recursion = true;
}else{
funcs['' + curr] = true;
}
stack.push(item);
}
if (depth) {
// console.log('depth is ' + depth);
// console.log('stack is ' + stack.length);
stack.splice(0, depth);
}
var result = {
'name': ex.name,
'message': ex.message,
'url': getLocationHref(),
'stack': stack
};
augmentStackTraceWithInitialElement(result, ex.sourceURL || ex.fileName, ex.line || ex.lineNumber, ex.message || ex.description);
return result;
}
/**
* Computes a stack trace for an exception.
* @param {Error} ex
* @param {(string|number)=} depth
*/
function computeStackTrace(ex, depth) {
var stack = null;
depth = (depth == null ? 0 : +depth);
try {
// This must be tried first because Opera 10 *destroys*
// its stacktrace property if you try to access the stack
// property first!!
stack = computeStackTraceFromStacktraceProp(ex);
if (stack) {
return stack;
}
} catch (e) {
if (TraceKit.debug) {
throw e;
}
}
try {
stack = computeStackTraceFromStackProp(ex);
if (stack) {
return stack;
}
} catch (e) {
if (TraceKit.debug) {
throw e;
}
}
try {
stack = computeStackTraceFromOperaMultiLineMessage(ex);
if (stack) {
return stack;
}
} catch (e) {
if (TraceKit.debug) {
throw e;
}
}
try {
stack = computeStackTraceByWalkingCallerChain(ex, depth + 1);
if (stack) {
return stack;
}
} catch (e) {
if (TraceKit.debug) {
throw e;
}
}
return {
'name': ex.name,
'message': ex.message,
'url': getLocationHref()
};
}
computeStackTrace.augmentStackTraceWithInitialElement = augmentStackTraceWithInitialElement;
computeStackTrace.computeStackTraceFromStackProp = computeStackTraceFromStackProp;
computeStackTrace.guessFunctionName = guessFunctionName;
computeStackTrace.gatherContext = gatherContext;
return computeStackTrace;
}());
module.exports = TraceKit;
},{"4":4}]},{},[3])(3)
});
\ No newline at end of file
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment