Commit d8f52c4f authored by Ruben Davila's avatar Ruben Davila

Merge remote-tracking branch 'ce/8-12-stable' into 8-12-stable-ee

Conflicts:
	VERSION
	app/assets/stylesheets/pages/groups.scss
	app/models/project.rb
	app/models/snippet.rb
	config/routes.rb
	db/schema.rb
parents a2c1856e 3f55188d
...@@ -3,6 +3,8 @@ group: git ...@@ -3,6 +3,8 @@ group: git
services: services:
- postgres - postgres
before_precompile: ./bin/pkgr_before_precompile.sh before_precompile: ./bin/pkgr_before_precompile.sh
env:
- SKIP_STORAGE_VALIDATION=true
targets: targets:
debian-7: &wheezy debian-7: &wheezy
build_dependencies: build_dependencies:
...@@ -25,6 +27,16 @@ targets: ...@@ -25,6 +27,16 @@ targets:
- libicu52 - libicu52
- libpcre3 - libpcre3
- git - git
ubuntu-16.04:
build_dependencies:
- libkrb5-dev
- libicu-dev
- cmake
- pkg-config
dependencies:
- libicu55
- libpcre3
- git
centos-6: centos-6:
build_dependencies: build_dependencies:
- krb5-devel - krb5-devel
......
...@@ -770,33 +770,26 @@ Rails/ScopeArgs: ...@@ -770,33 +770,26 @@ Rails/ScopeArgs:
RSpec/AnyInstance: RSpec/AnyInstance:
Enabled: false Enabled: false
# Check for expectations where `be(...)` can replace `eql(...)`. # Check that the first argument to the top level describe is the tested class or
RSpec/BeEql: # module.
Enabled: false
# Check that the first argument to the top level describe is a constant.
RSpec/DescribeClass: RSpec/DescribeClass:
Enabled: false Enabled: false
# Checks that tests use `described_class`. # Use `described_class` for tested class / module.
RSpec/DescribedClass:
Enabled: false
# Checks that the second argument to `describe` specifies a method.
RSpec/DescribeMethod: RSpec/DescribeMethod:
Enabled: false Enabled: false
# Checks if an example group does not include any tests. # Checks that the second argument to top level describe is the tested method
RSpec/EmptyExampleGroup: # name.
RSpec/DescribedClass:
Enabled: false Enabled: false
CustomIncludeMethods: []
# Checks for long examples. # Checks for long example.
RSpec/ExampleLength: RSpec/ExampleLength:
Enabled: false Enabled: false
Max: 5 Max: 5
# Checks that example descriptions do not start with "should". # Do not use should when describing your tests.
RSpec/ExampleWording: RSpec/ExampleWording:
Enabled: false Enabled: false
CustomTransform: CustomTransform:
...@@ -805,10 +798,6 @@ RSpec/ExampleWording: ...@@ -805,10 +798,6 @@ RSpec/ExampleWording:
not: does not not: does not
IgnoredWords: [] IgnoredWords: []
# Checks for `expect(...)` calls containing literal values.
RSpec/ExpectActual:
Enabled: false
# Checks the file and folder naming of the spec file. # Checks the file and folder naming of the spec file.
RSpec/FilePath: RSpec/FilePath:
Enabled: false Enabled: false
...@@ -820,65 +809,19 @@ RSpec/FilePath: ...@@ -820,65 +809,19 @@ RSpec/FilePath:
RSpec/Focus: RSpec/Focus:
Enabled: true Enabled: true
# Checks the arguments passed to `before`, `around`, and `after`.
RSpec/HookArgument:
Enabled: false
EnforcedStyle: implicit
# Check that a consistent implict expectation style is used.
# TODO (rspeicher): Available in rubocop-rspec 1.8.0
# RSpec/ImplicitExpect:
# Enabled: true
# EnforcedStyle: is_expected
# Checks for the usage of instance variables. # Checks for the usage of instance variables.
RSpec/InstanceVariable: RSpec/InstanceVariable:
Enabled: false Enabled: false
# Checks for `subject` definitions that come after `let` definitions. # Checks for multiple top-level describes.
RSpec/LeadingSubject:
Enabled: false
# Checks unreferenced `let!` calls being used for test setup.
RSpec/LetSetup:
Enabled: false
# Check that chains of messages are not being stubbed.
RSpec/MessageChain:
Enabled: false
# Checks for consistent message expectation style.
RSpec/MessageExpectation:
Enabled: false
EnforcedStyle: allow
# Checks for multiple top level describes.
RSpec/MultipleDescribes: RSpec/MultipleDescribes:
Enabled: false Enabled: false
# Checks if examples contain too many `expect` calls. # Enforces the usage of the same method on all negative message expectations.
RSpec/MultipleExpectations:
Enabled: false
Max: 1
# Checks for explicitly referenced test subjects.
RSpec/NamedSubject:
Enabled: false
# Checks for nested example groups.
RSpec/NestedGroups:
Enabled: false
MaxNesting: 2
# Checks for consistent method usage for negating expectations.
RSpec/NotToNot: RSpec/NotToNot:
EnforcedStyle: not_to EnforcedStyle: not_to
Enabled: true Enabled: true
# Checks for stubbed test subjects.
RSpec/SubjectStub:
Enabled: false
# Prefer using verifying doubles over normal doubles. # Prefer using verifying doubles over normal doubles.
RSpec/VerifiedDoubles: RSpec/VerifiedDoubles:
Enabled: false Enabled: false
...@@ -11,6 +11,7 @@ v 8.12.0 (unreleased) ...@@ -11,6 +11,7 @@ v 8.12.0 (unreleased)
- Prune events older than 12 months. (ritave) - Prune events older than 12 months. (ritave)
- Prepend blank line to `Closes` message on merge request linked to issue (lukehowell) - Prepend blank line to `Closes` message on merge request linked to issue (lukehowell)
- Fix issues/merge-request templates dropdown for forked projects - Fix issues/merge-request templates dropdown for forked projects
- Amends the packager.io configuration file to create a build for Ubuntu 16.04. !6247 (Jon "The Nice Guy" Spriggs)
- Filter tags by name !6121 - Filter tags by name !6121
- Update gitlab shell secret file also when it is empty. !3774 (glensc) - Update gitlab shell secret file also when it is empty. !3774 (glensc)
- Give project selection dropdowns responsive width, make non-wrapping. - Give project selection dropdowns responsive width, make non-wrapping.
...@@ -21,13 +22,16 @@ v 8.12.0 (unreleased) ...@@ -21,13 +22,16 @@ v 8.12.0 (unreleased)
- Pass the "Remember me" value to the U2F authentication form - Pass the "Remember me" value to the U2F authentication form
- Display stages in valid order in stages dropdown on build page - Display stages in valid order in stages dropdown on build page
- Only update projects.last_activity_at once per hour when creating a new event - Only update projects.last_activity_at once per hour when creating a new event
- Cycle analytics (first iteration) !5986
- Remove vendor prefixes for linear-gradient CSS (ClemMakesApps) - Remove vendor prefixes for linear-gradient CSS (ClemMakesApps)
- Move pushes_since_gc from the database to Redis - Move pushes_since_gc from the database to Redis
- Limit number of shown environments on Merge Request: show only environments for target_branch, source_branch and tags
- Add font color contrast to external label in admin area (ClemMakesApps) - Add font color contrast to external label in admin area (ClemMakesApps)
- Change logo animation to CSS (ClemMakesApps) - Change logo animation to CSS (ClemMakesApps)
- Instructions for enabling Git packfile bitmaps !6104 - Instructions for enabling Git packfile bitmaps !6104
- Use Search::GlobalService.new in the `GET /projects/search/:query` endpoint - Use Search::GlobalService.new in the `GET /projects/search/:query` endpoint
- Fix long comments in diffs messing with table width - Fix long comments in diffs messing with table width
- Add spec covering 'Gitlab::Git::committer_hash' !6433 (dandunckelman)
- Fix pagination on user snippets page - Fix pagination on user snippets page
- Run CI builds with the permissions of users !5735 - Run CI builds with the permissions of users !5735
- Fix sorting of issues in API - Fix sorting of issues in API
...@@ -39,16 +43,20 @@ v 8.12.0 (unreleased) ...@@ -39,16 +43,20 @@ v 8.12.0 (unreleased)
- Escape search term before passing it to Regexp.new !6241 (winniehell) - Escape search term before passing it to Regexp.new !6241 (winniehell)
- Fix pinned sidebar behavior in smaller viewports !6169 - Fix pinned sidebar behavior in smaller viewports !6169
- Fix file permissions change when updating a file on the Gitlab UI !5979 - Fix file permissions change when updating a file on the Gitlab UI !5979
- Added horizontal padding on build page sidebar on code coverage block. !6196 (Vitaly Baev)
- Change merge_error column from string to text type - Change merge_error column from string to text type
- Reduce contributions calendar data payload (ClemMakesApps) - Reduce contributions calendar data payload (ClemMakesApps)
- Show all pipelines for merge requests even from discarded commits !6414
- Replace contributions calendar timezone payload with dates (ClemMakesApps) - Replace contributions calendar timezone payload with dates (ClemMakesApps)
- Add `web_url` field to issue, merge request, and snippet API objects (Ben Boeckel) - Add `web_url` field to issue, merge request, and snippet API objects (Ben Boeckel)
- Enable pipeline events by default !6278 - Enable pipeline events by default !6278
- Move parsing of sidekiq ps into helper !6245 (pascalbetz) - Move parsing of sidekiq ps into helper !6245 (pascalbetz)
- Added go to issue boards keyboard shortcut - Added go to issue boards keyboard shortcut
- Expose `sha` and `merge_commit_sha` in merge request API (Ben Boeckel) - Expose `sha` and `merge_commit_sha` in merge request API (Ben Boeckel)
- Emoji can be awarded on Snippets !4456
- Set path for all JavaScript cookies to honor GitLab's subdirectory setting !5627 (Mike Greiling) - Set path for all JavaScript cookies to honor GitLab's subdirectory setting !5627 (Mike Greiling)
- Fix blame table layout width - Fix blame table layout width
- Spec testing if issue authors can read issues on private projects
- Fix bug where pagination is still displayed despite all todos marked as done (ClemMakesApps) - Fix bug where pagination is still displayed despite all todos marked as done (ClemMakesApps)
- Request only the LDAP attributes we need !6187 - Request only the LDAP attributes we need !6187
- Center build stage columns in pipeline overview (ClemMakesApps) - Center build stage columns in pipeline overview (ClemMakesApps)
...@@ -75,6 +83,8 @@ v 8.12.0 (unreleased) ...@@ -75,6 +83,8 @@ v 8.12.0 (unreleased)
- Require confirmation when not logged in for unsubscribe links !6223 (Maximiliano Perez Coto) - Require confirmation when not logged in for unsubscribe links !6223 (Maximiliano Perez Coto)
- Add `wiki_page_events` to project hook APIs (Ben Boeckel) - Add `wiki_page_events` to project hook APIs (Ben Boeckel)
- Remove Gitorious import - Remove Gitorious import
- Loads GFM autocomplete source only when required
- Fix issue with slash commands not loading on new issue page
- Fix inconsistent background color for filter input field (ClemMakesApps) - Fix inconsistent background color for filter input field (ClemMakesApps)
- Remove prefixes from transition CSS property (ClemMakesApps) - Remove prefixes from transition CSS property (ClemMakesApps)
- Add Sentry logging to API calls - Add Sentry logging to API calls
...@@ -94,6 +104,7 @@ v 8.12.0 (unreleased) ...@@ -94,6 +104,7 @@ v 8.12.0 (unreleased)
- Add hover state to todos !5361 (winniehell) - Add hover state to todos !5361 (winniehell)
- Fix icon alignment of star and fork buttons !5451 (winniehell) - Fix icon alignment of star and fork buttons !5451 (winniehell)
- Fix alignment of icon buttons !5887 (winniehell) - Fix alignment of icon buttons !5887 (winniehell)
- Added Ubuntu 16.04 support for packager.io (JonTheNiceGuy)
- Fix markdown help references (ClemMakesApps) - Fix markdown help references (ClemMakesApps)
- Add last commit time to repo view (ClemMakesApps) - Add last commit time to repo view (ClemMakesApps)
- Fix accessibility and visibility of project list dropdown button !6140 - Fix accessibility and visibility of project list dropdown button !6140
......
...@@ -310,7 +310,7 @@ group :development, :test do ...@@ -310,7 +310,7 @@ group :development, :test do
gem 'spring-commands-teaspoon', '~> 0.0.2' gem 'spring-commands-teaspoon', '~> 0.0.2'
gem 'rubocop', '~> 0.42.0', require: false gem 'rubocop', '~> 0.42.0', require: false
gem 'rubocop-rspec', '~> 1.7.0', require: false gem 'rubocop-rspec', '~> 1.5.0', require: false
gem 'scss_lint', '~> 0.47.0', require: false gem 'scss_lint', '~> 0.47.0', require: false
gem 'haml_lint', '~> 0.18.2', require: false gem 'haml_lint', '~> 0.18.2', require: false
gem 'simplecov', '0.12.0', require: false gem 'simplecov', '0.12.0', require: false
...@@ -331,6 +331,7 @@ group :test do ...@@ -331,6 +331,7 @@ group :test do
gem 'webmock', '~> 1.21.0' gem 'webmock', '~> 1.21.0'
gem 'test_after_commit', '~> 0.4.2' gem 'test_after_commit', '~> 0.4.2'
gem 'sham_rack', '~> 1.3.6' gem 'sham_rack', '~> 1.3.6'
gem 'timecop', '~> 0.8.0'
end end
group :production do group :production do
......
...@@ -650,8 +650,8 @@ GEM ...@@ -650,8 +650,8 @@ GEM
rainbow (>= 1.99.1, < 3.0) rainbow (>= 1.99.1, < 3.0)
ruby-progressbar (~> 1.7) ruby-progressbar (~> 1.7)
unicode-display_width (~> 1.0, >= 1.0.1) unicode-display_width (~> 1.0, >= 1.0.1)
rubocop-rspec (1.7.0) rubocop-rspec (1.5.0)
rubocop (>= 0.42.0) rubocop (>= 0.40.0)
ruby-fogbugz (0.2.1) ruby-fogbugz (0.2.1)
crack (~> 0.4) crack (~> 0.4)
ruby-prof (0.15.9) ruby-prof (0.15.9)
...@@ -980,7 +980,7 @@ DEPENDENCIES ...@@ -980,7 +980,7 @@ DEPENDENCIES
rspec-rails (~> 3.5.0) rspec-rails (~> 3.5.0)
rspec-retry (~> 0.4.5) rspec-retry (~> 0.4.5)
rubocop (~> 0.42.0) rubocop (~> 0.42.0)
rubocop-rspec (~> 1.7.0) rubocop-rspec (~> 1.5.0)
ruby-fogbugz (~> 0.2.1) ruby-fogbugz (~> 0.2.1)
ruby-prof (~> 0.15.9) ruby-prof (~> 0.15.9)
sanitize (~> 2.0) sanitize (~> 2.0)
...@@ -1013,6 +1013,7 @@ DEPENDENCIES ...@@ -1013,6 +1013,7 @@ DEPENDENCIES
teaspoon-jasmine (~> 2.2.0) teaspoon-jasmine (~> 2.2.0)
test_after_commit (~> 0.4.2) test_after_commit (~> 0.4.2)
thin (~> 1.7.0) thin (~> 1.7.0)
timecop (~> 0.8.0)
turbolinks (~> 2.5.0) turbolinks (~> 2.5.0)
u2f (~> 0.2.1) u2f (~> 0.2.1)
uglifier (~> 2.7.2) uglifier (~> 2.7.2)
......
((global) => {
const COOKIE_NAME = 'cycle_analytics_help_dismissed';
gl.CycleAnalytics = class CycleAnalytics {
constructor() {
const that = this;
this.isHelpDismissed = $.cookie(COOKIE_NAME);
this.vue = new Vue({
el: '#cycle-analytics',
name: 'CycleAnalytics',
created: this.fetchData(),
data: this.decorateData({ isLoading: true }),
methods: {
dismissLanding() {
that.dismissLanding();
}
}
});
}
fetchData(options) {
options = options || { startDate: 30 };
$.ajax({
url: $('#cycle-analytics').data('request-path'),
method: 'GET',
dataType: 'json',
contentType: 'application/json',
data: { start_date: options.startDate }
}).done((data) => {
this.vue.$data = this.decorateData(data);
this.initDropdown();
})
.error((data) => {
this.handleError(data);
})
.always(() => {
this.vue.isLoading = false;
})
}
decorateData(data) {
data.summary = data.summary || [];
data.stats = data.stats || [];
data.isHelpDismissed = this.isHelpDismissed;
data.isLoading = data.isLoading || false;
data.summary.forEach((item) => {
item.value = item.value || '-';
});
data.stats.forEach((item) => {
item.value = item.value || '- - -';
})
return data;
}
handleError(data) {
this.vue.$data = {
hasError: true,
isHelpDismissed: this.isHelpDismissed
};
new Flash('There was an error while fetching cycle analytics data.', 'alert');
}
dismissLanding() {
this.vue.isHelpDismissed = true;
$.cookie(COOKIE_NAME, true);
}
initDropdown() {
const $dropdown = $('.js-ca-dropdown');
const $label = $dropdown.find('.dropdown-label');
$dropdown.find('li a').off('click').on('click', (e) => {
e.preventDefault();
const $target = $(e.currentTarget);
const value = $target.data('value');
$label.text($target.text().trim());
this.vue.isLoading = true;
this.fetchData({ startDate: value });
})
}
}
})(window.gl || (window.gl = {}));
...@@ -94,6 +94,11 @@ ...@@ -94,6 +94,11 @@
break; break;
case "projects:merge_requests:conflicts": case "projects:merge_requests:conflicts":
window.mcui = new MergeConflictResolver() window.mcui = new MergeConflictResolver()
break;
case 'projects:merge_requests:index':
shortcut_handler = new ShortcutsNavigation();
Issuable.init();
break;
case 'dashboard:activity': case 'dashboard:activity':
new Activities(); new Activities();
break; break;
...@@ -192,6 +197,9 @@ ...@@ -192,6 +197,9 @@
new gl.ProtectedBranchCreate(); new gl.ProtectedBranchCreate();
new gl.ProtectedBranchEditList(); new gl.ProtectedBranchEditList();
break; break;
case 'projects:cycle_analytics:show':
new gl.CycleAnalytics();
break;
} }
switch (path.first()) { switch (path.first()) {
case 'admin': case 'admin':
......
...@@ -99,8 +99,7 @@ ...@@ -99,8 +99,7 @@
.top-area { .top-area {
@include clearfix; @include clearfix;
border-bottom: 1px solid $btn-gray-hover;
border-bottom: 1px solid #eee;
.nav-text { .nav-text {
padding-top: 16px; padding-top: 16px;
......
...@@ -109,6 +109,10 @@ ...@@ -109,6 +109,10 @@
width: 100%; width: 100%;
} }
.block-first {
padding: 5px 16px 11px;
}
.js-build-variable { .js-build-variable {
color: $code-color; color: $code-color;
} }
......
#cycle-analytics {
margin: 24px auto 0;
width: 800px;
position: relative;
.panel {
.content-block {
padding: 24px 0;
border-bottom: none;
position: relative;
}
.column {
text-align: center;
.header {
font-size: 30px;
line-height: 38px;
font-weight: normal;
margin: 0;
}
.text {
color: $layout-link-gray;
margin: 0;
}
&:last-child {
text-align: right;
}
}
.dropdown {
position: relative;
top: 13px;
}
}
.bordered-box {
border: 1px solid $border-color;
@include border-radius($border-radius-default);
position: relative;
}
.content-list {
li {
padding: 18px $gl-padding $gl-padding;
.container-fluid {
padding: 0;
}
}
.title-col {
p {
margin: 0;
&.title {
line-height: 19px;
font-size: 15px;
font-weight: 600;
}
&:text {
color: #8c8c8c;
}
}
}
.value-col {
text-align: right;
span {
line-height: 42px;
}
}
}
.landing {
margin-bottom: $gl-padding;
overflow: hidden;
.dismiss-icon {
position: absolute;
right: $gl-padding;
cursor: pointer;
color: #b2b2b2;
}
svg {
margin: 0 20px;
float: left;
width: 136px;
height: 136px;
}
.inner-content {
width: 480px;
float: left;
h4 {
color: $gl-text-color;
font-size: 17px;
}
p {
color: #8c8c8c;
margin-bottom: $gl-padding;
}
}
}
.fa-spinner {
font-size: 28px;
position: relative;
margin-left: -20px;
left: 50%;
margin-top: 36px;
}
}
...@@ -66,3 +66,16 @@ ...@@ -66,3 +66,16 @@
margin-bottom: $gl-padding; margin-bottom: $gl-padding;
} }
} }
.groups-header {
@media (min-width: $screen-sm-min) {
.nav-links {
width: 35%;
}
.nav-controls {
width: 65%;
}
}
}
...@@ -12,11 +12,18 @@ ...@@ -12,11 +12,18 @@
.snippet-file-content { .snippet-file-content {
border-radius: 3px; border-radius: 3px;
margin-bottom: $gl-padding;
.btn-clipboard { .btn-clipboard {
@extend .btn; @extend .btn;
} }
} }
.project-snippets .awards {
border-bottom: 1px solid $table-border-color;
padding-bottom: $gl-padding;
}
.snippet-title { .snippet-title {
font-size: 24px; font-size: 24px;
font-weight: 600; font-weight: 600;
......
...@@ -10,7 +10,9 @@ module ToggleAwardEmoji ...@@ -10,7 +10,9 @@ module ToggleAwardEmoji
if awardable.user_can_award?(current_user, name) if awardable.user_can_award?(current_user, name)
awardable.toggle_award_emoji(name, current_user) awardable.toggle_award_emoji(name, current_user)
TodoService.new.new_award_emoji(to_todoable(awardable), current_user)
todoable = to_todoable(awardable)
TodoService.new.new_award_emoji(todoable, current_user) if todoable
render json: { ok: true } render json: { ok: true }
else else
...@@ -24,8 +26,10 @@ module ToggleAwardEmoji ...@@ -24,8 +26,10 @@ module ToggleAwardEmoji
case awardable case awardable
when Note when Note
awardable.noteable awardable.noteable
else when MergeRequest, Issue
awardable awardable
when Snippet
nil
end end
end end
......
...@@ -11,10 +11,8 @@ class JwtController < ApplicationController ...@@ -11,10 +11,8 @@ class JwtController < ApplicationController
service = SERVICES[params[:service]] service = SERVICES[params[:service]]
return head :not_found unless service return head :not_found unless service
@authentication_result ||= Gitlab::Auth::Result.new
result = service.new(@authentication_result.project, @authentication_result.actor, auth_params). result = service.new(@authentication_result.project, @authentication_result.actor, auth_params).
execute(authentication_abilities: @authentication_result.authentication_abilities) execute(authentication_abilities: @authentication_result.authentication_abilities || [])
render json: result, status: result[:http_status] render json: result, status: result[:http_status]
end end
...@@ -22,6 +20,8 @@ class JwtController < ApplicationController ...@@ -22,6 +20,8 @@ class JwtController < ApplicationController
private private
def authenticate_project_or_user def authenticate_project_or_user
@authentication_result = Gitlab::Auth::Result.new
authenticate_with_http_basic do |login, password| authenticate_with_http_basic do |login, password|
@authentication_result = Gitlab::Auth.find_for_git_client(login, password, project: nil, ip: request.ip) @authentication_result = Gitlab::Auth.find_for_git_client(login, password, project: nil, ip: request.ip)
......
class Projects::CycleAnalyticsController < Projects::ApplicationController
include ActionView::Helpers::DateHelper
include ActionView::Helpers::TextHelper
before_action :authorize_read_cycle_analytics!
def show
@cycle_analytics = CycleAnalytics.new(@project, from: parse_start_date)
respond_to do |format|
format.html
format.json { render json: cycle_analytics_json }
end
end
private
def parse_start_date
case cycle_analytics_params[:start_date]
when '30' then 30.days.ago
when '90' then 90.days.ago
else 90.days.ago
end
end
def cycle_analytics_params
return {} unless params[:cycle_analytics].present?
{ start_date: params[:cycle_analytics][:start_date] }
end
def cycle_analytics_json
cycle_analytics_view_data = [[:issue, "Issue", "Time before an issue gets scheduled"],
[:plan, "Plan", "Time before an issue starts implementation"],
[:code, "Code", "Time until first merge request"],
[:test, "Test", "Total test time for all commits/merges"],
[:review, "Review", "Time between merge request creation and merge/close"],
[:staging, "Staging", "From merge request merge until deploy to production"],
[:production, "Production", "From issue creation until deploy to production"]]
stats = cycle_analytics_view_data.reduce([]) do |stats, (stage_method, stage_text, stage_description)|
value = @cycle_analytics.send(stage_method).presence
stats << {
title: stage_text,
description: stage_description,
value: value && !value.zero? ? distance_of_time_in_words(value) : nil
}
stats
end
issues = @cycle_analytics.summary.new_issues
commits = @cycle_analytics.summary.commits
deploys = @cycle_analytics.summary.deploys
summary = [
{ title: "New Issue".pluralize(issues), value: issues },
{ title: "Commit".pluralize(commits), value: commits },
{ title: "Deploy".pluralize(deploys), value: deploys }
]
{
summary: summary,
stats: stats
}
end
end
...@@ -32,11 +32,11 @@ class Projects::GitHttpClientController < Projects::ApplicationController ...@@ -32,11 +32,11 @@ class Projects::GitHttpClientController < Projects::ApplicationController
return # Allow access return # Allow access
end end
elsif allow_kerberos_spnego_auth? && spnego_provided? elsif allow_kerberos_spnego_auth? && spnego_provided?
user = find_kerberos_user kerberos_user = find_kerberos_user
if user if kerberos_user
@authentication_result = Gitlab::Auth::Result.new( @authentication_result = Gitlab::Auth::Result.new(
user, nil, :kerberos, Gitlab::Auth.full_authentication_abilities) kerberos_user, nil, :kerberos, Gitlab::Auth.full_authentication_abilities)
send_final_spnego_response send_final_spnego_response
return # Allow access return # Allow access
......
class Projects::SnippetsController < Projects::ApplicationController class Projects::SnippetsController < Projects::ApplicationController
include ToggleAwardEmoji
before_action :module_enabled before_action :module_enabled
before_action :snippet, only: [:show, :edit, :destroy, :update, :raw] before_action :snippet, only: [:show, :edit, :destroy, :update, :raw, :toggle_award_emoji]
# Allow read any snippet # Allow read any snippet
before_action :authorize_read_project_snippet!, except: [:new, :create, :index] before_action :authorize_read_project_snippet!, except: [:new, :create, :index]
...@@ -80,6 +82,7 @@ class Projects::SnippetsController < Projects::ApplicationController ...@@ -80,6 +82,7 @@ class Projects::SnippetsController < Projects::ApplicationController
def snippet def snippet
@snippet ||= @project.snippets.find(params[:id]) @snippet ||= @project.snippets.find(params[:id])
end end
alias_method :awardable, :snippet
def authorize_read_project_snippet! def authorize_read_project_snippet!
return render_404 unless can?(current_user, :read_project_snippet, @snippet) return render_404 unless can?(current_user, :read_project_snippet, @snippet)
......
class SnippetsController < ApplicationController class SnippetsController < ApplicationController
include ToggleAwardEmoji
before_action :snippet, only: [:show, :edit, :destroy, :update, :raw] before_action :snippet, only: [:show, :edit, :destroy, :update, :raw]
# Allow read snippet # Allow read snippet
...@@ -85,6 +87,7 @@ class SnippetsController < ApplicationController ...@@ -85,6 +87,7 @@ class SnippetsController < ApplicationController
PersonalSnippet.find(params[:id]) PersonalSnippet.find(params[:id])
end end
end end
alias_method :awardable, :snippet
def authorize_read_snippet! def authorize_read_snippet!
authenticate_user! unless can?(current_user, :read_personal_snippet, @snippet) authenticate_user! unless can?(current_user, :read_personal_snippet, @snippet)
......
module AwardEmojiHelper
def toggle_award_url(awardable)
if @project
url_for([:toggle_award_emoji, @project.namespace.becomes(Namespace), @project, awardable])
else
url_for([:toggle_award_emoji, awardable])
end
end
end
...@@ -46,6 +46,10 @@ module GitlabRoutingHelper ...@@ -46,6 +46,10 @@ module GitlabRoutingHelper
namespace_project_environments_path(project.namespace, project, *args) namespace_project_environments_path(project.namespace, project, *args)
end end
def project_cycle_analytics_path(project, *args)
namespace_project_cycle_analytics_path(project.namespace, project, *args)
end
def project_builds_path(project, *args) def project_builds_path(project, *args)
namespace_project_builds_path(project.namespace, project, *args) namespace_project_builds_path(project.namespace, project, *args)
end end
...@@ -66,6 +70,10 @@ module GitlabRoutingHelper ...@@ -66,6 +70,10 @@ module GitlabRoutingHelper
namespace_project_runner_path(@project.namespace, @project, runner, *args) namespace_project_runner_path(@project.namespace, @project, runner, *args)
end end
def environment_path(environment, *args)
namespace_project_environment_path(environment.project.namespace, environment.project, environment, *args)
end
def issue_path(entity, *args) def issue_path(entity, *args)
namespace_project_issue_path(entity.project.namespace, entity.project, entity, *args) namespace_project_issue_path(entity.project.namespace, entity.project, entity, *args)
end end
...@@ -98,6 +106,14 @@ module GitlabRoutingHelper ...@@ -98,6 +106,14 @@ module GitlabRoutingHelper
end end
end end
def toggle_award_emoji_personal_snippet_path(*args)
toggle_award_emoji_snippet_path(*args)
end
def toggle_award_emoji_namespace_project_project_snippet_path(*args)
toggle_award_emoji_namespace_project_snippet_path(*args)
end
## Members ## Members
def project_members_url(project, *args) def project_members_url(project, *args)
namespace_project_project_members_url(project.namespace, project) namespace_project_project_members_url(project.namespace, project)
......
...@@ -494,8 +494,11 @@ module Ci ...@@ -494,8 +494,11 @@ module Ci
end end
def hide_secrets(trace) def hide_secrets(trace)
trace = Ci::MaskSecret.mask(trace, project.runners_token) if project return unless trace
trace = Ci::MaskSecret.mask(trace, token)
trace = trace.dup
Ci::MaskSecret.mask!(trace, project.runners_token) if project
Ci::MaskSecret.mask!(trace, token)
trace trace
end end
end end
......
...@@ -56,6 +56,16 @@ module Ci ...@@ -56,6 +56,16 @@ module Ci
pipeline.finished_at = Time.now pipeline.finished_at = Time.now
end end
after_transition [:created, :pending] => :running do |pipeline|
MergeRequest::Metrics.where(merge_request_id: pipeline.merge_requests.map(&:id)).
update_all(latest_build_started_at: pipeline.started_at, latest_build_finished_at: nil)
end
after_transition any => [:success] do |pipeline|
MergeRequest::Metrics.where(merge_request_id: pipeline.merge_requests.map(&:id)).
update_all(latest_build_finished_at: pipeline.finished_at)
end
before_transition do |pipeline| before_transition do |pipeline|
pipeline.update_duration pipeline.update_duration
end end
...@@ -280,6 +290,16 @@ module Ci ...@@ -280,6 +290,16 @@ module Ci
project.execute_services(data, :pipeline_hooks) project.execute_services(data, :pipeline_hooks)
end end
# Merge requests for which the current pipeline is running against
# the merge request's latest commit.
def merge_requests
@merge_requests ||=
begin
project.merge_requests.where(source_branch: self.ref).
select { |merge_request| merge_request.pipeline.try(:id) == self.id }
end
end
private private
def pipeline_data def pipeline_data
......
...@@ -71,6 +71,12 @@ module Awardable ...@@ -71,6 +71,12 @@ module Awardable
end end
end end
def user_authored?(current_user)
author = self.respond_to?(:author) ? self.author : self.user
author == current_user
end
def awarded_emoji?(emoji_name, current_user) def awarded_emoji?(emoji_name, current_user)
award_emoji.where(name: emoji_name, user: current_user).exists? award_emoji.where(name: emoji_name, user: current_user).exists?
end end
......
...@@ -28,10 +28,13 @@ module Issuable ...@@ -28,10 +28,13 @@ module Issuable
loaded? && to_a.all? { |note| note.association(:award_emoji).loaded? } loaded? && to_a.all? { |note| note.association(:award_emoji).loaded? }
end end
end end
has_many :label_links, as: :target, dependent: :destroy has_many :label_links, as: :target, dependent: :destroy
has_many :labels, through: :label_links has_many :labels, through: :label_links
has_many :todos, as: :target, dependent: :destroy has_many :todos, as: :target, dependent: :destroy
has_one :metrics
validates :author, presence: true validates :author, presence: true
validates :title, presence: true, length: { within: 0..255 } validates :title, presence: true, length: { within: 0..255 }
...@@ -84,6 +87,7 @@ module Issuable ...@@ -84,6 +87,7 @@ module Issuable
acts_as_paranoid acts_as_paranoid
after_save :update_assignee_cache_counts, if: :assignee_id_changed? after_save :update_assignee_cache_counts, if: :assignee_id_changed?
after_save :record_metrics
def update_assignee_cache_counts def update_assignee_cache_counts
# make sure we flush the cache for both the old *and* new assignee # make sure we flush the cache for both the old *and* new assignee
...@@ -199,10 +203,6 @@ module Issuable ...@@ -199,10 +203,6 @@ module Issuable
end end
end end
def user_authored?(user)
user == author
end
def subscribed_without_subscriptions?(user) def subscribed_without_subscriptions?(user)
participants(user).include?(user) participants(user).include?(user)
end end
...@@ -289,4 +289,9 @@ module Issuable ...@@ -289,4 +289,9 @@ module Issuable
def can_move?(*) def can_move?(*)
false false
end end
def record_metrics
metrics = self.metrics || create_metrics
metrics.record!
end
end end
class CycleAnalytics
include Gitlab::Database::Median
include Gitlab::Database::DateTime
def initialize(project, from:)
@project = project
@from = from
end
def summary
@summary ||= Summary.new(@project, from: @from)
end
def issue
calculate_metric(:issue,
Issue.arel_table[:created_at],
[Issue::Metrics.arel_table[:first_associated_with_milestone_at],
Issue::Metrics.arel_table[:first_added_to_board_at]])
end
def plan
calculate_metric(:plan,
[Issue::Metrics.arel_table[:first_associated_with_milestone_at],
Issue::Metrics.arel_table[:first_added_to_board_at]],
Issue::Metrics.arel_table[:first_mentioned_in_commit_at])
end
def code
calculate_metric(:code,
Issue::Metrics.arel_table[:first_mentioned_in_commit_at],
MergeRequest.arel_table[:created_at])
end
def test
calculate_metric(:test,
MergeRequest::Metrics.arel_table[:latest_build_started_at],
MergeRequest::Metrics.arel_table[:latest_build_finished_at])
end
def review
calculate_metric(:review,
MergeRequest.arel_table[:created_at],
MergeRequest::Metrics.arel_table[:merged_at])
end
def staging
calculate_metric(:staging,
MergeRequest::Metrics.arel_table[:merged_at],
MergeRequest::Metrics.arel_table[:first_deployed_to_production_at])
end
def production
calculate_metric(:production,
Issue.arel_table[:created_at],
MergeRequest::Metrics.arel_table[:first_deployed_to_production_at])
end
private
def calculate_metric(name, start_time_attrs, end_time_attrs)
cte_table = Arel::Table.new("cte_table_for_#{name}")
# Build a `SELECT` query. We find the first of the `end_time_attrs` that isn't `NULL` (call this end_time).
# Next, we find the first of the start_time_attrs that isn't `NULL` (call this start_time).
# We compute the (end_time - start_time) interval, and give it an alias based on the current
# cycle analytics stage.
interval_query = Arel::Nodes::As.new(
cte_table,
subtract_datetimes(base_query, end_time_attrs, start_time_attrs, name.to_s))
median_datetime(cte_table, interval_query, name)
end
# Join table with a row for every <issue,merge_request> pair (where the merge request
# closes the given issue) with issue and merge request metrics included. The metrics
# are loaded with an inner join, so issues / merge requests without metrics are
# automatically excluded.
def base_query
arel_table = MergeRequestsClosingIssues.arel_table
# Load issues
query = arel_table.join(Issue.arel_table).on(Issue.arel_table[:id].eq(arel_table[:issue_id])).
join(Issue::Metrics.arel_table).on(Issue.arel_table[:id].eq(Issue::Metrics.arel_table[:issue_id])).
where(Issue.arel_table[:project_id].eq(@project.id)).
where(Issue.arel_table[:deleted_at].eq(nil)).
where(Issue.arel_table[:created_at].gteq(@from))
# Load merge_requests
query = query.join(MergeRequest.arel_table, Arel::Nodes::OuterJoin).
on(MergeRequest.arel_table[:id].eq(arel_table[:merge_request_id])).
join(MergeRequest::Metrics.arel_table).
on(MergeRequest.arel_table[:id].eq(MergeRequest::Metrics.arel_table[:merge_request_id]))
# Limit to merge requests that have been deployed to production after `@from`
query.where(MergeRequest::Metrics.arel_table[:first_deployed_to_production_at].gteq(@from))
end
end
class CycleAnalytics
class Summary
def initialize(project, from:)
@project = project
@from = from
end
def new_issues
@project.issues.created_after(@from).count
end
def commits
repository = @project.repository.raw_repository
if @project.default_branch
repository.log(ref: @project.default_branch, after: @from).count
end
end
def deploys
@project.deployments.where("created_at > ?", @from).count
end
end
end
...@@ -42,4 +42,38 @@ class Deployment < ActiveRecord::Base ...@@ -42,4 +42,38 @@ class Deployment < ActiveRecord::Base
project.repository.is_ancestor?(commit.id, sha) project.repository.is_ancestor?(commit.id, sha)
end end
def update_merge_request_metrics!
return unless environment.update_merge_request_metrics?
merge_requests = project.merge_requests.
joins(:metrics).
where(target_branch: self.ref, merge_request_metrics: { first_deployed_to_production_at: nil }).
where("merge_request_metrics.merged_at <= ?", self.created_at)
if previous_deployment
merge_requests = merge_requests.where("merge_request_metrics.merged_at >= ?", previous_deployment.created_at)
end
# Need to use `map` instead of `select` because MySQL doesn't allow `SELECT`ing from the same table
# that we're updating.
merge_request_ids =
if Gitlab::Database.postgresql?
merge_requests.select(:id)
elsif Gitlab::Database.mysql?
merge_requests.map(&:id)
end
MergeRequest::Metrics.
where(merge_request_id: merge_request_ids, first_deployed_to_production_at: nil).
update_all(first_deployed_to_production_at: self.created_at)
end
def previous_deployment
@previous_deployment ||=
project.deployments.joins(:environment).
where(environments: { name: self.environment.name }, ref: self.ref).
where.not(id: self.id).
take
end
end end
...@@ -43,4 +43,8 @@ class Environment < ActiveRecord::Base ...@@ -43,4 +43,8 @@ class Environment < ActiveRecord::Base
last_deployment.includes_commit?(commit) last_deployment.includes_commit?(commit)
end end
def update_merge_request_metrics?
self.name == "production"
end
end end
...@@ -29,6 +29,8 @@ class Issue < ActiveRecord::Base ...@@ -29,6 +29,8 @@ class Issue < ActiveRecord::Base
has_many :events, as: :target, dependent: :destroy has_many :events, as: :target, dependent: :destroy
has_many :merge_requests_closing_issues, class_name: 'MergeRequestsClosingIssues', dependent: :delete_all
validates :project, presence: true validates :project, presence: true
scope :cared, ->(user) { where(assignee_id: user) } scope :cared, ->(user) { where(assignee_id: user) }
...@@ -44,6 +46,8 @@ class Issue < ActiveRecord::Base ...@@ -44,6 +46,8 @@ class Issue < ActiveRecord::Base
scope :order_weight_desc, -> { reorder('weight IS NOT NULL, weight DESC') } scope :order_weight_desc, -> { reorder('weight IS NOT NULL, weight DESC') }
scope :order_weight_asc, -> { reorder('weight ASC') } scope :order_weight_asc, -> { reorder('weight ASC') }
scope :created_after, -> (datetime) { where("created_at >= ?", datetime) }
attr_spammable :title, spam_title: true attr_spammable :title, spam_title: true
attr_spammable :description, spam_description: true attr_spammable :description, spam_description: true
......
class Issue::Metrics < ActiveRecord::Base
belongs_to :issue
def record!
if issue.milestone_id.present? && self.first_associated_with_milestone_at.blank?
self.first_associated_with_milestone_at = Time.now
end
if issue_assigned_to_list_label? && self.first_added_to_board_at.blank?
self.first_added_to_board_at = Time.now
end
self.save
end
private
def issue_assigned_to_list_label?
issue.labels.any? { |label| label.lists.present? }
end
end
...@@ -19,6 +19,8 @@ class MergeRequest < ActiveRecord::Base ...@@ -19,6 +19,8 @@ class MergeRequest < ActiveRecord::Base
has_many :events, as: :target, dependent: :destroy has_many :events, as: :target, dependent: :destroy
has_many :merge_requests_closing_issues, class_name: 'MergeRequestsClosingIssues', dependent: :delete_all
serialize :merge_params, Hash serialize :merge_params, Hash
after_create :ensure_merge_request_diff, unless: :importing? after_create :ensure_merge_request_diff, unless: :importing?
...@@ -522,6 +524,19 @@ class MergeRequest < ActiveRecord::Base ...@@ -522,6 +524,19 @@ class MergeRequest < ActiveRecord::Base
target_project target_project
end end
# If the merge request closes any issues, save this information in the
# `MergeRequestsClosingIssues` model. This is a performance optimization.
# Calculating this information for a number of merge requests requires
# running `ReferenceExtractor` on each of them separately.
def cache_merge_request_closes_issues!(current_user = self.author)
transaction do
self.merge_requests_closing_issues.delete_all
closes_issues(current_user).each do |issue|
self.merge_requests_closing_issues.create!(issue: issue)
end
end
end
def closes_issue?(issue) def closes_issue?(issue)
closes_issues.include?(issue) closes_issues.include?(issue)
end end
...@@ -529,7 +544,8 @@ class MergeRequest < ActiveRecord::Base ...@@ -529,7 +544,8 @@ class MergeRequest < ActiveRecord::Base
# Return the set of issues that will be closed if this merge request is accepted. # Return the set of issues that will be closed if this merge request is accepted.
def closes_issues(current_user = self.author) def closes_issues(current_user = self.author)
if target_branch == project.default_branch if target_branch == project.default_branch
messages = commits.map(&:safe_message) << description messages = [description]
messages.concat(commits.map(&:safe_message)) if merge_request_diff
Gitlab::ClosingIssueExtractor.new(project, current_user). Gitlab::ClosingIssueExtractor.new(project, current_user).
closed_by_message(messages.join("\n")) closed_by_message(messages.join("\n"))
...@@ -771,9 +787,12 @@ class MergeRequest < ActiveRecord::Base ...@@ -771,9 +787,12 @@ class MergeRequest < ActiveRecord::Base
def environments def environments
return [] unless diff_head_commit return [] unless diff_head_commit
target_project.environments.select do |environment| environments = source_project.environments_for(
environment.includes_commit?(diff_head_commit) source_branch, diff_head_commit)
end environments += target_project.environments_for(
target_branch, diff_head_commit, with_tags: true)
environments.uniq
end end
def state_human_name def state_human_name
...@@ -878,10 +897,23 @@ class MergeRequest < ActiveRecord::Base ...@@ -878,10 +897,23 @@ class MergeRequest < ActiveRecord::Base
end end
def all_pipelines def all_pipelines
@all_pipelines ||= return unless source_project
if diff_head_sha && source_project
source_project.pipelines.order(id: :desc).where(sha: commits_sha, ref: source_branch) @all_pipelines ||= begin
sha = if persisted?
all_commits_sha
else
diff_head_sha
end
source_project.pipelines.order(id: :desc).
where(sha: sha, ref: source_branch)
end
end end
# Note that this could also return SHA from now dangling commits
def all_commits_sha
merge_request_diffs.flat_map(&:commits_sha).uniq
end end
def merge_commit def merge_commit
......
class MergeRequest::Metrics < ActiveRecord::Base
belongs_to :merge_request
def record!
if merge_request.merged? && self.merged_at.blank?
self.merged_at = Time.now
end
self.save
end
end
...@@ -117,6 +117,14 @@ class MergeRequestDiff < ActiveRecord::Base ...@@ -117,6 +117,14 @@ class MergeRequestDiff < ActiveRecord::Base
project.commit(head_commit_sha) project.commit(head_commit_sha)
end end
def commits_sha
if @commits
commits.map(&:sha)
else
st_commits.map { |commit| commit[:id] }
end
end
def diff_refs def diff_refs
return unless start_commit_sha || base_commit_sha return unless start_commit_sha || base_commit_sha
......
class MergeRequestsClosingIssues < ActiveRecord::Base
belongs_to :merge_request
belongs_to :issue
validates :merge_request_id, uniqueness: { scope: :issue_id }, presence: true
validates :issue_id, presence: true
end
...@@ -229,10 +229,6 @@ class Note < ActiveRecord::Base ...@@ -229,10 +229,6 @@ class Note < ActiveRecord::Base
end end
end end
def user_authored?(user)
user == author
end
def award_emoji? def award_emoji?
can_be_award_emoji? && contains_emoji_only? can_be_award_emoji? && contains_emoji_only?
end end
......
...@@ -1562,6 +1562,22 @@ class Project < ActiveRecord::Base ...@@ -1562,6 +1562,22 @@ class Project < ActiveRecord::Base
size_limit_enabled? && (size_mb > actual_size_limit || size_mb + repository_and_lfs_size > actual_size_limit) size_limit_enabled? && (size_mb > actual_size_limit || size_mb + repository_and_lfs_size > actual_size_limit)
end end
def environments_for(ref, commit, with_tags: false)
environment_ids = deployments.group(:environment_id).
select(:environment_id)
environment_ids =
if with_tags
environment_ids.where('ref=? OR tag IS TRUE', ref)
else
environment_ids.where(ref: ref)
end
environments.where(id: environment_ids).select do |environment|
environment.includes_commit?(commit)
end
end
private private
def pushes_since_gc_redis_key def pushes_since_gc_redis_key
......
...@@ -22,6 +22,12 @@ class ProjectFeature < ActiveRecord::Base ...@@ -22,6 +22,12 @@ class ProjectFeature < ActiveRecord::Base
belongs_to :project belongs_to :project
default_value_for :builds_access_level, value: ENABLED, allows_nil: false
default_value_for :issues_access_level, value: ENABLED, allows_nil: false
default_value_for :merge_requests_access_level, value: ENABLED, allows_nil: false
default_value_for :snippets_access_level, value: ENABLED, allows_nil: false
default_value_for :wiki_access_level, value: ENABLED, allows_nil: false
def feature_available?(feature, user) def feature_available?(feature, user)
raise ArgumentError, 'invalid project feature' unless FEATURES.include?(feature) raise ArgumentError, 'invalid project feature' unless FEATURES.include?(feature)
......
...@@ -924,7 +924,7 @@ class Repository ...@@ -924,7 +924,7 @@ class Repository
def get_committer_and_author(user, email: nil, name: nil) def get_committer_and_author(user, email: nil, name: nil)
committer = user_to_committer(user) committer = user_to_committer(user)
author = name && email ? Gitlab::Git::committer_hash(email: email, name: name) : committer author = Gitlab::Git::committer_hash(email: email, name: name) || committer
{ {
author: author, author: author,
......
...@@ -5,6 +5,7 @@ class Snippet < ActiveRecord::Base ...@@ -5,6 +5,7 @@ class Snippet < ActiveRecord::Base
include Referable include Referable
include Sortable include Sortable
include Elastic::SnippetsSearch include Elastic::SnippetsSearch
include Awardable
default_value_for :visibility_level, Snippet::PRIVATE default_value_for :visibility_level, Snippet::PRIVATE
......
...@@ -49,6 +49,7 @@ class ProjectPolicy < BasePolicy ...@@ -49,6 +49,7 @@ class ProjectPolicy < BasePolicy
can! :create_issue can! :create_issue
can! :create_note can! :create_note
can! :upload_file can! :upload_file
can! :read_cycle_analytics
end end
def reporter_access! def reporter_access!
...@@ -224,6 +225,7 @@ class ProjectPolicy < BasePolicy ...@@ -224,6 +225,7 @@ class ProjectPolicy < BasePolicy
can! :read_commit_status can! :read_commit_status
can! :read_container_image can! :read_container_image
can! :download_code can! :download_code
can! :read_cycle_analytics
# NOTE: may be overridden by IssuePolicy # NOTE: may be overridden by IssuePolicy
can! :read_issue can! :read_issue
......
...@@ -5,7 +5,7 @@ module Auth ...@@ -5,7 +5,7 @@ module Auth
AUDIENCE = 'container_registry' AUDIENCE = 'container_registry'
def execute(authentication_abilities:) def execute(authentication_abilities:)
@authentication_abilities = authentication_abilities || [] @authentication_abilities = authentication_abilities
return error('not found', 404) unless registry.enabled return error('not found', 404) unless registry.enabled
......
...@@ -4,7 +4,7 @@ class CreateDeploymentService < BaseService ...@@ -4,7 +4,7 @@ class CreateDeploymentService < BaseService
def execute(deployable = nil) def execute(deployable = nil)
environment = find_or_create_environment environment = find_or_create_environment
project.deployments.create( deployment = project.deployments.create(
environment: environment, environment: environment,
ref: params[:ref], ref: params[:ref],
tag: params[:tag], tag: params[:tag],
...@@ -12,6 +12,10 @@ class CreateDeploymentService < BaseService ...@@ -12,6 +12,10 @@ class CreateDeploymentService < BaseService
user: current_user, user: current_user,
deployable: deployable deployable: deployable
) )
deployment.update_merge_request_metrics!
deployment
end end
private private
......
...@@ -139,6 +139,7 @@ class GitPushService < BaseService ...@@ -139,6 +139,7 @@ class GitPushService < BaseService
end end
commit.create_cross_references!(authors[commit], closed_issues) commit.create_cross_references!(authors[commit], closed_issues)
update_issue_metrics(commit, authors)
end end
end end
...@@ -191,4 +192,11 @@ class GitPushService < BaseService ...@@ -191,4 +192,11 @@ class GitPushService < BaseService
def branch_name def branch_name
@branch_name ||= Gitlab::Git.ref_name(params[:ref]) @branch_name ||= Gitlab::Git.ref_name(params[:ref])
end end
def update_issue_metrics(commit, authors)
mentioned_issues = commit.all_references(authors[commit]).issues
Issue::Metrics.where(issue_id: mentioned_issues.map(&:id), first_mentioned_in_commit_at: nil).
update_all(first_mentioned_in_commit_at: commit.committed_date)
end
end end
...@@ -157,6 +157,10 @@ class IssuableBaseService < BaseService ...@@ -157,6 +157,10 @@ class IssuableBaseService < BaseService
# To be overridden by subclasses # To be overridden by subclasses
end end
def after_update(issuable)
# To be overridden by subclasses
end
def update_issuable(issuable, attributes) def update_issuable(issuable, attributes)
issuable.with_transaction_returning_status do issuable.with_transaction_returning_status do
issuable.update(attributes.merge(updated_by: current_user)) issuable.update(attributes.merge(updated_by: current_user))
...@@ -182,6 +186,7 @@ class IssuableBaseService < BaseService ...@@ -182,6 +186,7 @@ class IssuableBaseService < BaseService
end end
handle_changes(issuable, old_labels: old_labels, old_mentioned_users: old_mentioned_users) handle_changes(issuable, old_labels: old_labels, old_mentioned_users: old_mentioned_users)
after_update(issuable)
issuable.create_new_cross_references!(current_user) issuable.create_new_cross_references!(current_user)
execute_hooks(issuable, 'update') execute_hooks(issuable, 'update')
end end
......
...@@ -20,6 +20,7 @@ module MergeRequests ...@@ -20,6 +20,7 @@ module MergeRequests
event_service.open_mr(issuable, current_user) event_service.open_mr(issuable, current_user)
notification_service.new_merge_request(issuable, current_user) notification_service.new_merge_request(issuable, current_user)
todo_service.new_merge_request(issuable, current_user) todo_service.new_merge_request(issuable, current_user)
issuable.cache_merge_request_closes_issues!(current_user)
end end
end end
end end
...@@ -13,6 +13,7 @@ module MergeRequests ...@@ -13,6 +13,7 @@ module MergeRequests
reload_merge_requests reload_merge_requests
reset_merge_when_build_succeeds reset_merge_when_build_succeeds
mark_pending_todos_done mark_pending_todos_done
cache_merge_requests_closing_issues
# Leave a system note if a branch was deleted/added # Leave a system note if a branch was deleted/added
if branch_added? || branch_removed? if branch_added? || branch_removed?
...@@ -156,6 +157,14 @@ module MergeRequests ...@@ -156,6 +157,14 @@ module MergeRequests
end end
end end
# If the merge requests closes any issues, save this information in the
# `MergeRequestsClosingIssues` model (as a performance optimization).
def cache_merge_requests_closing_issues
@project.merge_requests.where(source_branch: @branch_name).each do |merge_request|
merge_request.cache_merge_request_closes_issues!(@current_user)
end
end
def filter_merge_requests(merge_requests) def filter_merge_requests(merge_requests)
merge_requests.uniq.select(&:source_project) merge_requests.uniq.select(&:source_project)
end end
......
...@@ -87,5 +87,9 @@ module MergeRequests ...@@ -87,5 +87,9 @@ module MergeRequests
def close_service def close_service
MergeRequests::CloseService MergeRequests::CloseService
end end
def after_update(issuable)
issuable.cache_merge_request_closes_issues!(current_user)
end
end end
end end
- grouped_emojis = awardable.grouped_awards(with_thumbs: inline) - grouped_emojis = awardable.grouped_awards(with_thumbs: inline)
.awards.js-awards-block{ class: ("hidden" if !inline && grouped_emojis.empty?), data: { award_url: url_for([:toggle_award_emoji, @project.namespace.becomes(Namespace), @project, awardable]) } } .awards.js-awards-block{ class: ("hidden" if !inline && grouped_emojis.empty?), data: { award_url: toggle_award_url(awardable) } }
- awards_sort(grouped_emojis).each do |emoji, awards| - awards_sort(grouped_emojis).each do |emoji, awards|
%button.btn.award-control.js-emoji-btn.has-tooltip{ type: "button", class: (award_active_class(awards, current_user)), data: { placement: "bottom", title: award_user_list(awards, current_user) } } %button.btn.award-control.js-emoji-btn.has-tooltip{ type: "button", class: (award_active_class(awards, current_user)), data: { placement: "bottom", title: award_user_list(awards, current_user) } }
= emoji_icon(emoji, sprite: false) = emoji_icon(emoji, sprite: false)
......
...@@ -23,7 +23,7 @@ ...@@ -23,7 +23,7 @@
.cover-desc.description .cover-desc.description
= markdown(@group.description, pipeline: :description) = markdown(@group.description, pipeline: :description)
%div{ class: container_class } %div.groups-header{ class: container_class }
.top-area .top-area
%ul.nav-links %ul.nav-links
%li.active %li.active
......
...@@ -47,7 +47,7 @@ ...@@ -47,7 +47,7 @@
Repository Repository
- if project_nav_tab? :pipelines - if project_nav_tab? :pipelines
= nav_link(controller: [:pipelines, :builds, :environments]) do = nav_link(controller: [:pipelines, :builds, :environments, :cycle_analytics]) do
= link_to project_pipelines_path(@project), title: 'Pipelines', class: 'shortcuts-pipelines' do = link_to project_pipelines_path(@project), title: 'Pipelines', class: 'shortcuts-pipelines' do
%span %span
Pipelines Pipelines
......
...@@ -14,9 +14,6 @@ ...@@ -14,9 +14,6 @@
window.project_uploads_path = "#{namespace_project_uploads_path project.namespace,project}"; window.project_uploads_path = "#{namespace_project_uploads_path project.namespace,project}";
window.preview_markdown_path = "#{preview_markdown_path}"; window.preview_markdown_path = "#{preview_markdown_path}";
- content_for :scripts_body do
= render "layouts/init_auto_complete" if current_user
- content_for :header_content do - content_for :header_content do
.js-dropdown-menu-projects .js-dropdown-menu-projects
.dropdown-menu.dropdown-select.dropdown-menu-projects .dropdown-menu.dropdown-select.dropdown-menu-projects
......
...@@ -7,3 +7,6 @@ ...@@ -7,3 +7,6 @@
= text_area_tag attr, nil, class: classes, placeholder: placeholder = text_area_tag attr, nil, class: classes, placeholder: placeholder
%a.zen-control.zen-control-leave.js-zen-leave{ href: "#" } %a.zen-control.zen-control-leave.js-zen-leave{ href: "#" }
= icon('compress') = icon('compress')
- content_for :scripts_body do
= render "layouts/init_auto_complete" if current_user && (@target_project || @project)
- @no_container = true
- page_title "Cycle Analytics"
= render "projects/pipelines/head"
#cycle-analytics{"v-cloak" => "true", data: { request_path: project_cycle_analytics_path(@project)}}
.bordered-box.landing.content-block{"v-if" => "!isHelpDismissed"}
= icon('times', class: 'dismiss-icon', "@click": "dismissLanding()")
= custom_icon('icon_cycle_analytics_splash')
.inner-content
%h4
Introducing Cycle Analytics
%p
Cycle Analytics gives an overview of how much time it takes to go from idea to production in your project.
= link_to "Read more", help_page_path('user/project/cycle_analytics'), target: '_blank', class: 'btn'
= icon("spinner spin", "v-show" => "isLoading")
.wrapper{"v-show" => "!isLoading && !hasError"}
.panel.panel-default
.panel-heading
Pipeline Health
.content-block
.container-fluid
.row
.col-xs-3.column{"v-for" => "item in summary"}
%h3.header {{item.value}}
%p.text {{item.title}}
.col-xs-3.column
.dropdown.inline.js-ca-dropdown
%button.dropdown-menu-toggle{"data-toggle" => "dropdown", :type => "button"}
%span.dropdown-label Last 30 days
%i.fa.fa-chevron-down
%ul.dropdown-menu.dropdown-menu-align-right
%li
%a{'href' => "#", 'data-value' => '30'}
Last 30 days
%li
%a{'href' => "#", 'data-value' => '90'}
Last 90 days
.bordered-box
%ul.content-list
%li{"v-for" => "item in stats"}
.container-fluid
.row
.col-xs-10.title-col
%p.title
{{item.title}}
%p.text
{{item.description}}
.col-xs-2.value-col
%span
{{item.value}}
...@@ -43,14 +43,15 @@ ...@@ -43,14 +43,15 @@
= icon("times-circle") = icon("times-circle")
Could not connect to the CI server. Please check your settings and try again. Could not connect to the CI server. Please check your settings and try again.
- @merge_request.environments.each do |environment| - @merge_request.environments.sort_by(&:name).each do |environment|
- if can?(current_user, :read_environment, environment)
.mr-widget-heading .mr-widget-heading
.ci_widget.ci-success .ci_widget.ci-success
= ci_icon_for_status("success") = ci_icon_for_status("success")
%span.hidden-sm %span.hidden-sm
Deployed to Deployed to
= succeed '.' do = succeed '.' do
= link_to environment.name, namespace_project_environment_path(@project.namespace, @project, environment), class: 'environment' = link_to environment.name, environment_path(environment), class: 'environment'
- external_url = environment.external_url - external_url = environment.external_url
- if external_url - if external_url
= link_to external_url, target: '_blank' do = link_to external_url, target: '_blank' do
......
...@@ -19,3 +19,9 @@ ...@@ -19,3 +19,9 @@
= link_to project_environments_path(@project), title: 'Environments', class: 'shortcuts-environments' do = link_to project_environments_path(@project), title: 'Environments', class: 'shortcuts-environments' do
%span %span
Environments Environments
- if can?(current_user, :read_cycle_analytics, @project)
= nav_link(controller: %w(cycle_analytics)) do
= link_to project_cycle_analytics_path(@project), title: 'Cycle Analytics' do
%span
Cycle Analytics
...@@ -3,7 +3,7 @@ ...@@ -3,7 +3,7 @@
= link_to new_namespace_project_snippet_path(@project.namespace, @project), class: 'btn btn-grouped btn-create new-snippet-link', title: "New Snippet" do = link_to new_namespace_project_snippet_path(@project.namespace, @project), class: 'btn btn-grouped btn-create new-snippet-link', title: "New Snippet" do
New Snippet New Snippet
- if can?(current_user, :update_project_snippet, @snippet) - if can?(current_user, :update_project_snippet, @snippet)
= link_to namespace_project_snippet_path(@project.namespace, @project, @snippet), method: :delete, data: { confirm: "Are you sure?" }, class: "btn btn-grouped btn-warning", title: 'Delete Snippet' do = link_to namespace_project_snippet_path(@project.namespace, @project, @snippet), method: :delete, data: { confirm: "Are you sure?" }, class: "btn btn-grouped btn-danger", title: 'Delete Snippet' do
Delete Delete
- if can?(current_user, :update_project_snippet, @snippet) - if can?(current_user, :update_project_snippet, @snippet)
= link_to edit_namespace_project_snippet_path(@project.namespace, @project, @snippet), class: "btn btn-grouped snippable-edit" do = link_to edit_namespace_project_snippet_path(@project.namespace, @project, @snippet), class: "btn btn-grouped snippable-edit" do
......
...@@ -2,7 +2,8 @@ ...@@ -2,7 +2,8 @@
= render 'shared/snippets/header' = render 'shared/snippets/header'
%article.file-holder.snippet-file-content .project-snippets
%article.file-holder.snippet-file-content
.file-title .file-title
= blob_icon 0, @snippet.file_name = blob_icon 0, @snippet.file_name
= @snippet.file_name = @snippet.file_name
...@@ -11,4 +12,6 @@ ...@@ -11,4 +12,6 @@
= link_to 'Raw', raw_namespace_project_snippet_path(@project.namespace, @project, @snippet), class: "btn btn-sm", target: "_blank" = link_to 'Raw', raw_namespace_project_snippet_path(@project.namespace, @project, @snippet), class: "btn btn-sm", target: "_blank"
= render 'shared/snippets/blob' = render 'shared/snippets/blob'
%div#notes= render "projects/notes/notes_with_form" = render 'award_emoji/awards_block', awardable: @snippet, inline: true
%div#notes= render "projects/notes/notes_with_form"
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 99 102" xmlns:xlink="http://www.w3.org/1999/xlink"><defs><path id="0" d="m35.12 56.988c4.083-4.385 5.968-12.155 5.968-24.04 0-20.2-15.874-32.16-15.874-32.16-1.114-.954-2.929-.979-4.04 0 0 0-15.874 11.957-15.874 32.16 0 11.882 1.884 19.652 5.968 24.04h23.848"/><mask id="1" width="35.783" height="56.924" x="0" y="0" fill="#fff"><use xlink:href="#0"/></mask></defs><g fill="none" fill-rule="evenodd" transform="translate(0-4)"><g transform="translate(32.15 3.976)"><g fill="#6b4fbb"><path d="m11.928 56.988l1.325-1.325v3.313c0 .737.59 1.325 1.325 1.325h17.229c.736 0 1.325-.59 1.325-1.325v-3.313l1.325 1.325h-22.53m22.53-1.325v3.313c0 1.464-1.18 2.651-2.651 2.651h-17.229c-1.464 0-2.651-1.178-2.651-2.651v-3.313h22.53m-5.964 7.361h.663c0 3.294-2.67 5.964-5.964 5.964-3.294 0-5.964-2.67-5.964-5.964h.663.663c0 2.562 2.077 4.639 4.639 4.639 2.562 0 4.639-2.077 4.639-4.639h.663"/><path d="m5.816 42.535c-.346-2.839-.515-6.03-.515-9.584 0-20.2 15.874-32.16 15.874-32.16 1.106-.979 2.921-.954 4.04 0 0 0 15.874 11.957 15.874 32.16 0 11.882-1.884 19.652-5.968 24.04h-23.848c-2.861-3.073-4.643-7.807-5.453-14.453-.06-.493-.115-.997-.164-1.511l-4.04 2.884c-.891.637-1.614 2.041-1.614 3.137v14.581c0 1.465.971 1.958 2.165 1.106l8.691-6.208c-.282-.332-.553-.681-.813-1.048l-8.648 6.177c-.147.105-.069.152-.069-.027v-14.581c0-.668.516-1.671 1.059-2.059l3.432-2.451m38.4 20.2c1.193.852 2.165.359 2.165-1.106v-14.581c0-1.096-.723-2.5-1.614-3.137l-4.04-2.884c-.049.514-.104 1.018-.164 1.511l3.432 2.451c.543.388 1.059 1.391 1.059 2.059v14.581c0 .179.078.132-.069.027l-8.648-6.177c-.26.367-.531.716-.813 1.048l8.691 6.208"/></g><use fill="#fff" stroke="#6b4fbb" stroke-width="2.651" mask="url(#1)" xlink:href="#0"/><g fill="#b5a7dd"><path d="m30.482 28.494c0-4.03-3.263-7.289-7.289-7.289-4.03 0-7.289 3.263-7.289 7.289 0 4.03 3.263 7.289 7.289 7.289 4.03 0 7.289-3.263 7.289-7.289m-15.904 0c0-4.758 3.857-8.614 8.614-8.614 4.758 0 8.614 3.857 8.614 8.614 0 4.758-3.857 8.614-8.614 8.614-4.758 0-8.614-3.857-8.614-8.614"/><path d="m27.17 28.494c0-2.196-1.78-3.976-3.976-3.976-2.196 0-3.976 1.78-3.976 3.976 0 2.196 1.78 3.976 3.976 3.976 2.196 0 3.976-1.78 3.976-3.976m-9.277 0c0-2.928 2.373-5.301 5.301-5.301 2.928 0 5.301 2.373 5.301 5.301 0 2.928-2.373 5.301-5.301 5.301-2.928 0-5.301-2.373-5.301-5.301"/></g><path fill="#6b4fbb" d="m34.458 87.47c0 1.098.89 1.988 1.988 1.988 1.098 0 1.988-.89 1.988-1.988 0-.366.297-.663.663-.663.366 0 .663.297.663.663 0 1.83-1.483 3.313-3.313 3.313-1.826 0-3.307-1.478-3.313-3.302 0-.002 0-.003 0-.005v-2.663c0-.363.294-.657.663-.657.366 0 .663.299.663.657v2.657m-21.2-6.615c0-.002 0-.003 0-.005v-2.663c0-.358-.297-.657-.663-.657-.369 0-.663.294-.663.657v2.657c0 1.098-.89 1.988-1.988 1.988-1.098 0-1.988-.89-1.988-1.988 0-.366-.297-.663-.663-.663-.366 0-.663.297-.663.663 0 1.83 1.483 3.313 3.313 3.313 1.826 0 3.307-1.477 3.313-3.302m5.301 7.285c0-.001 0-.002 0-.003v-16.576c0-.362-.297-.658-.663-.658-.369 0-.663.295-.663.658v16.571c0 2.01-1.632 3.645-3.645 3.645-2.01 0-3.645-1.632-3.645-3.645 0-.366-.297-.663-.663-.663-.366 0-.663.297-.663.663 0 2.745 2.225 4.97 4.97 4.97 2.742 0 4.966-2.221 4.97-4.963m10.602 8.607v-18.555c0-.365-.297-.661-.663-.661-.369 0-.663.296-.663.661v18.557c0 0 0 0 0 .001.001 2.744 2.226 4.968 4.97 4.968 2.745 0 4.97-2.225 4.97-4.97 0-.366-.297-.663-.663-.663-.366 0-.663.297-.663.663 0 2.01-1.632 3.645-3.645 3.645-2.01 0-3.645-1.632-3.645-3.645m3.976-25.19c0-.363.294-.657.663-.657.366 0 .663.299.663.657v2.663c0 .363-.294.657-.663.657-.366 0-.663-.299-.663-.657v-2.663m0 6.627c0-.363.294-.657.663-.657.366 0 .663.299.663.657v2.663c0 .363-.294.657-.663.657-.366 0-.663-.299-.663-.657v-2.663m-10.602-6.627c0-.363.294-.657.663-.657.366 0 .663.299.663.657v2.663c0 .363-.294.657-.663.657-.366 0-.663-.299-.663-.657v-2.663m5.301 0c0-.363.294-.657.663-.657.366 0 .663.299.663.657v2.663c0 .363-.294.657-.663.657-.366 0-.663-.299-.663-.657v-2.663m-5.301 6.627c0-.363.294-.657.663-.657.366 0 .663.299.663.657v2.663c0 .363-.294.657-.663.657-.366 0-.663-.299-.663-.657v-2.663m0 6.627c0-.363.294-.657.663-.657.366 0 .663.299.663.657v2.663c0 .363-.294.657-.663.657-.366 0-.663-.299-.663-.657v-2.663m-10.602-13.253c0-.363.294-.657.663-.657.366 0 .663.299.663.657v2.663c0 .363-.294.657-.663.657-.366 0-.663-.299-.663-.657v-2.663"/></g><path fill="#e2ddf2" d="m97.75 76.54c0-2.745-2.225-4.97-4.97-4.97-2.745 0-4.97 2.225-4.97 4.97 0 2.745 2.225 4.97 4.97 4.97 2.745 0 4.97-2.225 4.97-4.97m-8.614 0c0-2.01 1.632-3.645 3.645-3.645 2.01 0 3.645 1.632 3.645 3.645 0 2.01-1.632 3.645-3.645 3.645-2.01 0-3.645-1.632-3.645-3.645m-60.964-57.651c0-2.745-2.225-4.97-4.97-4.97-2.745 0-4.97 2.225-4.97 4.97 0 2.745 2.225 4.97 4.97 4.97 2.745 0 4.97-2.225 4.97-4.97m-8.614 0c0-2.01 1.632-3.645 3.645-3.645 2.01 0 3.645 1.632 3.645 3.645 0 2.01-1.632 3.645-3.645 3.645-2.01 0-3.645-1.632-3.645-3.645"/><path fill="#b5a7dd" d="m98.41 34.458c0-1.83-1.483-3.313-3.313-3.313-1.83 0-3.313 1.483-3.313 3.313 0 1.83 1.483 3.313 3.313 3.313 1.83 0 3.313-1.483 3.313-3.313m-5.301 0c0-1.098.89-1.988 1.988-1.988 1.098 0 1.988.89 1.988 1.988 0 1.098-.89 1.988-1.988 1.988-1.098 0-1.988-.89-1.988-1.988m-86.14 20.542c0-1.83-1.483-3.313-3.313-3.313-1.83 0-3.313 1.483-3.313 3.313 0 1.83 1.483 3.313 3.313 3.313 1.83 0 3.313-1.483 3.313-3.313m-5.301 0c0-1.098.89-1.988 1.988-1.988 1.098 0 1.988.89 1.988 1.988 0 1.098-.89 1.988-1.988 1.988-1.098 0-1.988-.89-1.988-1.988"/></g></svg>
...@@ -10,3 +10,5 @@ ...@@ -10,3 +10,5 @@
= clipboard_button(clipboard_target: ".blob-content[data-blob-id='#{@snippet.id}']") = clipboard_button(clipboard_target: ".blob-content[data-blob-id='#{@snippet.id}']")
= link_to 'Raw', raw_snippet_path(@snippet), class: "btn btn-sm", target: "_blank" = link_to 'Raw', raw_snippet_path(@snippet), class: "btn btn-sm", target: "_blank"
= render 'shared/snippets/blob' = render 'shared/snippets/blob'
= render 'award_emoji/awards_block', awardable: @snippet, inline: true
\ No newline at end of file
...@@ -35,6 +35,10 @@ Rails.application.routes.draw do ...@@ -35,6 +35,10 @@ Rails.application.routes.draw do
post :approve_access_request, on: :member post :approve_access_request, on: :member
end end
concern :awardable do
post :toggle_award_emoji, on: :member
end
namespace :ci do namespace :ci do
# CI API # CI API
Ci::API::API.logger Rails.logger Ci::API::API.logger Rails.logger
...@@ -106,7 +110,7 @@ Rails.application.routes.draw do ...@@ -106,7 +110,7 @@ Rails.application.routes.draw do
# #
# Global snippets # Global snippets
# #
resources :snippets do resources :snippets, concerns: :awardable do
member do member do
get 'raw' get 'raw'
end end
...@@ -118,7 +122,6 @@ Rails.application.routes.draw do ...@@ -118,7 +122,6 @@ Rails.application.routes.draw do
# #
# Invites # Invites
# #
resources :invites, only: [:show], constraints: { id: /[A-Za-z0-9_-]+/ } do resources :invites, only: [:show], constraints: { id: /[A-Za-z0-9_-]+/ } do
member do member do
post :accept post :accept
...@@ -711,7 +714,7 @@ Rails.application.routes.draw do ...@@ -711,7 +714,7 @@ Rails.application.routes.draw do
end end
end end
resources :snippets, constraints: { id: /\d+/ } do resources :snippets, concerns: :awardable, constraints: { id: /\d+/ } do
member do member do
get 'raw' get 'raw'
end end
...@@ -773,7 +776,7 @@ Rails.application.routes.draw do ...@@ -773,7 +776,7 @@ Rails.application.routes.draw do
end end
end end
resources :merge_requests, constraints: { id: /\d+/ } do resources :merge_requests, concerns: :awardable, constraints: { id: /\d+/ } do
member do member do
get :commits get :commits
get :diffs get :diffs
...@@ -787,7 +790,6 @@ Rails.application.routes.draw do ...@@ -787,7 +790,6 @@ Rails.application.routes.draw do
post :toggle_subscription post :toggle_subscription
post :approve post :approve
post :rebase post :rebase
post :toggle_award_emoji
post :remove_wip post :remove_wip
get :diff_for_path get :diff_for_path
post :resolve_conflicts post :resolve_conflicts
...@@ -850,6 +852,8 @@ Rails.application.routes.draw do ...@@ -850,6 +852,8 @@ Rails.application.routes.draw do
resources :environments resources :environments
resource :cycle_analytics, only: [:show]
resources :builds, only: [:index, :show], constraints: { id: /\d+/ } do resources :builds, only: [:index, :show], constraints: { id: /\d+/ } do
collection do collection do
post :cancel_all post :cancel_all
...@@ -908,10 +912,9 @@ Rails.application.routes.draw do ...@@ -908,10 +912,9 @@ Rails.application.routes.draw do
end end
end end
resources :issues, constraints: { id: /\d+/ } do resources :issues, concerns: :awardable, constraints: { id: /\d+/ } do
member do member do
post :toggle_subscription post :toggle_subscription
post :toggle_award_emoji
post :mark_as_spam post :mark_as_spam
get :referenced_merge_requests get :referenced_merge_requests
get :related_branches get :related_branches
...@@ -939,9 +942,8 @@ Rails.application.routes.draw do ...@@ -939,9 +942,8 @@ Rails.application.routes.draw do
resources :group_links, only: [:index, :create, :destroy], constraints: { id: /\d+/ } resources :group_links, only: [:index, :create, :destroy], constraints: { id: /\d+/ }
resources :notes, only: [:index, :create, :destroy, :update], constraints: { id: /\d+/ } do resources :notes, only: [:index, :create, :destroy, :update], concerns: :awardable, constraints: { id: /\d+/ } do
member do member do
post :toggle_award_emoji
delete :delete_attachment delete :delete_attachment
post :resolve post :resolve
delete :resolve, action: :unresolve delete :resolve, action: :unresolve
......
require 'sidekiq/testing'
require './spec/support/test_env'
class Gitlab::Seeder::CycleAnalytics
def initialize(project, perf: false)
@project = project
@user = User.order(:id).last
@issue_count = perf ? 1000 : 5
stub_git_pre_receive!
end
# The GitLab API needn't be running for the fixtures to be
# created. Since we're performing a number of git actions
# here (like creating a branch or committing a file), we need
# to disable the `pre_receive` hook in order to remove this
# dependency on the GitLab API.
def stub_git_pre_receive!
GitHooksService.class_eval do
def run_hook(name)
[true, '']
end
end
end
def seed_metrics!
@issue_count.times do |index|
# Issue
Timecop.travel 5.days.from_now
title = "#{FFaker::Product.brand}-#{FFaker::Product.brand}-#{rand(1000)}"
issue = Issue.create(project: @project, title: title, author: @user)
issue_metrics = issue.metrics
# Milestones / Labels
Timecop.travel 5.days.from_now
if index.even?
issue_metrics.first_associated_with_milestone_at = rand(6..12).hours.from_now
else
issue_metrics.first_added_to_board_at = rand(6..12).hours.from_now
end
# Commit
Timecop.travel 5.days.from_now
issue_metrics.first_mentioned_in_commit_at = rand(6..12).hours.from_now
# MR
Timecop.travel 5.days.from_now
branch_name = "#{FFaker::Product.brand}-#{FFaker::Product.brand}-#{rand(1000)}"
@project.repository.add_branch(@user, branch_name, 'master')
merge_request = MergeRequest.create(target_project: @project, source_project: @project, source_branch: branch_name, target_branch: 'master', title: branch_name, author: @user)
merge_request_metrics = merge_request.metrics
# MR closing issues
Timecop.travel 5.days.from_now
MergeRequestsClosingIssues.create!(issue: issue, merge_request: merge_request)
# Merge
Timecop.travel 5.days.from_now
merge_request_metrics.merged_at = rand(6..12).hours.from_now
# Start build
Timecop.travel 5.days.from_now
merge_request_metrics.latest_build_started_at = rand(6..12).hours.from_now
# Finish build
Timecop.travel 5.days.from_now
merge_request_metrics.latest_build_finished_at = rand(6..12).hours.from_now
# Deploy to production
Timecop.travel 5.days.from_now
merge_request_metrics.first_deployed_to_production_at = rand(6..12).hours.from_now
issue_metrics.save!
merge_request_metrics.save!
print '.'
end
end
def seed!
Sidekiq::Testing.inline! do
issues = create_issues
puts '.'
# Stage 1
Timecop.travel 5.days.from_now
add_milestones_and_list_labels(issues)
print '.'
# Stage 2
Timecop.travel 5.days.from_now
branches = mention_in_commits(issues)
print '.'
# Stage 3
Timecop.travel 5.days.from_now
merge_requests = create_merge_requests_closing_issues(issues, branches)
print '.'
# Stage 4
Timecop.travel 5.days.from_now
run_builds(merge_requests)
print '.'
# Stage 5
Timecop.travel 5.days.from_now
merge_merge_requests(merge_requests)
print '.'
# Stage 6 / 7
Timecop.travel 5.days.from_now
deploy_to_production(merge_requests)
print '.'
end
print '.'
end
private
def create_issues
Array.new(@issue_count) do
issue_params = {
title: "Cycle Analytics: #{FFaker::Lorem.sentence(6)}",
description: FFaker::Lorem.sentence,
state: 'opened',
assignee: @project.team.users.sample
}
Issues::CreateService.new(@project, @project.team.users.sample, issue_params).execute
end
end
def add_milestones_and_list_labels(issues)
issues.shuffle.map.with_index do |issue, index|
Timecop.travel 12.hours.from_now
if index.even?
issue.update(milestone: @project.milestones.sample)
else
label_name = "#{FFaker::Product.brand}-#{FFaker::Product.brand}-#{rand(1000)}"
list_label = FactoryGirl.create(:label, title: label_name, project: issue.project)
FactoryGirl.create(:list, board: FactoryGirl.create(:board, project: issue.project), label: list_label)
issue.update(labels: [list_label])
end
issue
end
end
def mention_in_commits(issues)
issues.map do |issue|
Timecop.travel 12.hours.from_now
branch_name = filename = "#{FFaker::Product.brand}-#{FFaker::Product.brand}-#{rand(1000)}"
issue.project.repository.add_branch(@user, branch_name, 'master')
options = {
committer: issue.project.repository.user_to_committer(@user),
author: issue.project.repository.user_to_committer(@user),
commit: { message: "Commit for ##{issue.iid}", branch: branch_name, update_ref: true },
file: { content: "content", path: filename, update: false }
}
commit_sha = Gitlab::Git::Blob.commit(issue.project.repository, options)
issue.project.repository.commit(commit_sha)
GitPushService.new(issue.project,
@user,
oldrev: issue.project.repository.commit("master").sha,
newrev: commit_sha,
ref: 'refs/heads/master').execute
branch_name
end
end
def create_merge_requests_closing_issues(issues, branches)
issues.zip(branches).map do |issue, branch|
Timecop.travel 12.hours.from_now
opts = {
title: 'Cycle Analytics merge_request',
description: "Fixes #{issue.to_reference}",
source_branch: branch,
target_branch: 'master'
}
MergeRequests::CreateService.new(issue.project, @user, opts).execute
end
end
def run_builds(merge_requests)
merge_requests.each do |merge_request|
Timecop.travel 12.hours.from_now
service = Ci::CreatePipelineService.new(merge_request.project,
@user,
ref: "refs/heads/#{merge_request.source_branch}")
pipeline = service.execute(ignore_skip_ci: true, save_on_errors: false)
pipeline.run!
Timecop.travel rand(1..6).hours.from_now
pipeline.succeed!
end
end
def merge_merge_requests(merge_requests)
merge_requests.each do |merge_request|
Timecop.travel 12.hours.from_now
MergeRequests::MergeService.new(merge_request.project, @user).execute(merge_request)
end
end
def deploy_to_production(merge_requests)
merge_requests.each do |merge_request|
Timecop.travel 12.hours.from_now
CreateDeploymentService.new(merge_request.project, @user, {
environment: 'production',
ref: 'master',
tag: false,
sha: @project.repository.commit('master').sha
}).execute
end
end
end
Gitlab::Seeder.quiet do
if ENV['SEED_CYCLE_ANALYTICS']
Project.all.each do |project|
seeder = Gitlab::Seeder::CycleAnalytics.new(project)
seeder.seed!
end
elsif ENV['CYCLE_ANALYTICS_PERF_TEST']
seeder = Gitlab::Seeder::CycleAnalytics.new(Project.order(:id).first, perf: true)
seeder.seed!
elsif ENV['CYCLE_ANALYTICS_POPULATE_METRICS_DIRECTLY']
seeder = Gitlab::Seeder::CycleAnalytics.new(Project.order(:id).first, perf: true)
seeder.seed_metrics!
else
puts "Not running the cycle analytics seed file. Use the `SEED_CYCLE_ANALYTICS` environment variable to enable it."
end
end
# See http://doc.gitlab.com/ce/development/migration_style_guide.html
# for more information on how to write migrations for GitLab.
class AddTableIssueMetrics < ActiveRecord::Migration
include Gitlab::Database::MigrationHelpers
# Set this constant to true if this migration requires downtime.
DOWNTIME = true
# When a migration requires downtime you **must** uncomment the following
# constant and define a short and easy to understand explanation as to why the
# migration requires downtime.
DOWNTIME_REASON = 'Adding foreign key'
# When using the methods "add_concurrent_index" or "add_column_with_default"
# you must disable the use of transactions as these methods can not run in an
# existing transaction. When using "add_concurrent_index" make sure that this
# method is the _only_ method called in the migration, any other changes
# should go in a separate migration. This ensures that upon failure _only_ the
# index creation fails and can be retried or reverted easily.
#
# To disable transactions uncomment the following line and remove these
# comments:
# disable_ddl_transaction!
def change
create_table :issue_metrics do |t|
t.references :issue, index: { name: "index_issue_metrics" }, foreign_key: { on_delete: :cascade }, null: false
t.datetime 'first_mentioned_in_commit_at'
t.datetime 'first_associated_with_milestone_at'
t.datetime 'first_added_to_board_at'
t.timestamps null: false
end
end
end
# See http://doc.gitlab.com/ce/development/migration_style_guide.html
# for more information on how to write migrations for GitLab.
class AddTableMergeRequestMetrics < ActiveRecord::Migration
include Gitlab::Database::MigrationHelpers
# Set this constant to true if this migration requires downtime.
DOWNTIME = true
# When a migration requires downtime you **must** uncomment the following
# constant and define a short and easy to understand explanation as to why the
# migration requires downtime.
DOWNTIME_REASON = 'Adding foreign key'
# When using the methods "add_concurrent_index" or "add_column_with_default"
# you must disable the use of transactions as these methods can not run in an
# existing transaction. When using "add_concurrent_index" make sure that this
# method is the _only_ method called in the migration, any other changes
# should go in a separate migration. This ensures that upon failure _only_ the
# index creation fails and can be retried or reverted easily.
#
# To disable transactions uncomment the following line and remove these
# comments:
# disable_ddl_transaction!
def change
create_table :merge_request_metrics do |t|
t.references :merge_request, index: { name: "index_merge_request_metrics" }, foreign_key: { on_delete: :cascade }, null: false
t.datetime 'latest_build_started_at'
t.datetime 'latest_build_finished_at'
t.datetime 'first_deployed_to_production_at', index: true
t.datetime 'merged_at'
t.timestamps null: false
end
end
end
# See http://doc.gitlab.com/ce/development/migration_style_guide.html
# for more information on how to write migrations for GitLab.
class CreateMergeRequestsClosingIssues < ActiveRecord::Migration
include Gitlab::Database::MigrationHelpers
# Set this constant to true if this migration requires downtime.
DOWNTIME = true
# When a migration requires downtime you **must** uncomment the following
# constant and define a short and easy to understand explanation as to why the
# migration requires downtime.
DOWNTIME_REASON = 'Adding foreign keys'
# When using the methods "add_concurrent_index" or "add_column_with_default"
# you must disable the use of transactions as these methods can not run in an
# existing transaction. When using "add_concurrent_index" make sure that this
# method is the _only_ method called in the migration, any other changes
# should go in a separate migration. This ensures that upon failure _only_ the
# index creation fails and can be retried or reverted easily.
#
# To disable transactions uncomment the following line and remove these
# comments:
# disable_ddl_transaction!
def change
create_table :merge_requests_closing_issues do |t|
t.references :merge_request, foreign_key: { on_delete: :cascade }, index: true, null: false
t.references :issue, foreign_key: { on_delete: :cascade }, index: true, null: false
t.timestamps null: false
end
end
end
...@@ -11,7 +11,7 @@ ...@@ -11,7 +11,7 @@
# #
# It's strongly recommended that you check this file into your version control system. # It's strongly recommended that you check this file into your version control system.
ActiveRecord::Schema.define(version: 20160913212128) do ActiveRecord::Schema.define(version: 20160915081353) do
# These are extensions that must be enabled in order to support this database # These are extensions that must be enabled in order to support this database
enable_extension "plpgsql" enable_extension "plpgsql"
...@@ -499,6 +499,17 @@ ActiveRecord::Schema.define(version: 20160913212128) do ...@@ -499,6 +499,17 @@ ActiveRecord::Schema.define(version: 20160913212128) do
add_index "index_statuses", ["project_id"], name: "index_index_statuses_on_project_id", unique: true, using: :btree add_index "index_statuses", ["project_id"], name: "index_index_statuses_on_project_id", unique: true, using: :btree
create_table "issue_metrics", force: :cascade do |t|
t.integer "issue_id", null: false
t.datetime "first_associated_with_milestone_at"
t.datetime "first_added_to_board_at"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.datetime "first_mentioned_in_commit_at"
end
add_index "issue_metrics", ["issue_id"], name: "index_issue_metrics", using: :btree
create_table "issues", force: :cascade do |t| create_table "issues", force: :cascade do |t|
t.string "title" t.string "title"
t.integer "assignee_id" t.integer "assignee_id"
...@@ -659,6 +670,18 @@ ActiveRecord::Schema.define(version: 20160913212128) do ...@@ -659,6 +670,18 @@ ActiveRecord::Schema.define(version: 20160913212128) do
add_index "merge_request_diffs", ["merge_request_id"], name: "index_merge_request_diffs_on_merge_request_id", using: :btree add_index "merge_request_diffs", ["merge_request_id"], name: "index_merge_request_diffs_on_merge_request_id", using: :btree
create_table "merge_request_metrics", force: :cascade do |t|
t.integer "merge_request_id", null: false
t.datetime "latest_build_started_at"
t.datetime "latest_build_finished_at"
t.datetime "first_deployed_to_production_at"
t.datetime "merged_at"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
end
add_index "merge_request_metrics", ["merge_request_id"], name: "index_merge_request_metrics", using: :btree
create_table "merge_requests", force: :cascade do |t| create_table "merge_requests", force: :cascade do |t|
t.string "target_branch", null: false t.string "target_branch", null: false
t.string "source_branch", null: false t.string "source_branch", null: false
...@@ -702,6 +725,16 @@ ActiveRecord::Schema.define(version: 20160913212128) do ...@@ -702,6 +725,16 @@ ActiveRecord::Schema.define(version: 20160913212128) do
add_index "merge_requests", ["title"], name: "index_merge_requests_on_title", using: :btree add_index "merge_requests", ["title"], name: "index_merge_requests_on_title", using: :btree
add_index "merge_requests", ["title"], name: "index_merge_requests_on_title_trigram", using: :gin, opclasses: {"title"=>"gin_trgm_ops"} add_index "merge_requests", ["title"], name: "index_merge_requests_on_title_trigram", using: :gin, opclasses: {"title"=>"gin_trgm_ops"}
create_table "merge_requests_closing_issues", force: :cascade do |t|
t.integer "merge_request_id", null: false
t.integer "issue_id", null: false
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
end
add_index "merge_requests_closing_issues", ["issue_id"], name: "index_merge_requests_closing_issues_on_issue_id", using: :btree
add_index "merge_requests_closing_issues", ["merge_request_id"], name: "index_merge_requests_closing_issues_on_merge_request_id", using: :btree
create_table "milestones", force: :cascade do |t| create_table "milestones", force: :cascade do |t|
t.string "title", null: false t.string "title", null: false
t.integer "project_id", null: false t.integer "project_id", null: false
...@@ -1313,8 +1346,12 @@ ActiveRecord::Schema.define(version: 20160913212128) do ...@@ -1313,8 +1346,12 @@ ActiveRecord::Schema.define(version: 20160913212128) do
add_index "web_hooks", ["project_id"], name: "index_web_hooks_on_project_id", using: :btree add_index "web_hooks", ["project_id"], name: "index_web_hooks_on_project_id", using: :btree
add_foreign_key "boards", "projects" add_foreign_key "boards", "projects"
add_foreign_key "issue_metrics", "issues", on_delete: :cascade
add_foreign_key "lists", "boards" add_foreign_key "lists", "boards"
add_foreign_key "lists", "labels" add_foreign_key "lists", "labels"
add_foreign_key "merge_request_metrics", "merge_requests", on_delete: :cascade
add_foreign_key "merge_requests_closing_issues", "issues", on_delete: :cascade
add_foreign_key "merge_requests_closing_issues", "merge_requests", on_delete: :cascade
add_foreign_key "path_locks", "projects" add_foreign_key "path_locks", "projects"
add_foreign_key "path_locks", "users" add_foreign_key "path_locks", "users"
add_foreign_key "personal_access_tokens", "users" add_foreign_key "personal_access_tokens", "users"
......
# Award Emoji # Award Emoji
> [Introduced][ce-4575] in GitLab 8.9. > [Introduced][ce-4575] in GitLab 8.9, Snippet support in 8.12
An awarded emoji tells a thousand words, and can be awarded on issues, merge An awarded emoji tells a thousand words, and can be awarded on issues, merge
requests and notes/comments. Issues, merge requests and notes are further called requests, snippets, and notes/comments. Issues, merge requests, snippets, and notes are further called
`awardables`. `awardables`.
## Issues and merge requests ## Issues, merge requests, and snippets
### List an awardable's award emoji ### List an awardable's award emoji
...@@ -15,6 +16,7 @@ Gets a list of all award emoji ...@@ -15,6 +16,7 @@ Gets a list of all award emoji
``` ```
GET /projects/:id/issues/:issue_id/award_emoji GET /projects/:id/issues/:issue_id/award_emoji
GET /projects/:id/merge_requests/:merge_request_id/award_emoji GET /projects/:id/merge_requests/:merge_request_id/award_emoji
GET /projects/:id/snippets/:snippet_id/award_emoji
``` ```
Parameters: Parameters:
...@@ -69,11 +71,12 @@ Example Response: ...@@ -69,11 +71,12 @@ Example Response:
### Get single award emoji ### Get single award emoji
Gets a single award emoji from an issue or merge request. Gets a single award emoji from an issue, snippet, or merge request.
``` ```
GET /projects/:id/issues/:issue_id/award_emoji/:award_id GET /projects/:id/issues/:issue_id/award_emoji/:award_id
GET /projects/:id/merge_requests/:merge_request_id/award_emoji/:award_id GET /projects/:id/merge_requests/:merge_request_id/award_emoji/:award_id
GET /projects/:id/snippets/:snippet_id/award_emoji/:award_id
``` ```
Parameters: Parameters:
...@@ -116,6 +119,7 @@ This end point creates an award emoji on the specified resource ...@@ -116,6 +119,7 @@ This end point creates an award emoji on the specified resource
``` ```
POST /projects/:id/issues/:issue_id/award_emoji POST /projects/:id/issues/:issue_id/award_emoji
POST /projects/:id/merge_requests/:merge_request_id/award_emoji POST /projects/:id/merge_requests/:merge_request_id/award_emoji
POST /projects/:id/snippets/:snippet_id/award_emoji
``` ```
Parameters: Parameters:
...@@ -159,6 +163,7 @@ admins or the author of the award. Status code 200 on success, 401 if unauthoriz ...@@ -159,6 +163,7 @@ admins or the author of the award. Status code 200 on success, 401 if unauthoriz
``` ```
DELETE /projects/:id/issues/:issue_id/award_emoji/:award_id DELETE /projects/:id/issues/:issue_id/award_emoji/:award_id
DELETE /projects/:id/merge_requests/:merge_request_id/award_emoji/:award_id DELETE /projects/:id/merge_requests/:merge_request_id/award_emoji/:award_id
DELETE /projects/:id/snippets/:snippet_id/award_emoji/:award_id
``` ```
Parameters: Parameters:
...@@ -197,7 +202,7 @@ Example Response: ...@@ -197,7 +202,7 @@ Example Response:
## Award Emoji on Notes ## Award Emoji on Notes
The endpoints documented above are available for Notes as well. Notes The endpoints documented above are available for Notes as well. Notes
are a sub-resource of Issues and Merge Requests. The examples below are a sub-resource of Issues, Merge Requests, or Snippets. The examples below
describe working with Award Emoji on notes for an Issue, but can be describe working with Award Emoji on notes for an Issue, but can be
easily adapted for notes on a Merge Request. easily adapted for notes on a Merge Request.
......
# Cycle Analytics
> [Introduced][ce-5986] in GitLab 8.12.
>
> **Note:**
This the first iteration of Cycle Analytics, you can follow the following issue
to track the changes that are coming to this feature: [#20975][ce-20975].
Cycle Analytics measures the time it takes to go from an idea to production for
each project you have. This is achieved by not only indicating the total time it
takes to reach at that point, but the total time is broken down into the
multiple stages an idea has to pass through to be shipped.
Cycle Analytics is that it is tightly coupled with the [GitLab flow] and
calculates a separate median for each stage.
## Overview
You can find the Cycle Analytics page under your project's **Pipelines > Cycle
Analytics** tab.
![Cycle Analytics landing page](img/cycle_analytics_landing_page.png)
You can see that there are seven stages in total:
- **Issue** (Tracker)
- Median time from issue creation until given a milestone or list label
(first assignment, any milestone, milestone date or assignee is not required)
- **Plan** (Board)
- Median time from giving an issue a milestone or label until pushing the
first commit
- **Code** (IDE)
- Median time from the first commit until the merge request is created
- **Test** (CI)
- Total test time for all commits/merges
- **Review** (Merge Request/MR)
- Median time from merge request creation until the merge request is merged
(closed merge requests won't be taken into account)
- **Staging** (Continuous Deployment)
- Median time from when the merge request got merged until the deploy to
production (production is last stage/environment)
- **Production** (Total)
- Sum of all the above stages excluding the Test (CI) time
## How the data is measured
Cycle Analytics records cycle time so only data on the issues that have been
deployed to production are measured. In case you just started a new project and
you have not pushed anything to production, then you will not be able to
properly see the Cycle Analytics of your project.
Specifically, if your CI is not set up and you have not defined a `production`
[environment], then you will not have any data.
Below you can see in more detail what the various stages of Cycle Analytics mean.
| **Stage** | **Description** |
| --------- | --------------- |
| Issue | Measures the median time between creating an issue and taking action to solve it, by either labeling it or adding it to a milestone, whatever comes first. The label will be tracked only if it already has an [Issue Board list][board] created for it. |
| Plan | Measures the median time between the action you took for the previous stage, and pushing the first commit to the repository. To make this change tracked, the commit needs to be pushed that contains the issue closing pattern `Closes #xxx`, where `xxx` is the number of the issue related to this commit. If the commit does not contain the issue closing pattern, it is not considered to the measure time of the stage. |
| Code | Measures the median time between pushing a first commit (previous stage) and creating a merge request related to that commit. The key to keep the process tracked is include the issue closing pattern to the description of the merge request. |
| Test | Measures the median time to run the entire pipeline for that project. It's related to the time GitLab CI takes to run every job for the commits pushed to that merge request defined in the previous stage. It is basically the start->finish time for all pipelines. `master` is not excluded. It does not attempt to track time for any particular stages. |
| Review | Measures the median time taken to review the merge request, between its creation and until it's merged. |
| Staging | Measures the median time between merging the merge request until the very first deployment of the to production. It's tracked by the [environment] set to `production` in your GitLab CI configuration. If there isn't a `production` environment, this is not tracked. |
| Production| The sum of all time taken to run the entire process, from issue creation to deploying the code to production. |
---
Here's a little explanation of how this works behind the scenes:
1. Issues and merge requests are grouped together in pairs, such that for each
`<issue, merge request>` pair, the merge request has `Fixes #xxx` for the
corresponding issue. All other issues and merge requests are **not** considered.
1. Then the <issue, merge request> pairs are filtered out. Any merge request
that has **not** been deployed to production in the last XX days (specified
by the UI - default is 90 days) prohibits these pairs from being considered.
1. For the remaining `<issue, merge request>` pairs, we check the information that
we need for the stages, like issue creation date, merge request merge time,
etc.
To sum up, anything that doesn't follow the [GitLab flow] won't be tracked at all.
So, if a merge request doesn't close an issue or an issue is not labeled with a
label present in the Issue Board or assigned a milestone or a project has no
`production` environment, the Cycle Analytics dashboard won't present any data
at all.
## Permissions
The current permissions on the Cycle Analytics dashboard are:
- Public projects - anyone can access
- Private/internal projects - any member (guest level and above) can access
You can [read more about permissions][permissions] in general.
## More resources
Learn more about Cycle Analytics in the following resources:
- [Cycle Analytics feature page](https://about.gitlab.com/solutions/cycle-analytics/)
- [Cycle Analytics feature preview](https://about.gitlab.com/2016/09/16/feature-preview-introducing-cycle-analytics/)
- [Cycle Analytics feature highlight](https://about.gitlab.com/2016-09-19-cycle-analytics-feature-highlight.html)
[ce-5986]: https://gitlab.com/gitlab-org/gitlab-ce/merge_requests/5986
[ce-20975]: https://gitlab.com/gitlab-org/gitlab-ce/issues/20975
[GitLab flow]: ../../workflow/gitlab_flow.md
[permissions]: ../permissions.md
[environment]: ../../ci/yaml/README.md#environment
[board]: issue_board.md#creating-a-new-list
# Workflow # Workflow
- [Change your time zone](timezone.md) - [Change your time zone](timezone.md)
- [Cycle Analytics](../user/project/cycle_analytics.md)
- [Description templates](../user/project/description_templates.md) - [Description templates](../user/project/description_templates.md)
- [Feature branch workflow](workflow.md) - [Feature branch workflow](workflow.md)
- [GitLab Flow](gitlab_flow.md) - [GitLab Flow](gitlab_flow.md)
......
module API module API
class AwardEmoji < Grape::API class AwardEmoji < Grape::API
before { authenticate! } before { authenticate! }
AWARDABLES = [Issue, MergeRequest] AWARDABLES = %w[issue merge_request snippet]
resource :projects do resource :projects do
AWARDABLES.each do |awardable_type| AWARDABLES.each do |awardable_type|
awardable_string = awardable_type.to_s.underscore.pluralize awardable_string = awardable_type.pluralize
awardable_id_string = "#{awardable_type.to_s.underscore}_id" awardable_id_string = "#{awardable_type}_id"
[ ":id/#{awardable_string}/:#{awardable_id_string}/award_emoji", [ ":id/#{awardable_string}/:#{awardable_id_string}/award_emoji",
":id/#{awardable_string}/:#{awardable_id_string}/notes/:note_id/award_emoji" ":id/#{awardable_string}/:#{awardable_id_string}/notes/:note_id/award_emoji"
...@@ -87,9 +87,7 @@ module API ...@@ -87,9 +87,7 @@ module API
helpers do helpers do
def can_read_awardable? def can_read_awardable?
ability = "read_#{awardable.class.to_s.underscore}".to_sym can?(current_user, read_ability(awardable), awardable)
can?(current_user, ability, awardable)
end end
def can_award_awardable? def can_award_awardable?
...@@ -100,18 +98,25 @@ module API ...@@ -100,18 +98,25 @@ module API
@awardable ||= @awardable ||=
begin begin
if params.include?(:note_id) if params.include?(:note_id)
noteable.notes.find(params[:note_id]) note_id = params.delete(:note_id)
awardable.notes.find(note_id)
elsif params.include?(:issue_id)
user_project.issues.find(params[:issue_id])
elsif params.include?(:merge_request_id)
user_project.merge_requests.find(params[:merge_request_id])
else else
noteable user_project.snippets.find(params[:snippet_id])
end end
end end
end end
def noteable def read_ability(awardable)
if params.include?(:issue_id) case awardable
user_project.issues.find(params[:issue_id]) when Note
read_ability(awardable.noteable)
else else
user_project.merge_requests.find(params[:merge_request_id]) :"read_#{awardable.class.to_s.underscore}"
end end
end end
end end
......
module Ci::MaskSecret module Ci::MaskSecret
class << self class << self
def mask(value, token) def mask!(value, token)
return value unless value.present? && token.present? return value unless value.present? && token.present?
value.gsub(token, 'x' * token.length) value.gsub!(token, 'x' * token.length)
value
end end
end end
end end
module Gitlab
module Database
module DateTime
# Find the first of the `end_time_attrs` that isn't `NULL`. Subtract from it
# the first of the `start_time_attrs` that isn't NULL. `SELECT` the resulting interval
# along with an alias specified by the `as` parameter.
#
# Note: For MySQL, the interval is returned in seconds.
# For PostgreSQL, the interval is returned as an INTERVAL type.
def subtract_datetimes(query_so_far, end_time_attrs, start_time_attrs, as)
diff_fn = if Gitlab::Database.postgresql?
Arel::Nodes::Subtraction.new(
Arel::Nodes::NamedFunction.new("COALESCE", Array.wrap(end_time_attrs)),
Arel::Nodes::NamedFunction.new("COALESCE", Array.wrap(start_time_attrs)))
elsif Gitlab::Database.mysql?
Arel::Nodes::NamedFunction.new(
"TIMESTAMPDIFF",
[Arel.sql('second'),
Arel::Nodes::NamedFunction.new("COALESCE", Array.wrap(start_time_attrs)),
Arel::Nodes::NamedFunction.new("COALESCE", Array.wrap(end_time_attrs))])
end
query_so_far.project(diff_fn.as(as))
end
end
end
end
# https://www.periscopedata.com/blog/medians-in-sql.html
module Gitlab
module Database
module Median
def median_datetime(arel_table, query_so_far, column_sym)
median_queries =
if Gitlab::Database.postgresql?
pg_median_datetime_sql(arel_table, query_so_far, column_sym)
elsif Gitlab::Database.mysql?
mysql_median_datetime_sql(arel_table, query_so_far, column_sym)
end
results = Array.wrap(median_queries).map do |query|
ActiveRecord::Base.connection.execute(query)
end
extract_median(results).presence
end
def extract_median(results)
result = results.compact.first
if Gitlab::Database.postgresql?
result = result.first.presence
median = result['median'] if result
median.to_f if median
elsif Gitlab::Database.mysql?
result.to_a.flatten.first
end
end
def mysql_median_datetime_sql(arel_table, query_so_far, column_sym)
query = arel_table.
from(arel_table.project(Arel.sql('*')).order(arel_table[column_sym]).as(arel_table.table_name)).
project(average([arel_table[column_sym]], 'median')).
where(
Arel::Nodes::Between.new(
Arel.sql("(select @row_id := @row_id + 1)"),
Arel::Nodes::And.new(
[Arel.sql('@ct/2.0'),
Arel.sql('@ct/2.0 + 1')]
)
)
).
# Disallow negative values
where(arel_table[column_sym].gteq(0))
[
Arel.sql("CREATE TEMPORARY TABLE IF NOT EXISTS #{query_so_far.to_sql}"),
Arel.sql("set @ct := (select count(1) from #{arel_table.table_name});"),
Arel.sql("set @row_id := 0;"),
query.to_sql,
Arel.sql("DROP TEMPORARY TABLE IF EXISTS #{arel_table.table_name};")
]
end
def pg_median_datetime_sql(arel_table, query_so_far, column_sym)
# Create a CTE with the column we're operating on, row number (after sorting by the column
# we're operating on), and count of the table we're operating on (duplicated across) all rows
# of the CTE. For example, if we're looking to find the median of the `projects.star_count`
# column, the CTE might look like this:
#
# star_count | row_id | ct
# ------------+--------+----
# 5 | 1 | 3
# 9 | 2 | 3
# 15 | 3 | 3
cte_table = Arel::Table.new("ordered_records")
cte = Arel::Nodes::As.new(
cte_table,
arel_table.
project(
arel_table[column_sym].as(column_sym.to_s),
Arel::Nodes::Over.new(Arel::Nodes::NamedFunction.new("row_number", []),
Arel::Nodes::Window.new.order(arel_table[column_sym])).as('row_id'),
arel_table.project("COUNT(1)").as('ct')).
# Disallow negative values
where(arel_table[column_sym].gteq(zero_interval)))
# From the CTE, select either the middle row or the middle two rows (this is accomplished
# by 'where cte.row_id between cte.ct / 2.0 AND cte.ct / 2.0 + 1'). Find the average of the
# selected rows, and this is the median value.
cte_table.project(average([extract_epoch(cte_table[column_sym])], "median")).
where(
Arel::Nodes::Between.new(
cte_table[:row_id],
Arel::Nodes::And.new(
[(cte_table[:ct] / Arel.sql('2.0')),
(cte_table[:ct] / Arel.sql('2.0') + 1)]
)
)
).
with(query_so_far, cte).
to_sql
end
private
def average(args, as)
Arel::Nodes::NamedFunction.new("AVG", args, as)
end
def extract_epoch(arel_attribute)
Arel.sql(%Q{EXTRACT(EPOCH FROM "#{arel_attribute.relation.name}"."#{arel_attribute.name}")})
end
# Need to cast '0' to an INTERVAL before we can check if the interval is positive
def zero_interval
Arel::Nodes::NamedFunction.new("CAST", [Arel.sql("'0' AS INTERVAL")])
end
end
end
end
...@@ -19,6 +19,8 @@ module Gitlab ...@@ -19,6 +19,8 @@ module Gitlab
end end
def committer_hash(email:, name:) def committer_hash(email:, name:)
return if email.nil? || name.nil?
{ {
email: email, email: email,
name: name, name: name,
......
...@@ -10,6 +10,7 @@ project_tree: ...@@ -10,6 +10,7 @@ project_tree:
- milestone: - milestone:
- :events - :events
- snippets: - snippets:
- :award_emoji
- notes: - notes:
:author :author
- :releases - :releases
...@@ -66,6 +67,8 @@ excluded_attributes: ...@@ -66,6 +67,8 @@ excluded_attributes:
- :milestone_id - :milestone_id
merge_requests: merge_requests:
- :milestone_id - :milestone_id
award_emoji:
- :awardable_id
methods: methods:
statuses: statuses:
......
require 'spec_helper' require 'spec_helper'
describe SnippetsController do describe SnippetsController do
describe 'GET #show' do
let(:user) { create(:user) } let(:user) { create(:user) }
describe 'GET #show' do
context 'when the personal snippet is private' do context 'when the personal snippet is private' do
let(:personal_snippet) { create(:personal_snippet, :private, author: user) } let(:personal_snippet) { create(:personal_snippet, :private, author: user) }
...@@ -230,4 +230,33 @@ describe SnippetsController do ...@@ -230,4 +230,33 @@ describe SnippetsController do
end end
end end
end end
context 'award emoji on snippets' do
let(:personal_snippet) { create(:personal_snippet, :public, author: user) }
let(:another_user) { create(:user) }
before do
sign_in(another_user)
end
describe 'POST #toggle_award_emoji' do
it "toggles the award emoji" do
expect do
post(:toggle_award_emoji, id: personal_snippet.to_param, name: "thumbsup")
end.to change { personal_snippet.award_emoji.count }.from(0).to(1)
expect(response.status).to eq(200)
end
it "removes the already awarded emoji" do
post(:toggle_award_emoji, id: personal_snippet.to_param, name: "thumbsup")
expect do
post(:toggle_award_emoji, id: personal_snippet.to_param, name: "thumbsup")
end.to change { personal_snippet.award_emoji.count }.from(1).to(0)
expect(response.status).to eq(200)
end
end
end
end end
...@@ -3,11 +3,12 @@ FactoryGirl.define do ...@@ -3,11 +3,12 @@ FactoryGirl.define do
sha '97de212e80737a608d939f648d959671fb0a0142' sha '97de212e80737a608d939f648d959671fb0a0142'
ref 'master' ref 'master'
tag false tag false
project nil
environment factory: :environment environment factory: :environment
after(:build) do |deployment, evaluator| after(:build) do |deployment, evaluator|
deployment.project = deployment.environment.project deployment.project ||= deployment.environment.project
end end
end end
end end
require 'spec_helper'
describe 'GFM autocomplete loading', feature: true, js: true do
let(:project) { create(:project) }
before do
login_as :admin
visit namespace_project_path(project.namespace, project)
end
it 'does not load on project#show' do
expect(evaluate_script('GitLab.GfmAutoComplete.dataSource')).to eq('')
end
it 'loads on new issue page' do
visit new_namespace_project_issue_path(project.namespace, project)
expect(evaluate_script('GitLab.GfmAutoComplete.dataSource')).not_to eq('')
end
end
require 'spec_helper'
# Integration test that exports a file using the Import/Export feature
# It looks up for any sensitive word inside the JSON, so if a sensitive word is found
# we''l have to either include it adding the model that includes it to the +safe_list+
# or make sure the attribute is blacklisted in the +import_export.yml+ configuration
feature 'Import/Export - project export integration test', feature: true, js: true do
include Select2Helper
include ExportFileHelper
let(:user) { create(:admin) }
let(:export_path) { "#{Dir::tmpdir}/import_file_spec" }
let(:config_hash) { YAML.load_file(Gitlab::ImportExport.config_file).deep_stringify_keys }
let(:sensitive_words) { %w[pass secret token key] }
let(:safe_list) do
{
token: [ProjectHook, Ci::Trigger, CommitStatus],
key: [Project, Ci::Variable, :yaml_variables]
}
end
let(:safe_hashes) { { yaml_variables: %w[key value public] } }
let(:project) { setup_project }
background do
allow_any_instance_of(Gitlab::ImportExport).to receive(:storage_path).and_return(export_path)
end
after do
FileUtils.rm_rf(export_path, secure: true)
end
context 'admin user' do
before do
login_as(user)
end
scenario 'exports a project successfully' do
visit edit_namespace_project_path(project.namespace, project)
expect(page).to have_content('Export project')
click_link 'Export project'
visit edit_namespace_project_path(project.namespace, project)
expect(page).to have_content('Download export')
in_directory_with_expanded_export(project) do |exit_status, tmpdir|
expect(exit_status).to eq(0)
project_json_path = File.join(tmpdir, 'project.json')
expect(File).to exist(project_json_path)
project_hash = JSON.parse(IO.read(project_json_path))
sensitive_words.each do |sensitive_word|
found = find_sensitive_attributes(sensitive_word, project_hash)
expect(found).to be_nil, failure_message(found.try(:key_found), found.try(:parent), sensitive_word)
end
end
end
def failure_message(key_found, parent, sensitive_word)
<<-MSG
Found a new sensitive word <#{key_found}>, which is part of the hash #{parent.inspect}
If you think this information shouldn't get exported, please exclude the model or attribute in IMPORT_EXPORT_CONFIG.
Otherwise, please add the exception to +safe_list+ in CURRENT_SPEC using #{sensitive_word} as the key and the
correspondent hash or model as the value.
IMPORT_EXPORT_CONFIG: #{Gitlab::ImportExport.config_file}
CURRENT_SPEC: #{__FILE__}
MSG
end
end
end
require 'spec_helper' require 'spec_helper'
feature 'project import', feature: true, js: true do feature 'Import/Export - project import integration test', feature: true, js: true do
include Select2Helper include Select2Helper
let(:admin) { create(:admin) } let(:admin) { create(:admin) }
......
...@@ -5,15 +5,23 @@ describe Ci::MaskSecret, lib: true do ...@@ -5,15 +5,23 @@ describe Ci::MaskSecret, lib: true do
describe '#mask' do describe '#mask' do
it 'masks exact number of characters' do it 'masks exact number of characters' do
expect(subject.mask('token', 'oke')).to eq('txxxn') expect(mask('token', 'oke')).to eq('txxxn')
end end
it 'masks multiple occurrences' do it 'masks multiple occurrences' do
expect(subject.mask('token token token', 'oke')).to eq('txxxn txxxn txxxn') expect(mask('token token token', 'oke')).to eq('txxxn txxxn txxxn')
end end
it 'does not mask if not found' do it 'does not mask if not found' do
expect(subject.mask('token', 'not')).to eq('token') expect(mask('token', 'not')).to eq('token')
end
it 'does support null token' do
expect(mask('token', nil)).to eq('token')
end
def mask(value, token)
subject.mask!(value.dup, token)
end end
end end
end end
...@@ -662,7 +662,7 @@ describe Gitlab::GitAccess, lib: true do ...@@ -662,7 +662,7 @@ describe Gitlab::GitAccess, lib: true do
end end
context 'to private project' do context 'to private project' do
let(:project) { create(:project, :internal) } let(:project) { create(:project) }
it { expect(subject).not_to be_allowed } it { expect(subject).not_to be_allowed }
end end
......
require 'spec_helper'
describe Gitlab::Git, lib: true do
let(:committer_email) { FFaker::Internet.email }
# I have to remove periods from the end of the name
# This happened when the user's name had a suffix (i.e. "Sr.")
# This seems to be what git does under the hood. For example, this commit:
#
# $ git commit --author='Foo Sr. <foo@example.com>' -m 'Where's my trailing period?'
#
# results in this:
#
# $ git show --pretty
# ...
# Author: Foo Sr <foo@example.com>
# ...
let(:committer_name) { FFaker::Name.name.chomp("\.") }
describe 'committer_hash' do
it "returns a hash containing the given email and name" do
committer_hash = Gitlab::Git::committer_hash(email: committer_email, name: committer_name)
expect(committer_hash[:email]).to eq(committer_email)
expect(committer_hash[:name]).to eq(committer_name)
expect(committer_hash[:time]).to be_a(Time)
end
context 'when email is nil' do
it "returns nil" do
committer_hash = Gitlab::Git::committer_hash(email: nil, name: committer_name)
expect(committer_hash).to be_nil
end
end
context 'when name is nil' do
it "returns nil" do
committer_hash = Gitlab::Git::committer_hash(email: committer_email, name: nil)
expect(committer_hash).to be_nil
end
end
end
end
---
issues:
- subscriptions
- award_emoji
- author
- assignee
- updated_by
- milestone
- notes
- label_links
- labels
- todos
- user_agent_detail
- moved_to
- events
- merge_requests_closing_issues
- metrics
events:
- author
- project
- target
notes:
- award_emoji
- project
- noteable
- author
- updated_by
- resolved_by
- todos
- events
label_links:
- target
- label
label:
- subscriptions
- project
- lists
- label_links
- issues
- merge_requests
milestone:
- project
- issues
- labels
- merge_requests
- participants
- events
snippets:
- author
- project
- notes
- award_emoji
releases:
- project
project_members:
- created_by
- user
- source
- project
merge_requests:
- subscriptions
- award_emoji
- author
- assignee
- updated_by
- milestone
- notes
- label_links
- labels
- todos
- target_project
- source_project
- merge_user
- merge_request_diffs
- merge_request_diff
- events
- merge_requests_closing_issues
- metrics
merge_request_diff:
- merge_request
pipelines:
- project
- user
- statuses
- builds
- trigger_requests
statuses:
- project
- pipeline
- user
variables:
- project
triggers:
- project
- trigger_requests
deploy_keys:
- user
- deploy_keys_projects
- projects
services:
- project
- service_hook
hooks:
- project
protected_branches:
- project
- merge_access_levels
- push_access_levels
merge_access_levels:
- protected_branch
push_access_levels:
- protected_branch
project:
- taggings
- base_tags
- tag_taggings
- tags
- creator
- group
- namespace
- board
- last_event
- services
- campfire_service
- drone_ci_service
- emails_on_push_service
- builds_email_service
- irker_service
- pivotaltracker_service
- hipchat_service
- flowdock_service
- assembla_service
- asana_service
- gemnasium_service
- slack_service
- buildkite_service
- bamboo_service
- teamcity_service
- pushover_service
- jira_service
- redmine_service
- custom_issue_tracker_service
- bugzilla_service
- gitlab_issue_tracker_service
- external_wiki_service
- forked_project_link
- forked_from_project
- forked_project_links
- forks
- merge_requests
- fork_merge_requests
- issues
- labels
- events
- milestones
- notes
- snippets
- hooks
- protected_branches
- project_members
- users
- requesters
- deploy_keys_projects
- deploy_keys
- users_star_projects
- starrers
- releases
- lfs_objects_projects
- lfs_objects
- project_group_links
- invited_groups
- todos
- notification_settings
- import_data
- commit_statuses
- pipelines
- builds
- runner_projects
- runners
- variables
- triggers
- environments
- deployments
- project_feature
award_emoji:
- awardable
- user
\ No newline at end of file
require 'spec_helper'
# Part of the test security suite for the Import/Export feature
# Checks whether there are new attributes in models that are currently being exported as part of the
# project Import/Export feature.
# If there are new attributes, these will have to either be added to this spec in case we want them
# to be included as part of the export, or blacklist them using the import_export.yml configuration file.
# Likewise, new models added to import_export.yml, will need to be added with their correspondent attributes
# to this spec.
describe 'Import/Export attribute configuration', lib: true do
include ConfigurationHelper
let(:config_hash) { YAML.load_file(Gitlab::ImportExport.config_file).deep_stringify_keys }
let(:relation_names) do
names = names_from_tree(config_hash['project_tree'])
# Remove duplicated or add missing models
# - project is not part of the tree, so it has to be added manually.
# - milestone, labels have both singular and plural versions in the tree, so remove the duplicates.
names.flatten.uniq - ['milestones', 'labels'] + ['project']
end
let(:safe_attributes_file) { 'spec/lib/gitlab/import_export/safe_model_attributes.yml' }
let(:safe_model_attributes) { YAML.load_file(safe_attributes_file) }
it 'has no new columns' do
relation_names.each do |relation_name|
relation_class = relation_class_for_name(relation_name)
expect(safe_model_attributes[relation_class.to_s]).not_to be_nil, "Expected exported class #{relation_class.to_s} to exist in safe_model_attributes"
current_attributes = parsed_attributes(relation_name, relation_class.attribute_names)
safe_attributes = safe_model_attributes[relation_class.to_s]
new_attributes = current_attributes - safe_attributes
expect(new_attributes).to be_empty, failure_message(relation_class.to_s, new_attributes)
end
end
def failure_message(relation_class, new_attributes)
<<-MSG
It looks like #{relation_class}, which is exported using the project Import/Export, has new attributes: #{new_attributes.join(',')}
Please add the attribute(s) to SAFE_MODEL_ATTRIBUTES if you consider this can be exported.
Otherwise, please blacklist the attribute(s) in IMPORT_EXPORT_CONFIG by adding it to its correspondent
model in the +excluded_attributes+ section.
SAFE_MODEL_ATTRIBUTES: #{File.expand_path(safe_attributes_file)}
IMPORT_EXPORT_CONFIG: #{Gitlab::ImportExport.config_file}
MSG
end
class Author < User
end
end
require 'spec_helper'
# Part of the test security suite for the Import/Export feature
# Finds if a new model has been added that can potentially be part of the Import/Export
# If it finds a new model, it will show a +failure_message+ with the options available.
describe 'Import/Export model configuration', lib: true do
include ConfigurationHelper
let(:config_hash) { YAML.load_file(Gitlab::ImportExport.config_file).deep_stringify_keys }
let(:model_names) do
names = names_from_tree(config_hash['project_tree'])
# Remove duplicated or add missing models
# - project is not part of the tree, so it has to be added manually.
# - milestone, labels have both singular and plural versions in the tree, so remove the duplicates.
# - User, Author... Models we do not care about for checking models
names.flatten.uniq - ['milestones', 'labels', 'user', 'author'] + ['project']
end
let(:all_models_yml) { 'spec/lib/gitlab/import_export/all_models.yml' }
let(:all_models) { YAML.load_file(all_models_yml) }
let(:current_models) { setup_models }
it 'has no new models' do
model_names.each do |model_name|
new_models = Array(current_models[model_name]) - Array(all_models[model_name])
expect(new_models).to be_empty, failure_message(model_name.classify, new_models)
end
end
# List of current models between models, in the format of
# {model: [model_2, model3], ...}
def setup_models
all_models_hash = {}
model_names.each do |model_name|
model_class = relation_class_for_name(model_name)
all_models_hash[model_name] = associations_for(model_class) - ['project']
end
all_models_hash
end
def failure_message(parent_model_name, new_models)
<<-MSG
New model(s) <#{new_models.join(',')}> have been added, related to #{parent_model_name}, which is exported by
the Import/Export feature.
If you think this model should be included in the export, please add it to IMPORT_EXPORT_CONFIG.
Definitely add it to MODELS_JSON to signal that you've handled this error and to prevent it from showing up in the future.
MODELS_JSON: #{File.expand_path(all_models_yml)}
IMPORT_EXPORT_CONFIG: #{Gitlab::ImportExport.config_file}
MSG
end
end
---
Issue:
- id
- title
- assignee_id
- author_id
- project_id
- created_at
- updated_at
- position
- branch_name
- description
- state
- iid
- updated_by_id
- confidential
- deleted_at
- due_date
- moved_to_id
- lock_version
- milestone_id
- weight
Event:
- id
- target_type
- target_id
- title
- data
- project_id
- created_at
- updated_at
- action
- author_id
Note:
- id
- note
- noteable_type
- author_id
- created_at
- updated_at
- project_id
- attachment
- line_code
- commit_id
- noteable_id
- system
- st_diff
- updated_by_id
- type
- position
- original_position
- resolved_at
- resolved_by_id
- discussion_id
- original_discussion_id
LabelLink:
- id
- label_id
- target_id
- target_type
- created_at
- updated_at
Label:
- id
- title
- color
- project_id
- created_at
- updated_at
- template
- description
- priority
Milestone:
- id
- title
- project_id
- description
- due_date
- created_at
- updated_at
- state
- iid
ProjectSnippet:
- id
- title
- content
- author_id
- project_id
- created_at
- updated_at
- file_name
- type
- visibility_level
Release:
- id
- tag
- description
- project_id
- created_at
- updated_at
ProjectMember:
- id
- access_level
- source_id
- source_type
- user_id
- notification_level
- type
- created_at
- updated_at
- created_by_id
- invite_email
- invite_token
- invite_accepted_at
- requested_at
- expires_at
User:
- id
- username
- email
MergeRequest:
- id
- target_branch
- source_branch
- source_project_id
- author_id
- assignee_id
- title
- created_at
- updated_at
- state
- merge_status
- target_project_id
- iid
- description
- position
- locked_at
- updated_by_id
- merge_error
- merge_params
- merge_when_build_succeeds
- merge_user_id
- merge_commit_sha
- deleted_at
- in_progress_merge_commit_sha
- lock_version
- milestone_id
- approvals_before_merge
- rebase_commit_sha
MergeRequestDiff:
- id
- state
- st_commits
- merge_request_id
- created_at
- updated_at
- base_commit_sha
- real_size
- head_commit_sha
- start_commit_sha
Ci::Pipeline:
- id
- project_id
- ref
- sha
- before_sha
- push_data
- created_at
- updated_at
- tag
- yaml_errors
- committed_at
- gl_project_id
- status
- started_at
- finished_at
- duration
- user_id
CommitStatus:
- id
- project_id
- status
- finished_at
- trace
- created_at
- updated_at
- started_at
- runner_id
- coverage
- commit_id
- commands
- job_id
- name
- deploy
- options
- allow_failure
- stage
- trigger_request_id
- stage_idx
- tag
- ref
- user_id
- type
- target_url
- description
- artifacts_file
- gl_project_id
- artifacts_metadata
- erased_by_id
- erased_at
- artifacts_expire_at
- environment
- artifacts_size
- when
- yaml_variables
- queued_at
- token
Ci::Variable:
- id
- project_id
- key
- value
- encrypted_value
- encrypted_value_salt
- encrypted_value_iv
- gl_project_id
Ci::Trigger:
- id
- token
- project_id
- deleted_at
- created_at
- updated_at
- gl_project_id
DeployKey:
- id
- user_id
- created_at
- updated_at
- key
- title
- type
- fingerprint
- public
Service:
- id
- type
- title
- project_id
- created_at
- updated_at
- active
- properties
- template
- push_events
- issues_events
- merge_requests_events
- tag_push_events
- note_events
- pipeline_events
- build_events
- category
- default
- wiki_page_events
- confidential_issues_events
ProjectHook:
- id
- url
- project_id
- created_at
- updated_at
- type
- service_id
- push_events
- issues_events
- merge_requests_events
- tag_push_events
- note_events
- pipeline_events
- enable_ssl_verification
- build_events
- wiki_page_events
- token
- group_id
- confidential_issues_events
ProtectedBranch:
- id
- project_id
- name
- created_at
- updated_at
Project:
- description
- issues_enabled
- merge_requests_enabled
- wiki_enabled
- snippets_enabled
- visibility_level
- archived
Author:
- name
ProjectFeature:
- id
- project_id
- merge_requests_access_level
- issues_access_level
- wiki_access_level
- snippets_access_level
- builds_access_level
- created_at
- updated_at
ProtectedBranch::MergeAccessLevel:
- id
- protected_branch_id
- access_level
- created_at
- updated_at
ProtectedBranch::PushAccessLevel:
- id
- protected_branch_id
- access_level
- created_at
- updated_at
AwardEmoji:
- id
- user_id
- name
- awardable_type
- created_at
- updated_at
...@@ -187,6 +187,37 @@ describe Ci::Pipeline, models: true do ...@@ -187,6 +187,37 @@ describe Ci::Pipeline, models: true do
end end
end end
describe "merge request metrics" do
let(:project) { FactoryGirl.create :project }
let(:pipeline) { FactoryGirl.create(:ci_empty_pipeline, status: 'created', project: project, ref: 'master', sha: project.repository.commit('master').id) }
let!(:merge_request) { create(:merge_request, source_project: project, source_branch: pipeline.ref) }
context 'when transitioning to running' do
it 'records the build start time' do
time = Time.now
Timecop.freeze(time) { build.run }
expect(merge_request.reload.metrics.latest_build_started_at).to be_within(1.second).of(time)
end
it 'clears the build end time' do
build.run
expect(merge_request.reload.metrics.latest_build_finished_at).to be_nil
end
end
context 'when transitioning to success' do
it 'records the build end time' do
build.run
time = Time.now
Timecop.freeze(time) { build.success }
expect(merge_request.reload.metrics.latest_build_finished_at).to be_within(1.second).of(time)
end
end
end
def create_build(name, queued_at = current, started_from = 0) def create_build(name, queued_at = current, started_from = 0)
create(:ci_build, create(:ci_build,
name: name, name: name,
...@@ -468,4 +499,28 @@ describe Ci::Pipeline, models: true do ...@@ -468,4 +499,28 @@ describe Ci::Pipeline, models: true do
stage_idx: stage_idx) stage_idx: stage_idx)
end end
end end
describe "#merge_requests" do
let(:project) { FactoryGirl.create :project }
let(:pipeline) { FactoryGirl.create(:ci_empty_pipeline, status: 'created', project: project, ref: 'master', sha: project.repository.commit('master').id) }
it "returns merge requests whose `diff_head_sha` matches the pipeline's SHA" do
merge_request = create(:merge_request, source_project: project, source_branch: pipeline.ref)
expect(pipeline.merge_requests).to eq([merge_request])
end
it "doesn't return merge requests whose source branch doesn't match the pipeline's ref" do
create(:merge_request, source_project: project, source_branch: 'feature', target_branch: 'master')
expect(pipeline.merge_requests).to be_empty
end
it "doesn't return merge requests whose `diff_head_sha` doesn't match the pipeline's SHA" do
create(:merge_request, source_project: project, source_branch: pipeline.ref)
allow_any_instance_of(MergeRequest).to receive(:diff_head_sha) { '97de212e80737a608d939f648d959671fb0a0142b' }
expect(pipeline.merge_requests).to be_empty
end
end
end end
require 'spec_helper'
describe 'CycleAnalytics#code', feature: true do
extend CycleAnalyticsHelpers::TestGeneration
let(:project) { create(:project) }
let(:from_date) { 10.days.ago }
let(:user) { create(:user, :admin) }
subject { CycleAnalytics.new(project, from: from_date) }
generate_cycle_analytics_spec(
phase: :code,
data_fn: -> (context) { { issue: context.create(:issue, project: context.project) } },
start_time_conditions: [["issue mentioned in a commit",
-> (context, data) do
context.create_commit_referencing_issue(data[:issue])
end]],
end_time_conditions: [["merge request that closes issue is created",
-> (context, data) do
context.create_merge_request_closing_issue(data[:issue])
end]],
post_fn: -> (context, data) do
context.merge_merge_requests_closing_issue(data[:issue])
context.deploy_master
end)
context "when a regular merge request (that doesn't close the issue) is created" do
it "returns nil" do
5.times do
issue = create(:issue, project: project)
create_commit_referencing_issue(issue)
create_merge_request_closing_issue(issue, message: "Closes nothing")
merge_merge_requests_closing_issue(issue)
deploy_master
end
expect(subject.code).to be_nil
end
end
end
require 'spec_helper'
describe 'CycleAnalytics#issue', models: true do
extend CycleAnalyticsHelpers::TestGeneration
let(:project) { create(:project) }
let(:from_date) { 10.days.ago }
let(:user) { create(:user, :admin) }
subject { CycleAnalytics.new(project, from: from_date) }
generate_cycle_analytics_spec(
phase: :issue,
data_fn: -> (context) { { issue: context.build(:issue, project: context.project) } },
start_time_conditions: [["issue created", -> (context, data) { data[:issue].save }]],
end_time_conditions: [["issue associated with a milestone",
-> (context, data) do
if data[:issue].persisted?
data[:issue].update(milestone: context.create(:milestone, project: context.project))
end
end],
["list label added to issue",
-> (context, data) do
if data[:issue].persisted?
data[:issue].update(label_ids: [context.create(:label, lists: [context.create(:list)]).id])
end
end]],
post_fn: -> (context, data) do
if data[:issue].persisted?
context.create_merge_request_closing_issue(data[:issue].reload)
context.merge_merge_requests_closing_issue(data[:issue])
context.deploy_master
end
end)
context "when a regular label (instead of a list label) is added to the issue" do
it "returns nil" do
5.times do
regular_label = create(:label)
issue = create(:issue, project: project)
issue.update(label_ids: [regular_label.id])
create_merge_request_closing_issue(issue)
merge_merge_requests_closing_issue(issue)
deploy_master
end
expect(subject.issue).to be_nil
end
end
end
require 'spec_helper'
describe 'CycleAnalytics#plan', feature: true do
extend CycleAnalyticsHelpers::TestGeneration
let(:project) { create(:project) }
let(:from_date) { 10.days.ago }
let(:user) { create(:user, :admin) }
subject { CycleAnalytics.new(project, from: from_date) }
generate_cycle_analytics_spec(
phase: :plan,
data_fn: -> (context) do
{
issue: context.create(:issue, project: context.project),
branch_name: context.random_git_name
}
end,
start_time_conditions: [["issue associated with a milestone",
-> (context, data) do
data[:issue].update(milestone: context.create(:milestone, project: context.project))
end],
["list label added to issue",
-> (context, data) do
data[:issue].update(label_ids: [context.create(:label, lists: [context.create(:list)]).id])
end]],
end_time_conditions: [["issue mentioned in a commit",
-> (context, data) do
context.create_commit_referencing_issue(data[:issue], branch_name: data[:branch_name])
end]],
post_fn: -> (context, data) do
context.create_merge_request_closing_issue(data[:issue], source_branch: data[:branch_name])
context.merge_merge_requests_closing_issue(data[:issue])
context.deploy_master
end)
context "when a regular label (instead of a list label) is added to the issue" do
it "returns nil" do
branch_name = random_git_name
label = create(:label)
issue = create(:issue, project: project)
issue.update(label_ids: [label.id])
create_commit_referencing_issue(issue, branch_name: branch_name)
create_merge_request_closing_issue(issue, source_branch: branch_name)
merge_merge_requests_closing_issue(issue)
deploy_master
expect(subject.issue).to be_nil
end
end
end
require 'spec_helper'
describe 'CycleAnalytics#production', feature: true do
extend CycleAnalyticsHelpers::TestGeneration
let(:project) { create(:project) }
let(:from_date) { 10.days.ago }
let(:user) { create(:user, :admin) }
subject { CycleAnalytics.new(project, from: from_date) }
generate_cycle_analytics_spec(
phase: :production,
data_fn: -> (context) { { issue: context.build(:issue, project: context.project) } },
start_time_conditions: [["issue is created", -> (context, data) { data[:issue].save }]],
before_end_fn: lambda do |context, data|
context.create_merge_request_closing_issue(data[:issue])
context.merge_merge_requests_closing_issue(data[:issue])
end,
end_time_conditions:
[["merge request that closes issue is deployed to production", -> (context, data) { context.deploy_master }],
["production deploy happens after merge request is merged (along with other changes)",
lambda do |context, data|
# Make other changes on master
sha = context.project.repository.commit_file(context.user, context.random_git_name, "content", "commit message", 'master', false)
context.project.repository.commit(sha)
context.deploy_master
end]])
context "when a regular merge request (that doesn't close the issue) is merged and deployed" do
it "returns nil" do
5.times do
merge_request = create(:merge_request)
MergeRequests::MergeService.new(project, user).execute(merge_request)
deploy_master
end
expect(subject.production).to be_nil
end
end
context "when the deployment happens to a non-production environment" do
it "returns nil" do
5.times do
issue = create(:issue, project: project)
merge_request = create_merge_request_closing_issue(issue)
MergeRequests::MergeService.new(project, user).execute(merge_request)
deploy_master(environment: 'staging')
end
expect(subject.production).to be_nil
end
end
end
require 'spec_helper'
describe 'CycleAnalytics#review', feature: true do
extend CycleAnalyticsHelpers::TestGeneration
let(:project) { create(:project) }
let(:from_date) { 10.days.ago }
let(:user) { create(:user, :admin) }
subject { CycleAnalytics.new(project, from: from_date) }
generate_cycle_analytics_spec(
phase: :review,
data_fn: -> (context) { { issue: context.create(:issue, project: context.project) } },
start_time_conditions: [["merge request that closes issue is created",
-> (context, data) do
context.create_merge_request_closing_issue(data[:issue])
end]],
end_time_conditions: [["merge request that closes issue is merged",
-> (context, data) do
context.merge_merge_requests_closing_issue(data[:issue])
end]],
post_fn: -> (context, data) { context.deploy_master })
context "when a regular merge request (that doesn't close the issue) is created and merged" do
it "returns nil" do
5.times do
MergeRequests::MergeService.new(project, user).execute(create(:merge_request))
deploy_master
end
expect(subject.review).to be_nil
end
end
end
require 'spec_helper'
describe 'CycleAnalytics#staging', feature: true do
extend CycleAnalyticsHelpers::TestGeneration
let(:project) { create(:project) }
let(:from_date) { 10.days.ago }
let(:user) { create(:user, :admin) }
subject { CycleAnalytics.new(project, from: from_date) }
generate_cycle_analytics_spec(
phase: :staging,
data_fn: lambda do |context|
issue = context.create(:issue, project: context.project)
{ issue: issue, merge_request: context.create_merge_request_closing_issue(issue) }
end,
start_time_conditions: [["merge request that closes issue is merged",
-> (context, data) do
context.merge_merge_requests_closing_issue(data[:issue])
end ]],
end_time_conditions: [["merge request that closes issue is deployed to production",
-> (context, data) do
context.deploy_master
end],
["production deploy happens after merge request is merged (along with other changes)",
lambda do |context, data|
# Make other changes on master
sha = context.project.repository.commit_file(
context.user,
context.random_git_name,
"content",
"commit message",
'master',
false)
context.project.repository.commit(sha)
context.deploy_master
end]])
context "when a regular merge request (that doesn't close the issue) is merged and deployed" do
it "returns nil" do
5.times do
merge_request = create(:merge_request)
MergeRequests::MergeService.new(project, user).execute(merge_request)
deploy_master
end
expect(subject.staging).to be_nil
end
end
context "when the deployment happens to a non-production environment" do
it "returns nil" do
5.times do
issue = create(:issue, project: project)
merge_request = create_merge_request_closing_issue(issue)
MergeRequests::MergeService.new(project, user).execute(merge_request)
deploy_master(environment: 'staging')
end
expect(subject.staging).to be_nil
end
end
end
require 'spec_helper'
describe CycleAnalytics::Summary, models: true do
let(:project) { create(:project) }
let(:from) { Time.now }
let(:user) { create(:user, :admin) }
subject { described_class.new(project, from: from) }
describe "#new_issues" do
it "finds the number of issues created after the 'from date'" do
Timecop.freeze(5.days.ago) { create(:issue, project: project) }
Timecop.freeze(5.days.from_now) { create(:issue, project: project) }
expect(subject.new_issues).to eq(1)
end
it "doesn't find issues from other projects" do
Timecop.freeze(5.days.from_now) { create(:issue, project: create(:project)) }
expect(subject.new_issues).to eq(0)
end
end
describe "#commits" do
it "finds the number of commits created after the 'from date'" do
Timecop.freeze(5.days.ago) { create_commit("Test message", project, user, 'master') }
Timecop.freeze(5.days.from_now) { create_commit("Test message", project, user, 'master') }
expect(subject.commits).to eq(1)
end
it "doesn't find commits from other projects" do
Timecop.freeze(5.days.from_now) { create_commit("Test message", create(:project), user, 'master') }
expect(subject.commits).to eq(0)
end
end
describe "#deploys" do
it "finds the number of deploys made created after the 'from date'" do
Timecop.freeze(5.days.ago) { create(:deployment, project: project) }
Timecop.freeze(5.days.from_now) { create(:deployment, project: project) }
expect(subject.deploys).to eq(1)
end
it "doesn't find commits from other projects" do
Timecop.freeze(5.days.from_now) { create(:deployment, project: create(:project)) }
expect(subject.deploys).to eq(0)
end
end
end
require 'spec_helper'
describe 'CycleAnalytics#test', feature: true do
extend CycleAnalyticsHelpers::TestGeneration
let(:project) { create(:project) }
let(:from_date) { 10.days.ago }
let(:user) { create(:user, :admin) }
subject { CycleAnalytics.new(project, from: from_date) }
generate_cycle_analytics_spec(
phase: :test,
data_fn: lambda do |context|
issue = context.create(:issue, project: context.project)
merge_request = context.create_merge_request_closing_issue(issue)
pipeline = context.create(:ci_pipeline, ref: merge_request.source_branch, sha: merge_request.diff_head_sha, project: context.project)
{ pipeline: pipeline, issue: issue }
end,
start_time_conditions: [["pipeline is started", -> (context, data) { data[:pipeline].run! }]],
end_time_conditions: [["pipeline is finished", -> (context, data) { data[:pipeline].succeed! }]],
post_fn: -> (context, data) do
context.merge_merge_requests_closing_issue(data[:issue])
context.deploy_master
end)
context "when the pipeline is for a regular merge request (that doesn't close an issue)" do
it "returns nil" do
5.times do
issue = create(:issue, project: project)
merge_request = create_merge_request_closing_issue(issue)
pipeline = create(:ci_pipeline, ref: "refs/heads/#{merge_request.source_branch}", sha: merge_request.diff_head_sha)
pipeline.run!
pipeline.succeed!
merge_merge_requests_closing_issue(issue)
deploy_master
end
expect(subject.test).to be_nil
end
end
context "when the pipeline is not for a merge request" do
it "returns nil" do
5.times do
pipeline = create(:ci_pipeline, ref: "refs/heads/master", sha: project.repository.commit('master').sha)
pipeline.run!
pipeline.succeed!
deploy_master
end
expect(subject.test).to be_nil
end
end
context "when the pipeline is dropped (failed)" do
it "returns nil" do
5.times do
issue = create(:issue, project: project)
merge_request = create_merge_request_closing_issue(issue)
pipeline = create(:ci_pipeline, ref: "refs/heads/#{merge_request.source_branch}", sha: merge_request.diff_head_sha)
pipeline.run!
pipeline.drop!
merge_merge_requests_closing_issue(issue)
deploy_master
end
expect(subject.test).to be_nil
end
end
context "when the pipeline is cancelled" do
it "returns nil" do
5.times do
issue = create(:issue, project: project)
merge_request = create_merge_request_closing_issue(issue)
pipeline = create(:ci_pipeline, ref: "refs/heads/#{merge_request.source_branch}", sha: merge_request.diff_head_sha)
pipeline.run!
pipeline.cancel!
merge_merge_requests_closing_issue(issue)
deploy_master
end
expect(subject.test).to be_nil
end
end
end
require 'spec_helper'
describe Issue::Metrics, models: true do
let(:project) { create(:project) }
subject { create(:issue, project: project) }
describe "when recording the default set of issue metrics on issue save" do
context "milestones" do
it "records the first time an issue is associated with a milestone" do
time = Time.now
Timecop.freeze(time) { subject.update(milestone: create(:milestone)) }
metrics = subject.metrics
expect(metrics).to be_present
expect(metrics.first_associated_with_milestone_at).to be_within(1.second).of(time)
end
it "does not record the second time an issue is associated with a milestone" do
time = Time.now
Timecop.freeze(time) { subject.update(milestone: create(:milestone)) }
Timecop.freeze(time + 2.hours) { subject.update(milestone: nil) }
Timecop.freeze(time + 6.hours) { subject.update(milestone: create(:milestone)) }
metrics = subject.metrics
expect(metrics).to be_present
expect(metrics.first_associated_with_milestone_at).to be_within(1.second).of(time)
end
end
context "list labels" do
it "records the first time an issue is associated with a list label" do
list_label = create(:label, lists: [create(:list)])
time = Time.now
Timecop.freeze(time) { subject.update(label_ids: [list_label.id]) }
metrics = subject.metrics
expect(metrics).to be_present
expect(metrics.first_added_to_board_at).to be_within(1.second).of(time)
end
it "does not record the second time an issue is associated with a list label" do
time = Time.now
first_list_label = create(:label, lists: [create(:list)])
Timecop.freeze(time) { subject.update(label_ids: [first_list_label.id]) }
second_list_label = create(:label, lists: [create(:list)])
Timecop.freeze(time + 5.hours) { subject.update(label_ids: [second_list_label.id]) }
metrics = subject.metrics
expect(metrics).to be_present
expect(metrics.first_added_to_board_at).to be_within(1.second).of(time)
end
end
end
end
require 'spec_helper'
describe MergeRequest::Metrics, models: true do
let(:project) { create(:project) }
subject { create(:merge_request, source_project: project) }
describe "when recording the default set of metrics on merge request save" do
it "records the merge time" do
time = Time.now
Timecop.freeze(time) { subject.mark_as_merged }
metrics = subject.metrics
expect(metrics).to be_present
expect(metrics.merged_at).to be_within(1.second).of(time)
end
end
end
...@@ -589,15 +589,62 @@ describe MergeRequest, models: true do ...@@ -589,15 +589,62 @@ describe MergeRequest, models: true do
end end
describe '#all_pipelines' do describe '#all_pipelines' do
let!(:pipelines) do shared_examples 'returning pipelines with proper ordering' do
subject.merge_request_diff.commits.map do |commit| let!(:all_pipelines) do
create(:ci_empty_pipeline, project: subject.source_project, sha: commit.id, ref: subject.source_branch) subject.all_commits_sha.map do |sha|
create(:ci_empty_pipeline,
project: subject.source_project,
sha: sha,
ref: subject.source_branch)
end end
end end
it 'returns a pipelines from source projects with proper ordering' do it 'returns all pipelines' do
expect(subject.all_pipelines).not_to be_empty expect(subject.all_pipelines).not_to be_empty
expect(subject.all_pipelines).to eq(pipelines.reverse) expect(subject.all_pipelines).to eq(all_pipelines.reverse)
end
end
context 'with single merge_request_diffs' do
it_behaves_like 'returning pipelines with proper ordering'
end
context 'with multiple irrelevant merge_request_diffs' do
before do
subject.update(target_branch: 'markdown')
end
it_behaves_like 'returning pipelines with proper ordering'
end
context 'with unsaved merge request' do
subject { build(:merge_request) }
let!(:pipeline) do
create(:ci_empty_pipeline,
project: subject.project,
sha: subject.diff_head_sha,
ref: subject.source_branch)
end
it 'returns pipelines from diff_head_sha' do
expect(subject.all_pipelines).to contain_exactly(pipeline)
end
end
end
describe '#all_commits_sha' do
let(:all_commits_sha) do
subject.merge_request_diffs.flat_map(&:commits).map(&:sha).uniq
end
before do
subject.update(target_branch: 'markdown')
end
it 'returns all SHA from all merge_request_diffs' do
expect(subject.merge_request_diffs.size).to eq(2)
expect(subject.all_commits_sha).to eq(all_commits_sha)
end end
end end
...@@ -795,16 +842,57 @@ describe MergeRequest, models: true do ...@@ -795,16 +842,57 @@ describe MergeRequest, models: true do
end end
end end
describe "#environments" do describe '#environments' do
let(:project) { create(:project) } let(:project) { create(:project) }
let(:merge_request) { create(:merge_request, source_project: project) } let(:merge_request) { create(:merge_request, source_project: project) }
context 'with multiple environments' do
let(:environments) { create_list(:environment, 3, project: project) }
before do
create(:deployment, environment: environments.first, ref: 'master', sha: project.commit('master').id)
create(:deployment, environment: environments.second, ref: 'feature', sha: project.commit('feature').id)
end
it 'selects deployed environments' do it 'selects deployed environments' do
environments = create_list(:environment, 3, project: project) expect(merge_request.environments).to contain_exactly(environments.first)
create(:deployment, environment: environments.first, sha: project.commit('master').id) end
create(:deployment, environment: environments.second, sha: project.commit('feature').id) end
expect(merge_request.environments).to eq [environments.first] context 'with environments on source project' do
let(:source_project) do
create(:project) do |fork_project|
fork_project.create_forked_project_link(forked_to_project_id: fork_project.id, forked_from_project_id: project.id)
end
end
let(:merge_request) do
create(:merge_request,
source_project: source_project, source_branch: 'feature',
target_project: project)
end
let(:source_environment) { create(:environment, project: source_project) }
before do
create(:deployment, environment: source_environment, ref: 'feature', sha: merge_request.diff_head_sha)
end
it 'selects deployed environments' do
expect(merge_request.environments).to contain_exactly(source_environment)
end
context 'with environments on target project' do
let(:target_environment) { create(:environment, project: project) }
before do
create(:deployment, environment: target_environment, tag: true, sha: merge_request.diff_head_sha)
end
it 'selects deployed environments' do
expect(merge_request.environments).to contain_exactly(source_environment, target_environment)
end
end
end end
context 'without a diff_head_commit' do context 'without a diff_head_commit' do
......
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment