Commit 1c065f39 authored by Kamil Trzciński's avatar Kamil Trzciński

Merge branch 'chatops' into 'master'

Support for GitLab Chatops using Slash commands

See merge request gitlab-org/gitlab-ee!4466
parents db0c04c0 6c8450aa
......@@ -62,7 +62,8 @@ module Ci
schedule: 4,
api: 5,
external: 6,
pipeline: 7
pipeline: 7,
chat: 8
}
enum config_source: {
......
class SlackSlashCommandsService < SlashCommandsService
prepend EE::SlackSlashCommandsService
include TriggersHelper
def title
......
......@@ -3,6 +3,7 @@ module Ci
attr_reader :pipeline
SEQUENCE = [Gitlab::Ci::Pipeline::Chain::Build,
EE::Gitlab::Ci::Pipeline::Chain::RemoveUnwantedChatJobs,
Gitlab::Ci::Pipeline::Chain::Validate::Abilities,
Gitlab::Ci::Pipeline::Chain::Validate::Repository,
Gitlab::Ci::Pipeline::Chain::Validate::Config,
......@@ -29,7 +30,8 @@ module Ci
current_user: current_user,
# EE specific
allow_mirror_update: mirror_update
allow_mirror_update: mirror_update,
chat_data: params[:chat_data]
)
sequence = Gitlab::Ci::Pipeline::Chain::Sequence
......
......@@ -144,3 +144,4 @@
- rebase
- repository_update_mirror
- repository_update_remote_mirror
- chat_notification
class BuildFinishedWorker
prepend EE::BuildFinishedWorker
include ApplicationWorker
include PipelineQueue
......
......@@ -73,6 +73,7 @@
# EE-specific queues
- [ldap_group_sync, 2]
- [chat_notification, 2]
- [geo, 1]
- [repository_remove_remote, 1]
- [repository_update_mirror, 1]
......
......@@ -434,6 +434,14 @@ ActiveRecord::Schema.define(version: 20180307012445) do
add_index "ci_job_artifacts", ["job_id", "file_type"], name: "index_ci_job_artifacts_on_job_id_and_file_type", unique: true, using: :btree
add_index "ci_job_artifacts", ["project_id"], name: "index_ci_job_artifacts_on_project_id", using: :btree
create_table "ci_pipeline_chat_data", id: :bigserial, force: :cascade do |t|
t.integer "pipeline_id", null: false
t.integer "chat_name_id", null: false
t.text "response_url", null: false
end
add_index "ci_pipeline_chat_data", ["pipeline_id"], name: "index_ci_pipeline_chat_data_on_pipeline_id", unique: true, using: :btree
create_table "ci_pipeline_schedule_variables", force: :cascade do |t|
t.string "key", null: false
t.text "value"
......@@ -2536,6 +2544,8 @@ ActiveRecord::Schema.define(version: 20180307012445) do
add_foreign_key "ci_group_variables", "namespaces", column: "group_id", name: "fk_33ae4d58d8", on_delete: :cascade
add_foreign_key "ci_job_artifacts", "ci_builds", column: "job_id", on_delete: :cascade
add_foreign_key "ci_job_artifacts", "projects", on_delete: :cascade
add_foreign_key "ci_pipeline_chat_data", "chat_names", on_delete: :cascade
add_foreign_key "ci_pipeline_chat_data", "ci_pipelines", column: "pipeline_id", on_delete: :cascade
add_foreign_key "ci_pipeline_schedule_variables", "ci_pipeline_schedules", column: "pipeline_schedule_id", name: "fk_41c35fda51", on_delete: :cascade
add_foreign_key "ci_pipeline_schedules", "projects", name: "fk_8ead60fcc4", on_delete: :cascade
add_foreign_key "ci_pipeline_schedules", "users", column: "owner_id", name: "fk_9ea99f58d2", on_delete: :nullify
......
module Ci
class PipelineChatData < ActiveRecord::Base
self.table_name = 'ci_pipeline_chat_data'
belongs_to :chat_name
validates :pipeline_id, presence: true
validates :chat_name_id, presence: true
validates :response_url, presence: true
end
end
module EE
module Ci
module Pipeline
extend ActiveSupport::Concern
EE_FAILURE_REASONS = {
activity_limit_exceeded: 20,
size_limit_exceeded: 21
}.freeze
included do
has_one :chat_data, class_name: 'Ci::PipelineChatData'
end
def predefined_variables
result = super
result << { key: 'CI_PIPELINE_SOURCE', value: source.to_s, public: true }
......
module EE
module SlackSlashCommandsService
def chat_responder
::Gitlab::Chat::Responder::Slack
end
end
end
......@@ -63,6 +63,7 @@ class License < ActiveRecord::Base
dast
epics
ide
chatops
].freeze
# List all features available for early adopters,
......@@ -318,6 +319,7 @@ class License < ActiveRecord::Base
def reset_current
self.class.reset_current
Gitlab::Chat.flush_available_cache
end
def reset_license
......
# frozen_string_literal: true
class ChatNotificationWorker
include ApplicationWorker
RESCHEDULE_INTERVAL = 2.seconds
def perform(build_id)
Ci::Build.find_by(id: build_id).try do |build|
send_response(build)
end
rescue Gitlab::Chat::Output::MissingBuildSectionError
# The creation of traces and sections appears to be eventually consistent.
# As a result it's possible for us to run the above code before the trace
# sections are present. To better handle such cases we'll just reschedule
# the job instead of producing an error.
self.class.perform_in(RESCHEDULE_INTERVAL, build_id)
end
def send_response(build)
Gitlab::Chat::Responder.responder_for(build).try do |responder|
if build.success?
output = Gitlab::Chat::Output.new(build)
responder.success(output.to_s)
else
responder.failure
end
end
end
end
module EE
module BuildFinishedWorker
def perform(build_id)
super
::Ci::Build.find_by(id: build_id).try do |build|
ChatNotificationWorker.perform_async(build_id) if build.pipeline.chat?
end
end
end
end
---
title: Added basic implementation of GitLab Chatops
merge_request:
author:
type: added
# See http://doc.gitlab.com/ce/development/migration_style_guide.html
# for more information on how to write migrations for GitLab.
class CreateChatopsTables < ActiveRecord::Migration
include Gitlab::Database::MigrationHelpers
# Set this constant to true if this migration requires downtime.
DOWNTIME = false
def change
create_table :ci_pipeline_chat_data, id: :bigserial do |t|
t.integer :pipeline_id, null: false
t.references :chat_name, foreign_key: { on_delete: :cascade }, null: false
t.text :response_url, null: false
# A pipeline can only contain one row in this table, hence this index is
# unique.
t.index :pipeline_id, unique: true
end
add_foreign_key :ci_pipeline_chat_data, :ci_pipelines,
column: :pipeline_id,
on_delete: :cascade
end
end
module EE
module Gitlab
module Ci
module Pipeline
module Chain
class RemoveUnwantedChatJobs < ::Gitlab::Ci::Pipeline::Chain::Base
def perform!
return unless pipeline.config_processor && pipeline.chat?
# When scheduling a chat pipeline we only want to run the build
# that matches the chat command.
pipeline.config_processor.jobs.select! do |name, _|
name.to_s == command.chat_data[:command].to_s
end
end
def break?
false
end
end
end
end
end
end
end
# frozen_string_literal: true
module Gitlab
module Chat
CACHE_TTL = 1.hour.to_i
AVAILABLE_CACHE_KEY = :gitlab_chat_available
# Returns `true` if Chatops is available for the current instance.
def self.available?
# We anticipate this code to be called rather frequently, especially on
# large instances such as GitLab.com. To reduce database load we cache the
# output for a while.
Rails.cache.fetch(AVAILABLE_CACHE_KEY, expires_in: CACHE_TTL) do
::License.feature_available?(:chatops)
end
end
def self.flush_available_cache
Rails.cache.delete(AVAILABLE_CACHE_KEY)
end
end
end
# frozen_string_literal: true
module Gitlab
module Chat
# Class for scheduling chat pipelines.
#
# A Command takes care of creating a `Ci::Pipeline` with all the data
# necessary to execute a chat command. This includes data such as the chat
# data (e.g. the response URL) and any environment variables that should be
# exposed to the chat command.
class Command
include Utils::StrongMemoize
attr_reader :project, :chat_name, :name, :arguments, :response_url,
:channel
# project - The Project to schedule the command for.
# chat_name - The ChatName belonging to the user that scheduled the
# command.
# name - The name of the chat command to run.
# arguments - The arguments (as a String) to pass to the command.
# channel - The channel the message was sent from.
# response_url - The URL to send the response back to.
def initialize(project:, chat_name:, name:, arguments:, channel:, response_url:)
@project = project
@chat_name = chat_name
@name = name
@arguments = arguments
@channel = channel
@response_url = response_url
end
# Tries to create a new pipeline.
#
# This method will return a pipeline that _may_ be persisted, or `nil` if
# the pipeline could not be created.
def try_create_pipeline
return unless valid?
create_pipeline
end
def create_pipeline
service = ::Ci::CreatePipelineService.new(
project,
chat_name.user,
ref: branch,
sha: commit,
chat_data: {
chat_name_id: chat_name.id,
command: name,
arguments: arguments,
response_url: response_url
}
)
service.execute(:chat) do |pipeline|
create_environment_variables(pipeline)
create_chat_data(pipeline)
end
end
# pipeline - The `Ci::Pipeline` to create the environment variables for.
def create_environment_variables(pipeline)
pipeline.variables.create!(
[
{ key: 'CHAT_INPUT', value: arguments },
{ key: 'CHAT_CHANNEL', value: channel }
]
)
end
# pipeline - The `Ci::Pipeline` to create the chat data for.
def create_chat_data(pipeline)
pipeline.create_chat_data!(
chat_name_id: chat_name.id,
response_url: response_url
)
end
def valid?
branch && commit
end
def branch
strong_memoize(:branch) { project.default_branch }
end
def commit
strong_memoize(:commit) do
project.commit(branch)&.id if branch
end
end
end
end
end
# frozen_string_literal: true
module Gitlab
module Chat
# Class for gathering and formatting the output of a `Ci::Build`.
class Output
attr_reader :build
MissingBuildSectionError = Class.new(StandardError)
# The primary trace section to look for.
PRIMARY_SECTION = 'chat_reply'
# The backup trace section in case the primary one could not be found.
FALLBACK_SECTION = 'build_script'
# build - The `Ci::Build` to obtain the output from.
def initialize(build)
@build = build
end
# Returns a `String` containing the output of the build.
#
# The output _does not_ include the command that was executed.
def to_s
offset, length = read_offset_and_length
trace.read do |stream|
stream.seek(offset)
output = stream
.stream
.read(length)
.force_encoding(Encoding.default_external)
without_executed_command_line(output)
end
end
# Returns the offset to seek to and the number of bytes to read relative
# to the offset.
def read_offset_and_length
section = find_build_trace_section(PRIMARY_SECTION) ||
find_build_trace_section(FALLBACK_SECTION)
unless section
raise(
MissingBuildSectionError,
"The build_script trace section could not be found for build #{build.id}"
)
end
length = section[:byte_end] - section[:byte_start]
[section[:byte_start], length]
end
# Removes the line containing the executed command from the build output.
#
# output - A `String` containing the output of a trace section.
def without_executed_command_line(output)
output.split("\n")[1..-1].join("\n")
end
# Returns the trace section for the given name, or `nil` if the section
# could not be found.
#
# name - The name of the trace section to find.
def find_build_trace_section(name)
trace_sections.find { |s| s[:name] == name }
end
def trace_sections
@trace_sections ||= trace.extract_sections
end
def trace
@trace ||= build.trace
end
end
end
end
# frozen_string_literal: true
module Gitlab
module Chat
module Responder
# Returns an instance of the responder to use for generating chat
# responses.
#
# This method will return `nil` if no formatter is available for the given
# build.
#
# build - A `Ci::Build` that executed a chat command.
def self.responder_for(build)
service = build.pipeline.chat_data&.chat_name&.service
if (responder = service.try(:chat_responder))
responder.new(build)
end
end
end
end
end
module Gitlab
module Chat
module Responder
class Base
attr_reader :build
# build - The `Ci::Build` that was executed.
def initialize(build)
@build = build
end
def pipeline
build.pipeline
end
def project
pipeline.project
end
def success(*)
raise NotImplementedError, 'You must implement #success(output)'
end
def failure
raise NotImplementedError, 'You must implement #failure'
end
def send_response(output)
raise NotImplementedError, 'You must implement #send_response(output)'
end
def scheduled_output
raise NotImplementedError, 'You must implement #scheduled_output'
end
end
end
end
end
# frozen_string_literal: true
module Gitlab
module Chat
module Responder
class Slack < Responder::Base
SUCCESS_COLOR = '#B3ED8E'
FAILURE_COLOR = '#FF5640'
RESPONSE_TYPE = :in_channel
# Slack breaks messages apart if they're around 4 KB in size. We use a
# slightly smaller limit here to account for user mentions.
MESSAGE_SIZE_LIMIT = 3.5.kilobytes
# Sends a response back to Slack
#
# output - The output to send back to Slack, as a Hash.
def send_response(output)
HTTParty.post(
pipeline.chat_data.response_url,
{
headers: { Accept: 'application/json' },
body: output.to_json
}
)
end
# Sends the output for a build that completed successfully.
#
# output - The output produced by the chat command.
def success(output)
output =
if output.empty?
'The command successfully completed but did not ' \
'write any data to STDOUT or STDERR.'
else
limit_output(output)
end
send_response(
text: message_text(output),
response_type: RESPONSE_TYPE,
attachments: [
{
color: SUCCESS_COLOR,
actions: [
view_project_button,
view_pipeline_button,
view_build_button
]
}
]
)
end
# Sends the output for a build that failed.
def failure
send_response(
text: message_text('Sorry, the build failed!'),
response_type: RESPONSE_TYPE,
attachments: [
{
color: FAILURE_COLOR,
actions: [
view_project_button,
view_pipeline_button,
view_build_button
]
}
]
)
end
# Returns the output to send back after a command has been scheduled.
def scheduled_output
{
text: message_text('The command has been scheduled!'),
attachments: [
{
actions: [
view_project_button,
view_pipeline_button,
view_build_button
]
}
]
}
end
private
def limit_output(output)
if output.bytesize <= MESSAGE_SIZE_LIMIT
output
else
'The command output is too large to be sent back directly. ' \
"The full output can be found at #{build_url}"
end
end
def mention_user
"<@#{pipeline.chat_data.chat_name.chat_id}>"
end
def message_text(output)
"#{mention_user}: #{output}"
end
def view_project_button
{
type: :button,
text: 'View Project',
url: url_helpers.project_url(project)
}
end
def view_pipeline_button
{
type: :button,
text: 'View Pipeline',
url: url_helpers.project_pipeline_url(project, pipeline)
}
end
def view_build_button
{
type: :button,
text: 'View Build',
url: build_url
}
end
def build_url
url_helpers.project_build_url(project, build)
end
def url_helpers
::Gitlab::Routing.url_helpers
end
end
end
end
end
# frozen_string_literal: true
module Gitlab
module SlashCommands
module Presenters
class Run < Presenters::Base
def present(pipeline)
build = pipeline.builds.take
if build && (responder = Chat::Responder.responder_for(build))
in_channel_response(responder.scheduled_output)
else
unsupported_chat_service
end
end
def unsupported_chat_service
ephemeral_response(text: 'Sorry, this chat service is currently not supported by GitLab ChatOps.')
end
def failed_to_schedule(command)
ephemeral_response(
text: 'The command could not be scheduled. Make sure that your ' \
'project has a .gitlab-ci.yml that defines a job with the ' \
"name #{command.inspect}"
)
end
end
end
end
end
# frozen_string_literal: true
module Gitlab
module SlashCommands
# Slash command for triggering chatops jobs.
class Run < BaseCommand
def self.match(text)
/\Arun\s+(?<command>\S+)(\s+(?<arguments>.+))?\z/.match(text)
end
def self.help_message
'run <command> <arguments>'
end
def self.available?(project)
Chat.available? && project.builds_enabled?
end
def self.allowed?(project, user)
can?(user, :create_pipeline, project)
end
def execute(match)
command = Chat::Command.new(
project: project,
chat_name: chat_name,
name: match[:command],
arguments: match[:arguments],
channel: params[:channel_id],
response_url: params[:response_url]
)
presenter = Gitlab::SlashCommands::Presenters::Run.new
pipeline = command.try_create_pipeline
if pipeline&.persisted?
presenter.present(pipeline)
else
presenter.failed_to_schedule(command.name)
end
end
end
end
end
require 'spec_helper'
describe EE::Gitlab::Ci::Pipeline::Chain::RemoveUnwantedChatJobs do
let(:project) { create(:project) }
let(:pipeline) do
build(:ci_pipeline_with_one_job, project: project, ref: 'master')
end
let(:command) do
double(:command, project: project, chat_data: { command: 'echo' })
end
describe '#perform!' do
it 'removes unwanted jobs for chat pipelines' do
allow(pipeline).to receive(:chat?).and_return(true)
pipeline.config_processor.jobs[:echo] = double(:job)
described_class.new(pipeline, command).perform!
expect(pipeline.config_processor.jobs.keys).to eq([:echo])
end
end
it 'does not remove any jobs for non-chat pipelines' do
described_class.new(pipeline, command).perform!
expect(pipeline.config_processor.jobs.keys).to eq([:rspec])
end
end
require 'spec_helper'
describe Gitlab::Chat::Command do
let(:chat_name) { create(:chat_name) }
let(:command) do
described_class.new(
project: project,
chat_name: chat_name,
name: 'spinach',
arguments: 'foo',
channel: '123',
response_url: 'http://example.com'
)
end
describe '#try_create_pipeline' do
let(:project) { create(:project) }
it 'returns nil when the command is not valid' do
expect(command)
.to receive(:valid?)
.and_return(false)
expect(command.try_create_pipeline).to be_nil
end
it 'tries to create the pipeline when a command is valid' do
expect(command)
.to receive(:valid?)
.and_return(true)
expect(command)
.to receive(:create_pipeline)
command.try_create_pipeline
end
end
describe '#create_pipeline' do
let(:project) { create(:project, :test_repo) }
let(:pipeline) { command.create_pipeline }
before do
stub_repository_ci_yaml_file(sha: project.commit.id)
project.add_developer(chat_name.user)
end
it 'creates the pipeline' do
expect(pipeline).to be_persisted
end
it 'creates the chat data for the pipeline' do
expect(pipeline.chat_data).to be_an_instance_of(Ci::PipelineChatData)
end
it 'stores the chat name ID in the chat data' do
expect(pipeline.chat_data.chat_name_id).to eq(chat_name.id)
end
it 'stores the response URL in the chat data' do
expect(pipeline.chat_data.response_url).to eq('http://example.com')
end
it 'creates the environment variables for the pipeline' do
vars = pipeline.variables.each_with_object({}) do |row, hash|
hash[row.key] = row.value
end
expect(vars['CHAT_INPUT']).to eq('foo')
expect(vars['CHAT_CHANNEL']).to eq('123')
end
end
end
require 'spec_helper'
describe Gitlab::Chat::Output do
let(:build) do
create(:ci_build, pipeline: create(:ci_pipeline, source: :chat))
end
let(:output) { described_class.new(build) }
describe '#to_s' do
it 'returns the build output as a String' do
trace = Gitlab::Ci::Trace.new(build)
trace.set("echo hello\nhello")
allow(build)
.to receive(:trace)
.and_return(trace)
allow(output)
.to receive(:read_offset_and_length)
.and_return([0, 13])
expect(output.to_s).to eq('he')
end
end
describe '#read_offset_and_length' do
context 'without the chat_reply trace section' do
it 'falls back to using the build_script trace section' do
expect(output)
.to receive(:find_build_trace_section)
.with('chat_reply')
.and_return(nil)
expect(output)
.to receive(:find_build_trace_section)
.with('build_script')
.and_return({ name: 'build_script', byte_start: 1, byte_end: 4 })
expect(output.read_offset_and_length).to eq([1, 3])
end
end
context 'without the build_script trace section' do
it 'raises MissingBuildSectionError' do
expect { output.read_offset_and_length }
.to raise_error(described_class::MissingBuildSectionError)
end
end
context 'with the chat_reply trace section' do
it 'returns the read offset and length as an Array' do
trace = Gitlab::Ci::Trace.new(build)
allow(build)
.to receive(:trace)
.and_return(trace)
allow(trace)
.to receive(:extract_sections)
.and_return([{ name: 'chat_reply', byte_start: 1, byte_end: 4 }])
expect(output.read_offset_and_length).to eq([1, 3])
end
end
end
describe '#without_executed_command_line' do
it 'returns the input without the first line' do
expect(output.without_executed_command_line("hello\nworld"))
.to eq('world')
end
end
describe '#find_build_trace_section' do
it 'returns nil when no section could be found' do
expect(output.find_build_trace_section('foo')).to be_nil
end
it 'returns the trace section when it could be found' do
section = { name: 'chat_reply', byte_start: 1, byte_end: 4 }
allow(output)
.to receive(:trace_sections)
.and_return([section])
expect(output.find_build_trace_section('chat_reply')).to eq(section)
end
end
end
require 'spec_helper'
describe Gitlab::Chat::Responder::Base do
let(:project) { double(:project) }
let(:pipeline) { double(:pipeline, project: project) }
let(:build) { double(:build, pipeline: pipeline) }
let(:responder) { described_class.new(build) }
describe '#pipeline' do
it 'returns the pipeline' do
expect(responder.pipeline).to eq(pipeline)
end
end
describe '#project' do
it 'returns the project' do
expect(responder.project).to eq(project)
end
end
describe '#success' do
it 'raises NotImplementedError' do
expect { responder.success }.to raise_error(NotImplementedError)
end
end
describe '#failure' do
it 'raises NotImplementedError' do
expect { responder.failure }.to raise_error(NotImplementedError)
end
end
describe '#send_response' do
it 'raises NotImplementedError' do
expect { responder.send_response('hello') }
.to raise_error(NotImplementedError)
end
end
describe '#scheduled_output' do
it 'raises NotImplementedError' do
expect { responder.scheduled_output }
.to raise_error(NotImplementedError)
end
end
end
require 'spec_helper'
describe Gitlab::Chat::Responder::Slack do
let(:chat_name) { create(:chat_name, chat_id: 'U123') }
let(:pipeline) do
pipeline = create(:ci_pipeline)
pipeline.create_chat_data!(
response_url: 'http://example.com',
chat_name_id: chat_name.id
)
pipeline
end
let(:build) { create(:ci_build, pipeline: pipeline) }
let(:responder) { described_class.new(build) }
describe '#send_response' do
it 'sends a response back to Slack' do
expect(HTTParty).to receive(:post).with(
'http://example.com',
{ headers: { Accept: 'application/json' }, body: 'hello'.to_json }
)
responder.send_response('hello')
end
end
describe '#success' do
it 'returns the output for a successful build' do
expect(responder)
.to receive(:send_response)
.with(hash_including(text: '<@U123>: hello', response_type: :in_channel))
responder.success('hello')
end
it 'limits the output to a fixed size' do
expect(responder)
.to receive(:send_response)
.with(hash_including(text: /The command output is too large/))
responder.success('a' * 4000)
end
it 'returns a generic message when the build did not produce any output' do
expect(responder)
.to receive(:send_response)
.with(hash_including(text: /did not write any data to STDOUT/))
responder.success('')
end
end
describe '#failure' do
it 'returns the output for a failed build' do
expect(responder).to receive(:send_response).with(
hash_including(
text: '<@U123>: Sorry, the build failed!',
response_type: :in_channel
)
)
responder.failure
end
end
describe '#scheduled_output' do
it 'returns the output for a scheduled build' do
output = responder.scheduled_output
expect(output[:text]).to eq('<@U123>: The command has been scheduled!')
end
end
end
require 'spec_helper'
describe Gitlab::Chat::Responder do
describe '.responder_for' do
context 'using a regular build' do
it 'returns nil' do
build = create(:ci_build)
expect(described_class.responder_for(build)).to be_nil
end
end
context 'using a chat build' do
it 'returns the responder for the build' do
pipeline = create(:ci_pipeline)
build = create(:ci_build, pipeline: pipeline)
service = double(:service, chat_responder: Gitlab::Chat::Responder::Slack)
chat_name = double(:chat_name, service: service)
chat_data = double(:chat_data, chat_name: chat_name)
allow(pipeline)
.to receive(:chat_data)
.and_return(chat_data)
expect(described_class.responder_for(build))
.to be_an_instance_of(Gitlab::Chat::Responder::Slack)
end
end
end
end
require 'spec_helper'
describe Gitlab::Chat, :use_clean_rails_memory_store_caching do
describe '.available?' do
it 'returns true when the chatops feature is available' do
allow(License)
.to receive(:feature_available?)
.with(:chatops)
.and_return(true)
expect(described_class).to be_available
end
it 'returns false when the chatops feature is not available' do
allow(License)
.to receive(:feature_available?)
.with(:chatops)
.and_return(false)
expect(described_class).not_to be_available
end
it 'caches the feature availability' do
expect(License)
.to receive(:feature_available?)
.once
.with(:chatops)
.and_return(true)
2.times do
described_class.available?
end
end
end
describe '.flush_available_cache' do
it 'flushes the feature availability cache' do
expect(License)
.to receive(:feature_available?)
.twice
.with(:chatops)
.and_return(true)
described_class.available?
described_class.flush_available_cache
described_class.available?
end
end
end
require 'spec_helper'
describe Gitlab::SlashCommands::Presenters::Run do
let(:presenter) { described_class.new }
describe '#present' do
context 'when no builds are present' do
it 'returns an error' do
builds = double(:builds, take: nil)
pipeline = double(:pipeline, builds: builds)
expect(presenter)
.to receive(:unsupported_chat_service)
presenter.present(pipeline)
end
end
context 'when a responder could be found' do
it 'returns the output for a scheduled pipeline' do
responder = double(:responder, scheduled_output: 'hello')
build = double(:build)
builds = double(:builds, take: build)
pipeline = double(:pipeline, builds: builds)
allow(Gitlab::Chat::Responder)
.to receive(:responder_for)
.with(build)
.and_return(responder)
expect(presenter)
.to receive(:in_channel_response)
.with('hello')
presenter.present(pipeline)
end
end
context 'when a responder could not be found' do
it 'returns an error' do
build = double(:build)
builds = double(:builds, take: build)
pipeline = double(:pipeline, builds: builds)
allow(Gitlab::Chat::Responder)
.to receive(:responder_for)
.with(build)
.and_return(nil)
expect(presenter)
.to receive(:unsupported_chat_service)
presenter.present(pipeline)
end
end
end
describe '#unsupported_chat_service' do
it 'returns an ephemeral response' do
expect(presenter)
.to receive(:ephemeral_response)
.with(text: /Sorry, this chat service is currently not supported/)
presenter.unsupported_chat_service
end
end
describe '#failed_to_schedule' do
it 'returns an ephemeral response' do
expect(presenter)
.to receive(:ephemeral_response)
.with(text: /The command could not be scheduled/)
presenter.failed_to_schedule('foo')
end
end
end
require 'spec_helper'
describe Gitlab::SlashCommands::Run do
describe '.available?' do
it 'returns true when builds are enabled for the project' do
project = double(:project, builds_enabled?: true)
allow(Gitlab::Chat)
.to receive(:available?)
.and_return(true)
expect(described_class.available?(project)).to eq(true)
end
it 'returns false when builds are disabled for the project' do
project = double(:project, builds_enabled?: false)
expect(described_class.available?(project)).to eq(false)
end
it 'returns false when chatops is not available' do
allow(Gitlab::Chat)
.to receive(:available?)
.and_return(false)
project = double(:project, builds_enabled?: true)
expect(described_class.available?(project)).to eq(false)
end
end
describe '.allowed?' do
it 'returns true when the user can create a pipeline' do
project = create(:project)
expect(described_class.allowed?(project, project.creator)).to eq(true)
end
it 'returns false when the user can not create a pipeline' do
project = create(:project)
user = create(:user)
expect(described_class.allowed?(project, user)).to eq(false)
end
end
describe '#execute' do
let(:chat_name) { create(:chat_name) }
let(:project) { create(:project) }
let(:command) do
described_class.new(project, chat_name, response_url: 'http://example.com')
end
context 'when a pipeline could not be scheduled' do
it 'returns an error' do
expect_any_instance_of(Gitlab::Chat::Command)
.to receive(:try_create_pipeline)
.and_return(nil)
expect_any_instance_of(Gitlab::SlashCommands::Presenters::Run)
.to receive(:failed_to_schedule)
.with('foo')
command.execute(command: 'foo', arguments: '')
end
end
context 'when a pipeline could be created but the chat service was not supported' do
it 'returns an error' do
build = double(:build)
pipeline = double(
:pipeline,
builds: double(:relation, take: build),
persisted?: true
)
expect_any_instance_of(Gitlab::Chat::Command)
.to receive(:try_create_pipeline)
.and_return(pipeline)
expect(Gitlab::Chat::Responder)
.to receive(:responder_for)
.with(build)
.and_return(nil)
expect_any_instance_of(Gitlab::SlashCommands::Presenters::Run)
.to receive(:unsupported_chat_service)
command.execute(command: 'foo', arguments: '')
end
end
context 'using a valid pipeline' do
it 'schedules the pipeline' do
responder = double(:responder, scheduled_output: 'hello')
build = double(:build)
pipeline = double(
:pipeline,
builds: double(:relation, take: build),
persisted?: true
)
expect_any_instance_of(Gitlab::Chat::Command)
.to receive(:try_create_pipeline)
.and_return(pipeline)
expect(Gitlab::Chat::Responder)
.to receive(:responder_for)
.with(build)
.and_return(responder)
expect_any_instance_of(Gitlab::SlashCommands::Presenters::Run)
.to receive(:in_channel_response)
.with(responder.scheduled_output)
command.execute(command: 'foo', arguments: '')
end
end
end
end
......@@ -8,6 +8,8 @@ describe Ci::Pipeline do
create(:ci_empty_pipeline, status: :created, project: project)
end
it { is_expected.to have_one(:chat_data) }
describe '.failure_reasons' do
it 'contains failure reasons about exceeded limits' do
expect(described_class.failure_reasons)
......
require 'spec_helper'
describe SlackSlashCommandsService do
describe '#chat_responder' do
it 'returns the responder to use for Slack' do
expect(described_class.new.chat_responder)
.to eq(Gitlab::Chat::Responder::Slack)
end
end
end
require 'spec_helper'
describe BuildFinishedWorker do
describe '#perform' do
it 'schedules a ChatNotification job for a chat build' do
build = create(:ci_build, pipeline: create(:ci_pipeline, source: :chat))
expect(ChatNotificationWorker)
.to receive(:perform_async)
.with(build.id)
described_class.new.perform(build.id)
end
it 'does not schedule a ChatNotification job for a regular build' do
build = create(:ci_build, pipeline: create(:ci_pipeline))
expect(ChatNotificationWorker)
.not_to receive(:perform_async)
described_class.new.perform(build.id)
end
end
end
require 'spec_helper'
describe ChatNotificationWorker do
let(:worker) { described_class.new }
let(:chat_build) do
create(:ci_build, pipeline: create(:ci_pipeline, source: :chat))
end
describe '#perform' do
it 'does nothing when the build no longer exists' do
expect(worker).not_to receive(:send_response)
worker.perform(-1)
end
it 'sends a response for an existing build' do
expect(worker)
.to receive(:send_response)
.with(an_instance_of(Ci::Build))
worker.perform(chat_build.id)
end
it 'reschedules the job if the trace sections could not be found' do
expect(worker)
.to receive(:send_response)
.and_raise(Gitlab::Chat::Output::MissingBuildSectionError)
expect(described_class)
.to receive(:perform_in)
.with(described_class::RESCHEDULE_INTERVAL, chat_build.id)
worker.perform(chat_build.id)
end
end
describe '#send_response' do
context 'when a responder could not be found' do
it 'does nothing' do
expect(Gitlab::Chat::Responder)
.to receive(:responder_for)
.with(chat_build)
.and_return(nil)
expect(worker.send_response(chat_build)).to be_nil
end
end
context 'when a responder could be found' do
let(:responder) { double(:responder) }
before do
allow(Gitlab::Chat::Responder)
.to receive(:responder_for)
.with(chat_build)
.and_return(responder)
end
it 'sends the response for a succeeded build' do
output = double(:output, to_s: 'this is the build output')
expect(chat_build)
.to receive(:success?)
.and_return(true)
expect(responder)
.to receive(:success)
.with(an_instance_of(String))
expect(Gitlab::Chat::Output)
.to receive(:new)
.with(chat_build)
.and_return(output)
worker.send_response(chat_build)
end
it 'sends the response for a failed build' do
expect(chat_build)
.to receive(:success?)
.and_return(false)
expect(responder).to receive(:failure)
worker.send_response(chat_build)
end
end
end
end
......@@ -10,7 +10,8 @@ module Gitlab
:seeds_block,
# EE specific
:allow_mirror_update
:allow_mirror_update,
:chat_data
) do
include Gitlab::Utils::StrongMemoize
......
......@@ -5,7 +5,8 @@ module Gitlab
Gitlab::SlashCommands::IssueShow,
Gitlab::SlashCommands::IssueNew,
Gitlab::SlashCommands::IssueSearch,
Gitlab::SlashCommands::Deploy
Gitlab::SlashCommands::Deploy,
Gitlab::SlashCommands::Run
].freeze
def execute
......
......@@ -126,6 +126,7 @@ pipelines:
- sourced_pipelines
- triggered_by_pipeline
- triggered_pipelines
- chat_data # EE only
pipeline_variables:
- pipeline
stages:
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment