Commit fd3a95f0 authored by GitLab Bot's avatar GitLab Bot

Add latest changes from gitlab-org/gitlab@master

parent 6a7005fe
......@@ -233,7 +233,7 @@ export default {
:key="assignee.id"
:link-href="assigneeUrl(assignee)"
:img-alt="avatarUrlTitle(assignee)"
:img-src="assignee.avatar"
:img-src="assignee.avatar || assignee.avatar_url"
:img-size="24"
class="js-no-trigger"
tooltip-placement="bottom"
......
import dateformat from 'dateformat';
import { pick, omit, isEqual, isEmpty } from 'lodash';
import { secondsToMilliseconds } from './datetime_utility';
const MINIMUM_DATE = new Date(0);
......@@ -221,3 +222,99 @@ export function getRangeType(range) {
*/
export const convertToFixedRange = dateTimeRange =>
handlers[getRangeType(dateTimeRange)](dateTimeRange);
/**
* Returns a copy of the object only with time range
* properties relevant to time range calculation.
*
* Filtered properties are:
* - 'start'
* - 'end'
* - 'anchor'
* - 'duration'
* - 'direction': if direction is already the default, its removed.
*
* @param {Object} timeRange - A time range object
* @returns Copy of time range
*/
const pruneTimeRange = timeRange => {
const res = pick(timeRange, ['start', 'end', 'anchor', 'duration', 'direction']);
if (res.direction === DEFAULT_DIRECTION) {
return omit(res, 'direction');
}
return res;
};
/**
* Returns true if the time ranges are equal according to
* the time range calculation properties
*
* @param {Object} timeRange - A time range object
* @param {Object} other - Time range object to compare with.
* @returns true if the time ranges are equal, false otherwise
*/
export const isEqualTimeRanges = (timeRange, other) => {
const tr1 = pruneTimeRange(timeRange);
const tr2 = pruneTimeRange(other);
return isEqual(tr1, tr2);
};
/**
* Searches for a time range in a array of time ranges using
* only the properies relevant to time ranges calculation.
*
* @param {Object} timeRange - Time range to search (needle)
* @param {Array} timeRanges - Array of time tanges (haystack)
*/
export const findTimeRange = (timeRange, timeRanges) =>
timeRanges.find(element => isEqualTimeRanges(element, timeRange));
// Time Ranges as URL Parameters Utils
/**
* List of possible time ranges parameters
*/
export const timeRangeParamNames = ['start', 'end', 'anchor', 'duration_seconds', 'direction'];
/**
* Converts a valid time range to a flat key-value pairs object.
*
* Duration is flatted to avoid having nested objects.
*
* @param {Object} A time range
* @returns key-value pairs object that can be used as parameters in a URL.
*/
export const timeRangeToParams = timeRange => {
let params = pruneTimeRange(timeRange);
if (timeRange.duration) {
const durationParms = {};
Object.keys(timeRange.duration).forEach(key => {
durationParms[`duration_${key}`] = timeRange.duration[key].toString();
});
params = { ...durationParms, ...params };
params = omit(params, 'duration');
}
return params;
};
/**
* Converts a valid set of flat params to a time range object
*
* Parameters that are not part of time range object are ignored.
*
* @param {params} params - key-value pairs object.
*/
export const timeRangeFromParams = params => {
const timeRangeParams = pick(params, timeRangeParamNames);
let range = Object.entries(timeRangeParams).reduce((acc, [key, val]) => {
// unflatten duration
if (key.startsWith('duration_')) {
acc.duration = acc.duration || {};
acc.duration[key.slice('duration_'.length)] = parseInt(val, 10);
return acc;
}
return { [key]: val, ...acc };
}, {});
range = pruneTimeRange(range);
return !isEmpty(range) ? range : null;
};
import { queryToObject, mergeUrlParams, removeParams } from '~/lib/utils/url_utility';
import {
timeRangeParamNames,
timeRangeFromParams,
timeRangeToParams,
} from '~/lib/utils/datetime_range';
/**
* This method is used to validate if the graph data format for a chart component
* that needs a time series as a response from a prometheus query (query_range) is
......@@ -93,4 +100,35 @@ export const graphDataValidatorForAnomalyValues = graphData => {
);
};
/**
* Returns a time range from the current URL params
*
* @returns {Object} The time range defined by the
* current URL, reading from `window.location.search`
*/
export const timeRangeFromUrl = (search = window.location.search) => {
const params = queryToObject(search);
return timeRangeFromParams(params);
};
/**
* Returns a URL with no time range based on the current URL.
*
* @param {String} New URL
*/
export const removeTimeRangeParams = (url = window.location.href) =>
removeParams(timeRangeParamNames, url);
/**
* Returns a URL for the a different time range based on the
* current URL and a time range.
*
* @param {String} New URL
*/
export const timeRangeToUrl = (timeRange, url = window.location.href) => {
const toUrl = removeTimeRangeParams(url);
const params = timeRangeToParams(timeRange);
return mergeUrlParams(params, toUrl);
};
export default {};
......@@ -64,7 +64,7 @@ A new serializer should inherit from a `BaseSerializer` class. It is necessary
to specify which serialization entity will be used to serialize a resource.
```ruby
class MyResourceSerializer < BaseSerialize
class MyResourceSerializer < BaseSerializer
entity MyResourceEntity
end
```
......
......@@ -12,8 +12,46 @@ module WorkerContext
@worker_context || superclass_context
end
def bulk_perform_async_with_contexts(objects, arguments_proc:, context_proc:)
with_batch_contexts(objects, arguments_proc, context_proc) do |arguments|
bulk_perform_async(arguments)
end
end
def bulk_perform_in_with_contexts(delay, objects, arguments_proc:, context_proc:)
with_batch_contexts(objects, arguments_proc, context_proc) do |arguments|
bulk_perform_in(delay, arguments)
end
end
def context_for_arguments(args)
batch_context&.context_for(args)
end
private
BATCH_CONTEXT_KEY = "#{name}_batch_context"
def batch_context
Thread.current[BATCH_CONTEXT_KEY]
end
def batch_context=(value)
Thread.current[BATCH_CONTEXT_KEY] = value
end
def with_batch_contexts(objects, arguments_proc, context_proc)
self.batch_context = Gitlab::BatchWorkerContext.new(
objects,
arguments_proc: arguments_proc,
context_proc: context_proc
)
yield(batch_context.arguments)
ensure
self.batch_context = nil
end
def superclass_context
return unless superclass.include?(WorkerContext)
......
---
title: MVC for assignees avatar dissapearing when opening issue sidebar in board
merge_request:
author: Oregand
type: fixed
# Project import/export administration **(CORE ONLY)**
>**Note:**
>
> - [Introduced][ce-3050] in GitLab 8.9.
> - Importing will not be possible if the import instance version is lower
> than that of the exporter.
> - For existing installations, the project import option has to be enabled in
> application settings (`/admin/application_settings`) under 'Import sources'.
> - The exports are stored in a temporary [shared directory][tmp] and are deleted
> every 24 hours by a specific worker.
> - ImportExport can use object storage automatically starting from GitLab 11.3
The GitLab Import/Export version can be checked by using:
> - [Introduced](https://gitlab.com/gitlab-org/gitlab-foss/issues/3050) in GitLab 8.9.
> - From GitLab 11.3, import/export can use object storage automatically.
See also:
- [Project import/export documentation](../../user/project/settings/import_export.md).
- [Project import/export API](../../api/project_import_export.md).
The GitLab import/export version can be checked by using the following command:
```shell
# Omnibus installations
......@@ -21,7 +18,7 @@ sudo gitlab-rake gitlab:import_export:version
bundle exec rake gitlab:import_export:version RAILS_ENV=production
```
The current list of DB tables that will get exported can be listed by using:
The current list of DB tables that will be exported can be listed by using the following command:
```shell
# Omnibus installations
......@@ -31,5 +28,13 @@ sudo gitlab-rake gitlab:import_export:data
bundle exec rake gitlab:import_export:data RAILS_ENV=production
```
[ce-3050]: https://gitlab.com/gitlab-org/gitlab-foss/issues/3050
[tmp]: ../../development/shared_files.md
## Important notes
Note the following:
- Importing is not possible if the version of the import instance is older than that of the exporter.
- The project import option must be enabled in
application settings (`/admin/application_settings`) under **Import sources**, which is available
under **{admin}** **Admin Area >** **{settings}** **Settings > Visibility and access controls**.
- The exports are stored in a temporary [shared directory](../../development/shared_files.md)
and are deleted every 24 hours by a specific worker.
# Project import/export API
> [Introduced][ce-41899] in GitLab 10.6.
> [Introduced](https://gitlab.com/gitlab-org/gitlab-foss/issues/41899) in GitLab 10.6.
See also the [project import/export documentation](../user/project/settings/import_export.md).
See also:
- [Project import/export documentation](../user/project/settings/import_export.md).
- [Project import/export administration rake tasks](../administration/raketasks/project_import_export.md). **(CORE ONLY)**
## Schedule an export
Start a new export.
The endpoint also accepts an `upload` param. This param is a hash that contains
The endpoint also accepts an `upload` parameter. This parameter is a hash that contains
all the necessary information to upload the exported project to a web server or
to any S3-compatible platform. At the moment we only support binary
data file uploads to the final server.
If the `upload` params is present, `upload[url]` param is required.
(**Note:** This feature was introduced in GitLab 10.7)
From GitLab 10.7, the `upload[url]` parameter is required if the `upload` parameter is present.
```text
POST /projects/:id/export
......@@ -56,8 +58,14 @@ GET /projects/:id/export
curl --header "PRIVATE-TOKEN: <your_access_token>" https://gitlab.example.com/api/v4/projects/1/export
```
Status can be one of `none`, `started`, `after_export_action` or `finished`. The
`after_export_action` state represents that the export process has been completed successfully and
Status can be one of:
- `none`
- `started`
- `after_export_action`
- `finished`
The `after_export_action` state represents that the export process has been completed successfully and
the platform is performing some actions on the resulted file. For example, sending
an email notifying the user to download the file, uploading the exported file
to a web server, etc.
......@@ -178,7 +186,13 @@ GET /projects/:id/import
curl --header "PRIVATE-TOKEN: <your_access_token>" https://gitlab.example.com/api/v4/projects/1/import
```
Status can be one of `none`, `scheduled`, `failed`, `started`, or `finished`.
Status can be one of:
- `none`
- `scheduled`
- `failed`
- `started`
- `finished`
If the status is `failed`, it will include the import error message under `import_error`.
......@@ -194,5 +208,3 @@ If the status is `failed`, it will include the import error message under `impor
"import_status": "started"
}
```
[ce-41899]: https://gitlab.com/gitlab-org/gitlab-foss/issues/41899
# Project import/export
>**Notes:**
>
> - [Introduced](https://gitlab.com/gitlab-org/gitlab-foss/issues/3050) in GitLab 8.9.
> - Importing will not be possible if the import instance version differs from
> that of the exporter.
> - For GitLab admins, please read through
> [Project import/export administration](../../../administration/raketasks/project_import_export.md).
> - For existing installations, the project import option has to be enabled in
> application settings (`/admin/application_settings`) under 'Import sources'.
> Ask your administrator if you don't see the **GitLab export** button when
> creating a new project.
> - Starting with GitLab 10.0, administrators can disable the project export option
> on the GitLab instance in application settings (`/admin/application_settings`)
> under 'Visibility and Access Controls'.
> - You can find some useful raketasks if you are an administrator in the
> [import_export](../../../administration/raketasks/project_import_export.md) raketask.
> - The exports are stored in a temporary [shared directory](../../../development/shared_files.md)
> and are deleted every 24 hours by a specific worker.
> - Group members will get exported as project members, as long as the user has
> maintainer or admin access to the group where the exported project lives. An admin
> in the import side is required to map the users, based on email or username.
> Otherwise, a supplementary comment is left to mention the original author and
> the MRs, notes or issues will be owned by the importer.
> - Project members with owner access will get imported as maintainers.
> - Control project Import/Export with the [API](../../../api/project_import_export.md).
> - If an imported project contains merge requests originated from forks,
> then new branches associated with such merge requests will be created
> within a project during the import/export. Thus, the number of branches
> in the exported project could be bigger than in the original project.
Existing projects running on any GitLab instance or GitLab.com can be exported
with all their related data and be moved into a new GitLab instance.
> - From GitLab 10.0, administrators can disable the project export option on the GitLab instance.
Existing projects running on any GitLab instance or GitLab.com can be exported with all their related
data and be moved into a new GitLab instance.
See also:
- [Project import/export API](../../../api/project_import_export.md).
- [Project import/export administration rake tasks](../../../administration/raketasks/project_import_export.md). **(CORE ONLY)**
## Important notes
Note the following:
- Importing is not possible if the import instance version differs from
that of the exporter.
- The project import option must be enabled in
application settings (`/admin/application_settings`) under under **Import sources**, which is
available under **{admin}** **Admin Area >** **{settings}** **Settings > Visibility and access controls**.
Ask your administrator if you don't see the **GitLab export** button when
creating a new project.
- The exports are stored in a temporary [shared directory](../../../development/shared_files.md)
and are deleted every 24 hours by a specific worker.
- Group members are exported as project members, as long as the user has
maintainer or admin access to the group where the exported project lives. An admin
in the import side is required to map the users, based on email or username.
Otherwise, a supplementary comment is left to mention the original author and
the MRs, notes, or issues will be owned by the importer.
- Project members with owner access will be imported as maintainers.
- If an imported project contains merge requests originating from forks,
then new branches associated with such merge requests will be created
within a project during the import/export. Thus, the number of branches
in the exported project could be bigger than in the original project.
## Version history
The following table lists updates to Import/Export:
| GitLab version | Import/Export version |
| ---------------- | --------------------- |
| 11.1 to current | 0.2.4 |
......@@ -52,9 +56,8 @@ with all their related data and be moved into a new GitLab instance.
| 8.9.5 | 0.1.1 |
| 8.9.0 | 0.1.0 |
> The table reflects what GitLab version we updated the Import/Export version at.
> For instance, 8.10.3 and 8.11 will have the same Import/Export version (0.1.3)
> and the exports between them will be compatible.
For example, 8.10.3 and 8.11 will have the same Import/Export version (0.1.3)
and the exports between them will be compatible.
## Exported contents
......@@ -88,7 +91,7 @@ For more details on the specific data persisted in a project export, see the
1. Go to your project's homepage.
1. Click **Settings** in the sidebar.
1. Click **{settings}** **Settings** in the sidebar.
1. Scroll down to find the **Export project** button:
......
# frozen_string_literal: true
module Gitlab
class BatchWorkerContext
def initialize(objects, arguments_proc:, context_proc:)
@objects = objects
@arguments_proc = arguments_proc
@context_proc = context_proc
end
def arguments
context_by_arguments.keys
end
def context_for(arguments)
context_by_arguments[arguments]
end
private
attr_reader :objects, :arguments_proc, :context_proc
def context_by_arguments
@context_by_arguments ||= objects.each_with_object({}) do |object, result|
arguments = Array.wrap(arguments_proc.call(object))
context = Gitlab::ApplicationContext.new(context_proc.call(object))
result[arguments] = context
end
end
end
end
......@@ -29,6 +29,7 @@ module Gitlab
lambda do |chain|
chain.add Gitlab::SidekiqStatus::ClientMiddleware
chain.add Gitlab::SidekiqMiddleware::ClientMetrics
chain.add Gitlab::SidekiqMiddleware::WorkerContext::Client # needs to be before the Labkit middleware
chain.add Labkit::Middleware::Sidekiq::Client
end
end
......
# frozen_string_literal: true
module Gitlab
module SidekiqMiddleware
module WorkerContext
private
def wrap_in_optional_context(context_or_nil, &block)
return yield unless context_or_nil
context_or_nil.use(&block)
end
end
end
end
# frozen_string_literal: true
module Gitlab
module SidekiqMiddleware
module WorkerContext
class Client
include Gitlab::SidekiqMiddleware::WorkerContext
def call(worker_class_or_name, job, _queue, _redis_pool, &block)
worker_class = worker_class_or_name.to_s.safe_constantize
# Mailers can't be constantized like this
return yield unless worker_class
return yield unless worker_class.include?(::ApplicationWorker)
context_for_args = worker_class.context_for_arguments(job['args'])
wrap_in_optional_context(context_for_args, &block)
end
end
end
end
end
......@@ -4,6 +4,8 @@ module Gitlab
module SidekiqMiddleware
module WorkerContext
class Server
include Gitlab::SidekiqMiddleware::WorkerContext
def call(worker, job, _queue, &block)
worker_class = worker.class
......@@ -13,14 +15,6 @@ module Gitlab
# Use the context defined on the class level as a base context
wrap_in_optional_context(worker_class.get_worker_context, &block)
end
private
def wrap_in_optional_context(context, &block)
return yield unless context
context.use(&block)
end
end
end
end
......
......@@ -111,7 +111,7 @@ describe HelpController do
it 'renders the raw file' do
get :show,
params: {
path: 'user/project/img/labels_default_v12_1'
path: 'fixtures/gitlab_tanuki'
},
format: :png
expect(response).to be_successful
......
......@@ -97,6 +97,9 @@ describe('Issue card component', () => {
issue: {
...wrapper.props('issue'),
assignees: [user],
updateData(newData) {
Object.assign(this, newData);
},
},
});
......@@ -118,6 +121,28 @@ describe('Issue card component', () => {
it('renders avatar', () => {
expect(wrapper.find('.board-card-assignee img').exists()).toBe(true);
});
it('renders the avatar using avatar_url property', done => {
wrapper.props('issue').updateData({
...wrapper.props('issue'),
assignees: [
{
id: '1',
name: 'test',
state: 'active',
username: 'test_name',
avatar_url: 'test_image_from_avatar_url',
},
],
});
wrapper.vm.$nextTick(() => {
expect(wrapper.find('.board-card-assignee img').attributes('src')).toBe(
'test_image_from_avatar_url?width=24',
);
done();
});
});
});
describe('assignee default avatar', () => {
......
import * as monitoringUtils from '~/monitoring/utils';
import { queryToObject, mergeUrlParams, removeParams } from '~/lib/utils/url_utility';
import {
mockHost,
mockProjectDir,
graphDataPrometheusQuery,
graphDataPrometheusQueryRange,
anomalyMockGraphData,
} from './mock_data';
jest.mock('~/lib/utils/url_utility');
const mockPath = `${mockHost}${mockProjectDir}/-/environments/29/metrics`;
const generatedLink = 'http://chart.link.com';
const chartTitle = 'Some metric chart';
const range = {
start: '2019-01-01T00:00:00.000Z',
end: '2019-01-10T00:00:00.000Z',
};
const rollingRange = {
duration: { seconds: 120 },
};
describe('monitoring/utils', () => {
const generatedLink = 'http://chart.link.com';
const chartTitle = 'Some metric chart';
afterEach(() => {
mergeUrlParams.mockReset();
queryToObject.mockReset();
});
describe('trackGenerateLinkToChartEventOptions', () => {
it('should return Cluster Monitoring options if located on Cluster Health Dashboard', () => {
......@@ -117,4 +139,75 @@ describe('monitoring/utils', () => {
expect(monitoringUtils.graphDataValidatorForAnomalyValues(fourMetrics)).toBe(false);
});
});
describe('timeRangeFromUrl', () => {
const { timeRangeFromUrl } = monitoringUtils;
it('returns a fixed range when query contains `start` and `end` paramters are given', () => {
queryToObject.mockReturnValueOnce(range);
expect(timeRangeFromUrl()).toEqual(range);
});
it('returns a rolling range when query contains `duration_seconds` paramters are given', () => {
const { seconds } = rollingRange.duration;
queryToObject.mockReturnValueOnce({
dashboard: '.gitlab/dashboard/my_dashboard.yml',
duration_seconds: `${seconds}`,
});
expect(timeRangeFromUrl()).toEqual(rollingRange);
});
it('returns null when no time range paramters are given', () => {
const params = {
dashboard: '.gitlab/dashboards/custom_dashboard.yml',
param1: 'value1',
param2: 'value2',
};
expect(timeRangeFromUrl(params, mockPath)).toBe(null);
});
});
describe('removeTimeRangeParams', () => {
const { removeTimeRangeParams } = monitoringUtils;
it('returns when query contains `start` and `end` paramters are given', () => {
removeParams.mockReturnValueOnce(mockPath);
expect(removeTimeRangeParams(`${mockPath}?start=${range.start}&end=${range.end}`)).toEqual(
mockPath,
);
});
});
describe('timeRangeToUrl', () => {
const { timeRangeToUrl } = monitoringUtils;
it('returns a fixed range when query contains `start` and `end` paramters are given', () => {
const toUrl = `${mockPath}?start=${range.start}&end=${range.end}`;
const fromUrl = mockPath;
removeParams.mockReturnValueOnce(fromUrl);
mergeUrlParams.mockReturnValueOnce(toUrl);
expect(timeRangeToUrl(range)).toEqual(toUrl);
expect(mergeUrlParams).toHaveBeenCalledWith(range, fromUrl);
});
it('returns a rolling range when query contains `duration_seconds` paramters are given', () => {
const { seconds } = rollingRange.duration;
const toUrl = `${mockPath}?duration_seconds=${seconds}`;
const fromUrl = mockPath;
removeParams.mockReturnValueOnce(fromUrl);
mergeUrlParams.mockReturnValueOnce(toUrl);
expect(timeRangeToUrl(rollingRange)).toEqual(toUrl);
expect(mergeUrlParams).toHaveBeenCalledWith({ duration_seconds: `${seconds}` }, fromUrl);
});
});
});
# frozen_string_literal: true
require 'spec_helper'
describe Gitlab::BatchWorkerContext do
subject(:batch_context) do
described_class.new(
%w(hello world),
arguments_proc: -> (word) { word },
context_proc: -> (word) { { user: build_stubbed(:user, username: word) } }
)
end
describe "#arguments" do
it "returns all the expected arguments in arrays" do
expect(batch_context.arguments).to eq([%w(hello), %w(world)])
end
end
describe "#context_for" do
it "returns the correct application context for the arguments" do
context = batch_context.context_for(%w(world))
expect(context).to be_a(Gitlab::ApplicationContext)
expect(context.to_lazy_hash[:user].call).to eq("world")
end
end
end
# frozen_string_literal: true
require 'spec_helper'
describe Gitlab::SidekiqMiddleware::WorkerContext::Client do
let(:worker_class) do
Class.new do
def self.name
'TestWithContextWorker'
end
include ApplicationWorker
def self.job_for_args(args)
jobs.find { |job| job['args'] == args }
end
def perform(*args)
end
end
end
before do
stub_const('TestWithContextWorker', worker_class)
end
describe "#call" do
it 'applies a context for jobs scheduled in batch' do
user_per_job = { 'job1' => build_stubbed(:user, username: 'user-1'),
'job2' => build_stubbed(:user, username: 'user-2') }
TestWithContextWorker.bulk_perform_async_with_contexts(
%w(job1 job2),
arguments_proc: -> (name) { [name, 1, 2, 3] },
context_proc: -> (name) { { user: user_per_job[name] } }
)
job1 = TestWithContextWorker.job_for_args(['job1', 1, 2, 3])
job2 = TestWithContextWorker.job_for_args(['job2', 1, 2, 3])
expect(job1['meta.user']).to eq(user_per_job['job1'].username)
expect(job2['meta.user']).to eq(user_per_job['job2'].username)
end
end
end
......@@ -110,6 +110,14 @@ describe Gitlab::SidekiqMiddleware do
let(:queue) { 'default' }
let(:redis_pool) { Sidekiq.redis_pool }
let(:middleware_expected_args) { [worker_class_arg, job, queue, redis_pool] }
let(:expected_middlewares) do
[
Gitlab::SidekiqStatus::ClientMiddleware,
Gitlab::SidekiqMiddleware::ClientMetrics,
Gitlab::SidekiqMiddleware::WorkerContext::Client,
Labkit::Middleware::Sidekiq::Client
]
end
before do
described_class.client_configurator.call(chain)
......@@ -120,8 +128,9 @@ describe Gitlab::SidekiqMiddleware do
# this will prevent the full middleware chain from being executed.
# This test ensures that this does not happen
it "invokes the chain" do
expect_any_instance_of(Gitlab::SidekiqStatus::ClientMiddleware).to receive(:call).with(*middleware_expected_args).once.and_call_original
expect_any_instance_of(Labkit::Middleware::Sidekiq::Client).to receive(:call).with(*middleware_expected_args).once.and_call_original
expected_middlewares do |middleware|
expect_any_instance_of(middleware).to receive(:call).with(*middleware_expected_args).once.ordered.and_call_original
end
expect { |b| chain.invoke(worker_class_arg, job, queue, redis_pool, &b) }.to yield_control.once
end
......
# frozen_string_literal: true
module DbCleaner
def delete_from_all_tables!(except: nil)
def delete_from_all_tables!(except: [])
except << 'ar_internal_metadata'
DatabaseCleaner.clean_with(:deletion, cache_tables: false, except: except)
end
......
......@@ -5,7 +5,11 @@ require 'spec_helper'
describe WorkerContext do
let(:worker) do
Class.new do
include WorkerContext
def self.name
"TestWorker"
end
include ApplicationWorker
end
end
......@@ -24,6 +28,78 @@ describe WorkerContext do
end
end
shared_examples 'tracking bulk scheduling contexts' do
describe "context contents" do
before do
# stub clearing the contexts, so we can check what's inside
allow(worker).to receive(:batch_context=).and_call_original
allow(worker).to receive(:batch_context=).with(nil)
end
it 'keeps track of the context per key to schedule' do
subject
expect(worker.context_for_arguments(["hello"])).to be_a(Gitlab::ApplicationContext)
end
it 'does not share contexts across threads' do
t1_context = nil
t2_context = nil
Thread.new do
subject
t1_context = worker.context_for_arguments(["hello"])
end.join
Thread.new do
t2_context = worker.context_for_arguments(["hello"])
end.join
expect(t1_context).to be_a(Gitlab::ApplicationContext)
expect(t2_context).to be_nil
end
end
it 'clears the contexts' do
subject
expect(worker.__send__(:batch_context)).to be_nil
end
end
describe '.bulk_perform_async_with_contexts' do
subject do
worker.bulk_perform_async_with_contexts(%w(hello world),
context_proc: -> (_) { { user: build_stubbed(:user) } },
arguments_proc: -> (word) { word })
end
it 'calls bulk_perform_async with the arguments' do
expect(worker).to receive(:bulk_perform_async).with([["hello"], ["world"]])
subject
end
it_behaves_like 'tracking bulk scheduling contexts'
end
describe '.bulk_perform_in_with_contexts' do
subject do
worker.bulk_perform_in_with_contexts(10.minutes,
%w(hello world),
context_proc: -> (_) { { user: build_stubbed(:user) } },
arguments_proc: -> (word) { word })
end
it 'calls bulk_perform_in with the arguments and delay' do
expect(worker).to receive(:bulk_perform_in).with(10.minutes, [["hello"], ["world"]])
subject
end
it_behaves_like 'tracking bulk scheduling contexts'
end
describe '#with_context' do
it 'allows modifying context when the job is running' do
worker.new.with_context(user: build_stubbed(:user, username: 'jane-doe')) do
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment