Commit fd3a95f0 authored by GitLab Bot's avatar GitLab Bot

Add latest changes from gitlab-org/gitlab@master

parent 6a7005fe
...@@ -233,7 +233,7 @@ export default { ...@@ -233,7 +233,7 @@ export default {
:key="assignee.id" :key="assignee.id"
:link-href="assigneeUrl(assignee)" :link-href="assigneeUrl(assignee)"
:img-alt="avatarUrlTitle(assignee)" :img-alt="avatarUrlTitle(assignee)"
:img-src="assignee.avatar" :img-src="assignee.avatar || assignee.avatar_url"
:img-size="24" :img-size="24"
class="js-no-trigger" class="js-no-trigger"
tooltip-placement="bottom" tooltip-placement="bottom"
......
import dateformat from 'dateformat'; import dateformat from 'dateformat';
import { pick, omit, isEqual, isEmpty } from 'lodash';
import { secondsToMilliseconds } from './datetime_utility'; import { secondsToMilliseconds } from './datetime_utility';
const MINIMUM_DATE = new Date(0); const MINIMUM_DATE = new Date(0);
...@@ -221,3 +222,99 @@ export function getRangeType(range) { ...@@ -221,3 +222,99 @@ export function getRangeType(range) {
*/ */
export const convertToFixedRange = dateTimeRange => export const convertToFixedRange = dateTimeRange =>
handlers[getRangeType(dateTimeRange)](dateTimeRange); handlers[getRangeType(dateTimeRange)](dateTimeRange);
/**
* Returns a copy of the object only with time range
* properties relevant to time range calculation.
*
* Filtered properties are:
* - 'start'
* - 'end'
* - 'anchor'
* - 'duration'
* - 'direction': if direction is already the default, its removed.
*
* @param {Object} timeRange - A time range object
* @returns Copy of time range
*/
const pruneTimeRange = timeRange => {
const res = pick(timeRange, ['start', 'end', 'anchor', 'duration', 'direction']);
if (res.direction === DEFAULT_DIRECTION) {
return omit(res, 'direction');
}
return res;
};
/**
* Returns true if the time ranges are equal according to
* the time range calculation properties
*
* @param {Object} timeRange - A time range object
* @param {Object} other - Time range object to compare with.
* @returns true if the time ranges are equal, false otherwise
*/
export const isEqualTimeRanges = (timeRange, other) => {
const tr1 = pruneTimeRange(timeRange);
const tr2 = pruneTimeRange(other);
return isEqual(tr1, tr2);
};
/**
* Searches for a time range in a array of time ranges using
* only the properies relevant to time ranges calculation.
*
* @param {Object} timeRange - Time range to search (needle)
* @param {Array} timeRanges - Array of time tanges (haystack)
*/
export const findTimeRange = (timeRange, timeRanges) =>
timeRanges.find(element => isEqualTimeRanges(element, timeRange));
// Time Ranges as URL Parameters Utils
/**
* List of possible time ranges parameters
*/
export const timeRangeParamNames = ['start', 'end', 'anchor', 'duration_seconds', 'direction'];
/**
* Converts a valid time range to a flat key-value pairs object.
*
* Duration is flatted to avoid having nested objects.
*
* @param {Object} A time range
* @returns key-value pairs object that can be used as parameters in a URL.
*/
export const timeRangeToParams = timeRange => {
let params = pruneTimeRange(timeRange);
if (timeRange.duration) {
const durationParms = {};
Object.keys(timeRange.duration).forEach(key => {
durationParms[`duration_${key}`] = timeRange.duration[key].toString();
});
params = { ...durationParms, ...params };
params = omit(params, 'duration');
}
return params;
};
/**
* Converts a valid set of flat params to a time range object
*
* Parameters that are not part of time range object are ignored.
*
* @param {params} params - key-value pairs object.
*/
export const timeRangeFromParams = params => {
const timeRangeParams = pick(params, timeRangeParamNames);
let range = Object.entries(timeRangeParams).reduce((acc, [key, val]) => {
// unflatten duration
if (key.startsWith('duration_')) {
acc.duration = acc.duration || {};
acc.duration[key.slice('duration_'.length)] = parseInt(val, 10);
return acc;
}
return { [key]: val, ...acc };
}, {});
range = pruneTimeRange(range);
return !isEmpty(range) ? range : null;
};
import { queryToObject, mergeUrlParams, removeParams } from '~/lib/utils/url_utility';
import {
timeRangeParamNames,
timeRangeFromParams,
timeRangeToParams,
} from '~/lib/utils/datetime_range';
/** /**
* This method is used to validate if the graph data format for a chart component * This method is used to validate if the graph data format for a chart component
* that needs a time series as a response from a prometheus query (query_range) is * that needs a time series as a response from a prometheus query (query_range) is
...@@ -93,4 +100,35 @@ export const graphDataValidatorForAnomalyValues = graphData => { ...@@ -93,4 +100,35 @@ export const graphDataValidatorForAnomalyValues = graphData => {
); );
}; };
/**
* Returns a time range from the current URL params
*
* @returns {Object} The time range defined by the
* current URL, reading from `window.location.search`
*/
export const timeRangeFromUrl = (search = window.location.search) => {
const params = queryToObject(search);
return timeRangeFromParams(params);
};
/**
* Returns a URL with no time range based on the current URL.
*
* @param {String} New URL
*/
export const removeTimeRangeParams = (url = window.location.href) =>
removeParams(timeRangeParamNames, url);
/**
* Returns a URL for the a different time range based on the
* current URL and a time range.
*
* @param {String} New URL
*/
export const timeRangeToUrl = (timeRange, url = window.location.href) => {
const toUrl = removeTimeRangeParams(url);
const params = timeRangeToParams(timeRange);
return mergeUrlParams(params, toUrl);
};
export default {}; export default {};
...@@ -64,7 +64,7 @@ A new serializer should inherit from a `BaseSerializer` class. It is necessary ...@@ -64,7 +64,7 @@ A new serializer should inherit from a `BaseSerializer` class. It is necessary
to specify which serialization entity will be used to serialize a resource. to specify which serialization entity will be used to serialize a resource.
```ruby ```ruby
class MyResourceSerializer < BaseSerialize class MyResourceSerializer < BaseSerializer
entity MyResourceEntity entity MyResourceEntity
end end
``` ```
......
...@@ -12,8 +12,46 @@ module WorkerContext ...@@ -12,8 +12,46 @@ module WorkerContext
@worker_context || superclass_context @worker_context || superclass_context
end end
def bulk_perform_async_with_contexts(objects, arguments_proc:, context_proc:)
with_batch_contexts(objects, arguments_proc, context_proc) do |arguments|
bulk_perform_async(arguments)
end
end
def bulk_perform_in_with_contexts(delay, objects, arguments_proc:, context_proc:)
with_batch_contexts(objects, arguments_proc, context_proc) do |arguments|
bulk_perform_in(delay, arguments)
end
end
def context_for_arguments(args)
batch_context&.context_for(args)
end
private private
BATCH_CONTEXT_KEY = "#{name}_batch_context"
def batch_context
Thread.current[BATCH_CONTEXT_KEY]
end
def batch_context=(value)
Thread.current[BATCH_CONTEXT_KEY] = value
end
def with_batch_contexts(objects, arguments_proc, context_proc)
self.batch_context = Gitlab::BatchWorkerContext.new(
objects,
arguments_proc: arguments_proc,
context_proc: context_proc
)
yield(batch_context.arguments)
ensure
self.batch_context = nil
end
def superclass_context def superclass_context
return unless superclass.include?(WorkerContext) return unless superclass.include?(WorkerContext)
......
---
title: MVC for assignees avatar dissapearing when opening issue sidebar in board
merge_request:
author: Oregand
type: fixed
# Project import/export administration **(CORE ONLY)** # Project import/export administration **(CORE ONLY)**
>**Note:** > - [Introduced](https://gitlab.com/gitlab-org/gitlab-foss/issues/3050) in GitLab 8.9.
> > - From GitLab 11.3, import/export can use object storage automatically.
> - [Introduced][ce-3050] in GitLab 8.9.
> - Importing will not be possible if the import instance version is lower See also:
> than that of the exporter.
> - For existing installations, the project import option has to be enabled in - [Project import/export documentation](../../user/project/settings/import_export.md).
> application settings (`/admin/application_settings`) under 'Import sources'. - [Project import/export API](../../api/project_import_export.md).
> - The exports are stored in a temporary [shared directory][tmp] and are deleted
> every 24 hours by a specific worker. The GitLab import/export version can be checked by using the following command:
> - ImportExport can use object storage automatically starting from GitLab 11.3
The GitLab Import/Export version can be checked by using:
```shell ```shell
# Omnibus installations # Omnibus installations
...@@ -21,7 +18,7 @@ sudo gitlab-rake gitlab:import_export:version ...@@ -21,7 +18,7 @@ sudo gitlab-rake gitlab:import_export:version
bundle exec rake gitlab:import_export:version RAILS_ENV=production bundle exec rake gitlab:import_export:version RAILS_ENV=production
``` ```
The current list of DB tables that will get exported can be listed by using: The current list of DB tables that will be exported can be listed by using the following command:
```shell ```shell
# Omnibus installations # Omnibus installations
...@@ -31,5 +28,13 @@ sudo gitlab-rake gitlab:import_export:data ...@@ -31,5 +28,13 @@ sudo gitlab-rake gitlab:import_export:data
bundle exec rake gitlab:import_export:data RAILS_ENV=production bundle exec rake gitlab:import_export:data RAILS_ENV=production
``` ```
[ce-3050]: https://gitlab.com/gitlab-org/gitlab-foss/issues/3050 ## Important notes
[tmp]: ../../development/shared_files.md
Note the following:
- Importing is not possible if the version of the import instance is older than that of the exporter.
- The project import option must be enabled in
application settings (`/admin/application_settings`) under **Import sources**, which is available
under **{admin}** **Admin Area >** **{settings}** **Settings > Visibility and access controls**.
- The exports are stored in a temporary [shared directory](../../development/shared_files.md)
and are deleted every 24 hours by a specific worker.
# Project import/export API # Project import/export API
> [Introduced][ce-41899] in GitLab 10.6. > [Introduced](https://gitlab.com/gitlab-org/gitlab-foss/issues/41899) in GitLab 10.6.
See also the [project import/export documentation](../user/project/settings/import_export.md). See also:
- [Project import/export documentation](../user/project/settings/import_export.md).
- [Project import/export administration rake tasks](../administration/raketasks/project_import_export.md). **(CORE ONLY)**
## Schedule an export ## Schedule an export
Start a new export. Start a new export.
The endpoint also accepts an `upload` param. This param is a hash that contains The endpoint also accepts an `upload` parameter. This parameter is a hash that contains
all the necessary information to upload the exported project to a web server or all the necessary information to upload the exported project to a web server or
to any S3-compatible platform. At the moment we only support binary to any S3-compatible platform. At the moment we only support binary
data file uploads to the final server. data file uploads to the final server.
If the `upload` params is present, `upload[url]` param is required. From GitLab 10.7, the `upload[url]` parameter is required if the `upload` parameter is present.
(**Note:** This feature was introduced in GitLab 10.7)
```text ```text
POST /projects/:id/export POST /projects/:id/export
...@@ -56,8 +58,14 @@ GET /projects/:id/export ...@@ -56,8 +58,14 @@ GET /projects/:id/export
curl --header "PRIVATE-TOKEN: <your_access_token>" https://gitlab.example.com/api/v4/projects/1/export curl --header "PRIVATE-TOKEN: <your_access_token>" https://gitlab.example.com/api/v4/projects/1/export
``` ```
Status can be one of `none`, `started`, `after_export_action` or `finished`. The Status can be one of:
`after_export_action` state represents that the export process has been completed successfully and
- `none`
- `started`
- `after_export_action`
- `finished`
The `after_export_action` state represents that the export process has been completed successfully and
the platform is performing some actions on the resulted file. For example, sending the platform is performing some actions on the resulted file. For example, sending
an email notifying the user to download the file, uploading the exported file an email notifying the user to download the file, uploading the exported file
to a web server, etc. to a web server, etc.
...@@ -178,7 +186,13 @@ GET /projects/:id/import ...@@ -178,7 +186,13 @@ GET /projects/:id/import
curl --header "PRIVATE-TOKEN: <your_access_token>" https://gitlab.example.com/api/v4/projects/1/import curl --header "PRIVATE-TOKEN: <your_access_token>" https://gitlab.example.com/api/v4/projects/1/import
``` ```
Status can be one of `none`, `scheduled`, `failed`, `started`, or `finished`. Status can be one of:
- `none`
- `scheduled`
- `failed`
- `started`
- `finished`
If the status is `failed`, it will include the import error message under `import_error`. If the status is `failed`, it will include the import error message under `import_error`.
...@@ -194,5 +208,3 @@ If the status is `failed`, it will include the import error message under `impor ...@@ -194,5 +208,3 @@ If the status is `failed`, it will include the import error message under `impor
"import_status": "started" "import_status": "started"
} }
``` ```
[ce-41899]: https://gitlab.com/gitlab-org/gitlab-foss/issues/41899
# Project import/export # Project import/export
>**Notes:**
>
> - [Introduced](https://gitlab.com/gitlab-org/gitlab-foss/issues/3050) in GitLab 8.9. > - [Introduced](https://gitlab.com/gitlab-org/gitlab-foss/issues/3050) in GitLab 8.9.
> - Importing will not be possible if the import instance version differs from > - From GitLab 10.0, administrators can disable the project export option on the GitLab instance.
> that of the exporter.
> - For GitLab admins, please read through Existing projects running on any GitLab instance or GitLab.com can be exported with all their related
> [Project import/export administration](../../../administration/raketasks/project_import_export.md). data and be moved into a new GitLab instance.
> - For existing installations, the project import option has to be enabled in
> application settings (`/admin/application_settings`) under 'Import sources'. See also:
> Ask your administrator if you don't see the **GitLab export** button when
> creating a new project. - [Project import/export API](../../../api/project_import_export.md).
> - Starting with GitLab 10.0, administrators can disable the project export option - [Project import/export administration rake tasks](../../../administration/raketasks/project_import_export.md). **(CORE ONLY)**
> on the GitLab instance in application settings (`/admin/application_settings`)
> under 'Visibility and Access Controls'. ## Important notes
> - You can find some useful raketasks if you are an administrator in the
> [import_export](../../../administration/raketasks/project_import_export.md) raketask. Note the following:
> - The exports are stored in a temporary [shared directory](../../../development/shared_files.md)
> and are deleted every 24 hours by a specific worker. - Importing is not possible if the import instance version differs from
> - Group members will get exported as project members, as long as the user has that of the exporter.
> maintainer or admin access to the group where the exported project lives. An admin - The project import option must be enabled in
> in the import side is required to map the users, based on email or username. application settings (`/admin/application_settings`) under under **Import sources**, which is
> Otherwise, a supplementary comment is left to mention the original author and available under **{admin}** **Admin Area >** **{settings}** **Settings > Visibility and access controls**.
> the MRs, notes or issues will be owned by the importer. Ask your administrator if you don't see the **GitLab export** button when
> - Project members with owner access will get imported as maintainers. creating a new project.
> - Control project Import/Export with the [API](../../../api/project_import_export.md). - The exports are stored in a temporary [shared directory](../../../development/shared_files.md)
> - If an imported project contains merge requests originated from forks, and are deleted every 24 hours by a specific worker.
> then new branches associated with such merge requests will be created - Group members are exported as project members, as long as the user has
> within a project during the import/export. Thus, the number of branches maintainer or admin access to the group where the exported project lives. An admin
> in the exported project could be bigger than in the original project. in the import side is required to map the users, based on email or username.
Otherwise, a supplementary comment is left to mention the original author and
Existing projects running on any GitLab instance or GitLab.com can be exported the MRs, notes, or issues will be owned by the importer.
with all their related data and be moved into a new GitLab instance. - Project members with owner access will be imported as maintainers.
- If an imported project contains merge requests originating from forks,
then new branches associated with such merge requests will be created
within a project during the import/export. Thus, the number of branches
in the exported project could be bigger than in the original project.
## Version history ## Version history
The following table lists updates to Import/Export:
| GitLab version | Import/Export version | | GitLab version | Import/Export version |
| ---------------- | --------------------- | | ---------------- | --------------------- |
| 11.1 to current | 0.2.4 | | 11.1 to current | 0.2.4 |
...@@ -52,9 +56,8 @@ with all their related data and be moved into a new GitLab instance. ...@@ -52,9 +56,8 @@ with all their related data and be moved into a new GitLab instance.
| 8.9.5 | 0.1.1 | | 8.9.5 | 0.1.1 |
| 8.9.0 | 0.1.0 | | 8.9.0 | 0.1.0 |
> The table reflects what GitLab version we updated the Import/Export version at. For example, 8.10.3 and 8.11 will have the same Import/Export version (0.1.3)
> For instance, 8.10.3 and 8.11 will have the same Import/Export version (0.1.3) and the exports between them will be compatible.
> and the exports between them will be compatible.
## Exported contents ## Exported contents
...@@ -88,7 +91,7 @@ For more details on the specific data persisted in a project export, see the ...@@ -88,7 +91,7 @@ For more details on the specific data persisted in a project export, see the
1. Go to your project's homepage. 1. Go to your project's homepage.
1. Click **Settings** in the sidebar. 1. Click **{settings}** **Settings** in the sidebar.
1. Scroll down to find the **Export project** button: 1. Scroll down to find the **Export project** button:
......
# frozen_string_literal: true
module Gitlab
class BatchWorkerContext
def initialize(objects, arguments_proc:, context_proc:)
@objects = objects
@arguments_proc = arguments_proc
@context_proc = context_proc
end
def arguments
context_by_arguments.keys
end
def context_for(arguments)
context_by_arguments[arguments]
end
private
attr_reader :objects, :arguments_proc, :context_proc
def context_by_arguments
@context_by_arguments ||= objects.each_with_object({}) do |object, result|
arguments = Array.wrap(arguments_proc.call(object))
context = Gitlab::ApplicationContext.new(context_proc.call(object))
result[arguments] = context
end
end
end
end
...@@ -29,6 +29,7 @@ module Gitlab ...@@ -29,6 +29,7 @@ module Gitlab
lambda do |chain| lambda do |chain|
chain.add Gitlab::SidekiqStatus::ClientMiddleware chain.add Gitlab::SidekiqStatus::ClientMiddleware
chain.add Gitlab::SidekiqMiddleware::ClientMetrics chain.add Gitlab::SidekiqMiddleware::ClientMetrics
chain.add Gitlab::SidekiqMiddleware::WorkerContext::Client # needs to be before the Labkit middleware
chain.add Labkit::Middleware::Sidekiq::Client chain.add Labkit::Middleware::Sidekiq::Client
end end
end end
......
# frozen_string_literal: true
module Gitlab
module SidekiqMiddleware
module WorkerContext
private
def wrap_in_optional_context(context_or_nil, &block)
return yield unless context_or_nil
context_or_nil.use(&block)
end
end
end
end
# frozen_string_literal: true
module Gitlab
module SidekiqMiddleware
module WorkerContext
class Client
include Gitlab::SidekiqMiddleware::WorkerContext
def call(worker_class_or_name, job, _queue, _redis_pool, &block)
worker_class = worker_class_or_name.to_s.safe_constantize
# Mailers can't be constantized like this
return yield unless worker_class
return yield unless worker_class.include?(::ApplicationWorker)
context_for_args = worker_class.context_for_arguments(job['args'])
wrap_in_optional_context(context_for_args, &block)
end
end
end
end
end
...@@ -4,6 +4,8 @@ module Gitlab ...@@ -4,6 +4,8 @@ module Gitlab
module SidekiqMiddleware module SidekiqMiddleware
module WorkerContext module WorkerContext
class Server class Server
include Gitlab::SidekiqMiddleware::WorkerContext
def call(worker, job, _queue, &block) def call(worker, job, _queue, &block)
worker_class = worker.class worker_class = worker.class
...@@ -13,14 +15,6 @@ module Gitlab ...@@ -13,14 +15,6 @@ module Gitlab
# Use the context defined on the class level as a base context # Use the context defined on the class level as a base context
wrap_in_optional_context(worker_class.get_worker_context, &block) wrap_in_optional_context(worker_class.get_worker_context, &block)
end end
private
def wrap_in_optional_context(context, &block)
return yield unless context
context.use(&block)
end
end end
end end
end end
......
...@@ -111,7 +111,7 @@ describe HelpController do ...@@ -111,7 +111,7 @@ describe HelpController do
it 'renders the raw file' do it 'renders the raw file' do
get :show, get :show,
params: { params: {
path: 'user/project/img/labels_default_v12_1' path: 'fixtures/gitlab_tanuki'
}, },
format: :png format: :png
expect(response).to be_successful expect(response).to be_successful
......
...@@ -97,6 +97,9 @@ describe('Issue card component', () => { ...@@ -97,6 +97,9 @@ describe('Issue card component', () => {
issue: { issue: {
...wrapper.props('issue'), ...wrapper.props('issue'),
assignees: [user], assignees: [user],
updateData(newData) {
Object.assign(this, newData);
},
}, },
}); });
...@@ -118,6 +121,28 @@ describe('Issue card component', () => { ...@@ -118,6 +121,28 @@ describe('Issue card component', () => {
it('renders avatar', () => { it('renders avatar', () => {
expect(wrapper.find('.board-card-assignee img').exists()).toBe(true); expect(wrapper.find('.board-card-assignee img').exists()).toBe(true);
}); });
it('renders the avatar using avatar_url property', done => {
wrapper.props('issue').updateData({
...wrapper.props('issue'),
assignees: [
{
id: '1',
name: 'test',
state: 'active',
username: 'test_name',
avatar_url: 'test_image_from_avatar_url',
},
],
});
wrapper.vm.$nextTick(() => {
expect(wrapper.find('.board-card-assignee img').attributes('src')).toBe(
'test_image_from_avatar_url?width=24',
);
done();
});
});
}); });
describe('assignee default avatar', () => { describe('assignee default avatar', () => {
......
import * as monitoringUtils from '~/monitoring/utils'; import * as monitoringUtils from '~/monitoring/utils';
import { queryToObject, mergeUrlParams, removeParams } from '~/lib/utils/url_utility';
import { import {
mockHost,
mockProjectDir,
graphDataPrometheusQuery, graphDataPrometheusQuery,
graphDataPrometheusQueryRange, graphDataPrometheusQueryRange,
anomalyMockGraphData, anomalyMockGraphData,
} from './mock_data'; } from './mock_data';
jest.mock('~/lib/utils/url_utility');
const mockPath = `${mockHost}${mockProjectDir}/-/environments/29/metrics`;
const generatedLink = 'http://chart.link.com';
const chartTitle = 'Some metric chart';
const range = {
start: '2019-01-01T00:00:00.000Z',
end: '2019-01-10T00:00:00.000Z',
};
const rollingRange = {
duration: { seconds: 120 },
};
describe('monitoring/utils', () => { describe('monitoring/utils', () => {
const generatedLink = 'http://chart.link.com'; afterEach(() => {
const chartTitle = 'Some metric chart'; mergeUrlParams.mockReset();
queryToObject.mockReset();
});
describe('trackGenerateLinkToChartEventOptions', () => { describe('trackGenerateLinkToChartEventOptions', () => {
it('should return Cluster Monitoring options if located on Cluster Health Dashboard', () => { it('should return Cluster Monitoring options if located on Cluster Health Dashboard', () => {
...@@ -117,4 +139,75 @@ describe('monitoring/utils', () => { ...@@ -117,4 +139,75 @@ describe('monitoring/utils', () => {
expect(monitoringUtils.graphDataValidatorForAnomalyValues(fourMetrics)).toBe(false); expect(monitoringUtils.graphDataValidatorForAnomalyValues(fourMetrics)).toBe(false);
}); });
}); });
describe('timeRangeFromUrl', () => {
const { timeRangeFromUrl } = monitoringUtils;
it('returns a fixed range when query contains `start` and `end` paramters are given', () => {
queryToObject.mockReturnValueOnce(range);
expect(timeRangeFromUrl()).toEqual(range);
});
it('returns a rolling range when query contains `duration_seconds` paramters are given', () => {
const { seconds } = rollingRange.duration;
queryToObject.mockReturnValueOnce({
dashboard: '.gitlab/dashboard/my_dashboard.yml',
duration_seconds: `${seconds}`,
});
expect(timeRangeFromUrl()).toEqual(rollingRange);
});
it('returns null when no time range paramters are given', () => {
const params = {
dashboard: '.gitlab/dashboards/custom_dashboard.yml',
param1: 'value1',
param2: 'value2',
};
expect(timeRangeFromUrl(params, mockPath)).toBe(null);
});
});
describe('removeTimeRangeParams', () => {
const { removeTimeRangeParams } = monitoringUtils;
it('returns when query contains `start` and `end` paramters are given', () => {
removeParams.mockReturnValueOnce(mockPath);
expect(removeTimeRangeParams(`${mockPath}?start=${range.start}&end=${range.end}`)).toEqual(
mockPath,
);
});
});
describe('timeRangeToUrl', () => {
const { timeRangeToUrl } = monitoringUtils;
it('returns a fixed range when query contains `start` and `end` paramters are given', () => {
const toUrl = `${mockPath}?start=${range.start}&end=${range.end}`;
const fromUrl = mockPath;
removeParams.mockReturnValueOnce(fromUrl);
mergeUrlParams.mockReturnValueOnce(toUrl);
expect(timeRangeToUrl(range)).toEqual(toUrl);
expect(mergeUrlParams).toHaveBeenCalledWith(range, fromUrl);
});
it('returns a rolling range when query contains `duration_seconds` paramters are given', () => {
const { seconds } = rollingRange.duration;
const toUrl = `${mockPath}?duration_seconds=${seconds}`;
const fromUrl = mockPath;
removeParams.mockReturnValueOnce(fromUrl);
mergeUrlParams.mockReturnValueOnce(toUrl);
expect(timeRangeToUrl(rollingRange)).toEqual(toUrl);
expect(mergeUrlParams).toHaveBeenCalledWith({ duration_seconds: `${seconds}` }, fromUrl);
});
});
}); });
# frozen_string_literal: true
require 'spec_helper'
describe Gitlab::BatchWorkerContext do
subject(:batch_context) do
described_class.new(
%w(hello world),
arguments_proc: -> (word) { word },
context_proc: -> (word) { { user: build_stubbed(:user, username: word) } }
)
end
describe "#arguments" do
it "returns all the expected arguments in arrays" do
expect(batch_context.arguments).to eq([%w(hello), %w(world)])
end
end
describe "#context_for" do
it "returns the correct application context for the arguments" do
context = batch_context.context_for(%w(world))
expect(context).to be_a(Gitlab::ApplicationContext)
expect(context.to_lazy_hash[:user].call).to eq("world")
end
end
end
# frozen_string_literal: true
require 'spec_helper'
describe Gitlab::SidekiqMiddleware::WorkerContext::Client do
let(:worker_class) do
Class.new do
def self.name
'TestWithContextWorker'
end
include ApplicationWorker
def self.job_for_args(args)
jobs.find { |job| job['args'] == args }
end
def perform(*args)
end
end
end
before do
stub_const('TestWithContextWorker', worker_class)
end
describe "#call" do
it 'applies a context for jobs scheduled in batch' do
user_per_job = { 'job1' => build_stubbed(:user, username: 'user-1'),
'job2' => build_stubbed(:user, username: 'user-2') }
TestWithContextWorker.bulk_perform_async_with_contexts(
%w(job1 job2),
arguments_proc: -> (name) { [name, 1, 2, 3] },
context_proc: -> (name) { { user: user_per_job[name] } }
)
job1 = TestWithContextWorker.job_for_args(['job1', 1, 2, 3])
job2 = TestWithContextWorker.job_for_args(['job2', 1, 2, 3])
expect(job1['meta.user']).to eq(user_per_job['job1'].username)
expect(job2['meta.user']).to eq(user_per_job['job2'].username)
end
end
end
...@@ -110,6 +110,14 @@ describe Gitlab::SidekiqMiddleware do ...@@ -110,6 +110,14 @@ describe Gitlab::SidekiqMiddleware do
let(:queue) { 'default' } let(:queue) { 'default' }
let(:redis_pool) { Sidekiq.redis_pool } let(:redis_pool) { Sidekiq.redis_pool }
let(:middleware_expected_args) { [worker_class_arg, job, queue, redis_pool] } let(:middleware_expected_args) { [worker_class_arg, job, queue, redis_pool] }
let(:expected_middlewares) do
[
Gitlab::SidekiqStatus::ClientMiddleware,
Gitlab::SidekiqMiddleware::ClientMetrics,
Gitlab::SidekiqMiddleware::WorkerContext::Client,
Labkit::Middleware::Sidekiq::Client
]
end
before do before do
described_class.client_configurator.call(chain) described_class.client_configurator.call(chain)
...@@ -120,8 +128,9 @@ describe Gitlab::SidekiqMiddleware do ...@@ -120,8 +128,9 @@ describe Gitlab::SidekiqMiddleware do
# this will prevent the full middleware chain from being executed. # this will prevent the full middleware chain from being executed.
# This test ensures that this does not happen # This test ensures that this does not happen
it "invokes the chain" do it "invokes the chain" do
expect_any_instance_of(Gitlab::SidekiqStatus::ClientMiddleware).to receive(:call).with(*middleware_expected_args).once.and_call_original expected_middlewares do |middleware|
expect_any_instance_of(Labkit::Middleware::Sidekiq::Client).to receive(:call).with(*middleware_expected_args).once.and_call_original expect_any_instance_of(middleware).to receive(:call).with(*middleware_expected_args).once.ordered.and_call_original
end
expect { |b| chain.invoke(worker_class_arg, job, queue, redis_pool, &b) }.to yield_control.once expect { |b| chain.invoke(worker_class_arg, job, queue, redis_pool, &b) }.to yield_control.once
end end
......
# frozen_string_literal: true # frozen_string_literal: true
module DbCleaner module DbCleaner
def delete_from_all_tables!(except: nil) def delete_from_all_tables!(except: [])
except << 'ar_internal_metadata'
DatabaseCleaner.clean_with(:deletion, cache_tables: false, except: except) DatabaseCleaner.clean_with(:deletion, cache_tables: false, except: except)
end end
......
...@@ -5,7 +5,11 @@ require 'spec_helper' ...@@ -5,7 +5,11 @@ require 'spec_helper'
describe WorkerContext do describe WorkerContext do
let(:worker) do let(:worker) do
Class.new do Class.new do
include WorkerContext def self.name
"TestWorker"
end
include ApplicationWorker
end end
end end
...@@ -24,6 +28,78 @@ describe WorkerContext do ...@@ -24,6 +28,78 @@ describe WorkerContext do
end end
end end
shared_examples 'tracking bulk scheduling contexts' do
describe "context contents" do
before do
# stub clearing the contexts, so we can check what's inside
allow(worker).to receive(:batch_context=).and_call_original
allow(worker).to receive(:batch_context=).with(nil)
end
it 'keeps track of the context per key to schedule' do
subject
expect(worker.context_for_arguments(["hello"])).to be_a(Gitlab::ApplicationContext)
end
it 'does not share contexts across threads' do
t1_context = nil
t2_context = nil
Thread.new do
subject
t1_context = worker.context_for_arguments(["hello"])
end.join
Thread.new do
t2_context = worker.context_for_arguments(["hello"])
end.join
expect(t1_context).to be_a(Gitlab::ApplicationContext)
expect(t2_context).to be_nil
end
end
it 'clears the contexts' do
subject
expect(worker.__send__(:batch_context)).to be_nil
end
end
describe '.bulk_perform_async_with_contexts' do
subject do
worker.bulk_perform_async_with_contexts(%w(hello world),
context_proc: -> (_) { { user: build_stubbed(:user) } },
arguments_proc: -> (word) { word })
end
it 'calls bulk_perform_async with the arguments' do
expect(worker).to receive(:bulk_perform_async).with([["hello"], ["world"]])
subject
end
it_behaves_like 'tracking bulk scheduling contexts'
end
describe '.bulk_perform_in_with_contexts' do
subject do
worker.bulk_perform_in_with_contexts(10.minutes,
%w(hello world),
context_proc: -> (_) { { user: build_stubbed(:user) } },
arguments_proc: -> (word) { word })
end
it 'calls bulk_perform_in with the arguments and delay' do
expect(worker).to receive(:bulk_perform_in).with(10.minutes, [["hello"], ["world"]])
subject
end
it_behaves_like 'tracking bulk scheduling contexts'
end
describe '#with_context' do describe '#with_context' do
it 'allows modifying context when the job is running' do it 'allows modifying context when the job is running' do
worker.new.with_context(user: build_stubbed(:user, username: 'jane-doe')) do worker.new.with_context(user: build_stubbed(:user, username: 'jane-doe')) do
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment