Commit 39f12a7f authored by Illya Klymov's avatar Illya Klymov

Add pagination and filtering to group import

* Add status line to know how many groups are available
* Basic pagination
* Filtering
parent 3c037436
<script>
import { GlLoadingIcon } from '@gitlab/ui';
import {
GlEmptyState,
GlIcon,
GlLink,
GlLoadingIcon,
GlSearchBoxByClick,
GlSprintf,
} from '@gitlab/ui';
import { s__ } from '~/locale';
import bulkImportSourceGroupsQuery from '../graphql/queries/bulk_import_source_groups.query.graphql';
import availableNamespacesQuery from '../graphql/queries/available_namespaces.query.graphql';
import setTargetNamespaceMutation from '../graphql/mutations/set_target_namespace.mutation.graphql';
import setNewNameMutation from '../graphql/mutations/set_new_name.mutation.graphql';
import importGroupMutation from '../graphql/mutations/import_group.mutation.graphql';
import ImportTableRow from './import_table_row.vue';
import PaginationLinks from '~/vue_shared/components/pagination_links.vue';
const mapApolloMutations = (mutations) =>
Object.fromEntries(
......@@ -22,16 +31,81 @@ const mapApolloMutations = (mutations) =>
export default {
components: {
GlEmptyState,
GlIcon,
GlLink,
GlLoadingIcon,
GlSearchBoxByClick,
GlSprintf,
ImportTableRow,
PaginationLinks,
},
props: {
sourceUrl: {
type: String,
required: true,
},
},
data() {
return {
filter: '',
page: 1,
};
},
apollo: {
bulkImportSourceGroups: bulkImportSourceGroupsQuery,
bulkImportSourceGroups: {
query: bulkImportSourceGroupsQuery,
variables() {
return { page: this.page, filter: this.filter };
},
},
availableNamespaces: availableNamespacesQuery,
},
computed: {
hasGroups() {
return this.bulkImportSourceGroups?.nodes?.length > 0;
},
hasEmptyFilter() {
return this.filter.length > 0 && !this.hasGroups;
},
statusMessage() {
return this.filter.length === 0
? s__('BulkImport|Showing %{start}-%{end} of %{total} from %{link}')
: s__(
'BulkImport|Showing %{start}-%{end} of %{total} matching filter "%{filter}" from %{link}',
);
},
paginationInfo() {
const { page, perPage, total } = this.bulkImportSourceGroups?.pageInfo ?? {
page: 1,
perPage: 0,
total: 0,
};
const start = (page - 1) * perPage + 1;
const end = start + (this.bulkImportSourceGroups.nodes?.length ?? 0) - 1;
return { start, end, total };
},
},
watch: {
filter() {
this.page = 1;
},
},
methods: {
setPage(page) {
this.page = page;
},
...mapApolloMutations({
setTargetNamespace: setTargetNamespaceMutation,
setNewName: setNewNameMutation,
......@@ -43,36 +117,74 @@ export default {
<template>
<div>
<gl-loading-icon v-if="$apollo.loading" size="md" class="gl-mt-5" />
<div v-else-if="bulkImportSourceGroups.length">
<table class="gl-w-full">
<thead class="gl-border-solid gl-border-gray-200 gl-border-0 gl-border-b-1">
<th class="gl-py-4 import-jobs-from-col">{{ s__('BulkImport|From source group') }}</th>
<th class="gl-py-4 import-jobs-to-col">{{ s__('BulkImport|To new group') }}</th>
<th class="gl-py-4 import-jobs-status-col">{{ __('Status') }}</th>
<th class="gl-py-4 import-jobs-cta-col"></th>
</thead>
<tbody>
<template v-for="group in bulkImportSourceGroups">
<import-table-row
:key="group.id"
:group="group"
:available-namespaces="availableNamespaces"
@update-target-namespace="
setTargetNamespace({
variables: { sourceGroupId: group.id, targetNamespace: $event },
})
"
@update-new-name="
setNewName({
variables: { sourceGroupId: group.id, newName: $event },
})
"
@import-group="importGroup({ variables: { sourceGroupId: group.id } })"
/>
<div
class="gl-py-5 gl-border-solid gl-border-gray-200 gl-border-0 gl-border-b-1 gl-display-flex gl-align-items-center"
>
<span>
<gl-sprintf v-if="!$apollo.loading" :message="statusMessage">
<template #start>
<strong>{{ paginationInfo.start }}</strong>
</template>
</tbody>
</table>
<template #end>
<strong>{{ paginationInfo.end }}</strong>
</template>
<template #total>
<strong>{{ n__('%d group', '%d groups', paginationInfo.total) }}</strong>
</template>
<template #filter>
<strong>{{ filter }}</strong>
</template>
<template #link>
<gl-link class="gl-display-inline-block" :href="sourceUrl" target="_blank">
{{ sourceUrl }} <gl-icon name="external-link" class="vertical-align-middle" />
</gl-link>
</template>
</gl-sprintf>
</span>
<gl-search-box-by-click class="gl-ml-auto" @submit="filter = $event" @clear="filter = ''" />
</div>
<gl-loading-icon v-if="$apollo.loading" size="md" class="gl-mt-5" />
<template v-else>
<gl-empty-state v-if="hasEmptyFilter" :title="__('Sorry, your filter produced no results')" />
<gl-empty-state
v-else-if="!hasGroups"
:title="s__('BulkImport|No groups available for import')"
/>
<div v-else class="gl-display-flex gl-flex-direction-column gl-align-items-center">
<table class="gl-w-full">
<thead class="gl-border-solid gl-border-gray-200 gl-border-0 gl-border-b-1">
<th class="gl-py-4 import-jobs-from-col">{{ s__('BulkImport|From source group') }}</th>
<th class="gl-py-4 import-jobs-to-col">{{ s__('BulkImport|To new group') }}</th>
<th class="gl-py-4 import-jobs-status-col">{{ __('Status') }}</th>
<th class="gl-py-4 import-jobs-cta-col"></th>
</thead>
<tbody>
<template v-for="group in bulkImportSourceGroups.nodes">
<import-table-row
:key="group.id"
:group="group"
:available-namespaces="availableNamespaces"
@update-target-namespace="
setTargetNamespace({
variables: { sourceGroupId: group.id, targetNamespace: $event },
})
"
@update-new-name="
setNewName({
variables: { sourceGroupId: group.id, newName: $event },
})
"
@import-group="importGroup({ variables: { sourceGroupId: group.id } })"
/>
</template>
</tbody>
</table>
<pagination-links
:change="setPage"
:page-info="bulkImportSourceGroups.pageInfo"
class="gl-flex gl-mt-3"
/>
</div>
</template>
</div>
</template>
import axios from '~/lib/utils/axios_utils';
import { parseIntPagination, normalizeHeaders } from '~/lib/utils/common_utils';
import createDefaultClient from '~/lib/graphql';
import { s__ } from '~/locale';
import createFlash from '~/flash';
......@@ -8,8 +9,10 @@ import { SourceGroupsManager } from './services/source_groups_manager';
import { StatusPoller } from './services/status_poller';
export const clientTypenames = {
BulkImportSourceGroupConnection: 'ClientBulkImportSourceGroupConnection',
BulkImportSourceGroup: 'ClientBulkImportSourceGroup',
AvailableNamespace: 'ClientAvailableNamespace',
BulkImportPageInfo: 'ClientBulkImportPageInfo',
};
export function createResolvers({ endpoints }) {
......@@ -17,22 +20,39 @@ export function createResolvers({ endpoints }) {
return {
Query: {
async bulkImportSourceGroups(_, __, { client }) {
async bulkImportSourceGroups(_, vars, { client }) {
const {
data: { availableNamespaces },
} = await client.query({ query: availableNamespacesQuery });
return axios.get(endpoints.status).then(({ data }) => {
return data.importable_data.map((group) => ({
__typename: clientTypenames.BulkImportSourceGroup,
...group,
status: STATUSES.NONE,
import_target: {
new_name: group.full_path,
target_namespace: availableNamespaces[0].full_path,
return axios
.get(endpoints.status, {
params: {
page: vars.page,
per_page: vars.perPage,
filter: vars.filter,
},
}));
});
})
.then(({ headers, data }) => {
const pagination = parseIntPagination(normalizeHeaders(headers));
return {
__typename: clientTypenames.BulkImportSourceGroupConnection,
nodes: data.importable_data.map((group) => ({
__typename: clientTypenames.BulkImportSourceGroup,
...group,
status: STATUSES.NONE,
import_target: {
new_name: group.full_path,
target_namespace: availableNamespaces[0].full_path,
},
})),
pageInfo: {
__typename: clientTypenames.BulkImportPageInfo,
...pagination,
},
};
});
},
availableNamespaces: () =>
......
#import "../fragments/bulk_import_source_group_item.fragment.graphql"
query bulkImportSourceGroups {
bulkImportSourceGroups @client {
...BulkImportSourceGroupItem
query bulkImportSourceGroups($page: Int = 1, $perPage: Int = 20, $filter: String = "") {
bulkImportSourceGroups(page: $page, filter: $filter, perPage: $perPage) @client {
nodes {
...BulkImportSourceGroupItem
}
pageInfo {
page
perPage
total
totalPages
}
}
}
......@@ -46,7 +46,10 @@ export class StatusPoller {
const { bulkImportSourceGroups } = this.client.readQuery({
query: bulkImportSourceGroupsQuery,
});
const groupsInProgress = bulkImportSourceGroups.filter((g) => g.status === STATUSES.STARTED);
const groupsInProgress = bulkImportSourceGroups.nodes.filter(
(g) => g.status === STATUSES.STARTED,
);
if (groupsInProgress.length) {
const { data: results } = await this.client.query({
query: generateGroupsQuery(groupsInProgress),
......
......@@ -10,7 +10,12 @@ Vue.use(VueApollo);
export function mountImportGroupsApp(mountElement) {
if (!mountElement) return undefined;
const { statusPath, availableNamespacesPath, createBulkImportPath } = mountElement.dataset;
const {
statusPath,
availableNamespacesPath,
createBulkImportPath,
sourceUrl,
} = mountElement.dataset;
const apolloProvider = new VueApollo({
defaultClient: createApolloClient({
endpoints: {
......@@ -25,7 +30,11 @@ export function mountImportGroupsApp(mountElement) {
el: mountElement,
apolloProvider,
render(createElement) {
return createElement(ImportTable);
return createElement(ImportTable, {
props: {
sourceUrl,
},
});
},
});
}
......@@ -22,7 +22,13 @@ class Import::BulkImportsController < ApplicationController
def status
respond_to do |format|
format.json do
render json: { importable_data: serialized_importable_data }
data = importable_data
pagination_headers.each do |header|
response.set_header(header, data.headers[header])
end
render json: { importable_data: serialized_data(data.parsed_response) }
end
format.html do
@source_url = session[url_key]
......@@ -44,8 +50,12 @@ class Import::BulkImportsController < ApplicationController
private
def serialized_importable_data
serializer.represent(importable_data, {}, Import::BulkImportEntity)
def pagination_headers
%w[x-next-page x-page x-per-page x-prev-page x-total x-total-pages]
end
def serialized_data(data)
serializer.represent(data, {}, Import::BulkImportEntity)
end
def serializer
......@@ -53,7 +63,7 @@ class Import::BulkImportsController < ApplicationController
end
def importable_data
client.get('groups', query_params).parsed_response
client.get('groups', query_params)
end
# Default query string params used to fetch groups from GitLab source instance
......@@ -74,7 +84,9 @@ class Import::BulkImportsController < ApplicationController
def client
@client ||= BulkImports::Clients::Http.new(
uri: session[url_key],
token: session[access_token_key]
token: session[access_token_key],
per_page: params[:per_page],
page: params[:page]
)
end
......
......@@ -4,9 +4,8 @@
%h1.gl-my-0.gl-py-4.gl-font-size-h1.gl-border-solid.gl-border-gray-200.gl-border-0.gl-border-b-1
= s_('BulkImport|Import groups from GitLab')
%p.gl-my-0.gl-py-5.gl-border-solid.gl-border-gray-200.gl-border-0.gl-border-b-1
= s_('BulkImport|Importing groups from %{link}').html_safe % { link: external_link(@source_url, @source_url) }
#import-groups-mount-element{ data: { status_path: status_import_bulk_imports_path(format: :json),
available_namespaces_path: import_available_namespaces_path(format: :json),
create_bulk_import_path: import_bulk_imports_path(format: :json) } }
create_bulk_import_path: import_bulk_imports_path(format: :json),
source_url: @source_url } }
---
title: 'Add pagination and filtering to htoup imports'
merge_request: 52340
author:
type: changed
......@@ -200,6 +200,11 @@ msgid_plural "%d fixed test results"
msgstr[0] ""
msgstr[1] ""
msgid "%d group"
msgid_plural "%d groups"
msgstr[0] ""
msgstr[1] ""
msgid "%d group selected"
msgid_plural "%d groups selected"
msgstr[0] ""
......@@ -4885,10 +4890,16 @@ msgstr ""
msgid "BulkImport|Import groups from GitLab"
msgstr ""
msgid "BulkImport|Importing groups from %{link}"
msgid "BulkImport|Importing the group failed"
msgstr ""
msgid "BulkImport|No groups available for import"
msgstr ""
msgid "BulkImport|Importing the group failed"
msgid "BulkImport|Showing %{start}-%{end} of %{total} from %{link}"
msgstr ""
msgid "BulkImport|Showing %{start}-%{end} of %{total} matching filter \"%{filter}\" from %{link}"
msgstr ""
msgid "BulkImport|To new group"
......
......@@ -59,7 +59,14 @@ RSpec.describe Import::BulkImportsController do
parsed_response: [
{ 'id' => 1, 'full_name' => 'group1', 'full_path' => 'full/path/group1', 'web_url' => 'http://demo.host/full/path/group1' },
{ 'id' => 2, 'full_name' => 'group2', 'full_path' => 'full/path/group2', 'web_url' => 'http://demo.host/full/path/group1' }
]
],
headers: {
'x-next-page' => '2',
'X-page' => '1',
'x-per-page' => '20',
'x-total' => '37',
'x-total-pages' => '2'
}
)
end
......@@ -81,6 +88,17 @@ RSpec.describe Import::BulkImportsController do
expect(json_response).to eq({ importable_data: client_response.parsed_response }.as_json)
end
it 'forwards pagination headers' do
get :status, format: :json
expect(response.headers['x-per-page']).to eq client_response.headers['x-per-page']
expect(response.headers['x-page']).to eq client_response.headers['x-page']
expect(response.headers['x-next-page']).to eq client_response.headers['x-next-page']
expect(response.headers['x-prev-page']).to eq client_response.headers['x-prev-page']
expect(response.headers['x-total']).to eq client_response.headers['x-total']
expect(response.headers['x-total-pages']).to eq client_response.headers['x-total-pages']
end
context 'when filtering' do
it 'returns filtered result' do
filter = 'test'
......
......@@ -23,8 +23,8 @@ RSpec.describe 'Import/Export - Connect to another instance', :js do
source_url = 'https://gitlab.com'
pat = 'demo-pat'
stub_path = 'stub-group'
stub_request(:get, "%{url}/api/v4/groups?page=1&per_page=30&top_level_only=true&min_access_level=40" % { url: source_url }).to_return(
total = 37
stub_request(:get, "%{url}/api/v4/groups?page=1&per_page=20&top_level_only=true&min_access_level=40&search=" % { url: source_url }).to_return(
body: [{
id: 2595438,
web_url: 'https://gitlab.com/groups/auto-breakfast',
......@@ -33,7 +33,14 @@ RSpec.describe 'Import/Export - Connect to another instance', :js do
full_name: 'Stub',
full_path: stub_path
}].to_json,
headers: { 'Content-Type' => 'application/json' }
headers: {
'Content-Type' => 'application/json',
'X-Next-Page' => 2,
'X-Page' => 1,
'X-Per-Page' => 20,
'X-Total' => total,
'X-Total-Pages' => 2
}
)
expect(page).to have_content 'Import groups from another instance of GitLab'
......@@ -44,7 +51,7 @@ RSpec.describe 'Import/Export - Connect to another instance', :js do
click_on 'Connect instance'
expect(page).to have_content 'Importing groups from %{url}' % { url: source_url }
expect(page).to have_content 'Showing 1-1 of %{total} groups from %{url}' % { url: source_url, total: total }
expect(page).to have_content stub_path
end
end
......
import { shallowMount, createLocalVue } from '@vue/test-utils';
import VueApollo from 'vue-apollo';
import { GlLoadingIcon } from '@gitlab/ui';
import { GlEmptyState, GlLoadingIcon, GlSearchBoxByClick, GlSprintf } from '@gitlab/ui';
import waitForPromises from 'helpers/wait_for_promises';
import createMockApollo from 'helpers/mock_apollo_helper';
import ImportTableRow from '~/import_entities/import_groups/components/import_table_row.vue';
......@@ -8,6 +8,7 @@ import ImportTable from '~/import_entities/import_groups/components/import_table
import setTargetNamespaceMutation from '~/import_entities/import_groups/graphql/mutations/set_target_namespace.mutation.graphql';
import setNewNameMutation from '~/import_entities/import_groups/graphql/mutations/set_new_name.mutation.graphql';
import importGroupMutation from '~/import_entities/import_groups/graphql/mutations/import_group.mutation.graphql';
import PaginationLinks from '~/vue_shared/components/pagination_links.vue';
import { STATUSES } from '~/import_entities/constants';
......@@ -20,6 +21,9 @@ describe('import table', () => {
let wrapper;
let apolloProvider;
const FAKE_GROUP = generateFakeEntry({ id: 1, status: STATUSES.NONE });
const FAKE_PAGE_INFO = { page: 1, perPage: 20, total: 40, totalPages: 2 };
const createComponent = ({ bulkImportSourceGroups }) => {
apolloProvider = createMockApollo([], {
Query: {
......@@ -34,6 +38,12 @@ describe('import table', () => {
});
wrapper = shallowMount(ImportTable, {
propsData: {
sourceUrl: 'https://demo.host',
},
stubs: {
GlSprintf,
},
localVue,
apolloProvider,
});
......@@ -62,13 +72,28 @@ describe('import table', () => {
expect(wrapper.find(GlLoadingIcon).exists()).toBe(false);
});
it('renders message about empty state when no groups are available for import', async () => {
createComponent({
bulkImportSourceGroups: () => ({
nodes: [],
pageInfo: FAKE_PAGE_INFO,
}),
});
await waitForPromises();
expect(wrapper.find(GlEmptyState).props().title).toBe('No groups available for import');
});
it('renders import row for each group in response', async () => {
const FAKE_GROUPS = [
generateFakeEntry({ id: 1, status: STATUSES.NONE }),
generateFakeEntry({ id: 2, status: STATUSES.FINISHED }),
];
createComponent({
bulkImportSourceGroups: () => FAKE_GROUPS,
bulkImportSourceGroups: () => ({
nodes: FAKE_GROUPS,
pageInfo: FAKE_PAGE_INFO,
}),
});
await waitForPromises();
......@@ -76,11 +101,9 @@ describe('import table', () => {
});
describe('converts row events to mutation invocations', () => {
const FAKE_GROUP = generateFakeEntry({ id: 1, status: STATUSES.NONE });
beforeEach(() => {
createComponent({
bulkImportSourceGroups: () => [FAKE_GROUP],
bulkImportSourceGroups: () => ({ nodes: [FAKE_GROUP], pageInfo: FAKE_PAGE_INFO }),
});
return waitForPromises();
});
......@@ -100,4 +123,115 @@ describe('import table', () => {
});
});
});
describe('pagination', () => {
const bulkImportSourceGroupsQueryMock = jest
.fn()
.mockResolvedValue({ nodes: [FAKE_GROUP], pageInfo: FAKE_PAGE_INFO });
beforeEach(() => {
createComponent({
bulkImportSourceGroups: bulkImportSourceGroupsQueryMock,
});
return waitForPromises();
});
it('correctly passes pagination info from query', () => {
expect(wrapper.find(PaginationLinks).props().pageInfo).toStrictEqual(FAKE_PAGE_INFO);
});
it('updates page when page change is requested', async () => {
const REQUESTED_PAGE = 2;
wrapper.find(PaginationLinks).props().change(REQUESTED_PAGE);
await waitForPromises();
expect(bulkImportSourceGroupsQueryMock).toHaveBeenCalledWith(
expect.anything(),
expect.objectContaining({ page: REQUESTED_PAGE }),
expect.anything(),
expect.anything(),
);
});
it('updates status text when page is changed', async () => {
const REQUESTED_PAGE = 2;
bulkImportSourceGroupsQueryMock.mockResolvedValue({
nodes: [FAKE_GROUP],
pageInfo: {
page: 2,
total: 38,
perPage: 20,
totalPages: 2,
},
});
wrapper.find(PaginationLinks).props().change(REQUESTED_PAGE);
await waitForPromises();
expect(wrapper.text()).toContain('Showing 21-21 of 38');
});
});
describe('filters', () => {
const bulkImportSourceGroupsQueryMock = jest
.fn()
.mockResolvedValue({ nodes: [], pageInfo: FAKE_PAGE_INFO });
beforeEach(() => {
createComponent({
bulkImportSourceGroups: bulkImportSourceGroupsQueryMock,
});
return waitForPromises();
});
const findFilterInput = () => wrapper.find(GlSearchBoxByClick);
it('properly passes filter to graphql query when search box is submitted', async () => {
createComponent({
bulkImportSourceGroups: bulkImportSourceGroupsQueryMock,
});
await waitForPromises();
const FILTER_VALUE = 'foo';
findFilterInput().vm.$emit('submit', FILTER_VALUE);
await waitForPromises();
expect(bulkImportSourceGroupsQueryMock).toHaveBeenCalledWith(
expect.anything(),
expect.objectContaining({ filter: FILTER_VALUE }),
expect.anything(),
expect.anything(),
);
});
it('updates status string when search box is submitted', async () => {
createComponent({
bulkImportSourceGroups: bulkImportSourceGroupsQueryMock,
});
await waitForPromises();
const FILTER_VALUE = 'foo';
findFilterInput().vm.$emit('submit', FILTER_VALUE);
await waitForPromises();
expect(wrapper.text()).toContain('Showing 1-0 of 40 groups matching filter "foo"');
});
it('properly resets filter in graphql query when search box is cleared', async () => {
const FILTER_VALUE = 'foo';
findFilterInput().vm.$emit('submit', FILTER_VALUE);
await waitForPromises();
bulkImportSourceGroupsQueryMock.mockClear();
await apolloProvider.defaultClient.resetStore();
findFilterInput().vm.$emit('clear');
await waitForPromises();
expect(bulkImportSourceGroupsQueryMock).toHaveBeenCalledWith(
expect.anything(),
expect.objectContaining({ filter: '' }),
expect.anything(),
expect.anything(),
);
});
});
});
......@@ -79,33 +79,56 @@ describe('Bulk import resolvers', () => {
axiosMockAdapter
.onGet(FAKE_ENDPOINTS.availableNamespaces)
.reply(httpStatus.OK, availableNamespacesFixture);
const response = await client.query({ query: bulkImportSourceGroupsQuery });
results = response.data.bulkImportSourceGroups;
});
it('mirrors REST endpoint response fields', () => {
const MIRRORED_FIELDS = ['id', 'full_name', 'full_path', 'web_url'];
expect(
results.every((r, idx) =>
MIRRORED_FIELDS.every(
(field) => r[field] === statusEndpointFixture.importable_data[idx][field],
describe('when called', () => {
beforeEach(async () => {
const response = await client.query({ query: bulkImportSourceGroupsQuery });
results = response.data.bulkImportSourceGroups.nodes;
});
it('mirrors REST endpoint response fields', () => {
const MIRRORED_FIELDS = ['id', 'full_name', 'full_path', 'web_url'];
expect(
results.every((r, idx) =>
MIRRORED_FIELDS.every(
(field) => r[field] === statusEndpointFixture.importable_data[idx][field],
),
),
),
).toBe(true);
});
).toBe(true);
});
it('populates each result instance with status field default to none', () => {
expect(results.every((r) => r.status === STATUSES.NONE)).toBe(true);
});
it('populates each result instance with status field default to none', () => {
expect(results.every((r) => r.status === STATUSES.NONE)).toBe(true);
});
it('populates each result instance with import_target defaulted to first available namespace', () => {
expect(
results.every(
(r) => r.import_target.target_namespace === availableNamespacesFixture[0].full_path,
),
).toBe(true);
it('populates each result instance with import_target defaulted to first available namespace', () => {
expect(
results.every(
(r) => r.import_target.target_namespace === availableNamespacesFixture[0].full_path,
),
).toBe(true);
});
});
it.each`
variable | queryParam | value
${'filter'} | ${'filter'} | ${'demo'}
${'perPage'} | ${'per_page'} | ${30}
${'page'} | ${'page'} | ${3}
`(
'properly passes GraphQL variable $variable as REST $queryParam query parameter',
async ({ variable, queryParam, value }) => {
await client.query({
query: bulkImportSourceGroupsQuery,
variables: { [variable]: value },
});
const restCall = axiosMockAdapter.history.get.find(
(q) => q.url === FAKE_ENDPOINTS.status,
);
expect(restCall.params[queryParam]).toBe(value);
},
);
});
});
......@@ -117,20 +140,28 @@ describe('Bulk import resolvers', () => {
client.writeQuery({
query: bulkImportSourceGroupsQuery,
data: {
bulkImportSourceGroups: [
{
__typename: clientTypenames.BulkImportSourceGroup,
id: GROUP_ID,
status: STATUSES.NONE,
web_url: 'https://fake.host/1',
full_path: 'fake_group_1',
full_name: 'fake_name_1',
import_target: {
target_namespace: 'root',
new_name: 'group1',
bulkImportSourceGroups: {
nodes: [
{
__typename: clientTypenames.BulkImportSourceGroup,
id: GROUP_ID,
status: STATUSES.NONE,
web_url: 'https://fake.host/1',
full_path: 'fake_group_1',
full_name: 'fake_name_1',
import_target: {
target_namespace: 'root',
new_name: 'group1',
},
},
],
pageInfo: {
page: 1,
perPage: 20,
total: 37,
totalPages: 2,
},
],
},
},
});
......@@ -140,7 +171,7 @@ describe('Bulk import resolvers', () => {
fetchPolicy: 'cache-only',
})
.subscribe(({ data }) => {
results = data.bulkImportSourceGroups;
results = data.bulkImportSourceGroups.nodes;
});
});
......@@ -174,7 +205,9 @@ describe('Bulk import resolvers', () => {
});
await waitForPromises();
const { bulkImportSourceGroups: intermediateResults } = client.readQuery({
const {
bulkImportSourceGroups: { nodes: intermediateResults },
} = client.readQuery({
query: bulkImportSourceGroupsQuery,
});
......
......@@ -4,6 +4,7 @@ import waitForPromises from 'helpers/wait_for_promises';
import createFlash from '~/flash';
import { StatusPoller } from '~/import_entities/import_groups/graphql/services/status_poller';
import { clientTypenames } from '~/import_entities/import_groups/graphql/client_factory';
import bulkImportSourceGroupsQuery from '~/import_entities/import_groups/graphql/queries/bulk_import_source_groups.query.graphql';
import { STATUSES } from '~/import_entities/constants';
import { SourceGroupsManager } from '~/import_entities/import_groups/graphql/services/source_groups_manager';
......@@ -17,6 +18,7 @@ jest.mock('~/import_entities/import_groups/graphql/services/source_groups_manage
}));
const TEST_POLL_INTERVAL = 1000;
const FAKE_PAGE_INFO = { page: 1, perPage: 20, total: 40, totalPages: 2 };
describe('Bulk import status poller', () => {
let poller;
......@@ -25,6 +27,25 @@ describe('Bulk import status poller', () => {
const listQueryCacheCalls = () =>
clientMock.readQuery.mock.calls.filter((call) => call[0].query === bulkImportSourceGroupsQuery);
const generateFakeGroups = (statuses) =>
statuses.map((status, idx) => generateFakeEntry({ status, id: idx }));
const writeFakeGroupsQuery = (nodes) => {
clientMock.cache.writeQuery({
query: bulkImportSourceGroupsQuery,
data: {
bulkImportSourceGroups: {
__typename: clientTypenames.BulkImportSourceGroupConnection,
nodes,
pageInfo: {
__typename: clientTypenames.BulkImportPageInfo,
...FAKE_PAGE_INFO,
},
},
},
});
};
beforeEach(() => {
clientMock = createMockClient({
cache: new InMemoryCache({
......@@ -42,10 +63,7 @@ describe('Bulk import status poller', () => {
describe('general behavior', () => {
beforeEach(() => {
clientMock.cache.writeQuery({
query: bulkImportSourceGroupsQuery,
data: { bulkImportSourceGroups: [] },
});
writeFakeGroupsQuery([]);
});
it('does not perform polling when constructed', () => {
......@@ -94,14 +112,7 @@ describe('Bulk import status poller', () => {
});
it('does not query server when no groups have STARTED status', async () => {
clientMock.cache.writeQuery({
query: bulkImportSourceGroupsQuery,
data: {
bulkImportSourceGroups: [STATUSES.NONE, STATUSES.FINISHED].map((status, idx) =>
generateFakeEntry({ status, id: idx }),
),
},
});
writeFakeGroupsQuery(generateFakeGroups([STATUSES.NONE, STATUSES.FINISHED]));
jest.spyOn(clientMock, 'query');
poller.startPolling();
......@@ -111,44 +122,23 @@ describe('Bulk import status poller', () => {
describe('when there are groups which have STARTED status', () => {
const TARGET_NAMESPACE = 'root';
const STARTED_GROUP_1 = {
const STARTED_GROUP_1 = generateFakeEntry({
status: STATUSES.STARTED,
id: 'started1',
import_target: {
target_namespace: TARGET_NAMESPACE,
new_name: 'group1',
},
};
});
const STARTED_GROUP_2 = {
const STARTED_GROUP_2 = generateFakeEntry({
status: STATUSES.STARTED,
id: 'started2',
import_target: {
target_namespace: TARGET_NAMESPACE,
new_name: 'group2',
},
};
});
const NOT_STARTED_GROUP = {
const NOT_STARTED_GROUP = generateFakeEntry({
status: STATUSES.NONE,
id: 'not_started',
import_target: {
target_namespace: TARGET_NAMESPACE,
new_name: 'group3',
},
};
});
it('query server only for groups with STATUSES.STARTED', async () => {
clientMock.cache.writeQuery({
query: bulkImportSourceGroupsQuery,
data: {
bulkImportSourceGroups: [
STARTED_GROUP_1,
NOT_STARTED_GROUP,
STARTED_GROUP_2,
].map((group) => generateFakeEntry(group)),
},
});
writeFakeGroupsQuery([STARTED_GROUP_1, NOT_STARTED_GROUP, STARTED_GROUP_2]);
clientMock.query = jest.fn().mockResolvedValue({ data: {} });
poller.startPolling();
......@@ -166,14 +156,7 @@ describe('Bulk import status poller', () => {
});
it('updates statuses only for groups in response', async () => {
clientMock.cache.writeQuery({
query: bulkImportSourceGroupsQuery,
data: {
bulkImportSourceGroups: [STARTED_GROUP_1, STARTED_GROUP_2].map((group) =>
generateFakeEntry(group),
),
},
});
writeFakeGroupsQuery([STARTED_GROUP_1, STARTED_GROUP_2]);
clientMock.query = jest.fn().mockResolvedValue({ data: { group0: {} } });
poller.startPolling();
......@@ -188,14 +171,7 @@ describe('Bulk import status poller', () => {
describe('when error occurs', () => {
beforeEach(() => {
clientMock.cache.writeQuery({
query: bulkImportSourceGroupsQuery,
data: {
bulkImportSourceGroups: [STARTED_GROUP_1, STARTED_GROUP_2].map((group) =>
generateFakeEntry(group),
),
},
});
writeFakeGroupsQuery([STARTED_GROUP_1, STARTED_GROUP_2]);
clientMock.query = jest.fn().mockRejectedValue(new Error('dummy error'));
poller.startPolling();
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment