Commit 0b12a531 authored by GitLab Bot's avatar GitLab Bot

Add latest changes from gitlab-org/gitlab@master

parent 92f95cca
...@@ -629,7 +629,7 @@ GEM ...@@ -629,7 +629,7 @@ GEM
nakayoshi_fork (0.0.4) nakayoshi_fork (0.0.4)
nap (1.1.0) nap (1.1.0)
nenv (0.3.0) nenv (0.3.0)
net-ldap (0.16.0) net-ldap (0.16.2)
net-ntp (2.1.3) net-ntp (2.1.3)
net-ssh (5.2.0) net-ssh (5.2.0)
netrc (0.11.0) netrc (0.11.0)
......
...@@ -115,12 +115,10 @@ export default { ...@@ -115,12 +115,10 @@ export default {
<div role="rowheader" class="table-mobile-header">{{ s__('DeployKeys|Deploy key') }}</div> <div role="rowheader" class="table-mobile-header">{{ s__('DeployKeys|Deploy key') }}</div>
<div class="table-mobile-content qa-key"> <div class="table-mobile-content qa-key">
<strong class="title qa-key-title"> {{ deployKey.title }} </strong> <strong class="title qa-key-title"> {{ deployKey.title }} </strong>
<div class="fingerprint qa-key-fingerprint"> <div class="fingerprint" data-qa-selector="key_md5_fingerprint">
{{ __('MD5') }}:{{ deployKey.fingerprint }} {{ __('MD5') }}:{{ deployKey.fingerprint }}
</div> </div>
<div class="fingerprint qa-key-fingerprint"> <div class="fingerprint">{{ __('SHA256') }}:{{ deployKey.fingerprint_sha256 }}</div>
{{ __('SHA256') }}:{{ deployKey.fingerprint_sha256 }}
</div>
</div> </div>
</div> </div>
<div class="table-section section-30 section-wrap"> <div class="table-section section-30 section-wrap">
......
...@@ -95,6 +95,7 @@ export default { ...@@ -95,6 +95,7 @@ export default {
return { return {
treeWidth, treeWidth,
diffFilesLength: 0,
}; };
}, },
computed: { computed: {
...@@ -241,7 +242,8 @@ export default { ...@@ -241,7 +242,8 @@ export default {
fetchData(toggleTree = true) { fetchData(toggleTree = true) {
if (this.glFeatures.diffsBatchLoad) { if (this.glFeatures.diffsBatchLoad) {
this.fetchDiffFilesMeta() this.fetchDiffFilesMeta()
.then(() => { .then(({ real_size }) => {
this.diffFilesLength = parseInt(real_size, 10);
if (toggleTree) this.hideTreeListIfJustOneFile(); if (toggleTree) this.hideTreeListIfJustOneFile();
this.startDiffRendering(); this.startDiffRendering();
...@@ -264,7 +266,8 @@ export default { ...@@ -264,7 +266,8 @@ export default {
}); });
} else { } else {
this.fetchDiffFiles() this.fetchDiffFiles()
.then(() => { .then(({ real_size }) => {
this.diffFilesLength = parseInt(real_size, 10);
if (toggleTree) { if (toggleTree) {
this.hideTreeListIfJustOneFile(); this.hideTreeListIfJustOneFile();
} }
...@@ -351,6 +354,7 @@ export default { ...@@ -351,6 +354,7 @@ export default {
:merge-request-diff="mergeRequestDiff" :merge-request-diff="mergeRequestDiff"
:target-branch="targetBranch" :target-branch="targetBranch"
:is-limited-container="isLimitedContainer" :is-limited-container="isLimitedContainer"
:diff-files-length="diffFilesLength"
/> />
<hidden-files-warning <hidden-files-warning
......
...@@ -42,9 +42,13 @@ export default { ...@@ -42,9 +42,13 @@ export default {
required: false, required: false,
default: false, default: false,
}, },
diffFilesLength: {
type: Number,
required: true,
},
}, },
computed: { computed: {
...mapGetters('diffs', ['hasCollapsedFile', 'diffFilesLength']), ...mapGetters('diffs', ['hasCollapsedFile']),
...mapState('diffs', [ ...mapState('diffs', [
'commit', 'commit',
'showTreeList', 'showTreeList',
......
...@@ -64,6 +64,7 @@ export const fetchDiffFiles = ({ state, commit }) => { ...@@ -64,6 +64,7 @@ export const fetchDiffFiles = ({ state, commit }) => {
const urlParams = { const urlParams = {
w: state.showWhitespace ? '0' : '1', w: state.showWhitespace ? '0' : '1',
}; };
let returnData;
if (state.useSingleDiffStyle) { if (state.useSingleDiffStyle) {
urlParams.view = state.diffViewType; urlParams.view = state.diffViewType;
...@@ -87,9 +88,13 @@ export const fetchDiffFiles = ({ state, commit }) => { ...@@ -87,9 +88,13 @@ export const fetchDiffFiles = ({ state, commit }) => {
worker.postMessage(state.diffFiles); worker.postMessage(state.diffFiles);
returnData = res.data;
return Vue.nextTick(); return Vue.nextTick();
}) })
.then(handleLocationHash) .then(() => {
handleLocationHash();
return returnData;
})
.catch(() => worker.terminate()); .catch(() => worker.terminate());
}; };
...@@ -147,6 +152,7 @@ export const fetchDiffFilesMeta = ({ commit, state }) => { ...@@ -147,6 +152,7 @@ export const fetchDiffFilesMeta = ({ commit, state }) => {
prepareDiffData(data); prepareDiffData(data);
worker.postMessage(data.diff_files); worker.postMessage(data.diff_files);
return data;
}) })
.catch(() => worker.terminate()); .catch(() => worker.terminate());
}; };
......
...@@ -95,8 +95,6 @@ export const allBlobs = (state, getters) => ...@@ -95,8 +95,6 @@ export const allBlobs = (state, getters) =>
return acc; return acc;
}, []); }, []);
export const diffFilesLength = state => state.diffFiles.length;
export const getCommentFormForDiffFile = state => fileHash => export const getCommentFormForDiffFile = state => fileHash =>
state.commentForms.find(form => form.fileHash === fileHash); state.commentForms.find(form => form.fileHash === fileHash);
......
...@@ -179,16 +179,19 @@ export default { ...@@ -179,16 +179,19 @@ export default {
const mapDiscussions = (line, extraCheck = () => true) => ({ const mapDiscussions = (line, extraCheck = () => true) => ({
...line, ...line,
discussions: extraCheck() discussions: extraCheck()
? line.discussions ? line.discussions &&
line.discussions
.filter(() => !line.discussions.some(({ id }) => discussion.id === id)) .filter(() => !line.discussions.some(({ id }) => discussion.id === id))
.concat(lineCheck(line) ? discussion : line.discussions) .concat(lineCheck(line) ? discussion : line.discussions)
: [], : [],
}); });
const setDiscussionsExpanded = line => { const setDiscussionsExpanded = line => {
const isLineNoteTargeted = line.discussions.some( const isLineNoteTargeted =
disc => disc.notes && disc.notes.find(note => hash === `note_${note.id}`), line.discussions &&
); line.discussions.some(
disc => disc.notes && disc.notes.find(note => hash === `note_${note.id}`),
);
return { return {
...line, ...line,
......
...@@ -67,8 +67,8 @@ export default { ...@@ -67,8 +67,8 @@ export default {
if (this.entryModal.type === modalTypes.rename) { if (this.entryModal.type === modalTypes.rename) {
if (this.entries[this.entryName] && !this.entries[this.entryName].deleted) { if (this.entries[this.entryName] && !this.entries[this.entryName].deleted) {
flash( flash(
sprintf(s__('The name %{entryName} is already taken in this directory.'), { sprintf(s__('The name "%{name}" is already taken in this directory.'), {
entryName: this.entryName, name: this.entryName,
}), }),
'alert', 'alert',
document, document,
...@@ -81,22 +81,11 @@ export default { ...@@ -81,22 +81,11 @@ export default {
const entryName = parentPath.pop(); const entryName = parentPath.pop();
parentPath = parentPath.join('/'); parentPath = parentPath.join('/');
const createPromise = this.renameEntry({
parentPath && !this.entries[parentPath] path: this.entryModal.entry.path,
? this.createTempEntry({ name: parentPath, type: 'tree' }) name: entryName,
: Promise.resolve(); parentPath,
});
createPromise
.then(() =>
this.renameEntry({
path: this.entryModal.entry.path,
name: entryName,
parentPath,
}),
)
.catch(() =>
flash(__('Error creating a new path'), 'alert', document, null, false, true),
);
} }
} else { } else {
this.createTempEntry({ this.createTempEntry({
......
...@@ -53,60 +53,55 @@ export const setResizingStatus = ({ commit }, resizing) => { ...@@ -53,60 +53,55 @@ export const setResizingStatus = ({ commit }, resizing) => {
export const createTempEntry = ( export const createTempEntry = (
{ state, commit, dispatch }, { state, commit, dispatch },
{ name, type, content = '', base64 = false, binary = false, rawPath = '' }, { name, type, content = '', base64 = false, binary = false, rawPath = '' },
) => ) => {
new Promise(resolve => { const fullName = name.slice(-1) !== '/' && type === 'tree' ? `${name}/` : name;
const fullName = name.slice(-1) !== '/' && type === 'tree' ? `${name}/` : name;
if (state.entries[name] && !state.entries[name].deleted) {
if (state.entries[name] && !state.entries[name].deleted) { flash(
flash( sprintf(__('The name "%{name}" is already taken in this directory.'), {
`The name "${name.split('/').pop()}" is already taken in this directory.`, name: name.split('/').pop(),
'alert', }),
document, 'alert',
null, document,
false, null,
true, false,
); true,
);
resolve();
return null;
}
const data = decorateFiles({
data: [fullName],
projectId: state.currentProjectId,
branchId: state.currentBranchId,
type,
tempFile: true,
content,
base64,
binary,
rawPath,
});
const { file, parentPath } = data;
commit(types.CREATE_TMP_ENTRY, { return;
data, }
projectId: state.currentProjectId,
branchId: state.currentBranchId,
});
if (type === 'blob') { const data = decorateFiles({
commit(types.TOGGLE_FILE_OPEN, file.path); data: [fullName],
commit(types.ADD_FILE_TO_CHANGED, file.path); projectId: state.currentProjectId,
dispatch('setFileActive', file.path); branchId: state.currentBranchId,
dispatch('triggerFilesChange'); type,
dispatch('burstUnusedSeal'); tempFile: true,
} content,
base64,
binary,
rawPath,
});
const { file, parentPath } = data;
if (parentPath && !state.entries[parentPath].opened) { commit(types.CREATE_TMP_ENTRY, {
commit(types.TOGGLE_TREE_OPEN, parentPath); data,
} projectId: state.currentProjectId,
branchId: state.currentBranchId,
});
resolve(file); if (type === 'blob') {
commit(types.TOGGLE_FILE_OPEN, file.path);
commit(types.ADD_FILE_TO_CHANGED, file.path);
dispatch('setFileActive', file.path);
dispatch('triggerFilesChange');
dispatch('burstUnusedSeal');
}
return null; if (parentPath && !state.entries[parentPath].opened) {
}); commit(types.TOGGLE_TREE_OPEN, parentPath);
}
};
export const scrollToTab = () => { export const scrollToTab = () => {
Vue.nextTick(() => { Vue.nextTick(() => {
...@@ -211,8 +206,9 @@ export const deleteEntry = ({ commit, dispatch, state }, path) => { ...@@ -211,8 +206,9 @@ export const deleteEntry = ({ commit, dispatch, state }, path) => {
const entry = state.entries[path]; const entry = state.entries[path];
const { prevPath, prevName, prevParentPath } = entry; const { prevPath, prevName, prevParentPath } = entry;
const isTree = entry.type === 'tree'; const isTree = entry.type === 'tree';
const prevEntry = prevPath && state.entries[prevPath];
if (prevPath) { if (prevPath && (!prevEntry || prevEntry.deleted)) {
dispatch('renameEntry', { dispatch('renameEntry', {
path, path,
name: prevName, name: prevName,
...@@ -245,6 +241,11 @@ export const resetOpenFiles = ({ commit }) => commit(types.RESET_OPEN_FILES); ...@@ -245,6 +241,11 @@ export const resetOpenFiles = ({ commit }) => commit(types.RESET_OPEN_FILES);
export const renameEntry = ({ dispatch, commit, state }, { path, name, parentPath }) => { export const renameEntry = ({ dispatch, commit, state }, { path, name, parentPath }) => {
const entry = state.entries[path]; const entry = state.entries[path];
const newPath = parentPath ? `${parentPath}/${name}` : name; const newPath = parentPath ? `${parentPath}/${name}` : name;
const existingParent = parentPath && state.entries[parentPath];
if (parentPath && (!existingParent || existingParent.deleted)) {
dispatch('createTempEntry', { name: parentPath, type: 'tree' });
}
commit(types.RENAME_ENTRY, { path, name, parentPath }); commit(types.RENAME_ENTRY, { path, name, parentPath });
......
...@@ -83,8 +83,11 @@ export const showBranchNotFoundError = ({ dispatch }, branchId) => { ...@@ -83,8 +83,11 @@ export const showBranchNotFoundError = ({ dispatch }, branchId) => {
}); });
}; };
export const showEmptyState = ({ commit, state }, { projectId, branchId }) => { export const showEmptyState = ({ commit, state, dispatch }, { projectId, branchId }) => {
const treePath = `${projectId}/${branchId}`; const treePath = `${projectId}/${branchId}`;
dispatch('setCurrentBranchId', branchId);
commit(types.CREATE_TREE, { treePath }); commit(types.CREATE_TREE, { treePath });
commit(types.TOGGLE_LOADING, { commit(types.TOGGLE_LOADING, {
entry: state.trees[treePath], entry: state.trees[treePath],
......
import initSettingsPanels from '~/settings_panels'; import initSettingsPanels from '~/settings_panels';
import projectSelect from '~/project_select'; import projectSelect from '~/project_select';
import selfMonitor from '~/self_monitor';
document.addEventListener('DOMContentLoaded', () => { document.addEventListener('DOMContentLoaded', () => {
if (gon.features && gon.features.selfMonitoringProject) {
selfMonitor();
}
// Initialize expandable settings panels // Initialize expandable settings panels
initSettingsPanels(); initSettingsPanels();
projectSelect(); projectSelect();
......
<script>
import Vue from 'vue';
import { GlFormGroup, GlButton, GlModal, GlToast, GlToggle } from '@gitlab/ui';
import { mapState, mapActions } from 'vuex';
import { __, s__, sprintf } from '~/locale';
import { visitUrl, getBaseURL } from '~/lib/utils/url_utility';
Vue.use(GlToast);
export default {
components: {
GlFormGroup,
GlButton,
GlModal,
GlToggle,
},
formLabels: {
createProject: __('Create Project'),
},
data() {
return {
modalId: 'delete-self-monitor-modal',
};
},
computed: {
...mapState('selfMonitoring', [
'projectEnabled',
'projectCreated',
'showAlert',
'projectPath',
'loading',
'alertContent',
]),
selfMonitorEnabled: {
get() {
return this.projectEnabled;
},
set(projectEnabled) {
this.setSelfMonitor(projectEnabled);
},
},
selfMonitorProjectFullUrl() {
return `${getBaseURL()}/${this.projectPath}`;
},
selfMonitoringFormText() {
if (this.projectCreated) {
return sprintf(
s__(
'SelfMonitoring|Enabling this feature creates a %{projectLinkStart}project%{projectLinkEnd} that can be used to monitor the health of your instance.',
),
{
projectLinkStart: `<a href="${this.selfMonitorProjectFullUrl}">`,
projectLinkEnd: '</a>',
},
false,
);
}
return s__(
'SelfMonitoring|Enabling this feature creates a project that can be used to monitor the health of your instance.',
);
},
},
watch: {
selfMonitorEnabled() {
this.saveChangesSelfMonitorProject();
},
showAlert() {
let toastOptions = {
onComplete: () => {
this.resetAlert();
},
};
if (this.showAlert) {
if (this.alertContent.actionName && this.alertContent.actionName.length > 0) {
toastOptions = {
...toastOptions,
action: {
text: this.alertContent.actionText,
onClick: (_, toastObject) => {
this[this.alertContent.actionName]();
toastObject.goAway(0);
},
},
};
}
this.$toast.show(this.alertContent.message, toastOptions);
}
},
},
methods: {
...mapActions('selfMonitoring', [
'setSelfMonitor',
'createProject',
'deleteProject',
'resetAlert',
]),
hideSelfMonitorModal() {
this.$root.$emit('bv::hide::modal', this.modalId);
this.setSelfMonitor(true);
},
showSelfMonitorModal() {
this.$root.$emit('bv::show::modal', this.modalId);
},
saveChangesSelfMonitorProject() {
if (this.projectCreated && !this.projectEnabled) {
this.showSelfMonitorModal();
} else {
this.createProject();
}
},
viewSelfMonitorProject() {
visitUrl(this.selfMonitorProjectFullUrl);
},
},
};
</script>
<template>
<section class="settings no-animate js-self-monitoring-settings">
<div class="settings-header">
<h4 class="js-section-header">
{{ s__('SelfMonitoring|Self monitoring') }}
</h4>
<gl-button class="js-settings-toggle">{{ __('Expand') }}</gl-button>
<p class="js-section-sub-header">
{{ s__('SelfMonitoring|Enable or disable instance self monitoring') }}
</p>
</div>
<div class="settings-content">
<form name="self-monitoring-form">
<p v-html="selfMonitoringFormText"></p>
<gl-form-group :label="$options.formLabels.createProject" label-for="self-monitor-toggle">
<gl-toggle
v-model="selfMonitorEnabled"
:is-loading="loading"
name="self-monitor-toggle"
/>
</gl-form-group>
</form>
</div>
<gl-modal
:title="s__('SelfMonitoring|Disable self monitoring?')"
:modal-id="modalId"
:ok-title="__('Delete project')"
:cancel-title="__('Cancel')"
ok-variant="danger"
@ok="deleteProject"
@cancel="hideSelfMonitorModal"
>
<div>
{{
s__(
'SelfMonitoring|Disabling this feature will delete the self monitoring project. Are you sure you want to delete the project?',
)
}}
</div>
</gl-modal>
</section>
</template>
import Vue from 'vue';
import store from './store';
import SelfMonitorForm from './components/self_monitor_form.vue';
export default () => {
const el = document.querySelector('.js-self-monitoring-settings');
let selfMonitorProjectCreated;
if (el) {
selfMonitorProjectCreated = el.dataset.selfMonitoringProjectExists;
// eslint-disable-next-line no-new
new Vue({
el,
store: store({
projectEnabled: selfMonitorProjectCreated,
...el.dataset,
}),
render(createElement) {
return createElement(SelfMonitorForm);
},
});
}
};
import { __, s__ } from '~/locale';
import axios from '~/lib/utils/axios_utils';
import statusCodes from '~/lib/utils/http_status';
import { backOff } from '~/lib/utils/common_utils';
import * as types from './mutation_types';
const TWO_MINUTES = 120000;
function backOffRequest(makeRequestCallback) {
return backOff((next, stop) => {
makeRequestCallback()
.then(resp => {
if (resp.status === statusCodes.ACCEPTED) {
next();
} else {
stop(resp);
}
})
.catch(stop);
}, TWO_MINUTES);
}
export const setSelfMonitor = ({ commit }, enabled) => commit(types.SET_ENABLED, enabled);
export const createProject = ({ dispatch }) => dispatch('requestCreateProject');
export const resetAlert = ({ commit }) => commit(types.SET_SHOW_ALERT, false);
export const requestCreateProject = ({ dispatch, state, commit }) => {
commit(types.SET_LOADING, true);
axios
.post(state.createProjectEndpoint)
.then(resp => {
if (resp.status === statusCodes.ACCEPTED) {
dispatch('requestCreateProjectStatus', resp.data.job_id);
}
})
.catch(error => {
dispatch('requestCreateProjectError', error);
});
};
export const requestCreateProjectStatus = ({ dispatch, state }, jobId) => {
backOffRequest(() => axios.get(state.createProjectStatusEndpoint, { params: { job_id: jobId } }))
.then(resp => {
if (resp.status === statusCodes.OK) {
dispatch('requestCreateProjectSuccess', resp.data);
}
})
.catch(error => {
dispatch('requestCreateProjectError', error);
});
};
export const requestCreateProjectSuccess = ({ commit }, selfMonitorData) => {
commit(types.SET_LOADING, false);
commit(types.SET_PROJECT_URL, selfMonitorData.project_full_path);
commit(types.SET_ALERT_CONTENT, {
message: s__('SelfMonitoring|Self monitoring project has been successfully created.'),
actionText: __('View project'),
actionName: 'viewSelfMonitorProject',
});
commit(types.SET_SHOW_ALERT, true);
commit(types.SET_PROJECT_CREATED, true);
};
export const requestCreateProjectError = ({ commit }, error) => {
const { response } = error;
const message = response.data && response.data.message ? response.data.message : '';
commit(types.SET_ALERT_CONTENT, {
message: `${__('There was an error saving your changes.')} ${message}`,
});
commit(types.SET_SHOW_ALERT, true);
commit(types.SET_LOADING, false);
};
export const deleteProject = ({ dispatch }) => dispatch('requestDeleteProject');
export const requestDeleteProject = ({ dispatch, state, commit }) => {
commit(types.SET_LOADING, true);
axios
.delete(state.deleteProjectEndpoint)
.then(resp => {
if (resp.status === statusCodes.ACCEPTED) {
dispatch('requestDeleteProjectStatus', resp.data.job_id);
}
})
.catch(error => {
dispatch('requestDeleteProjectError', error);
});
};
export const requestDeleteProjectStatus = ({ dispatch, state }, jobId) => {
backOffRequest(() => axios.get(state.deleteProjectStatusEndpoint, { params: { job_id: jobId } }))
.then(resp => {
if (resp.status === statusCodes.OK) {
dispatch('requestDeleteProjectSuccess', resp.data);
}
})
.catch(error => {
dispatch('requestDeleteProjectError', error);
});
};
export const requestDeleteProjectSuccess = ({ commit }) => {
commit(types.SET_PROJECT_URL, '');
commit(types.SET_PROJECT_CREATED, false);
commit(types.SET_ALERT_CONTENT, {
message: s__('SelfMonitoring|Self monitoring project has been successfully deleted.'),
actionText: __('Undo'),
actionName: 'createProject',
});
commit(types.SET_SHOW_ALERT, true);
commit(types.SET_LOADING, false);
};
export const requestDeleteProjectError = ({ commit }, error) => {
const { response } = error;
const message = response.data && response.data.message ? response.data.message : '';
commit(types.SET_ALERT_CONTENT, {
message: `${__('There was an error saving your changes.')} ${message}`,
});
commit(types.SET_LOADING, false);
};
import Vue from 'vue';
import Vuex from 'vuex';
import createState from './state';
import * as actions from './actions';
import mutations from './mutations';
Vue.use(Vuex);
export const createStore = initialState =>
new Vuex.Store({
modules: {
selfMonitoring: {
namespaced: true,
state: createState(initialState),
actions,
mutations,
},
},
});
export default createStore;
export const SET_ENABLED = 'SET_ENABLED';
export const SET_PROJECT_CREATED = 'SET_PROJECT_CREATED';
export const SET_SHOW_ALERT = 'SET_SHOW_ALERT';
export const SET_PROJECT_URL = 'SET_PROJECT_URL';
export const SET_LOADING = 'SET_LOADING';
export const SET_ALERT_CONTENT = 'SET_ALERT_CONTENT';
import * as types from './mutation_types';
export default {
[types.SET_ENABLED](state, enabled) {
state.projectEnabled = enabled;
},
[types.SET_PROJECT_CREATED](state, created) {
state.projectCreated = created;
},
[types.SET_SHOW_ALERT](state, show) {
state.showAlert = show;
},
[types.SET_PROJECT_URL](state, url) {
state.projectPath = url;
},
[types.SET_LOADING](state, loading) {
state.loading = loading;
},
[types.SET_ALERT_CONTENT](state, content) {
state.alertContent = content;
},
};
import { parseBoolean } from '~/lib/utils/common_utils';
export default (initialState = {}) => ({
projectEnabled: parseBoolean(initialState.projectEnabled) || false,
projectCreated: parseBoolean(initialState.selfMonitorProjectCreated) || false,
createProjectEndpoint: initialState.createSelfMonitoringProjectPath || '',
deleteProjectEndpoint: initialState.deleteSelfMonitoringProjectPath || '',
createProjectStatusEndpoint: initialState.statusCreateSelfMonitoringProjectPath || '',
deleteProjectStatusEndpoint: initialState.statusDeleteSelfMonitoringProjectPath || '',
selfMonitorProjectPath: initialState.selfMonitoringProjectFullPath || '',
showAlert: false,
projectPath: '',
loading: false,
alertContent: {},
});
...@@ -281,7 +281,7 @@ module MarkupHelper ...@@ -281,7 +281,7 @@ module MarkupHelper
context.reverse_merge!( context.reverse_merge!(
current_user: (current_user if defined?(current_user)), current_user: (current_user if defined?(current_user)),
# RelativeLinkFilter # RepositoryLinkFilter and UploadLinkFilter
commit: @commit, commit: @commit,
project_wiki: @project_wiki, project_wiki: @project_wiki,
ref: @ref, ref: @ref,
......
...@@ -47,6 +47,9 @@ ...@@ -47,6 +47,9 @@
.settings-content .settings-content
= render 'performance_bar' = render 'performance_bar'
- if Feature.enabled?(:self_monitoring_project)
.js-self-monitoring-settings{ data: self_monitoring_project_data }
%section.settings.as-usage.no-animate#js-usage-settings{ class: ('expanded' if expanded_by_default?) } %section.settings.as-usage.no-animate#js-usage-settings{ class: ('expanded' if expanded_by_default?) }
.settings-header#usage-statistics .settings-header#usage-statistics
%h4 %h4
......
---
title: Fix discarding renamed directories in Web IDE
merge_request: 22943
author:
type: fixed
---
title: 'Fix: WebIDE doesn''t work on empty repositories again'
merge_request: 22950
author:
type: fixed
---
title: Fix MR diffs file count increments while batch loading
merge_request: 21764
author:
type: fixed
---
title: Add audit events to the adding members to project or group API endpoint
merge_request: 21633
author:
type: changed
---
title: Add more indexes for other order_by options (Projects API)
merge_request: 22784
author:
type: performance
---
title: Add API endpoint for creating a Geo node
merge_request: 22392
author: Rajendra Kadam
type: added
---
title: Update the Net-LDAP gem to 0.16.2
merge_request:
author:
type: other
---
title: Avoid making Gitaly calls when some Markdown text links to an uploaded file
merge_request: 22631
author:
type: performance
...@@ -208,7 +208,7 @@ class Gitlab::Seeder::CycleAnalytics ...@@ -208,7 +208,7 @@ class Gitlab::Seeder::CycleAnalytics
job = merge_request.head_pipeline.builds.where.not(environment: nil).last job = merge_request.head_pipeline.builds.where.not(environment: nil).last
job.success! job.success!
pipeline.update_status job.pipeline.update_status
end end
end end
end end
......
# frozen_string_literal: true
class AddIndexesForProjectsApi < ActiveRecord::Migration[5.2]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
disable_ddl_transaction!
COLUMNS = %i(created_at last_activity_at updated_at name path)
def up
COLUMNS.each do |column|
add_concurrent_index :projects, [column, :id], where: 'visibility_level = 20', order: { id: :desc }, name: "index_projects_api_vis20_#{column}_id_desc"
add_concurrent_index :projects, [column, :id], where: 'visibility_level = 20', name: "index_projects_api_vis20_#{column}"
end
remove_concurrent_index_by_name :projects, 'index_projects_on_visibility_level_created_at_id_desc'
remove_concurrent_index_by_name :projects, 'index_projects_on_visibility_level_created_at_desc_id_desc'
end
def down
add_concurrent_index :projects, %i(visibility_level created_at id), order: { id: :desc }, name: 'index_projects_on_visibility_level_created_at_id_desc'
add_concurrent_index :projects, %i(visibility_level created_at id), order: { created_at: :desc, id: :desc }, name: 'index_projects_on_visibility_level_created_at_desc_id_desc'
COLUMNS.each do |column|
remove_concurrent_index_by_name :projects, "index_projects_api_vis20_#{column}_id_desc"
remove_concurrent_index_by_name :projects, "index_projects_api_vis20_#{column}"
end
end
end
...@@ -10,7 +10,7 @@ ...@@ -10,7 +10,7 @@
# #
# It's strongly recommended that you check this file into your version control system. # It's strongly recommended that you check this file into your version control system.
ActiveRecord::Schema.define(version: 2020_01_08_233040) do ActiveRecord::Schema.define(version: 2020_01_10_144316) do
# These are extensions that must be enabled in order to support this database # These are extensions that must be enabled in order to support this database
enable_extension "pg_trgm" enable_extension "pg_trgm"
...@@ -3353,6 +3353,8 @@ ActiveRecord::Schema.define(version: 2020_01_08_233040) do ...@@ -3353,6 +3353,8 @@ ActiveRecord::Schema.define(version: 2020_01_08_233040) do
t.boolean "autoclose_referenced_issues" t.boolean "autoclose_referenced_issues"
t.string "suggestion_commit_message", limit: 255 t.string "suggestion_commit_message", limit: 255
t.index "lower((name)::text)", name: "index_projects_on_lower_name" t.index "lower((name)::text)", name: "index_projects_on_lower_name"
t.index ["created_at", "id"], name: "index_projects_api_vis20_created_at", where: "(visibility_level = 20)"
t.index ["created_at", "id"], name: "index_projects_api_vis20_created_at_id_desc", order: { id: :desc }, where: "(visibility_level = 20)"
t.index ["created_at", "id"], name: "index_projects_on_created_at_and_id" t.index ["created_at", "id"], name: "index_projects_on_created_at_and_id"
t.index ["creator_id"], name: "index_projects_on_creator_id" t.index ["creator_id"], name: "index_projects_on_creator_id"
t.index ["description"], name: "index_projects_on_description_trigram", opclass: :gin_trgm_ops, using: :gin t.index ["description"], name: "index_projects_on_description_trigram", opclass: :gin_trgm_ops, using: :gin
...@@ -3360,6 +3362,8 @@ ActiveRecord::Schema.define(version: 2020_01_08_233040) do ...@@ -3360,6 +3362,8 @@ ActiveRecord::Schema.define(version: 2020_01_08_233040) do
t.index ["id"], name: "index_on_id_partial_with_legacy_storage", where: "((storage_version < 2) OR (storage_version IS NULL))" t.index ["id"], name: "index_on_id_partial_with_legacy_storage", where: "((storage_version < 2) OR (storage_version IS NULL))"
t.index ["id"], name: "index_projects_on_id_partial_for_visibility", unique: true, where: "(visibility_level = ANY (ARRAY[10, 20]))" t.index ["id"], name: "index_projects_on_id_partial_for_visibility", unique: true, where: "(visibility_level = ANY (ARRAY[10, 20]))"
t.index ["id"], name: "index_projects_on_mirror_and_mirror_trigger_builds_both_true", where: "((mirror IS TRUE) AND (mirror_trigger_builds IS TRUE))" t.index ["id"], name: "index_projects_on_mirror_and_mirror_trigger_builds_both_true", where: "((mirror IS TRUE) AND (mirror_trigger_builds IS TRUE))"
t.index ["last_activity_at", "id"], name: "index_projects_api_vis20_last_activity_at", where: "(visibility_level = 20)"
t.index ["last_activity_at", "id"], name: "index_projects_api_vis20_last_activity_at_id_desc", order: { id: :desc }, where: "(visibility_level = 20)"
t.index ["last_activity_at"], name: "index_projects_on_last_activity_at" t.index ["last_activity_at"], name: "index_projects_on_last_activity_at"
t.index ["last_repository_check_at"], name: "index_projects_on_last_repository_check_at", where: "(last_repository_check_at IS NOT NULL)" t.index ["last_repository_check_at"], name: "index_projects_on_last_repository_check_at", where: "(last_repository_check_at IS NOT NULL)"
t.index ["last_repository_check_failed"], name: "index_projects_on_last_repository_check_failed" t.index ["last_repository_check_failed"], name: "index_projects_on_last_repository_check_failed"
...@@ -3368,8 +3372,12 @@ ActiveRecord::Schema.define(version: 2020_01_08_233040) do ...@@ -3368,8 +3372,12 @@ ActiveRecord::Schema.define(version: 2020_01_08_233040) do
t.index ["marked_for_deletion_by_user_id"], name: "index_projects_on_marked_for_deletion_by_user_id", where: "(marked_for_deletion_by_user_id IS NOT NULL)" t.index ["marked_for_deletion_by_user_id"], name: "index_projects_on_marked_for_deletion_by_user_id", where: "(marked_for_deletion_by_user_id IS NOT NULL)"
t.index ["mirror_last_successful_update_at"], name: "index_projects_on_mirror_last_successful_update_at" t.index ["mirror_last_successful_update_at"], name: "index_projects_on_mirror_last_successful_update_at"
t.index ["mirror_user_id"], name: "index_projects_on_mirror_user_id" t.index ["mirror_user_id"], name: "index_projects_on_mirror_user_id"
t.index ["name", "id"], name: "index_projects_api_vis20_name", where: "(visibility_level = 20)"
t.index ["name", "id"], name: "index_projects_api_vis20_name_id_desc", order: { id: :desc }, where: "(visibility_level = 20)"
t.index ["name"], name: "index_projects_on_name_trigram", opclass: :gin_trgm_ops, using: :gin t.index ["name"], name: "index_projects_on_name_trigram", opclass: :gin_trgm_ops, using: :gin
t.index ["namespace_id"], name: "index_projects_on_namespace_id" t.index ["namespace_id"], name: "index_projects_on_namespace_id"
t.index ["path", "id"], name: "index_projects_api_vis20_path", where: "(visibility_level = 20)"
t.index ["path", "id"], name: "index_projects_api_vis20_path_id_desc", order: { id: :desc }, where: "(visibility_level = 20)"
t.index ["path"], name: "index_projects_on_path" t.index ["path"], name: "index_projects_on_path"
t.index ["path"], name: "index_projects_on_path_trigram", opclass: :gin_trgm_ops, using: :gin t.index ["path"], name: "index_projects_on_path_trigram", opclass: :gin_trgm_ops, using: :gin
t.index ["pending_delete"], name: "index_projects_on_pending_delete" t.index ["pending_delete"], name: "index_projects_on_pending_delete"
...@@ -3379,8 +3387,8 @@ ActiveRecord::Schema.define(version: 2020_01_08_233040) do ...@@ -3379,8 +3387,8 @@ ActiveRecord::Schema.define(version: 2020_01_08_233040) do
t.index ["runners_token"], name: "index_projects_on_runners_token" t.index ["runners_token"], name: "index_projects_on_runners_token"
t.index ["runners_token_encrypted"], name: "index_projects_on_runners_token_encrypted" t.index ["runners_token_encrypted"], name: "index_projects_on_runners_token_encrypted"
t.index ["star_count"], name: "index_projects_on_star_count" t.index ["star_count"], name: "index_projects_on_star_count"
t.index ["visibility_level", "created_at", "id"], name: "index_projects_on_visibility_level_created_at_desc_id_desc", order: { created_at: :desc, id: :desc } t.index ["updated_at", "id"], name: "index_projects_api_vis20_updated_at", where: "(visibility_level = 20)"
t.index ["visibility_level", "created_at", "id"], name: "index_projects_on_visibility_level_created_at_id_desc", order: { id: :desc } t.index ["updated_at", "id"], name: "index_projects_api_vis20_updated_at_id_desc", order: { id: :desc }, where: "(visibility_level = 20)"
end end
create_table "prometheus_alert_events", force: :cascade do |t| create_table "prometheus_alert_events", force: :cascade do |t|
......
...@@ -298,7 +298,7 @@ log data to build up in `pg_xlog`. Removing the unused slots can reduce the amou ...@@ -298,7 +298,7 @@ log data to build up in `pg_xlog`. Removing the unused slots can reduce the amou
1. Start a PostgreSQL console session: 1. Start a PostgreSQL console session:
```sh ```sh
sudo gitlab-psql gitlabhq_production sudo gitlab-psql
``` ```
Note: **Note:** Using `gitlab-rails dbconsole` will not work, because managing replication slots requires superuser permissions. Note: **Note:** Using `gitlab-rails dbconsole` will not work, because managing replication slots requires superuser permissions.
......
...@@ -3,6 +3,54 @@ ...@@ -3,6 +3,54 @@
In order to interact with Geo node endpoints, you need to authenticate yourself In order to interact with Geo node endpoints, you need to authenticate yourself
as an admin. as an admin.
## Create a new Geo node
Creates a new Geo node.
```
POST /geo_nodes
```
| Attribute | Type | Required | Description |
| ----------------------------| ------- | -------- | -----------------------------------------------------------------|
| `primary` | boolean | no | Specifying whether this node will be primary. Defaults to false. |
| `enabled` | boolean | no | Flag indicating if the Geo node is enabled. Defaults to true. |
| `name` | string | yes | The unique identifier for the Geo node. Must match `geo_node_name` if it is set in `gitlab.rb`, otherwise it must match `external_url` |
| `url` | string | yes | The user-facing URL for the Geo node. |
| `internal_url` | string | no | The URL defined on the primary node that secondary nodes should use to contact it. Returns `url` if not set. |
| `files_max_capacity` | integer | no | Control the maximum concurrency of LFS/attachment backfill for this secondary node. Defaults to 10. |
| `repos_max_capacity` | integer | no | Control the maximum concurrency of repository backfill for this secondary node. Defaults to 25. |
| `verification_max_capacity` | integer | no | Control the maximum concurrency of repository verification for this node. Defaults to 100. |
| `container_repositories_max_capacity` | integer | no | Control the maximum concurrency of container repository sync for this node. Defaults to 10. |
| `sync_object_storage` | boolean | no | Flag indicating if the secondary Geo node will replicate blobs in Object Storage. Defaults to false. |
Example response:
```json
{
"id": 3,
"name": "Test Node 1",
"url": "https://secondary.example.com/",
"internal_url": "https://secondary.example.com/",
"primary": false,
"enabled": true,
"current": false,
"files_max_capacity": 10,
"repos_max_capacity": 25,
"verification_max_capacity": 100,
"container_repositories_max_capacity": 10,
"sync_object_storage": false,
"clone_protocol": "http",
"web_edit_url": "https://primary.example.com/admin/geo/nodes/3/edit",
"web_geo_projects_url": "http://secondary.example.com/admin/geo/projects",
"_links": {
"self": "https://primary.example.com/api/v4/geo_nodes/3",
"status": "https://primary.example.com/api/v4/geo_nodes/3/status",
"repair": "https://primary.example.com/api/v4/geo_nodes/3/repair"
}
}
```
## Retrieve configuration about all Geo nodes ## Retrieve configuration about all Geo nodes
``` ```
......
...@@ -21,6 +21,7 @@ The goal of the Package group is to build a set of features that, within three y ...@@ -21,6 +21,7 @@ The goal of the Package group is to build a set of features that, within three y
| Format | Use case | | Format | Use case |
| ------ | ------ | | ------ | ------ |
| [Bower](https://gitlab.com/gitlab-org/gitlab/issues/36888) | Boost your front end development by hosting your own Bower components. | | [Bower](https://gitlab.com/gitlab-org/gitlab/issues/36888) | Boost your front end development by hosting your own Bower components. |
| [Cargo](https://gitlab.com/gitlab-org/gitlab/issues/33060) | Cargo is the Rust package manager. Build, publish and share Rust packages |
| [Chef](https://gitlab.com/gitlab-org/gitlab/issues/36889) | Configuration management with Chef using all the benefits of a repository manager. | | [Chef](https://gitlab.com/gitlab-org/gitlab/issues/36889) | Configuration management with Chef using all the benefits of a repository manager. |
| [CocoaPods](https://gitlab.com/gitlab-org/gitlab/issues/36890) | Speed up development with Xcode and CocoaPods. | | [CocoaPods](https://gitlab.com/gitlab-org/gitlab/issues/36890) | Speed up development with Xcode and CocoaPods. |
| [Conda](https://gitlab.com/gitlab-org/gitlab/issues/36891) | Secure and private local Conda repositories. | | [Conda](https://gitlab.com/gitlab-org/gitlab/issues/36891) | Secure and private local Conda repositories. |
......
...@@ -199,9 +199,60 @@ include: ...@@ -199,9 +199,60 @@ include:
- template: SAST.gitlab-ci.yml - template: SAST.gitlab-ci.yml
variables: variables:
SAST_DISABLE_DIND: "true"
SCAN_KUBERNETES_MANIFESTS: "true" SCAN_KUBERNETES_MANIFESTS: "true"
``` ```
#### Pre-compilation
If your project requires custom build configurations, it can be preferable to avoid
compilation during your SAST execution and instead pass all job artifacts from an
earlier stage within the pipeline.
To pass your project's dependencies as artifacts, the dependencies must be included
in the project's working directory and specified using the `artifacts:path` configuration.
If all dependencies are present, the `-compile=false` flag can be provided to the
analyzer and compilation will be skipped:
```yaml
image: maven:3.6-jdk-8-alpine
stages:
- build
- test
include:
template: SAST.gitlab-ci.yml
variables:
SAST_DISABLE_DIND: "true"
build:
stage: build
script:
- mvn package -Dmaven.repo.local=./.m2/repository
artifacts:
paths:
- .m2/
- target/
spotbugs-sast:
dependencies: build
script:
- /analyzer run -compile=false
variables:
MAVEN_REPO_PATH: ./.m2/repository
artifacts:
reports:
sast: gl-sast-report.json
```
NOTE: **Note:**
The path to the vendored directory must be specified explicitly to allow
the analyzer to recognize the compiled artifacts. This configuration can vary per
analyzer but in the case of Java above, `MAVEN_REPO_PATH` can be used.
See [Analyzer settings](#analyzer-settings) for the complete list of available options.
### Available variables ### Available variables
SAST can be [configured](#customizing-the-sast-settings) using environment variables. SAST can be [configured](#customizing-the-sast-settings) using environment variables.
......
...@@ -73,20 +73,20 @@ If you have 2FA enabled, you need to use a [personal access token](../../profile ...@@ -73,20 +73,20 @@ If you have 2FA enabled, you need to use a [personal access token](../../profile
### Authenticating with an OAuth token ### Authenticating with an OAuth token
To authenticate with an [OAuth token](../../../api/oauth2.md#resource-owner-password-credentials-flow) To authenticate with an [OAuth token](../../../api/oauth2.md#resource-owner-password-credentials-flow)
or [personal access token](../../profile/personal_access_tokens.md), add a corresponding section to your `.npmrc` file: or [personal access token](../../profile/personal_access_tokens.md), set your NPM configuration:
```ini ```bash
; Set URL for your scoped packages. # Set URL for your scoped packages.
; For example package with name `@foo/bar` will use this URL for download # For example package with name `@foo/bar` will use this URL for download
@foo:registry=https://gitlab.com/api/v4/packages/npm/ npm config set @foo:registry https://gitlab.com/api/v4/packages/npm/
; Add the token for the scoped packages URL. This will allow you to download # Add the token for the scoped packages URL. This will allow you to download
; `@foo/` packages from private projects. # `@foo/` packages from private projects.
//gitlab.com/api/v4/packages/npm/:_authToken=<your_token> npm config set '//gitlab.com/api/v4/projects/<your_project_id>/packages/npm/:_authToken' "<your_token>"
; Add token for uploading to the registry. Replace <your_project_id> # Add token for uploading to the registry. Replace <your_project_id>
; with the project you want your package to be uploaded to. # with the project you want your package to be uploaded to.
//gitlab.com/api/v4/projects/<your_project_id>/packages/npm/:_authToken=<your_token> npm config set '//gitlab.com/api/v4/packages/npm/:_authToken' "<your_token>"
``` ```
Replace `<your_project_id>` with your project ID which can be found on the home page Replace `<your_project_id>` with your project ID which can be found on the home page
...@@ -103,13 +103,11 @@ If you encounter an error message with [Yarn](https://yarnpkg.com/en/), see the ...@@ -103,13 +103,11 @@ If you encounter an error message with [Yarn](https://yarnpkg.com/en/), see the
### Using variables to avoid hard-coding auth token values ### Using variables to avoid hard-coding auth token values
To avoid hard-coding the `authToken` value, you may use a variables in its place. To avoid hard-coding the `authToken` value, you may use a variables in its place:
In your `.npmrc` file, you would add:
```ini ```bash
@foo:registry=https://gitlab.com/api/v4/packages/npm/ npm config set '//gitlab.com/api/v4/projects/<your_project_id>/packages/npm/:_authToken' "${NPM_TOKEN}"
//gitlab.com/api/v4/packages/npm/:_authToken=${NPM_TOKEN} npm config set '//gitlab.com/api/v4/packages/npm/:_authToken' "${NPM_TOKEN}"
//gitlab.com/api/v4/projects/<your_project_id>/packages/npm/:_authToken=${NPM_TOKEN}
``` ```
Then, you could run `npm publish` either locally or via GitLab CI/CD: Then, you could run `npm publish` either locally or via GitLab CI/CD:
...@@ -227,6 +225,14 @@ And the `.npmrc` file should look like: ...@@ -227,6 +225,14 @@ And the `.npmrc` file should look like:
@foo:registry=https://gitlab.com/api/v4/packages/npm/ @foo:registry=https://gitlab.com/api/v4/packages/npm/
``` ```
### `npm install` returns `Error: Failed to replace env in config: ${NPM_TOKEN}`
You do not need a token to run `npm install` unless your project is private (the token is only required to publish). If the `.npmrc` file was checked in with a reference to `$NPM_TOKEN`, you can remove it. If you prefer to leave the reference in, you'll need to set a value prior to running `npm install` or set the value using [GitLab environment variables](./../../../ci/variables/README.md):
```bash
NPM_TOKEN=<your_token> npm install
```
## NPM dependencies metadata ## NPM dependencies metadata
> [Introduced](https://gitlab.com/gitlab-org/gitlab/issues/11867) in GitLab Premium 12.6. > [Introduced](https://gitlab.com/gitlab-org/gitlab/issues/11867) in GitLab Premium 12.6.
......
...@@ -41,6 +41,10 @@ module API ...@@ -41,6 +41,10 @@ module API
GroupMembersFinder.new(group).execute GroupMembersFinder.new(group).execute
end end
def create_member(current_user, user, source, params)
source.add_user(user, params[:access_level], current_user: current_user, expires_at: params[:expires_at])
end
def present_members(members) def present_members(members)
present members, with: Entities::Member, current_user: current_user present members, with: Entities::Member, current_user: current_user
end end
......
...@@ -101,12 +101,12 @@ module API ...@@ -101,12 +101,12 @@ module API
user = User.find_by_id(params[:user_id]) user = User.find_by_id(params[:user_id])
not_found!('User') unless user not_found!('User') unless user
member = source.add_user(user, params[:access_level], current_user: current_user, expires_at: params[:expires_at]) member = create_member(current_user, user, source, params)
if !member if !member
not_allowed! # This currently can only be reached in EE not_allowed! # This currently can only be reached in EE
elsif member.persisted? && member.valid? elsif member.persisted? && member.valid?
present_members member present_members(member)
else else
render_validation_error!(member) render_validation_error!(member)
end end
......
# frozen_string_literal: true
require 'uri'
module Banzai
module Filter
class BaseRelativeLinkFilter < HTML::Pipeline::Filter
include Gitlab::Utils::StrongMemoize
protected
def linkable_attributes
strong_memoize(:linkable_attributes) do
attrs = []
attrs += doc.search('a:not(.gfm)').map do |el|
el.attribute('href')
end
attrs += doc.search('img:not(.gfm), video:not(.gfm), audio:not(.gfm)').flat_map do |el|
[el.attribute('src'), el.attribute('data-src')]
end
attrs.reject do |attr|
attr.blank? || attr.value.start_with?('//')
end
end
end
def relative_url_root
Gitlab.config.gitlab.relative_url_root.presence || '/'
end
def project
context[:project]
end
private
def unescape_and_scrub_uri(uri)
Addressable::URI.unescape(uri).scrub
end
end
end
end
...@@ -4,19 +4,17 @@ require 'uri' ...@@ -4,19 +4,17 @@ require 'uri'
module Banzai module Banzai
module Filter module Filter
# HTML filter that "fixes" relative links to uploads or files in a repository. # HTML filter that "fixes" relative links to files in a repository.
# #
# Context options: # Context options:
# :commit # :commit
# :group
# :current_user # :current_user
# :project # :project
# :project_wiki # :project_wiki
# :ref # :ref
# :requested_path # :requested_path
class RelativeLinkFilter < HTML::Pipeline::Filter # :system_note
include Gitlab::Utils::StrongMemoize class RepositoryLinkFilter < BaseRelativeLinkFilter
def call def call
return doc if context[:system_note] return doc if context[:system_note]
...@@ -26,7 +24,9 @@ module Banzai ...@@ -26,7 +24,9 @@ module Banzai
load_uri_types load_uri_types
linkable_attributes.each do |attr| linkable_attributes.each do |attr|
process_link_attr(attr) if linkable_files? && repo_visible_to_user?
process_link_to_repository_attr(attr)
end
end end
doc doc
...@@ -35,8 +35,8 @@ module Banzai ...@@ -35,8 +35,8 @@ module Banzai
protected protected
def load_uri_types def load_uri_types
return unless linkable_files?
return unless linkable_attributes.present? return unless linkable_attributes.present?
return unless linkable_files?
return {} unless repository return {} unless repository
@uri_types = request_path.present? ? get_uri_types([request_path]) : {} @uri_types = request_path.present? ? get_uri_types([request_path]) : {}
...@@ -57,24 +57,6 @@ module Banzai ...@@ -57,24 +57,6 @@ module Banzai
end end
end end
def linkable_attributes
strong_memoize(:linkable_attributes) do
attrs = []
attrs += doc.search('a:not(.gfm)').map do |el|
el.attribute('href')
end
attrs += doc.search('img, video, audio').flat_map do |el|
[el.attribute('src'), el.attribute('data-src')]
end
attrs.reject do |attr|
attr.blank? || attr.value.start_with?('//')
end
end
end
def get_uri_types(paths) def get_uri_types(paths)
return {} if paths.empty? return {} if paths.empty?
...@@ -107,39 +89,6 @@ module Banzai ...@@ -107,39 +89,6 @@ module Banzai
rescue URI::Error, Addressable::URI::InvalidURIError rescue URI::Error, Addressable::URI::InvalidURIError
end end
def process_link_attr(html_attr)
if html_attr.value.start_with?('/uploads/')
process_link_to_upload_attr(html_attr)
elsif linkable_files? && repo_visible_to_user?
process_link_to_repository_attr(html_attr)
end
end
def process_link_to_upload_attr(html_attr)
path_parts = [unescape_and_scrub_uri(html_attr.value)]
if project
path_parts.unshift(relative_url_root, project.full_path)
elsif group
path_parts.unshift(relative_url_root, 'groups', group.full_path, '-')
else
path_parts.unshift(relative_url_root)
end
begin
path = Addressable::URI.escape(File.join(*path_parts))
rescue Addressable::URI::InvalidURIError
return
end
html_attr.value =
if context[:only_path]
path
else
Addressable::URI.join(Gitlab.config.gitlab.base_url, path).to_s
end
end
def process_link_to_repository_attr(html_attr) def process_link_to_repository_attr(html_attr)
uri = URI(html_attr.value) uri = URI(html_attr.value)
...@@ -239,10 +188,6 @@ module Banzai ...@@ -239,10 +188,6 @@ module Banzai
@current_commit ||= context[:commit] || repository.commit(ref) @current_commit ||= context[:commit] || repository.commit(ref)
end end
def relative_url_root
Gitlab.config.gitlab.relative_url_root.presence || '/'
end
def repo_visible_to_user? def repo_visible_to_user?
project && Ability.allowed?(current_user, :download_code, project) project && Ability.allowed?(current_user, :download_code, project)
end end
...@@ -251,14 +196,6 @@ module Banzai ...@@ -251,14 +196,6 @@ module Banzai
context[:ref] || project.default_branch context[:ref] || project.default_branch
end end
def group
context[:group]
end
def project
context[:project]
end
def current_user def current_user
context[:current_user] context[:current_user]
end end
...@@ -266,12 +203,6 @@ module Banzai ...@@ -266,12 +203,6 @@ module Banzai
def repository def repository
@repository ||= project&.repository @repository ||= project&.repository
end end
private
def unescape_and_scrub_uri(uri)
Addressable::URI.unescape(uri).scrub
end
end end
end end
end end
# frozen_string_literal: true
require 'uri'
module Banzai
module Filter
# HTML filter that "fixes" links to uploads.
#
# Context options:
# :group
# :only_path
# :project
# :system_note
class UploadLinkFilter < BaseRelativeLinkFilter
def call
return doc if context[:system_note]
linkable_attributes.each do |attr|
process_link_to_upload_attr(attr)
end
doc
end
protected
def process_link_to_upload_attr(html_attr)
return unless html_attr.value.start_with?('/uploads/')
path_parts = [unescape_and_scrub_uri(html_attr.value)]
if project
path_parts.unshift(relative_url_root, project.full_path)
elsif group
path_parts.unshift(relative_url_root, 'groups', group.full_path, '-')
else
path_parts.unshift(relative_url_root)
end
begin
path = Addressable::URI.escape(File.join(*path_parts))
rescue Addressable::URI::InvalidURIError
return
end
html_attr.value =
if context[:only_path]
path
else
Addressable::URI.join(Gitlab.config.gitlab.base_url, path).to_s
end
html_attr.parent.add_class('gfm')
end
def group
context[:group]
end
end
end
end
...@@ -16,7 +16,10 @@ module Banzai ...@@ -16,7 +16,10 @@ module Banzai
[ [
Filter::ReferenceRedactorFilter, Filter::ReferenceRedactorFilter,
Filter::InlineMetricsRedactorFilter, Filter::InlineMetricsRedactorFilter,
Filter::RelativeLinkFilter, # UploadLinkFilter must come before RepositoryLinkFilter to
# prevent unnecessary Gitaly calls from being made.
Filter::UploadLinkFilter,
Filter::RepositoryLinkFilter,
Filter::IssuableStateFilter, Filter::IssuableStateFilter,
Filter::SuggestionFilter Filter::SuggestionFilter
] ]
......
# frozen_string_literal: true
module Banzai
module Pipeline
class RelativeLinkPipeline < BasePipeline
def self.filters
FilterArray[
Filter::RelativeLinkFilter
]
end
end
end
end
...@@ -24,12 +24,14 @@ module Gitlab ...@@ -24,12 +24,14 @@ module Gitlab
fingerprints = [] fingerprints = []
Key.where(id: start_id..stop_id, fingerprint_sha256: nil).find_each do |regular_key| Key.where(id: start_id..stop_id, fingerprint_sha256: nil).find_each do |regular_key|
fingerprint = Base64.decode64(generate_ssh_public_key(regular_key.key)) if fingerprint = generate_ssh_public_key(regular_key.key)
bytea = ActiveRecord::Base.connection.escape_bytea(Base64.decode64(fingerprint))
fingerprints << {
id: regular_key.id, fingerprints << {
fingerprint_sha256: ActiveRecord::Base.connection.escape_bytea(fingerprint) id: regular_key.id,
} fingerprint_sha256: bytea
}
end
end end
Gitlab::Database.bulk_insert(TEMP_TABLE, fingerprints) Gitlab::Database.bulk_insert(TEMP_TABLE, fingerprints)
...@@ -48,7 +50,7 @@ module Gitlab ...@@ -48,7 +50,7 @@ module Gitlab
private private
def generate_ssh_public_key(regular_key) def generate_ssh_public_key(regular_key)
Gitlab::SSHPublicKey.new(regular_key).fingerprint("SHA256").gsub("SHA256:", "") Gitlab::SSHPublicKey.new(regular_key).fingerprint("SHA256")&.gsub("SHA256:", "")
end end
def execute(query) def execute(query)
......
...@@ -5217,6 +5217,9 @@ msgstr "" ...@@ -5217,6 +5217,9 @@ msgstr ""
msgid "Create New Domain" msgid "Create New Domain"
msgstr "" msgstr ""
msgid "Create Project"
msgstr ""
msgid "Create a GitLab account first, and then connect it to your %{label} account." msgid "Create a GitLab account first, and then connect it to your %{label} account."
msgstr "" msgstr ""
...@@ -5849,6 +5852,9 @@ msgstr "" ...@@ -5849,6 +5852,9 @@ msgstr ""
msgid "Delete pipeline" msgid "Delete pipeline"
msgstr "" msgstr ""
msgid "Delete project"
msgstr ""
msgid "Delete snippet" msgid "Delete snippet"
msgstr "" msgstr ""
...@@ -7209,9 +7215,6 @@ msgstr "" ...@@ -7209,9 +7215,6 @@ msgstr ""
msgid "Error Tracking" msgid "Error Tracking"
msgstr "" msgstr ""
msgid "Error creating a new path"
msgstr ""
msgid "Error creating epic" msgid "Error creating epic"
msgstr "" msgstr ""
...@@ -16412,6 +16415,30 @@ msgstr "" ...@@ -16412,6 +16415,30 @@ msgstr ""
msgid "Self-monitoring project was not deleted. Please check logs for any error messages" msgid "Self-monitoring project was not deleted. Please check logs for any error messages"
msgstr "" msgstr ""
msgid "SelfMonitoring|Disable self monitoring?"
msgstr ""
msgid "SelfMonitoring|Disabling this feature will delete the self monitoring project. Are you sure you want to delete the project?"
msgstr ""
msgid "SelfMonitoring|Enable or disable instance self monitoring"
msgstr ""
msgid "SelfMonitoring|Enabling this feature creates a %{projectLinkStart}project%{projectLinkEnd} that can be used to monitor the health of your instance."
msgstr ""
msgid "SelfMonitoring|Enabling this feature creates a project that can be used to monitor the health of your instance."
msgstr ""
msgid "SelfMonitoring|Self monitoring"
msgstr ""
msgid "SelfMonitoring|Self monitoring project has been successfully created."
msgstr ""
msgid "SelfMonitoring|Self monitoring project has been successfully deleted."
msgstr ""
msgid "Send a separate email notification to Developers." msgid "Send a separate email notification to Developers."
msgstr "" msgstr ""
...@@ -18263,7 +18290,7 @@ msgstr "" ...@@ -18263,7 +18290,7 @@ msgstr ""
msgid "The merge request can now be merged." msgid "The merge request can now be merged."
msgstr "" msgstr ""
msgid "The name %{entryName} is already taken in this directory." msgid "The name \"%{name}\" is already taken in this directory."
msgstr "" msgstr ""
msgid "The number of changes to be fetched from GitLab when cloning a repository. This can speed up Pipelines execution. Keep empty or set to 0 to disable shallow clone by default and make GitLab CI fetch all branches and tags each time." msgid "The number of changes to be fetched from GitLab when cloning a repository. This can speed up Pipelines execution. Keep empty or set to 0 to disable shallow clone by default and make GitLab CI fetch all branches and tags each time."
...@@ -20444,6 +20471,9 @@ msgstr "" ...@@ -20444,6 +20471,9 @@ msgstr ""
msgid "View previous app" msgid "View previous app"
msgstr "" msgstr ""
msgid "View project"
msgstr ""
msgid "View project labels" msgid "View project labels"
msgstr "" msgstr ""
......
...@@ -18,7 +18,7 @@ module QA ...@@ -18,7 +18,7 @@ module QA
view 'app/assets/javascripts/deploy_keys/components/key.vue' do view 'app/assets/javascripts/deploy_keys/components/key.vue' do
element :key element :key
element :key_title element :key_title
element :key_fingerprint element :key_md5_fingerprint
end end
def add_key def add_key
...@@ -33,17 +33,17 @@ module QA ...@@ -33,17 +33,17 @@ module QA
fill_in 'deploy_key_key', with: key fill_in 'deploy_key_key', with: key
end end
def find_fingerprint(title) def find_md5_fingerprint(title)
within_project_deploy_keys do within_project_deploy_keys do
find_element(:key, text: title) find_element(:key, text: title)
.find(element_selector_css(:key_fingerprint)).text .find(element_selector_css(:key_md5_fingerprint)).text.delete_prefix('MD5:')
end end
end end
def has_key?(title, fingerprint) def has_key?(title, md5_fingerprint)
within_project_deploy_keys do within_project_deploy_keys do
find_element(:key, text: title) find_element(:key, text: title)
.has_css?(element_selector_css(:key_fingerprint), text: fingerprint) .has_css?(element_selector_css(:key_md5_fingerprint), text: "MD5:#{md5_fingerprint}")
end end
end end
...@@ -53,12 +53,6 @@ module QA ...@@ -53,12 +53,6 @@ module QA
end end
end end
def key_fingerprint
within_project_deploy_keys do
find_element(:key_fingerprint).text
end
end
private private
def within_project_deploy_keys def within_project_deploy_keys
......
...@@ -61,7 +61,7 @@ module QA ...@@ -61,7 +61,7 @@ module QA
end end
# Click the select element again to close the dropdown # Click the select element again to close the dropdown
click_element :protected_branch_select click_element :"allowed_to_#{action}_select"
end end
end end
end end
......
...@@ -5,10 +5,10 @@ module QA ...@@ -5,10 +5,10 @@ module QA
class DeployKey < Base class DeployKey < Base
attr_accessor :title, :key attr_accessor :title, :key
attribute :fingerprint do attribute :md5_fingerprint do
Page::Project::Settings::Repository.perform do |setting| Page::Project::Settings::Repository.perform do |setting|
setting.expand_deploy_keys do |key| setting.expand_deploy_keys do |key|
key.find_fingerprint(title) key.find_md5_fingerprint(title)
end end
end end
end end
......
...@@ -7,7 +7,7 @@ module QA ...@@ -7,7 +7,7 @@ module QA
attr_accessor :title attr_accessor :title
def_delegators :key, :private_key, :public_key, :fingerprint def_delegators :key, :private_key, :public_key, :md5_fingerprint
def key def key
@key ||= Runtime::Key::RSA.new @key ||= Runtime::Key::RSA.new
......
...@@ -4,7 +4,7 @@ module QA ...@@ -4,7 +4,7 @@ module QA
module Runtime module Runtime
module Key module Key
class Base class Base
attr_reader :name, :bits, :private_key, :public_key, :fingerprint attr_reader :name, :bits, :private_key, :public_key, :md5_fingerprint
def initialize(name, bits) def initialize(name, bits)
@name = name @name = name
...@@ -29,7 +29,7 @@ module QA ...@@ -29,7 +29,7 @@ module QA
def populate_key_data(path) def populate_key_data(path)
@private_key = ::File.binread(path) @private_key = ::File.binread(path)
@public_key = ::File.binread("#{path}.pub") @public_key = ::File.binread("#{path}.pub")
@fingerprint = @md5_fingerprint =
`ssh-keygen -l -E md5 -f #{path} | cut -d' ' -f2 | cut -d: -f2-`.chomp `ssh-keygen -l -E md5 -f #{path} | cut -d' ' -f2 | cut -d: -f2-`.chomp
end end
end end
......
...@@ -13,7 +13,7 @@ module QA ...@@ -13,7 +13,7 @@ module QA
end end
expect(page).to have_content("Title: #{key_title}") expect(page).to have_content("Title: #{key_title}")
expect(page).to have_content(key.fingerprint) expect(page).to have_content(key.md5_fingerprint)
Page::Main::Menu.perform(&:click_settings_link) Page::Main::Menu.perform(&:click_settings_link)
Page::Profile::Menu.perform(&:click_ssh_keys) Page::Profile::Menu.perform(&:click_ssh_keys)
...@@ -23,7 +23,7 @@ module QA ...@@ -23,7 +23,7 @@ module QA
end end
expect(page).not_to have_content("Title: #{key_title}") expect(page).not_to have_content("Title: #{key_title}")
expect(page).not_to have_content(key.fingerprint) expect(page).not_to have_content(key.md5_fingerprint)
end end
end end
end end
......
...@@ -15,11 +15,11 @@ module QA ...@@ -15,11 +15,11 @@ module QA
resource.key = deploy_key_value resource.key = deploy_key_value
end end
expect(deploy_key.fingerprint).to eq key.fingerprint expect(deploy_key.md5_fingerprint).to eq key.md5_fingerprint
Page::Project::Settings::Repository.perform do |setting| Page::Project::Settings::Repository.perform do |setting|
setting.expand_deploy_keys do |keys| setting.expand_deploy_keys do |keys|
expect(keys).to have_key(deploy_key_title, key.fingerprint) expect(keys).to have_key(deploy_key_title, key.md5_fingerprint)
end end
end end
end end
......
...@@ -208,6 +208,8 @@ describe 'GitLab Markdown', :aggregate_failures do ...@@ -208,6 +208,8 @@ describe 'GitLab Markdown', :aggregate_failures do
@group = @feat.group @group = @feat.group
end end
let(:project) { @feat.project } # Shadow this so matchers can use it
context 'default pipeline' do context 'default pipeline' do
before do before do
@html = markdown(@feat.raw_markdown) @html = markdown(@feat.raw_markdown)
...@@ -216,8 +218,12 @@ describe 'GitLab Markdown', :aggregate_failures do ...@@ -216,8 +218,12 @@ describe 'GitLab Markdown', :aggregate_failures do
it_behaves_like 'all pipelines' it_behaves_like 'all pipelines'
it 'includes custom filters' do it 'includes custom filters' do
aggregate_failures 'RelativeLinkFilter' do aggregate_failures 'UploadLinkFilter' do
expect(doc).to parse_relative_links expect(doc).to parse_upload_links
end
aggregate_failures 'RepositoryLinkFilter' do
expect(doc).to parse_repository_links
end end
aggregate_failures 'EmojiFilter' do aggregate_failures 'EmojiFilter' do
...@@ -277,8 +283,12 @@ describe 'GitLab Markdown', :aggregate_failures do ...@@ -277,8 +283,12 @@ describe 'GitLab Markdown', :aggregate_failures do
it_behaves_like 'all pipelines' it_behaves_like 'all pipelines'
it 'includes custom filters' do it 'includes custom filters' do
aggregate_failures 'RelativeLinkFilter' do aggregate_failures 'UploadLinkFilter' do
expect(doc).not_to parse_relative_links expect(doc).to parse_upload_links
end
aggregate_failures 'RepositoryLinkFilter' do
expect(doc).not_to parse_repository_links
end end
aggregate_failures 'EmojiFilter' do aggregate_failures 'EmojiFilter' do
......
...@@ -111,7 +111,13 @@ Markdown should be usable inside a link. Let's try! ...@@ -111,7 +111,13 @@ Markdown should be usable inside a link. Let's try!
- [**text**](#link-strong) - [**text**](#link-strong)
- [`text`](#link-code) - [`text`](#link-code)
### RelativeLinkFilter ### UploadLinkFilter
Linking to an upload in this project should work:
[Relative Upload Link](/uploads/e90decf88d8f96fe9e1389afc2e4a91f/test.jpg)
![Relative Upload Image](/uploads/e90decf88d8f96fe9e1389afc2e4a91f/test.jpg)
### RepositoryLinkFilter
Linking to a file relative to this project's repository should work. Linking to a file relative to this project's repository should work.
......
...@@ -28,6 +28,7 @@ describe('CompareVersions', () => { ...@@ -28,6 +28,7 @@ describe('CompareVersions', () => {
propsData: { propsData: {
mergeRequestDiffs: diffsMockData, mergeRequestDiffs: diffsMockData,
mergeRequestDiff: diffsMockData[0], mergeRequestDiff: diffsMockData[0],
diffFilesLength: 0,
targetBranch, targetBranch,
...props, ...props,
}, },
......
// Jest Snapshot v1, https://goo.gl/fbAQLP
exports[`self monitor component When the self monitor project has not been created default state to match the default snapshot 1`] = `
<section
class="settings no-animate js-self-monitoring-settings"
>
<div
class="settings-header"
>
<h4
class="js-section-header"
>
Self monitoring
</h4>
<gl-button-stub
class="js-settings-toggle"
>
Expand
</gl-button-stub>
<p
class="js-section-sub-header"
>
Enable or disable instance self monitoring
</p>
</div>
<div
class="settings-content"
>
<form
name="self-monitoring-form"
>
<p>
Enabling this feature creates a project that can be used to monitor the health of your instance.
</p>
<gl-form-group-stub
label="Create Project"
label-for="self-monitor-toggle"
>
<gl-toggle-stub
labeloff="Toggle Status: OFF"
labelon="Toggle Status: ON"
name="self-monitor-toggle"
/>
</gl-form-group-stub>
</form>
</div>
<gl-modal-stub
cancel-title="Cancel"
modalclass=""
modalid="delete-self-monitor-modal"
ok-title="Delete project"
ok-variant="danger"
title="Disable self monitoring?"
titletag="h4"
>
<div>
Disabling this feature will delete the self monitoring project. Are you sure you want to delete the project?
</div>
</gl-modal-stub>
</section>
`;
import { shallowMount } from '@vue/test-utils';
import { GlButton } from '@gitlab/ui';
import SelfMonitor from '~/self_monitor/components/self_monitor_form.vue';
import { createStore } from '~/self_monitor/store';
describe('self monitor component', () => {
let wrapper;
let store;
describe('When the self monitor project has not been created', () => {
beforeEach(() => {
store = createStore({
projectEnabled: false,
selfMonitorProjectCreated: false,
createSelfMonitoringProjectPath: '/create',
deleteSelfMonitoringProjectPath: '/delete',
});
});
afterEach(() => {
if (wrapper.destroy) {
wrapper.destroy();
}
});
describe('default state', () => {
it('to match the default snapshot', () => {
wrapper = shallowMount(SelfMonitor, { store });
expect(wrapper.element).toMatchSnapshot();
});
});
it('renders header text', () => {
wrapper = shallowMount(SelfMonitor, { store });
expect(wrapper.find('.js-section-header').text()).toBe('Self monitoring');
});
describe('expand/collapse button', () => {
it('renders as an expand button by default', () => {
wrapper = shallowMount(SelfMonitor, { store });
const button = wrapper.find(GlButton);
expect(button.text()).toBe('Expand');
});
});
describe('sub-header', () => {
it('renders descriptive text', () => {
wrapper = shallowMount(SelfMonitor, { store });
expect(wrapper.find('.js-section-sub-header').text()).toContain(
'Enable or disable instance self monitoring',
);
});
});
describe('settings-content', () => {
it('renders the form description without a link', () => {
wrapper = shallowMount(SelfMonitor, { store });
expect(wrapper.vm.selfMonitoringFormText).toContain(
'Enabling this feature creates a project that can be used to monitor the health of your instance.',
);
});
it('renders the form description with a link', () => {
store = createStore({
projectEnabled: true,
selfMonitorProjectCreated: true,
createSelfMonitoringProjectPath: '/create',
deleteSelfMonitoringProjectPath: '/delete',
});
wrapper = shallowMount(SelfMonitor, { store });
expect(wrapper.vm.selfMonitoringFormText).toContain('<a href="http://localhost/">');
});
});
});
});
import axios from 'axios';
import MockAdapter from 'axios-mock-adapter';
import testAction from 'helpers/vuex_action_helper';
import statusCodes from '~/lib/utils/http_status';
import * as actions from '~/self_monitor/store/actions';
import * as types from '~/self_monitor/store/mutation_types';
import createState from '~/self_monitor/store/state';
describe('self monitor actions', () => {
let state;
let mock;
beforeEach(() => {
state = createState();
mock = new MockAdapter(axios);
});
describe('setSelfMonitor', () => {
it('commits the SET_ENABLED mutation', done => {
testAction(
actions.setSelfMonitor,
null,
state,
[{ type: types.SET_ENABLED, payload: null }],
[],
done,
);
});
});
describe('resetAlert', () => {
it('commits the SET_ENABLED mutation', done => {
testAction(
actions.resetAlert,
null,
state,
[{ type: types.SET_SHOW_ALERT, payload: false }],
[],
done,
);
});
});
describe('requestCreateProject', () => {
describe('success', () => {
beforeEach(() => {
state.createProjectEndpoint = '/create';
state.createProjectStatusEndpoint = '/create_status';
mock.onPost(state.createProjectEndpoint).reply(statusCodes.ACCEPTED, {
job_id: '123',
});
mock.onGet(state.createProjectStatusEndpoint).reply(statusCodes.OK, {
project_full_path: '/self-monitor-url',
});
});
it('dispatches status request with job data', done => {
testAction(
actions.requestCreateProject,
null,
state,
[
{
type: types.SET_LOADING,
payload: true,
},
],
[
{
type: 'requestCreateProjectStatus',
payload: '123',
},
],
done,
);
});
it('dispatches success with project path', done => {
testAction(
actions.requestCreateProjectStatus,
null,
state,
[],
[
{
type: 'requestCreateProjectSuccess',
payload: { project_full_path: '/self-monitor-url' },
},
],
done,
);
});
});
describe('error', () => {
beforeEach(() => {
state.createProjectEndpoint = '/create';
mock.onPost(state.createProjectEndpoint).reply(500);
});
it('dispatches error', done => {
testAction(
actions.requestCreateProject,
null,
state,
[
{
type: types.SET_LOADING,
payload: true,
},
],
[
{
type: 'requestCreateProjectError',
payload: new Error('Request failed with status code 500'),
},
],
done,
);
});
});
describe('requestCreateProjectSuccess', () => {
it('should commit the received data', done => {
testAction(
actions.requestCreateProjectSuccess,
{ project_full_path: '/self-monitor-url' },
state,
[
{ type: types.SET_LOADING, payload: false },
{ type: types.SET_PROJECT_URL, payload: '/self-monitor-url' },
{
type: types.SET_ALERT_CONTENT,
payload: {
actionName: 'viewSelfMonitorProject',
actionText: 'View project',
message: 'Self monitoring project has been successfully created.',
},
},
{ type: types.SET_SHOW_ALERT, payload: true },
{ type: types.SET_PROJECT_CREATED, payload: true },
],
[],
done,
);
});
});
});
describe('deleteSelfMonitorProject', () => {
describe('success', () => {
beforeEach(() => {
state.deleteProjectEndpoint = '/delete';
state.deleteProjectStatusEndpoint = '/delete-status';
mock.onDelete(state.deleteProjectEndpoint).reply(statusCodes.ACCEPTED, {
job_id: '456',
});
mock.onGet(state.deleteProjectStatusEndpoint).reply(statusCodes.OK, {
status: 'success',
});
});
it('dispatches status request with job data', done => {
testAction(
actions.requestDeleteProject,
null,
state,
[
{
type: types.SET_LOADING,
payload: true,
},
],
[
{
type: 'requestDeleteProjectStatus',
payload: '456',
},
],
done,
);
});
it('dispatches success with status', done => {
testAction(
actions.requestDeleteProjectStatus,
null,
state,
[],
[
{
type: 'requestDeleteProjectSuccess',
payload: { status: 'success' },
},
],
done,
);
});
});
describe('error', () => {
beforeEach(() => {
state.deleteProjectEndpoint = '/delete';
mock.onDelete(state.deleteProjectEndpoint).reply(500);
});
it('dispatches error', done => {
testAction(
actions.requestDeleteProject,
null,
state,
[
{
type: types.SET_LOADING,
payload: true,
},
],
[
{
type: 'requestDeleteProjectError',
payload: new Error('Request failed with status code 500'),
},
],
done,
);
});
});
describe('requestDeleteProjectSuccess', () => {
it('should commit mutations to remove previously set data', done => {
testAction(
actions.requestDeleteProjectSuccess,
null,
state,
[
{ type: types.SET_PROJECT_URL, payload: '' },
{ type: types.SET_PROJECT_CREATED, payload: false },
{
type: types.SET_ALERT_CONTENT,
payload: {
actionName: 'createProject',
actionText: 'Undo',
message: 'Self monitoring project has been successfully deleted.',
},
},
{ type: types.SET_SHOW_ALERT, payload: true },
{ type: types.SET_LOADING, payload: false },
],
[],
done,
);
});
});
});
});
import mutations from '~/self_monitor/store/mutations';
import createState from '~/self_monitor/store/state';
describe('self monitoring mutations', () => {
let localState;
beforeEach(() => {
localState = createState();
});
describe('SET_ENABLED', () => {
it('sets selfMonitor', () => {
mutations.SET_ENABLED(localState, true);
expect(localState.projectEnabled).toBe(true);
});
});
describe('SET_PROJECT_CREATED', () => {
it('sets projectCreated', () => {
mutations.SET_PROJECT_CREATED(localState, true);
expect(localState.projectCreated).toBe(true);
});
});
describe('SET_SHOW_ALERT', () => {
it('sets showAlert', () => {
mutations.SET_SHOW_ALERT(localState, true);
expect(localState.showAlert).toBe(true);
});
});
describe('SET_PROJECT_URL', () => {
it('sets projectPath', () => {
mutations.SET_PROJECT_URL(localState, '/url/');
expect(localState.projectPath).toBe('/url/');
});
});
describe('SET_LOADING', () => {
it('sets loading', () => {
mutations.SET_LOADING(localState, true);
expect(localState.loading).toBe(true);
});
});
describe('SET_ALERT_CONTENT', () => {
it('set alertContent', () => {
const alertContent = {
message: 'success',
actionText: 'undo',
actionName: 'createProject',
};
mutations.SET_ALERT_CONTENT(localState, alertContent);
expect(localState.alertContent).toBe(alertContent);
});
});
});
...@@ -77,7 +77,7 @@ describe('diffs/components/app', () => { ...@@ -77,7 +77,7 @@ describe('diffs/components/app', () => {
beforeEach(done => { beforeEach(done => {
const fetchResolver = () => { const fetchResolver = () => {
store.state.diffs.retrievingBatches = false; store.state.diffs.retrievingBatches = false;
return Promise.resolve(); return Promise.resolve({ real_size: 100 });
}; };
spyOn(window, 'requestIdleCallback').and.callFake(fn => fn()); spyOn(window, 'requestIdleCallback').and.callFake(fn => fn());
createComponent(); createComponent();
...@@ -229,6 +229,7 @@ describe('diffs/components/app', () => { ...@@ -229,6 +229,7 @@ describe('diffs/components/app', () => {
}); });
it('calls fetchDiffFiles if diffsBatchLoad is not enabled', done => { it('calls fetchDiffFiles if diffsBatchLoad is not enabled', done => {
expect(wrapper.vm.diffFilesLength).toEqual(0);
wrapper.vm.glFeatures.diffsBatchLoad = false; wrapper.vm.glFeatures.diffsBatchLoad = false;
wrapper.vm.fetchData(false); wrapper.vm.fetchData(false);
...@@ -238,12 +239,14 @@ describe('diffs/components/app', () => { ...@@ -238,12 +239,14 @@ describe('diffs/components/app', () => {
expect(wrapper.vm.fetchDiffFilesMeta).not.toHaveBeenCalled(); expect(wrapper.vm.fetchDiffFilesMeta).not.toHaveBeenCalled();
expect(wrapper.vm.fetchDiffFilesBatch).not.toHaveBeenCalled(); expect(wrapper.vm.fetchDiffFilesBatch).not.toHaveBeenCalled();
expect(wrapper.vm.unwatchDiscussions).toHaveBeenCalled(); expect(wrapper.vm.unwatchDiscussions).toHaveBeenCalled();
expect(wrapper.vm.diffFilesLength).toEqual(100);
done(); done();
}); });
}); });
it('calls batch methods if diffsBatchLoad is enabled, and not latest version', done => { it('calls batch methods if diffsBatchLoad is enabled, and not latest version', done => {
expect(wrapper.vm.diffFilesLength).toEqual(0);
wrapper.vm.glFeatures.diffsBatchLoad = true; wrapper.vm.glFeatures.diffsBatchLoad = true;
wrapper.vm.isLatestVersion = () => false; wrapper.vm.isLatestVersion = () => false;
wrapper.vm.fetchData(false); wrapper.vm.fetchData(false);
...@@ -254,11 +257,13 @@ describe('diffs/components/app', () => { ...@@ -254,11 +257,13 @@ describe('diffs/components/app', () => {
expect(wrapper.vm.fetchDiffFilesMeta).toHaveBeenCalled(); expect(wrapper.vm.fetchDiffFilesMeta).toHaveBeenCalled();
expect(wrapper.vm.fetchDiffFilesBatch).toHaveBeenCalled(); expect(wrapper.vm.fetchDiffFilesBatch).toHaveBeenCalled();
expect(wrapper.vm.unwatchDiscussions).toHaveBeenCalled(); expect(wrapper.vm.unwatchDiscussions).toHaveBeenCalled();
expect(wrapper.vm.diffFilesLength).toEqual(100);
done(); done();
}); });
}); });
it('calls batch methods if diffsBatchLoad is enabled, and latest version', done => { it('calls batch methods if diffsBatchLoad is enabled, and latest version', done => {
expect(wrapper.vm.diffFilesLength).toEqual(0);
wrapper.vm.glFeatures.diffsBatchLoad = true; wrapper.vm.glFeatures.diffsBatchLoad = true;
wrapper.vm.fetchData(false); wrapper.vm.fetchData(false);
...@@ -268,6 +273,7 @@ describe('diffs/components/app', () => { ...@@ -268,6 +273,7 @@ describe('diffs/components/app', () => {
expect(wrapper.vm.fetchDiffFilesMeta).toHaveBeenCalled(); expect(wrapper.vm.fetchDiffFilesMeta).toHaveBeenCalled();
expect(wrapper.vm.fetchDiffFilesBatch).toHaveBeenCalled(); expect(wrapper.vm.fetchDiffFilesBatch).toHaveBeenCalled();
expect(wrapper.vm.unwatchDiscussions).toHaveBeenCalled(); expect(wrapper.vm.unwatchDiscussions).toHaveBeenCalled();
expect(wrapper.vm.diffFilesLength).toEqual(100);
done(); done();
}); });
}); });
......
...@@ -141,6 +141,13 @@ describe('DiffsStoreActions', () => { ...@@ -141,6 +141,13 @@ describe('DiffsStoreActions', () => {
done(); done();
}, },
); );
fetchDiffFiles({ state: { endpoint }, commit: () => null })
.then(data => {
expect(data).toEqual(res);
done();
})
.catch(done.fail);
}); });
}); });
......
...@@ -263,14 +263,6 @@ describe('Diffs Module Getters', () => { ...@@ -263,14 +263,6 @@ describe('Diffs Module Getters', () => {
}); });
}); });
describe('diffFilesLength', () => {
it('returns length of diff files', () => {
localState.diffFiles.push('test', 'test 2');
expect(getters.diffFilesLength(localState)).toBe(2);
});
});
describe('currentDiffIndex', () => { describe('currentDiffIndex', () => {
it('returns index of currently selected diff in diffList', () => { it('returns index of currently selected diff in diffList', () => {
localState.diffFiles = [{ file_hash: '111' }, { file_hash: '222' }, { file_hash: '333' }]; localState.diffFiles = [{ file_hash: '111' }, { file_hash: '222' }, { file_hash: '333' }];
......
...@@ -52,19 +52,6 @@ describe('new file modal component', () => { ...@@ -52,19 +52,6 @@ describe('new file modal component', () => {
expect(templateFilesEl instanceof Element).toBeTruthy(); expect(templateFilesEl instanceof Element).toBeTruthy();
} }
}); });
describe('createEntryInStore', () => {
it('$emits create', () => {
spyOn(vm, 'createTempEntry');
vm.submitForm();
expect(vm.createTempEntry).toHaveBeenCalledWith({
name: 'testing',
type,
});
});
});
}); });
}); });
...@@ -145,31 +132,19 @@ describe('new file modal component', () => { ...@@ -145,31 +132,19 @@ describe('new file modal component', () => {
vm = createComponentWithStore(Component, store).$mount(); vm = createComponentWithStore(Component, store).$mount();
const flashSpy = spyOnDependency(modal, 'flash'); const flashSpy = spyOnDependency(modal, 'flash');
vm.submitForm();
expect(flashSpy).toHaveBeenCalled(); expect(flashSpy).not.toHaveBeenCalled();
});
it('calls createTempEntry when target path does not exist', () => {
const store = createStore();
store.state.entryModal = {
type: 'rename',
path: 'test-path/test',
entry: {
name: 'test',
type: 'blob',
path: 'test-path1/test',
},
};
vm = createComponentWithStore(Component, store).$mount();
spyOn(vm, 'createTempEntry').and.callFake(() => Promise.resolve());
vm.submitForm(); vm.submitForm();
expect(vm.createTempEntry).toHaveBeenCalledWith({ expect(flashSpy).toHaveBeenCalledWith(
name: 'test-path1', 'The name "test-path/test" is already taken in this directory.',
type: 'tree', 'alert',
}); jasmine.anything(),
null,
false,
true,
);
}); });
}); });
}); });
...@@ -201,35 +201,30 @@ describe('IDE store project actions', () => { ...@@ -201,35 +201,30 @@ describe('IDE store project actions', () => {
}); });
describe('showEmptyState', () => { describe('showEmptyState', () => {
it('commits proper mutations when supplied error is 404', done => { it('creates a blank tree and sets loading state to false', done => {
testAction( testAction(
showEmptyState, showEmptyState,
{ { projectId: 'abc/def', branchId: 'master' },
err: {
response: {
status: 404,
},
},
projectId: 'abc/def',
branchId: 'master',
},
store.state, store.state,
[ [
{ { type: 'CREATE_TREE', payload: { treePath: 'abc/def/master' } },
type: 'CREATE_TREE',
payload: {
treePath: 'abc/def/master',
},
},
{ {
type: 'TOGGLE_LOADING', type: 'TOGGLE_LOADING',
payload: { payload: { entry: store.state.trees['abc/def/master'], forceValue: false },
entry: store.state.trees['abc/def/master'],
forceValue: false,
},
}, },
], ],
[], jasmine.any(Object),
done,
);
});
it('sets the currentBranchId to the branchId that was passed', done => {
testAction(
showEmptyState,
{ projectId: 'abc/def', branchId: 'master' },
store.state,
jasmine.any(Object),
[{ type: 'setCurrentBranchId', payload: 'master' }],
done, done,
); );
}); });
......
...@@ -206,13 +206,17 @@ describe('Multi-file store actions', () => { ...@@ -206,13 +206,17 @@ describe('Multi-file store actions', () => {
describe('blob', () => { describe('blob', () => {
it('creates temp file', done => { it('creates temp file', done => {
const name = 'test';
store store
.dispatch('createTempEntry', { .dispatch('createTempEntry', {
name: 'test', name,
branchId: 'mybranch', branchId: 'mybranch',
type: 'blob', type: 'blob',
}) })
.then(f => { .then(() => {
const f = store.state.entries[name];
expect(f.tempFile).toBeTruthy(); expect(f.tempFile).toBeTruthy();
expect(store.state.trees['abcproject/mybranch'].tree.length).toBe(1); expect(store.state.trees['abcproject/mybranch'].tree.length).toBe(1);
...@@ -222,13 +226,17 @@ describe('Multi-file store actions', () => { ...@@ -222,13 +226,17 @@ describe('Multi-file store actions', () => {
}); });
it('adds tmp file to open files', done => { it('adds tmp file to open files', done => {
const name = 'test';
store store
.dispatch('createTempEntry', { .dispatch('createTempEntry', {
name: 'test', name,
branchId: 'mybranch', branchId: 'mybranch',
type: 'blob', type: 'blob',
}) })
.then(f => { .then(() => {
const f = store.state.entries[name];
expect(store.state.openFiles.length).toBe(1); expect(store.state.openFiles.length).toBe(1);
expect(store.state.openFiles[0].name).toBe(f.name); expect(store.state.openFiles[0].name).toBe(f.name);
...@@ -238,13 +246,17 @@ describe('Multi-file store actions', () => { ...@@ -238,13 +246,17 @@ describe('Multi-file store actions', () => {
}); });
it('adds tmp file to changed files', done => { it('adds tmp file to changed files', done => {
const name = 'test';
store store
.dispatch('createTempEntry', { .dispatch('createTempEntry', {
name: 'test', name,
branchId: 'mybranch', branchId: 'mybranch',
type: 'blob', type: 'blob',
}) })
.then(f => { .then(() => {
const f = store.state.entries[name];
expect(store.state.changedFiles.length).toBe(1); expect(store.state.changedFiles.length).toBe(1);
expect(store.state.changedFiles[0].name).toBe(f.name); expect(store.state.changedFiles[0].name).toBe(f.name);
...@@ -292,7 +304,9 @@ describe('Multi-file store actions', () => { ...@@ -292,7 +304,9 @@ describe('Multi-file store actions', () => {
type: 'blob', type: 'blob',
}) })
.then(() => { .then(() => {
expect(document.querySelector('.flash-alert')).not.toBeNull(); expect(document.querySelector('.flash-alert')?.textContent.trim()).toEqual(
`The name "${f.name}" is already taken in this directory.`,
);
done(); done();
}) })
...@@ -604,36 +618,98 @@ describe('Multi-file store actions', () => { ...@@ -604,36 +618,98 @@ describe('Multi-file store actions', () => {
); );
}); });
it('if renamed, reverts the rename before deleting', () => { describe('when renamed', () => {
const testEntry = { let testEntry;
path: 'test',
name: 'test',
prevPath: 'lorem/ipsum',
prevName: 'ipsum',
prevParentPath: 'lorem',
};
store.state.entries = { test: testEntry }; beforeEach(() => {
testAction( testEntry = {
deleteEntry, path: 'test',
testEntry.path, name: 'test',
store.state, prevPath: 'test_old',
[], prevName: 'test_old',
[ prevParentPath: '',
{ };
type: 'renameEntry',
payload: { store.state.entries = { test: testEntry };
path: testEntry.path, });
name: testEntry.prevName,
parentPath: testEntry.prevParentPath, describe('and previous does not exist', () => {
}, it('reverts the rename before deleting', done => {
}, testAction(
{ deleteEntry,
type: 'deleteEntry', testEntry.path,
payload: testEntry.prevPath, store.state,
}, [],
], [
); {
type: 'renameEntry',
payload: {
path: testEntry.path,
name: testEntry.prevName,
parentPath: testEntry.prevParentPath,
},
},
{
type: 'deleteEntry',
payload: testEntry.prevPath,
},
],
done,
);
});
});
describe('and previous exists', () => {
beforeEach(() => {
const oldEntry = {
path: testEntry.prevPath,
name: testEntry.prevName,
};
store.state.entries[oldEntry.path] = oldEntry;
});
it('does not revert rename before deleting', done => {
testAction(
deleteEntry,
testEntry.path,
store.state,
[{ type: types.DELETE_ENTRY, payload: testEntry.path }],
[
{ type: 'burstUnusedSeal' },
{ type: 'stageChange', payload: testEntry.path },
{ type: 'triggerFilesChange' },
],
done,
);
});
it('when previous is deleted, it reverts rename before deleting', done => {
store.state.entries[testEntry.prevPath].deleted = true;
testAction(
deleteEntry,
testEntry.path,
store.state,
[],
[
{
type: 'renameEntry',
payload: {
path: testEntry.path,
name: testEntry.prevName,
parentPath: testEntry.prevParentPath,
},
},
{
type: 'deleteEntry',
payload: testEntry.prevPath,
},
],
done,
);
});
});
}); });
it('bursts unused seal', done => { it('bursts unused seal', done => {
...@@ -918,6 +994,103 @@ describe('Multi-file store actions', () => { ...@@ -918,6 +994,103 @@ describe('Multi-file store actions', () => {
.then(done) .then(done)
.catch(done.fail); .catch(done.fail);
}); });
describe('with file in directory', () => {
const parentPath = 'original-dir';
const newParentPath = 'new-dir';
const fileName = 'test.md';
const filePath = `${parentPath}/${fileName}`;
let rootDir;
beforeEach(() => {
const parentEntry = file(parentPath, parentPath, 'tree');
const fileEntry = file(filePath, filePath, 'blob', parentEntry);
rootDir = {
tree: [],
};
Object.assign(store.state, {
entries: {
[parentPath]: {
...parentEntry,
tree: [fileEntry],
},
[filePath]: fileEntry,
},
trees: {
'/': rootDir,
},
});
});
it('creates new directory', done => {
expect(store.state.entries[newParentPath]).toBeUndefined();
store
.dispatch('renameEntry', { path: filePath, name: fileName, parentPath: newParentPath })
.then(() => {
expect(store.state.entries[newParentPath]).toEqual(
jasmine.objectContaining({
path: newParentPath,
type: 'tree',
tree: jasmine.arrayContaining([
store.state.entries[`${newParentPath}/${fileName}`],
]),
}),
);
})
.then(done)
.catch(done.fail);
});
describe('when new directory exists', () => {
let newDir;
beforeEach(() => {
newDir = file(newParentPath, newParentPath, 'tree');
store.state.entries[newDir.path] = newDir;
rootDir.tree.push(newDir);
});
it('inserts in new directory', done => {
expect(newDir.tree).toEqual([]);
store
.dispatch('renameEntry', {
path: filePath,
name: fileName,
parentPath: newParentPath,
})
.then(() => {
expect(newDir.tree).toEqual([store.state.entries[`${newParentPath}/${fileName}`]]);
})
.then(done)
.catch(done.fail);
});
it('when new directory is deleted, it undeletes it', done => {
store.dispatch('deleteEntry', newParentPath);
expect(store.state.entries[newParentPath].deleted).toBe(true);
expect(rootDir.tree.some(x => x.path === newParentPath)).toBe(false);
store
.dispatch('renameEntry', {
path: filePath,
name: fileName,
parentPath: newParentPath,
})
.then(() => {
expect(store.state.entries[newParentPath].deleted).toBe(false);
expect(rootDir.tree.some(x => x.path === newParentPath)).toBe(true);
})
.then(done)
.catch(done.fail);
});
});
});
}); });
}); });
......
...@@ -2,7 +2,7 @@ ...@@ -2,7 +2,7 @@
require 'spec_helper' require 'spec_helper'
describe Banzai::Filter::RelativeLinkFilter do describe Banzai::Filter::RepositoryLinkFilter do
include GitHelpers include GitHelpers
include RepoHelpers include RepoHelpers
...@@ -128,11 +128,6 @@ describe Banzai::Filter::RelativeLinkFilter do ...@@ -128,11 +128,6 @@ describe Banzai::Filter::RelativeLinkFilter do
expect { filter(act) }.not_to raise_error expect { filter(act) }.not_to raise_error
end end
it 'does not raise an exception on URIs containing invalid utf-8 byte sequences in uploads' do
act = link("/uploads/%FF")
expect { filter(act) }.not_to raise_error
end
it 'does not raise an exception on URIs containing invalid utf-8 byte sequences in context requested path' do it 'does not raise an exception on URIs containing invalid utf-8 byte sequences in context requested path' do
expect { filter(link("files/test.md"), requested_path: '%FF') }.not_to raise_error expect { filter(link("files/test.md"), requested_path: '%FF') }.not_to raise_error
end end
...@@ -147,11 +142,6 @@ describe Banzai::Filter::RelativeLinkFilter do ...@@ -147,11 +142,6 @@ describe Banzai::Filter::RelativeLinkFilter do
expect { filter(act) }.not_to raise_error expect { filter(act) }.not_to raise_error
end end
it 'does not raise an exception with a space in the path' do
act = link("/uploads/d18213acd3732630991986120e167e3d/Landscape_8.jpg \nBut here's some more unexpected text :smile:)")
expect { filter(act) }.not_to raise_error
end
it 'ignores ref if commit is passed' do it 'ignores ref if commit is passed' do
doc = filter(link('non/existent.file'), commit: project.commit('empty-branch') ) doc = filter(link('non/existent.file'), commit: project.commit('empty-branch') )
expect(doc.at_css('a')['href']) expect(doc.at_css('a')['href'])
...@@ -350,166 +340,4 @@ describe Banzai::Filter::RelativeLinkFilter do ...@@ -350,166 +340,4 @@ describe Banzai::Filter::RelativeLinkFilter do
include_examples :valid_repository include_examples :valid_repository
end end
context 'with a /upload/ URL' do
# not needed
let(:commit) { nil }
let(:ref) { nil }
let(:requested_path) { nil }
let(:upload_path) { '/uploads/e90decf88d8f96fe9e1389afc2e4a91f/test.jpg' }
let(:relative_path) { "/#{project.full_path}#{upload_path}" }
context 'to a project upload' do
shared_examples 'rewrite project uploads' do
context 'with an absolute URL' do
let(:absolute_path) { Gitlab.config.gitlab.url + relative_path }
let(:only_path) { false }
it 'rewrites the link correctly' do
doc = filter(link(upload_path))
expect(doc.at_css('a')['href']).to eq(absolute_path)
end
end
it 'rebuilds relative URL for a link' do
doc = filter(link(upload_path))
expect(doc.at_css('a')['href']).to eq(relative_path)
doc = filter(nested(link(upload_path)))
expect(doc.at_css('a')['href']).to eq(relative_path)
end
it 'rebuilds relative URL for an image' do
doc = filter(image(upload_path))
expect(doc.at_css('img')['src']).to eq(relative_path)
doc = filter(nested(image(upload_path)))
expect(doc.at_css('img')['src']).to eq(relative_path)
end
it 'does not modify absolute URL' do
doc = filter(link('http://example.com'))
expect(doc.at_css('a')['href']).to eq 'http://example.com'
end
it 'supports unescaped Unicode filenames' do
path = '/uploads/한글.png'
doc = filter(link(path))
expect(doc.at_css('a')['href']).to eq("/#{project.full_path}/uploads/%ED%95%9C%EA%B8%80.png")
end
it 'supports escaped Unicode filenames' do
path = '/uploads/한글.png'
escaped = Addressable::URI.escape(path)
doc = filter(image(escaped))
expect(doc.at_css('img')['src']).to eq("/#{project.full_path}/uploads/%ED%95%9C%EA%B8%80.png")
end
end
context 'without project repository access' do
let(:project) { create(:project, :repository, repository_access_level: ProjectFeature::PRIVATE) }
it_behaves_like 'rewrite project uploads'
end
context 'with project repository access' do
it_behaves_like 'rewrite project uploads'
end
end
context 'to a group upload' do
let(:upload_link) { link('/uploads/e90decf88d8f96fe9e1389afc2e4a91f/test.jpg') }
let(:group) { create(:group) }
let(:project) { nil }
let(:relative_path) { "/groups/#{group.full_path}/-/uploads/e90decf88d8f96fe9e1389afc2e4a91f/test.jpg" }
context 'with an absolute URL' do
let(:absolute_path) { Gitlab.config.gitlab.url + relative_path }
let(:only_path) { false }
it 'rewrites the link correctly' do
doc = filter(upload_link)
expect(doc.at_css('a')['href']).to eq(absolute_path)
end
end
it 'rewrites the link correctly' do
doc = filter(upload_link)
expect(doc.at_css('a')['href']).to eq(relative_path)
end
it 'rewrites the link correctly for subgroup' do
group.update!(parent: create(:group))
doc = filter(upload_link)
expect(doc.at_css('a')['href']).to eq(relative_path)
end
it 'does not modify absolute URL' do
doc = filter(link('http://example.com'))
expect(doc.at_css('a')['href']).to eq 'http://example.com'
end
end
context 'to a personal snippet' do
let(:group) { nil }
let(:project) { nil }
let(:relative_path) { '/uploads/-/system/personal_snippet/6/674e4f07fbf0a7736c3439212896e51a/example.tar.gz' }
context 'with an absolute URL' do
let(:absolute_path) { Gitlab.config.gitlab.url + relative_path }
let(:only_path) { false }
it 'rewrites the link correctly' do
doc = filter(link(relative_path))
expect(doc.at_css('a')['href']).to eq(absolute_path)
end
end
context 'with a relative URL root' do
let(:gitlab_root) { '/gitlab' }
let(:absolute_path) { Gitlab.config.gitlab.url + gitlab_root + relative_path }
before do
stub_config_setting(relative_url_root: gitlab_root)
end
context 'with an absolute URL' do
let(:only_path) { false }
it 'rewrites the link correctly' do
doc = filter(link(relative_path))
expect(doc.at_css('a')['href']).to eq(absolute_path)
end
end
it 'rewrites the link correctly' do
doc = filter(link(relative_path))
expect(doc.at_css('a')['href']).to eq(gitlab_root + relative_path)
end
end
it 'rewrites the link correctly' do
doc = filter(link(relative_path))
expect(doc.at_css('a')['href']).to eq(relative_path)
end
it 'does not modify absolute URL' do
doc = filter(link('http://example.com'))
expect(doc.at_css('a')['href']).to eq 'http://example.com'
end
end
end
end end
# frozen_string_literal: true
require 'spec_helper'
describe Banzai::Filter::UploadLinkFilter do
def filter(doc, contexts = {})
contexts.reverse_merge!(
project: project,
group: group,
only_path: only_path
)
described_class.call(doc, contexts)
end
def image(path)
%(<img src="#{path}" />)
end
def video(path)
%(<video src="#{path}"></video>)
end
def audio(path)
%(<audio src="#{path}"></audio>)
end
def link(path)
%(<a href="#{path}">#{path}</a>)
end
def nested(element)
%(<div>#{element}</div>)
end
let_it_be(:project) { create(:project, :public) }
let_it_be(:user) { create(:user) }
let(:group) { nil }
let(:project_path) { project.full_path }
let(:only_path) { true }
let(:upload_path) { '/uploads/e90decf88d8f96fe9e1389afc2e4a91f/test.jpg' }
let(:relative_path) { "/#{project.full_path}#{upload_path}" }
context 'to a project upload' do
context 'with an absolute URL' do
let(:absolute_path) { Gitlab.config.gitlab.url + relative_path }
let(:only_path) { false }
it 'rewrites the link correctly' do
doc = filter(link(upload_path))
expect(doc.at_css('a')['href']).to eq(absolute_path)
expect(doc.at_css('a').classes).to include('gfm')
end
end
it 'rebuilds relative URL for a link' do
doc = filter(link(upload_path))
expect(doc.at_css('a')['href']).to eq(relative_path)
expect(doc.at_css('a').classes).to include('gfm')
doc = filter(nested(link(upload_path)))
expect(doc.at_css('a')['href']).to eq(relative_path)
expect(doc.at_css('a').classes).to include('gfm')
end
it 'rebuilds relative URL for an image' do
doc = filter(image(upload_path))
expect(doc.at_css('img')['src']).to eq(relative_path)
expect(doc.at_css('img').classes).to include('gfm')
doc = filter(nested(image(upload_path)))
expect(doc.at_css('img')['src']).to eq(relative_path)
expect(doc.at_css('img').classes).to include('gfm')
end
it 'does not modify absolute URL' do
doc = filter(link('http://example.com'))
expect(doc.at_css('a')['href']).to eq 'http://example.com'
expect(doc.at_css('a').classes).not_to include('gfm')
end
it 'supports unescaped Unicode filenames' do
path = '/uploads/한글.png'
doc = filter(link(path))
expect(doc.at_css('a')['href']).to eq("/#{project.full_path}/uploads/%ED%95%9C%EA%B8%80.png")
expect(doc.at_css('a').classes).to include('gfm')
end
it 'supports escaped Unicode filenames' do
path = '/uploads/한글.png'
escaped = Addressable::URI.escape(path)
doc = filter(image(escaped))
expect(doc.at_css('img')['src']).to eq("/#{project.full_path}/uploads/%ED%95%9C%EA%B8%80.png")
expect(doc.at_css('img').classes).to include('gfm')
end
end
context 'to a group upload' do
let(:upload_link) { link('/uploads/e90decf88d8f96fe9e1389afc2e4a91f/test.jpg') }
let_it_be(:group) { create(:group) }
let(:project) { nil }
let(:relative_path) { "/groups/#{group.full_path}/-/uploads/e90decf88d8f96fe9e1389afc2e4a91f/test.jpg" }
context 'with an absolute URL' do
let(:absolute_path) { Gitlab.config.gitlab.url + relative_path }
let(:only_path) { false }
it 'rewrites the link correctly' do
doc = filter(upload_link)
expect(doc.at_css('a')['href']).to eq(absolute_path)
expect(doc.at_css('a').classes).to include('gfm')
end
end
it 'rewrites the link correctly' do
doc = filter(upload_link)
expect(doc.at_css('a')['href']).to eq(relative_path)
expect(doc.at_css('a').classes).to include('gfm')
end
it 'rewrites the link correctly for subgroup' do
group.update!(parent: create(:group))
doc = filter(upload_link)
expect(doc.at_css('a')['href']).to eq(relative_path)
expect(doc.at_css('a').classes).to include('gfm')
end
it 'does not modify absolute URL' do
doc = filter(link('http://example.com'))
expect(doc.at_css('a')['href']).to eq 'http://example.com'
expect(doc.at_css('a').classes).not_to include('gfm')
end
end
context 'to a personal snippet' do
let(:group) { nil }
let(:project) { nil }
let(:relative_path) { '/uploads/-/system/personal_snippet/6/674e4f07fbf0a7736c3439212896e51a/example.tar.gz' }
context 'with an absolute URL' do
let(:absolute_path) { Gitlab.config.gitlab.url + relative_path }
let(:only_path) { false }
it 'rewrites the link correctly' do
doc = filter(link(relative_path))
expect(doc.at_css('a')['href']).to eq(absolute_path)
expect(doc.at_css('a').classes).to include('gfm')
end
end
context 'with a relative URL root' do
let(:gitlab_root) { '/gitlab' }
let(:absolute_path) { Gitlab.config.gitlab.url + gitlab_root + relative_path }
before do
stub_config_setting(relative_url_root: gitlab_root)
end
context 'with an absolute URL' do
let(:only_path) { false }
it 'rewrites the link correctly' do
doc = filter(link(relative_path))
expect(doc.at_css('a')['href']).to eq(absolute_path)
expect(doc.at_css('a').classes).to include('gfm')
end
end
it 'rewrites the link correctly' do
doc = filter(link(relative_path))
expect(doc.at_css('a')['href']).to eq(gitlab_root + relative_path)
expect(doc.at_css('a').classes).to include('gfm')
end
end
it 'rewrites the link correctly' do
doc = filter(link(relative_path))
expect(doc.at_css('a')['href']).to eq(relative_path)
expect(doc.at_css('a').classes).to include('gfm')
end
it 'does not modify absolute URL' do
doc = filter(link('http://example.com'))
expect(doc.at_css('a')['href']).to eq 'http://example.com'
expect(doc.at_css('a').classes).not_to include('gfm')
end
end
context 'invalid input' do
using RSpec::Parameterized::TableSyntax
where(:name, :href) do
'invalid URI' | '://foo'
'invalid UTF-8 byte sequences' | '%FF'
'garbled path' | 'open(/var/tmp/):%20/location%0Afrom:%20/test'
'whitespace' | "d18213acd3732630991986120e167e3d/Landscape_8.jpg\nand more"
end
with_them do
it { expect { filter(link("/uploads/#{href}")) }.not_to raise_error }
end
end
end
# frozen_string_literal: true
require 'spec_helper'
describe Banzai::Pipeline::PostProcessPipeline do
context 'when a document only has upload links' do
it 'does not make any Gitaly calls', :request_store do
markdown = <<-MARKDOWN.strip_heredoc
[Relative Upload Link](/uploads/e90decf88d8f96fe9e1389afc2e4a91f/test.jpg)
![Relative Upload Image](/uploads/e90decf88d8f96fe9e1389afc2e4a91f/test.jpg)
MARKDOWN
context = {
project: create(:project, :public, :repository),
ref: 'master'
}
Gitlab::GitalyClient.reset_counts
described_class.call(markdown, context)
expect(Gitlab::GitalyClient.get_request_count).to eq(0)
end
end
end
...@@ -37,6 +37,25 @@ describe Gitlab::BackgroundMigration::MigrateFingerprintSha256WithinKeys, :migra ...@@ -37,6 +37,25 @@ describe Gitlab::BackgroundMigration::MigrateFingerprintSha256WithinKeys, :migra
expect(key_2.fingerprint_sha256).to eq('zMNbLekgdjtcgDv8VSC0z5lpdACMG3Q4PUoIz5+H2jM') expect(key_2.fingerprint_sha256).to eq('zMNbLekgdjtcgDv8VSC0z5lpdACMG3Q4PUoIz5+H2jM')
end end
context 'with invalid keys' do
before do
key = Key.find(1017)
# double space after "ssh-rsa" leads to a
# OpenSSL::PKey::PKeyError in Net::SSH::KeyFactory.load_data_public_key
key.update_column(:key, key.key.gsub('ssh-rsa ', 'ssh-rsa '))
end
it 'ignores errors and does not set the fingerprint' do
fingerprint_migrator.perform(1, 10000)
key_1 = Key.find(1017)
key_2 = Key.find(1027)
expect(key_1.fingerprint_sha256).to be_nil
expect(key_2.fingerprint_sha256).not_to be_nil
end
end
it 'migrates all keys' do it 'migrates all keys' do
expect(Key.where(fingerprint_sha256: nil).count).to eq(Key.all.count) expect(Key.where(fingerprint_sha256: nil).count).to eq(Key.all.count)
......
...@@ -10,8 +10,21 @@ module MarkdownMatchers ...@@ -10,8 +10,21 @@ module MarkdownMatchers
extend RSpec::Matchers::DSL extend RSpec::Matchers::DSL
include Capybara::Node::Matchers include Capybara::Node::Matchers
# RelativeLinkFilter # UploadLinkFilter
matcher :parse_relative_links do matcher :parse_upload_links do
set_default_markdown_messages
match do |actual|
link = actual.at_css('a:contains("Relative Upload Link")')
image = actual.at_css('img[alt="Relative Upload Image"]')
expect(link['href']).to eq("/#{project.full_path}/uploads/e90decf88d8f96fe9e1389afc2e4a91f/test.jpg")
expect(image['data-src']).to eq("/#{project.full_path}/uploads/e90decf88d8f96fe9e1389afc2e4a91f/test.jpg")
end
end
# RepositoryLinkFilter
matcher :parse_repository_links do
set_default_markdown_messages set_default_markdown_messages
match do |actual| match do |actual|
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment