Commit a47bbf7c authored by GitLab Bot's avatar GitLab Bot

Add latest changes from gitlab-org/gitlab@13-5-stable-ee

parent 4bafeeda
......@@ -222,7 +222,7 @@ export default {
<a
ref="titleWrapper"
:v-once="!viewDiffsFileByFile"
class="gl-mr-2 gl-text-decoration-none!"
class="gl-mr-2 gl-text-decoration-none! gl-text-truncate"
:href="titleLink"
@click="handleFileNameClick"
>
......
......@@ -54,7 +54,7 @@ export default {
<ide-tree-list>
<template #header>
{{ __('Edit') }}
<div class="ide-tree-actions ml-auto d-flex">
<div class="ide-tree-actions ml-auto d-flex" data-testid="ide-root-actions">
<new-entry-button
:label="__('New file')"
:show-label="false"
......
......@@ -657,6 +657,24 @@ export const secondsToMilliseconds = seconds => seconds * 1000;
*/
export const secondsToDays = seconds => Math.round(seconds / 86400);
/**
* Converts a numeric utc offset in seconds to +/- hours
* ie -32400 => -9 hours
* ie -12600 => -3.5 hours
*
* @param {Number} offset UTC offset in seconds as a integer
*
* @return {String} the + or - offset in hours
*/
export const secondsToHours = offset => {
const parsed = parseInt(offset, 10);
if (Number.isNaN(parsed) || parsed === 0) {
return `0`;
}
const num = offset / 3600;
return parseInt(num, 10) !== num ? num.toFixed(1) : num;
};
/**
* Returns the date n days after the date provided
*
......
......@@ -2,6 +2,7 @@
import { GlDropdown, GlDropdownItem, GlSearchBoxByType } from '@gitlab/ui';
import { __ } from '~/locale';
import autofocusonshow from '~/vue_shared/directives/autofocusonshow';
import { secondsToHours } from '~/lib/utils/datetime_utility';
export default {
name: 'TimezoneDropdown',
......@@ -58,16 +59,8 @@ export default {
isSelected(timezone) {
return this.value === timezone.formattedTimezone;
},
formatUtcOffset(offset) {
const parsed = parseInt(offset, 10);
if (Number.isNaN(parsed) || parsed === 0) {
return `0`;
}
const prefix = offset > 0 ? '+' : '-';
return `${prefix}${Math.abs(offset / 3600)}`;
},
formatTimezone(item) {
return `[UTC ${this.formatUtcOffset(item.offset)}] ${item.name}`;
return `[UTC ${secondsToHours(item.offset)}] ${item.name}`;
},
},
};
......
......@@ -45,6 +45,10 @@ module Ci
def get_store_class(store)
@stores ||= {}
# Can't memoize this because the feature flag may alter this
return fog_store_class.new if store.to_sym == :fog
@stores[store] ||= "Ci::BuildTraceChunks::#{store.capitalize}".constantize.new
end
......@@ -74,6 +78,14 @@ module Ci
def metadata_attributes
attribute_names - %w[raw_data]
end
def fog_store_class
if Feature.enabled?(:ci_trace_new_fog_store, default_enabled: true)
Ci::BuildTraceChunks::Fog
else
Ci::BuildTraceChunks::LegacyFog
end
end
end
def data
......
......@@ -8,13 +8,17 @@ module Ci
end
def data(model)
connection.get_object(bucket_name, key(model))[:body]
files.get(key(model))&.body
rescue Excon::Error::NotFound
# If the object does not exist in the object storage, this method returns nil.
end
def set_data(model, new_data)
connection.put_object(bucket_name, key(model), new_data)
# TODO: Support AWS S3 server side encryption
files.create({
key: key(model),
body: new_data
})
end
def append_data(model, new_data, offset)
......@@ -43,7 +47,7 @@ module Ci
def delete_keys(keys)
keys.each do |key|
connection.delete_object(bucket_name, key_raw(*key))
files.destroy(key_raw(*key))
end
end
......@@ -69,6 +73,14 @@ module Ci
@connection ||= ::Fog::Storage.new(object_store.connection.to_hash.deep_symbolize_keys)
end
def fog_directory
@fog_directory ||= connection.directories.new(key: bucket_name)
end
def files
@files ||= fog_directory.files
end
def object_store
Gitlab.config.artifacts.object_store
end
......
# frozen_string_literal: true
module Ci
module BuildTraceChunks
class LegacyFog
def available?
object_store.enabled
end
def data(model)
connection.get_object(bucket_name, key(model))[:body]
rescue Excon::Error::NotFound
# If the object does not exist in the object storage, this method returns nil.
end
def set_data(model, new_data)
connection.put_object(bucket_name, key(model), new_data)
end
def append_data(model, new_data, offset)
if offset > 0
truncated_data = data(model).to_s.byteslice(0, offset)
new_data = truncated_data + new_data
end
set_data(model, new_data)
new_data.bytesize
end
def size(model)
data(model).to_s.bytesize
end
def delete_data(model)
delete_keys([[model.build_id, model.chunk_index]])
end
def keys(relation)
return [] unless available?
relation.pluck(:build_id, :chunk_index)
end
def delete_keys(keys)
keys.each do |key|
connection.delete_object(bucket_name, key_raw(*key))
end
end
private
def key(model)
key_raw(model.build_id, model.chunk_index)
end
def key_raw(build_id, chunk_index)
"tmp/builds/#{build_id.to_i}/chunks/#{chunk_index.to_i}.log"
end
def bucket_name
return unless available?
object_store.remote_directory
end
def connection
return unless available?
@connection ||= ::Fog::Storage.new(object_store.connection.to_hash.deep_symbolize_keys)
end
def object_store
Gitlab.config.artifacts.object_store
end
end
end
end
---
title: Fix IDE issues with special characters
merge_request: 46398
author:
type: fixed
---
title: 'Auto Deploy: fixes issues for fetching other charts from stable repo'
merge_request: 46531
author:
type: fixed
---
title: Ensure that copy to clipboard button is visible
merge_request: 46466
author:
type: fixed
---
title: Add environment variables to override backup/restore DB settings
merge_request: 45855
author:
type: added
---
name: ci_new_artifact_file_reader
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/46552
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/273755
milestone: '13.6'
type: development
group: group::pipeline authoring
default_enabled: false
---
name: ci_trace_new_fog_store
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/46209
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/273405
type: development
group: group::testing
default_enabled: true
......@@ -106,7 +106,7 @@ See the section on [ETag mismatch errors](#etag-mismatch) for more details.
# OPTIONAL: The following lines are only needed if server side encryption is required
gitlab_rails['object_store']['storage_options'] = {
'server_side_encryption' => '<AES256 or aws:kms>',
'server_side_encryption_kms_key_id' => '<arn:s3:aws:xxx>'
'server_side_encryption_kms_key_id' => '<arn:aws:kms:xxx>'
}
gitlab_rails['object_store']['objects']['artifacts']['bucket'] = '<artifacts>'
gitlab_rails['object_store']['objects']['external_diffs']['bucket'] = '<external-diffs>'
......@@ -145,7 +145,7 @@ See the section on [ETag mismatch errors](#etag-mismatch) for more details.
region: <eu-central-1>
storage_options:
server_side_encryption: <AES256 or aws:kms>
server_side_encryption_key_kms_id: <arn:s3:aws:xxx>
server_side_encryption_key_kms_id: <arn:aws:kms:xxx>
objects:
artifacts:
bucket: <artifacts>
......
......@@ -940,9 +940,7 @@ message. Install the [correct GitLab version](https://packages.gitlab.com/gitlab
and then try again.
NOTE: **Note:**
There is a known issue with restore not working with `pgbouncer`. The [workaround is to bypass
`pgbouncer` and connect directly to the primary database node](../administration/postgresql/pgbouncer.md#procedure-for-bypassing-pgbouncer).
[Read more about backup and restore with `pgbouncer`](#backup-and-restore-for-installations-using-pgbouncer).
There is a known issue with restore not working with `pgbouncer`. [Read more about backup and restore with `pgbouncer`](#backup-and-restore-for-installations-using-pgbouncer).
### Restore for Docker image and GitLab Helm chart installations
......@@ -1039,26 +1037,60 @@ practical use.
## Backup and restore for installations using PgBouncer
PgBouncer can cause the following errors when performing backups and restores:
Do NOT backup or restore GitLab through a PgBouncer connection. These
tasks must [bypass PgBouncer and connect directly to the PostgreSQL primary database node](#bypassing-pgbouncer),
or they will cause a GitLab outage.
When the GitLab backup or restore task is used with PgBouncer, the
following error message is shown:
```ruby
ActiveRecord::StatementInvalid: PG::UndefinedTable
```
There is a [known issue](https://gitlab.com/gitlab-org/omnibus-gitlab/-/issues/3470) for restore not working
with `pgbouncer`.
This happens because the task uses `pg_dump`, which [sets a null search
path and explicitly includes the schema in every SQL query](https://gitlab.com/gitlab-org/gitlab/-/issues/23211)
to address [CVE-2018-1058](https://www.postgresql.org/about/news/postgresql-103-968-9512-9417-and-9322-released-1834/).
Since connections are reused with PgBouncer in transaction pooling mode,
PostgreSQL fails to search the default `public` schema. As a result,
this clearing of the search path causes tables and columns to appear
missing.
### Bypassing PgBouncer
There are two ways to fix this:
1. [Use environment variables to override the database settings](#environment-variable-overrides) for the backup task.
1. Reconfigure a node to [connect directly to the PostgreSQL primary database node](../administration/postgresql/pgbouncer.md#procedure-for-bypassing-pgbouncer).
#### Environment variable overrides
To workaround this issue, the GitLab server will need to bypass `pgbouncer` and
[connect directly to the primary database node](../administration/postgresql/pgbouncer.md#procedure-for-bypassing-pgbouncer)
to perform the database restore.
By default, GitLab uses the database configuration stored in a
configuration file (`database.yml`). However, you can override the database settings
for the backup and restore task by setting environment
variables that are prefixed with `GITLAB_BACKUP_`:
- `GITLAB_BACKUP_PGHOST`
- `GITLAB_BACKUP_PGUSER`
- `GITLAB_BACKUP_PGPORT`
- `GITLAB_BACKUP_PGPASSWORD`
- `GITLAB_BACKUP_PGSSLMODE`
- `GITLAB_BACKUP_PGSSLKEY`
- `GITLAB_BACKUP_PGSSLCERT`
- `GITLAB_BACKUP_PGSSLROOTCERT`
- `GITLAB_BACKUP_PGSSLCRL`
- `GITLAB_BACKUP_PGSSLCOMPRESSION`
For example, to override the database host and port to use 192.168.1.10
and port 5432 with the Omnibus package:
```shell
sudo GITLAB_BACKUP_PGHOST=192.168.1.10 GITLAB_BACKUP_PGPORT=5432 /opt/gitlab/bin/gitlab-backup create
```
There is also a [known issue](https://gitlab.com/gitlab-org/gitlab/-/issues/23211)
with PostgreSQL 9 and running a database backup through PgBouncer that can cause
an outage to GitLab. If you're still on PostgreSQL 9 and upgrading PostgreSQL isn't
an option, workarounds include having a dedicated application node just for backups,
configured to connect directly the primary database node as noted above. You're
advised to upgrade your PostgreSQL version though, GitLab 11.11 shipped with PostgreSQL
10.7, and that is the recommended version for GitLab 12+.
See the [PostgreSQL documentation](https://www.postgresql.org/docs/12/libpq-envars.html)
for more details on what these parameters do.
## Additional notes
......
......@@ -140,7 +140,14 @@ module Backup
'sslcrl' => 'PGSSLCRL',
'sslcompression' => 'PGSSLCOMPRESSION'
}
args.each { |opt, arg| ENV[arg] = config[opt].to_s if config[opt] }
args.each do |opt, arg|
# This enables the use of different PostgreSQL settings in
# case PgBouncer is used. PgBouncer clears the search path,
# which wreaks havoc on Rails if connections are reused.
override = "GITLAB_BACKUP_#{arg}"
val = ENV[override].presence || config[opt].to_s.presence
ENV[arg] = val if val
end
end
def report_success(success)
......
......@@ -45,6 +45,14 @@ module Gitlab
end
def read_zip_file!(file_path)
if ::Feature.enabled?(:ci_new_artifact_file_reader, job.project, default_enabled: false)
read_with_new_artifact_file_reader(file_path)
else
read_with_legacy_artifact_file_reader(file_path)
end
end
def read_with_new_artifact_file_reader(file_path)
job.artifacts_file.use_open_file do |file|
zip_file = Zip::File.new(file, false, true)
entry = zip_file.find_entry(file_path)
......@@ -61,6 +69,25 @@ module Gitlab
end
end
def read_with_legacy_artifact_file_reader(file_path)
job.artifacts_file.use_file do |archive_path|
Zip::File.open(archive_path) do |zip_file|
entry = zip_file.find_entry(file_path)
unless entry
raise Error, "Path `#{file_path}` does not exist inside the `#{job.name}` artifacts archive!"
end
if entry.name_is_directory?
raise Error, "Path `#{file_path}` was expected to be a file but it was a directory!"
end
zip_file.get_input_stream(entry) do |is|
is.read
end
end
end
end
def max_archive_size_in_mb
ActiveSupport::NumberHelper.number_to_human_size(MAX_ARCHIVE_SIZE)
end
......
.dast-auto-deploy:
image: "registry.gitlab.com/gitlab-org/cluster-integration/auto-deploy-image:v1.0.5"
image: "registry.gitlab.com/gitlab-org/cluster-integration/auto-deploy-image:v1.0.7"
dast_environment_deploy:
extends: .dast-auto-deploy
......
.auto-deploy:
image: "registry.gitlab.com/gitlab-org/cluster-integration/auto-deploy-image:v1.0.5"
image: "registry.gitlab.com/gitlab-org/cluster-integration/auto-deploy-image:v1.0.7"
dependencies: []
review:
......
import Vuex from 'vuex';
import { shallowMount, createLocalVue } from '@vue/test-utils';
import { GlDropdownItem, GlDropdown } from '@gitlab/ui';
import { secondsToHours } from '~/lib/utils/datetime_utility';
import TimezoneDropdown from '~/vue_shared/components/timezone_dropdown.vue';
import createStore from '~/deploy_freeze/store';
......@@ -12,6 +13,11 @@ describe('Deploy freeze timezone dropdown', () => {
let store;
const timezoneDataFixture = getJSONFixture('/api/freeze-periods/timezone_data.json');
const findTzByName = (identifier = '') =>
timezoneDataFixture.find(({ name }) => name.toLowerCase() === identifier.toLowerCase());
const formatTz = ({ offset, name }) => `[UTC ${secondsToHours(offset)}] ${name}`;
const createComponent = (searchTerm, selectedTimezone) => {
store = createStore({
projectId: '8',
......@@ -63,8 +69,9 @@ describe('Deploy freeze timezone dropdown', () => {
});
it('renders only the time zone searched for', () => {
const selectedTz = findTzByName('Alaska');
expect(findAllDropdownItems()).toHaveLength(1);
expect(findDropdownItemByIndex(0).text()).toBe('[UTC -8] Alaska');
expect(findDropdownItemByIndex(0).text()).toBe(formatTz(selectedTz));
});
it('should not display empty results message', () => {
......@@ -72,13 +79,15 @@ describe('Deploy freeze timezone dropdown', () => {
});
describe('Custom events', () => {
const selectedTz = findTzByName('Alaska');
it('should emit input if a time zone is clicked', () => {
findDropdownItemByIndex(0).vm.$emit('click');
expect(wrapper.emitted('input')).toEqual([
[
{
formattedTimezone: '[UTC -8] Alaska',
identifier: 'America/Juneau',
formattedTimezone: formatTz(selectedTz),
identifier: selectedTz.identifier,
},
],
]);
......
......@@ -105,7 +105,8 @@ describe('Monitoring router', () => {
path | currentDashboard
${'/panel/new'} | ${undefined}
${'/dashboard.yml/panel/new'} | ${'dashboard.yml'}
${'/config%2Fprometheus%2Fcommon_metrics.yml/panel/new'} | ${'config%2Fprometheus%2Fcommon_metrics.yml'}
${'/config/prometheus/common_metrics.yml/panel/new'} | ${'config/prometheus/common_metrics.yml'}
${'/config%2Fprometheus%2Fcommon_metrics.yml/panel/new'} | ${'config/prometheus/common_metrics.yml'}
`('"$path" renders page with dashboard "$currentDashboard"', ({ path, currentDashboard }) => {
const wrapper = createWrapper(BASE_PATH, path);
......
import { findAllByText, fireEvent, getByLabelText, screen } from '@testing-library/dom';
const isFileRowOpen = row => row.matches('.is-open');
const isFolderRowOpen = row => row.matches('.folder.is-open');
const getLeftSidebar = () => screen.getByTestId('left-sidebar');
......@@ -24,6 +24,8 @@ const findAndSetEditorValue = async value => {
const findTreeBody = () => screen.findByTestId('ide-tree-body', {}, { timeout: 5000 });
const findRootActions = () => screen.findByTestId('ide-root-actions', {}, { timeout: 7000 });
const findFileRowContainer = (row = null) =>
row ? Promise.resolve(row.parentElement) : findTreeBody();
......@@ -35,7 +37,7 @@ const findFileChild = async (row, name, index = 0) => {
};
const openFileRow = row => {
if (!row || isFileRowOpen(row)) {
if (!row || isFolderRowOpen(row)) {
return;
}
......@@ -74,6 +76,19 @@ const findAndSetFileName = async value => {
createButton.click();
};
const findAndClickRootAction = async name => {
const container = await findRootActions();
const button = getByLabelText(container, name);
button.click();
};
export const openFile = async path => {
const row = await findAndTraverseToPath(path);
openFileRow(row);
};
export const createFile = async (path, content) => {
const parentPath = path
.split('/')
......@@ -81,7 +96,12 @@ export const createFile = async (path, content) => {
.join('/');
const parentRow = await findAndTraverseToPath(parentPath);
clickFileRowAction(parentRow, 'New file');
if (parentRow) {
clickFileRowAction(parentRow, 'New file');
} else {
await findAndClickRootAction('New file');
}
await findAndSetFileName(path);
await findAndSetEditorValue(content);
......
import { TEST_HOST } from 'helpers/test_constants';
import { waitForText } from 'helpers/wait_for_text';
import waitForPromises from 'helpers/wait_for_promises';
import { useOverclockTimers } from 'test_helpers/utils/overclock_timers';
import { createCommitId } from 'test_helpers/factories/commit_id';
import { initIde } from '~/ide';
......@@ -86,4 +87,18 @@ describe('WebIDE', () => {
],
});
});
it('user adds file that starts with +', async () => {
createComponent();
await ideHelper.createFile('+test', 'Hello world!');
await ideHelper.openFile('+test');
// Wait for monaco things
await waitForPromises();
// Assert that +test is the only open tab
const tabs = Array.from(document.querySelectorAll('.multi-file-tab'));
expect(tabs.map(x => x.textContent.trim())).toEqual(['+test']);
});
});
......@@ -6,7 +6,7 @@ beforeEach(() => {
relative_url_root: '',
};
setTestTimeout(5000);
setTestTimeout(7000);
jest.useRealTimers();
});
......
......@@ -48,5 +48,26 @@ RSpec.describe Backup::Database do
expect(output).to include(visible_error)
end
end
context 'with PostgreSQL settings defined in the environment' do
let(:cmd) { %W[#{Gem.ruby} -e] + ["$stderr.puts ENV.to_h.select { |k, _| k.start_with?('PG') }"] }
let(:config) { YAML.load_file(File.join(Rails.root, 'config', 'database.yml'))['test'] }
before do
stub_const 'ENV', ENV.to_h.merge({
'GITLAB_BACKUP_PGHOST' => 'test.example.com',
'PGPASSWORD' => 'donotchange'
})
end
it 'overrides default config values' do
subject.restore
expect(output).to include(%("PGHOST"=>"test.example.com"))
expect(output).to include(%("PGPASSWORD"=>"donotchange"))
expect(output).to include(%("PGPORT"=>"#{config['port']}")) if config['port']
expect(output).to include(%("PGUSER"=>"#{config['username']}")) if config['username']
end
end
end
end
......@@ -18,6 +18,17 @@ RSpec.describe Gitlab::Ci::ArtifactFileReader do
expect(YAML.safe_load(subject).keys).to contain_exactly('rspec', 'time', 'custom')
end
context 'when FF ci_new_artifact_file_reader is disabled' do
before do
stub_feature_flags(ci_new_artifact_file_reader: false)
end
it 'returns the content at the path' do
is_expected.to be_present
expect(YAML.safe_load(subject).keys).to contain_exactly('rspec', 'time', 'custom')
end
end
context 'when path does not exist' do
let(:path) { 'file/does/not/exist.txt' }
let(:expected_error) do
......
......@@ -135,11 +135,31 @@ RSpec.describe Ci::BuildTraceChunk, :clean_gitlab_redis_shared_state do
context 'when data_store is fog' do
let(:data_store) { :fog }
before do
build_trace_chunk.send(:unsafe_set_data!, +'Sample data in fog')
context 'when legacy Fog is enabled' do
before do
stub_feature_flags(ci_trace_new_fog_store: false)
build_trace_chunk.send(:unsafe_set_data!, +'Sample data in fog')
end
it { is_expected.to eq('Sample data in fog') }
it 'returns a LegacyFog store' do
expect(described_class.get_store_class(data_store)).to be_a(Ci::BuildTraceChunks::LegacyFog)
end
end
it { is_expected.to eq('Sample data in fog') }
context 'when new Fog is enabled' do
before do
stub_feature_flags(ci_trace_new_fog_store: true)
build_trace_chunk.send(:unsafe_set_data!, +'Sample data in fog')
end
it { is_expected.to eq('Sample data in fog') }
it 'returns a new Fog store' do
expect(described_class.get_store_class(data_store)).to be_a(Ci::BuildTraceChunks::Fog)
end
end
end
end
......
......@@ -4,8 +4,12 @@ require 'spec_helper'
RSpec.describe Ci::BuildTraceChunks::Fog do
let(:data_store) { described_class.new }
let(:bucket) { 'artifacts' }
let(:connection_params) { Gitlab.config.artifacts.object_store.connection.symbolize_keys }
let(:connection) { ::Fog::Storage.new(connection_params) }
before do
stub_object_storage(connection_params: connection_params, remote_directory: bucket)
stub_artifacts_object_storage
end
......@@ -148,17 +152,17 @@ RSpec.describe Ci::BuildTraceChunks::Fog do
end
it 'deletes multiple data' do
::Fog::Storage.new(JobArtifactUploader.object_store_credentials).tap do |connection|
expect(connection.get_object('artifacts', "tmp/builds/#{build.id}/chunks/0.log")[:body]).to be_present
expect(connection.get_object('artifacts', "tmp/builds/#{build.id}/chunks/1.log")[:body]).to be_present
end
files = connection.directories.new(key: bucket).files
expect(files.count).to eq(2)
expect(files[0].body).to be_present
expect(files[1].body).to be_present
subject
::Fog::Storage.new(JobArtifactUploader.object_store_credentials).tap do |connection|
expect { connection.get_object('artifacts', "tmp/builds/#{build.id}/chunks/0.log")[:body] }.to raise_error(Excon::Error::NotFound)
expect { connection.get_object('artifacts', "tmp/builds/#{build.id}/chunks/1.log")[:body] }.to raise_error(Excon::Error::NotFound)
end
files.reload
expect(files.count).to eq(0)
end
end
end
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Ci::BuildTraceChunks::LegacyFog do
let(:data_store) { described_class.new }
before do
stub_artifacts_object_storage
end
describe '#available?' do
subject { data_store.available? }
context 'when object storage is enabled' do
it { is_expected.to be_truthy }
end
context 'when object storage is disabled' do
before do
stub_artifacts_object_storage(enabled: false)
end
it { is_expected.to be_falsy }
end
end
describe '#data' do
subject { data_store.data(model) }
context 'when data exists' do
let(:model) { create(:ci_build_trace_chunk, :fog_with_data, initial_data: 'sample data in fog') }
it 'returns the data' do
is_expected.to eq('sample data in fog')
end
end
context 'when data does not exist' do
let(:model) { create(:ci_build_trace_chunk, :fog_without_data) }
it 'returns nil' do
expect(data_store.data(model)).to be_nil
end
end
end
describe '#set_data' do
let(:new_data) { 'abc123' }
context 'when data exists' do
let(:model) { create(:ci_build_trace_chunk, :fog_with_data, initial_data: 'sample data in fog') }
it 'overwrites data' do
expect(data_store.data(model)).to eq('sample data in fog')
data_store.set_data(model, new_data)
expect(data_store.data(model)).to eq new_data
end
end
context 'when data does not exist' do
let(:model) { create(:ci_build_trace_chunk, :fog_without_data) }
it 'sets new data' do
expect(data_store.data(model)).to be_nil
data_store.set_data(model, new_data)
expect(data_store.data(model)).to eq new_data
end
end
end
describe '#delete_data' do
subject { data_store.delete_data(model) }
context 'when data exists' do
let(:model) { create(:ci_build_trace_chunk, :fog_with_data, initial_data: 'sample data in fog') }
it 'deletes data' do
expect(data_store.data(model)).to eq('sample data in fog')
subject
expect(data_store.data(model)).to be_nil
end
end
context 'when data does not exist' do
let(:model) { create(:ci_build_trace_chunk, :fog_without_data) }
it 'does nothing' do
expect(data_store.data(model)).to be_nil
subject
expect(data_store.data(model)).to be_nil
end
end
end
describe '#size' do
context 'when data exists' do
let(:model) { create(:ci_build_trace_chunk, :fog_with_data, initial_data: 'üabcd') }
it 'returns data bytesize correctly' do
expect(data_store.size(model)).to eq 6
end
end
context 'when data does not exist' do
let(:model) { create(:ci_build_trace_chunk, :fog_without_data) }
it 'returns zero' do
expect(data_store.size(model)).to be_zero
end
end
end
describe '#keys' do
subject { data_store.keys(relation) }
let(:build) { create(:ci_build) }
let(:relation) { build.trace_chunks }
before do
create(:ci_build_trace_chunk, :fog_with_data, chunk_index: 0, build: build)
create(:ci_build_trace_chunk, :fog_with_data, chunk_index: 1, build: build)
end
it 'returns keys' do
is_expected.to eq([[build.id, 0], [build.id, 1]])
end
end
describe '#delete_keys' do
subject { data_store.delete_keys(keys) }
let(:build) { create(:ci_build) }
let(:relation) { build.trace_chunks }
let(:keys) { data_store.keys(relation) }
before do
create(:ci_build_trace_chunk, :fog_with_data, chunk_index: 0, build: build)
create(:ci_build_trace_chunk, :fog_with_data, chunk_index: 1, build: build)
end
it 'deletes multiple data' do
::Fog::Storage.new(JobArtifactUploader.object_store_credentials).tap do |connection|
expect(connection.get_object('artifacts', "tmp/builds/#{build.id}/chunks/0.log")[:body]).to be_present
expect(connection.get_object('artifacts', "tmp/builds/#{build.id}/chunks/1.log")[:body]).to be_present
end
subject
::Fog::Storage.new(JobArtifactUploader.object_store_credentials).tap do |connection|
expect { connection.get_object('artifacts', "tmp/builds/#{build.id}/chunks/0.log")[:body] }.to raise_error(Excon::Error::NotFound)
expect { connection.get_object('artifacts', "tmp/builds/#{build.id}/chunks/1.log")[:body] }.to raise_error(Excon::Error::NotFound)
end
end
end
end
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
......@@ -12363,10 +12363,10 @@ vue-loader@^15.9.3:
vue-hot-reload-api "^2.3.0"
vue-style-loader "^4.1.0"
vue-router@^3.4.7:
version "3.4.7"
resolved "https://registry.yarnpkg.com/vue-router/-/vue-router-3.4.7.tgz#bf189bafd16f4e4ef783c4a6250a3090f2c1fa1b"
integrity sha512-CbHXue5BLrDivOk5O4eZ0WT4Yj8XwdXa4kCnsEIOzYUPF/07ZukayA2jGxDCJxLc9SgVQX9QX0OuGOwGlVB4Qg==
vue-router@3.4.5:
version "3.4.5"
resolved "https://registry.yarnpkg.com/vue-router/-/vue-router-3.4.5.tgz#d396ec037b35931bdd1e9b7edd86f9788dc15175"
integrity sha512-ioRY5QyDpXM9TDjOX6hX79gtaMXSVDDzSlbIlyAmbHNteIL81WIVB2e+jbzV23vzxtoV0krdS2XHm+GxFg+Nxg==
vue-runtime-helpers@^1.1.2:
version "1.1.2"
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment