Commit ff465d48 authored by George Koltsov's avatar George Koltsov

Add decompressed archive size validation

- Validate Project/Group Import incoming compressed
  archive to make sure decompressed size is within
  acceptabe range
parent 230adfd7
---
title: Add decompressed archive size validation on Project/Group Import
merge_request: 562
author:
type: security
---
type: reference, howto
---
# Project Import Decompressed Archive Size Limits
> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/31564) in GitLab 13.2.
When using [Project Import](../user/project/settings/import_export.md), the size of the decompressed project archive is limited to 10Gb.
If decompressed size exceeds this limit, `Decompressed archive size validation failed` error is returned.
## Enable/disable size validation
Decompressed size validation is enabled by default.
If you have a project with decompressed size exceeding this limit,
it is possible to disable the validation by turning off the
`validate_import_decompressed_archive_size` feature flag.
Start a [Rails console](../administration/troubleshooting/debug.md#starting-a-rails-console-session).
```ruby
# Disable
Feature.disable(:validate_import_decompressed_archive_size)
# Enable
Feature.enable(:validate_import_decompressed_archive_size)
```
# frozen_string_literal: true
require 'zlib'
module Gitlab
module ImportExport
class DecompressedArchiveSizeValidator
include Gitlab::Utils::StrongMemoize
DEFAULT_MAX_BYTES = 10.gigabytes.freeze
CHUNK_SIZE = 4096.freeze
attr_reader :error
def initialize(archive_path:, max_bytes: self.class.max_bytes)
@archive_path = archive_path
@max_bytes = max_bytes
@bytes_read = 0
@total_reads = 0
@denominator = 5
@error = nil
end
def valid?
strong_memoize(:valid) do
validate
end
end
def self.max_bytes
DEFAULT_MAX_BYTES
end
def archive_file
@archive_file ||= File.open(@archive_path)
end
private
def validate
until archive_file.eof?
compressed_chunk = archive_file.read(CHUNK_SIZE)
inflate_stream.inflate(compressed_chunk) do |chunk|
@bytes_read += chunk.size
@total_reads += 1
end
# Start garbage collection every 5 reads in order
# to prevent memory bloat during archive decompression
GC.start if gc_start?
if @bytes_read > @max_bytes
@error = error_message
return false
end
end
true
rescue => e
@error = error_message
Gitlab::ErrorTracking.track_exception(e)
Gitlab::Import::Logger.info(
message: @error,
error: e.message
)
false
ensure
inflate_stream.close
archive_file.close
end
def inflate_stream
@inflate_stream ||= Zlib::Inflate.new(Zlib::MAX_WBITS + 32)
end
def gc_start?
@total_reads % @denominator == 0
end
def error_message
_('Decompressed archive size validation failed.')
end
end
end
end
......@@ -28,6 +28,7 @@ module Gitlab
copy_archive
wait_for_archived_file do
validate_decompressed_archive_size if Feature.enabled?(:validate_import_decompressed_archive_size, default_enabled: true)
decompress_archive
end
rescue => e
......@@ -82,6 +83,14 @@ module Gitlab
def extracted_files
Dir.glob("#{@shared.export_path}/**/*", File::FNM_DOTMATCH).reject { |f| IGNORED_FILENAMES.include?(File.basename(f)) }
end
def validate_decompressed_archive_size
raise ImporterError.new(size_validator.error) unless size_validator.valid?
end
def size_validator
@size_validator ||= DecompressedArchiveSizeValidator.new(archive_path: @archive_file)
end
end
end
end
......@@ -7270,6 +7270,9 @@ msgstr ""
msgid "Decline and sign out"
msgstr ""
msgid "Decompressed archive size validation failed."
msgstr ""
msgid "Default Branch"
msgstr ""
......
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::ImportExport::DecompressedArchiveSizeValidator do
let_it_be(:filepath) { File.join(Dir.tmpdir, 'decompressed_archive_size_validator_spec.gz') }
before(:all) do
create_compressed_file
end
after(:all) do
FileUtils.rm(filepath)
end
subject { described_class.new(archive_path: filepath, max_bytes: max_bytes) }
describe '#valid?' do
let(:max_bytes) { 1 }
context 'when file does not exceed allowed decompressed size' do
let(:max_bytes) { 20 }
it 'returns true' do
expect(subject.valid?).to eq(true)
end
end
context 'when file exceeds allowed decompressed size' do
it 'returns false' do
expect(subject.valid?).to eq(false)
end
end
context 'when something goes wrong during decompression' do
before do
allow(subject.archive_file).to receive(:eof?).and_raise(StandardError)
end
it 'logs and tracks raised exception' do
expect(Gitlab::ErrorTracking).to receive(:track_exception).with(instance_of(StandardError))
expect(Gitlab::Import::Logger).to receive(:info).with(hash_including(message: 'Decompressed archive size validation failed.'))
subject.valid?
end
it 'returns false' do
expect(subject.valid?).to eq(false)
end
end
end
def create_compressed_file
Zlib::GzipWriter.open(filepath) do |gz|
gz.write('Hello World!')
end
end
end
......@@ -98,6 +98,45 @@ RSpec.describe Gitlab::ImportExport::FileImporter do
end
end
context 'when file exceeds acceptable decompressed size' do
let(:project) { create(:project) }
let(:shared) { Gitlab::ImportExport::Shared.new(project) }
let(:filepath) { File.join(Dir.tmpdir, 'file_importer_spec.gz') }
subject { described_class.new(importable: project, archive_file: filepath, shared: shared) }
before do
Zlib::GzipWriter.open(filepath) do |gz|
gz.write('Hello World!')
end
end
context 'when validate_import_decompressed_archive_size feature flag is enabled' do
before do
stub_feature_flags(validate_import_decompressed_archive_size: true)
allow(Gitlab::ImportExport::DecompressedArchiveSizeValidator).to receive(:max_bytes).and_return(1)
end
it 'returns false' do
expect(subject.import).to eq(false)
expect(shared.errors.join).to eq('Decompressed archive size validation failed.')
end
end
context 'when validate_import_decompressed_archive_size feature flag is disabled' do
before do
stub_feature_flags(validate_import_decompressed_archive_size: false)
end
it 'skips validation' do
expect(subject).to receive(:validate_decompressed_archive_size).never
subject.import
end
end
end
def setup_files
FileUtils.mkdir_p("#{shared.export_path}/subfolder/")
FileUtils.touch(valid_file)
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment