Commit 35d3c09d authored by Tan Le's avatar Tan Le

Stream audit event CSV file

The new CSV builder allows streaming large CSV file directly to the
client. This helps to keep the connection open and avoid timeout. The
users will also have a more responsive experience (i.e. not to wait for
a complete CSV built before seeing the download prompt).

This change also reduces memory usage since there is no need to load
the complete temporary CSV file in memory.
parent e4b4a34b
......@@ -266,6 +266,12 @@ class ApplicationController < ActionController::Base
end
end
def stream_headers
headers['Content-Length'] = nil
headers['X-Accel-Buffering'] = 'no' # Disable buffering on Nginx
headers['Last-Modified'] = '0' # Prevent buffering via Rack::ETag middleware
end
def default_headers
headers['X-Frame-Options'] = 'DENY'
headers['X-XSS-Protection'] = '1; mode=block'
......
......@@ -239,7 +239,7 @@ The first row contains the headers, which are listed in the following table alon
### Limitation
The Audit Log CSV file size is limited to a maximum of `15 MB`.
The Audit Log CSV file size is limited to a maximum of `100,000` events.
The remaining records are truncated when this limit is reached.
### Enable or disable Audit Log Export to CSV
......
......@@ -17,7 +17,7 @@ export default {
},
strings: {
buttonText: __('Export as CSV'),
tooltipText: __('Max size 15 MB'),
tooltipText: __('Max 100,000 events'),
},
};
</script>
......
......@@ -10,11 +10,13 @@ class Admin::AuditLogReportsController < Admin::ApplicationController
respond_to do |format|
format.csv do
send_data(
csv_data,
type: 'text/csv; charset=utf-8; header=present',
filename: csv_filename
)
no_cache_headers
stream_headers
headers['Content-Type'] = 'text/csv; charset=utf-8; header=present'
headers['Content-Disposition'] = "attachment; filename=\"#{csv_filename}\""
self.response_body = csv_data
end
end
end
......
......@@ -2,20 +2,18 @@
module AuditEvents
class ExportCsvService
TARGET_FILESIZE = 15.megabytes
def initialize(params = {})
@params = params
end
def csv_data
csv_builder.render(TARGET_FILESIZE)
csv_builder.render
end
private
def csv_builder
@csv_builder ||= CsvBuilder.new(data, header_to_value_hash)
@csv_builder ||= CsvBuilders::Stream.new(data, header_to_value_hash)
end
def data
......
......@@ -54,7 +54,11 @@ RSpec.describe Admin::AuditLogReportsController do
subject
expect(response).to have_gitlab_http_status(:ok)
expect(response.headers['Content-Type']).to eq('text/csv; charset=utf-8')
expect(response.headers["Content-Length"]).to be_nil
expect(response.headers["Cache-Control"]).to eq('no-cache, no-store')
expect(response.headers['Content-Type']).to eq('text/csv; charset=utf-8; header=present')
expect(response.headers['X-Accel-Buffering']).to eq('no')
expect(response.headers['Last-Modified']).to eq('0')
expect(response.headers['Content-Disposition'])
.to include("filename=\"audit-events-#{Time.current.to_i}.csv\"")
end
......
......@@ -7,7 +7,7 @@ exports[`AuditEventsExportButton component Audit Events CSV export button matche
href="http://example.com/audit_log_reports.csv?created_after=2020-12-12"
icon="export"
size="medium"
title="Max size 15 MB"
title="Max 100,000 events"
variant="default"
>
......
......@@ -29,16 +29,9 @@ RSpec.describe AuditEvents::ExportCsvService do
}
end
subject { described_class.new(params) }
let(:export_csv_service) { described_class.new(params) }
it 'invokes the CSV builder with correct limit' do
csv_builder = instance_spy(CsvBuilder)
allow(CsvBuilder).to receive(:new).and_return(csv_builder)
subject.csv_data
expect(csv_builder).to have_received(:render).with(15.megabytes)
end
subject(:csv) { CSV.parse(export_csv_service.csv_data.to_a.join, headers: true) }
it 'includes the appropriate headers' do
expect(csv.headers).to eq([
......@@ -98,8 +91,4 @@ RSpec.describe AuditEvents::ExportCsvService do
expect(csv[0]['Created At (UTC)']).to eq('2020-02-20T12:00:00Z')
end
end
def csv
CSV.parse(subject.csv_data, headers: true)
end
end
# frozen_string_literal: true
module CsvBuilders
class Stream < CsvBuilder
def render(max_rows = 100_000)
max_rows_including_header = max_rows + 1
Enumerator.new do |csv|
csv << CSV.generate_line(headers)
each do |object|
csv << CSV.generate_line(row(object))
end
end.lazy.take(max_rows_including_header) # rubocop: disable CodeReuse/ActiveRecord
end
end
end
......@@ -16545,6 +16545,9 @@ msgstr ""
msgid "MattermostService|This service allows users to perform common operations on this project by entering slash commands in Mattermost."
msgstr ""
msgid "Max 100,000 events"
msgstr ""
msgid "Max Group Export Download requests per minute per user"
msgstr ""
......@@ -16569,9 +16572,6 @@ msgstr ""
msgid "Max role"
msgstr ""
msgid "Max size 15 MB"
msgstr ""
msgid "MaxBuilds"
msgstr ""
......
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe CsvBuilders::Stream do
let(:event_1) { double(title: 'Added salt', description: 'A teaspoon') }
let(:event_2) { double(title: 'Added sugar', description: 'Just a pinch') }
let(:fake_relation) { FakeRelation.new([event_1, event_2]) }
subject(:builder) { described_class.new(fake_relation, 'Title' => 'title', 'Description' => 'description') }
describe '#render' do
before do
stub_const('FakeRelation', Array)
FakeRelation.class_eval do
def find_each(&block)
each(&block)
end
end
end
it 'returns a lazy enumerator' do
expect(builder.render).to be_an(Enumerator::Lazy)
end
it 'returns all rows up to default max value' do
expect(builder.render.to_a).to eq([
"Title,Description\n",
"Added salt,A teaspoon\n",
"Added sugar,Just a pinch\n"
])
end
it 'truncates to max rows' do
expect(builder.render(1).to_a).to eq([
"Title,Description\n",
"Added salt,A teaspoon\n"
])
end
end
end
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment