Commit 8da7d8c4 authored by alinamihaila's avatar alinamihaila

Check original categories in tests

parent 43575f71
...@@ -8,36 +8,6 @@ RSpec.describe Gitlab::UsageDataCounters::HLLRedisCounter, :clean_gitlab_redis_s ...@@ -8,36 +8,6 @@ RSpec.describe Gitlab::UsageDataCounters::HLLRedisCounter, :clean_gitlab_redis_s
let(:entity3) { '34rfjuuy-ce56-sa35-ds34-dfer567dfrf2' } let(:entity3) { '34rfjuuy-ce56-sa35-ds34-dfer567dfrf2' }
let(:entity4) { '8b9a2671-2abf-4bec-a682-22f6a8f7bf31' } let(:entity4) { '8b9a2671-2abf-4bec-a682-22f6a8f7bf31' }
let(:weekly_event) { 'g_analytics_contribution' }
let(:daily_event) { 'g_analytics_search' }
let(:analytics_slot_event) { 'g_analytics_contribution' }
let(:compliance_slot_event) { 'g_compliance_dashboard' }
let(:category_analytics_event) { 'g_analytics_search' }
let(:category_productivity_event) { 'g_analytics_productivity' }
let(:no_slot) { 'no_slot' }
let(:different_aggregation) { 'different_aggregation' }
let(:custom_daily_event) { 'g_analytics_custom' }
let(:global_category) { 'global' }
let(:compliance_category) {'compliance' }
let(:productivity_category) {'productivity' }
let(:analytics_category) { 'analytics' }
let(:known_events) do
[
{ name: weekly_event, redis_slot: "analytics", category: analytics_category, expiry: 84, aggregation: "weekly" },
{ name: daily_event, redis_slot: "analytics", category: analytics_category, expiry: 84, aggregation: "daily" },
{ name: category_productivity_event, redis_slot: "analytics", category: productivity_category, aggregation: "weekly" },
{ name: compliance_slot_event, redis_slot: "compliance", category: compliance_category, aggregation: "weekly" },
{ name: no_slot, category: global_category, aggregation: "daily" },
{ name: different_aggregation, category: global_category, aggregation: "monthly" }
].map(&:with_indifferent_access)
end
before do
allow(described_class).to receive(:known_events).and_return(known_events)
end
around do |example| around do |example|
# We need to freeze to a reference time # We need to freeze to a reference time
# because visits are grouped by the week number in the year # because visits are grouped by the week number in the year
...@@ -48,156 +18,186 @@ RSpec.describe Gitlab::UsageDataCounters::HLLRedisCounter, :clean_gitlab_redis_s ...@@ -48,156 +18,186 @@ RSpec.describe Gitlab::UsageDataCounters::HLLRedisCounter, :clean_gitlab_redis_s
Timecop.freeze(reference_time) { example.run } Timecop.freeze(reference_time) { example.run }
end end
describe '.events_for_category' do
it 'gets the event names for given category' do
expect(described_class.events_for_category(:analytics)).to contain_exactly(weekly_event, daily_event)
end
end
describe '.categories' do describe '.categories' do
it 'gets all unique category names' do it 'gets all unique category names' do
expect(described_class.categories).to contain_exactly(global_category, analytics_category, productivity_category, compliance_category) expect(described_class.categories).to contain_exactly('analytics', 'compliance', 'ide_edit', 'search')
end end
end end
describe '.track_event' do describe 'known_events' do
it "raise error if metrics don't have same aggregation" do let(:weekly_event) { 'g_analytics_contribution' }
expect { described_class.track_event(entity1, different_aggregation, Date.current) } .to raise_error(Gitlab::UsageDataCounters::HLLRedisCounter::UnknownAggregation) let(:daily_event) { 'g_analytics_search' }
let(:analytics_slot_event) { 'g_analytics_contribution' }
let(:compliance_slot_event) { 'g_compliance_dashboard' }
let(:category_analytics_event) { 'g_analytics_search' }
let(:category_productivity_event) { 'g_analytics_productivity' }
let(:no_slot) { 'no_slot' }
let(:different_aggregation) { 'different_aggregation' }
let(:custom_daily_event) { 'g_analytics_custom' }
let(:global_category) { 'global' }
let(:compliance_category) {'compliance' }
let(:productivity_category) {'productivity' }
let(:analytics_category) { 'analytics' }
let(:known_events) do
[
{ name: weekly_event, redis_slot: "analytics", category: analytics_category, expiry: 84, aggregation: "weekly" },
{ name: daily_event, redis_slot: "analytics", category: analytics_category, expiry: 84, aggregation: "daily" },
{ name: category_productivity_event, redis_slot: "analytics", category: productivity_category, aggregation: "weekly" },
{ name: compliance_slot_event, redis_slot: "compliance", category: compliance_category, aggregation: "weekly" },
{ name: no_slot, category: global_category, aggregation: "daily" },
{ name: different_aggregation, category: global_category, aggregation: "monthly" }
].map(&:with_indifferent_access)
end
before do
allow(described_class).to receive(:known_events).and_return(known_events)
end end
it 'raise error if metrics of unknown aggregation' do describe '.events_for_category' do
expect { described_class.track_event(entity1, 'unknown', Date.current) } .to raise_error(Gitlab::UsageDataCounters::HLLRedisCounter::UnknownEvent) it 'gets the event names for given category' do
expect(described_class.events_for_category(:analytics)).to contain_exactly(weekly_event, daily_event)
end
end end
context 'for weekly events' do describe '.track_event' do
it 'sets the keys in Redis to expire automatically after the given expiry time' do it "raise error if metrics don't have same aggregation" do
described_class.track_event(entity1, "g_analytics_contribution") expect { described_class.track_event(entity1, different_aggregation, Date.current) } .to raise_error(Gitlab::UsageDataCounters::HLLRedisCounter::UnknownAggregation)
end
Gitlab::Redis::SharedState.with do |redis| it 'raise error if metrics of unknown aggregation' do
keys = redis.scan_each(match: "g_{analytics}_contribution-*").to_a expect { described_class.track_event(entity1, 'unknown', Date.current) } .to raise_error(Gitlab::UsageDataCounters::HLLRedisCounter::UnknownEvent)
expect(keys).not_to be_empty end
context 'for weekly events' do
it 'sets the keys in Redis to expire automatically after the given expiry time' do
described_class.track_event(entity1, "g_analytics_contribution")
Gitlab::Redis::SharedState.with do |redis|
keys = redis.scan_each(match: "g_{analytics}_contribution-*").to_a
expect(keys).not_to be_empty
keys.each do |key| keys.each do |key|
expect(redis.ttl(key)).to be_within(5.seconds).of(12.weeks) expect(redis.ttl(key)).to be_within(5.seconds).of(12.weeks)
end
end end
end end
end
it 'sets the keys in Redis to expire automatically after 6 weeks by default' do it 'sets the keys in Redis to expire automatically after 6 weeks by default' do
described_class.track_event(entity1, "g_compliance_dashboard") described_class.track_event(entity1, "g_compliance_dashboard")
Gitlab::Redis::SharedState.with do |redis| Gitlab::Redis::SharedState.with do |redis|
keys = redis.scan_each(match: "g_{compliance}_dashboard-*").to_a keys = redis.scan_each(match: "g_{compliance}_dashboard-*").to_a
expect(keys).not_to be_empty expect(keys).not_to be_empty
keys.each do |key| keys.each do |key|
expect(redis.ttl(key)).to be_within(5.seconds).of(6.weeks) expect(redis.ttl(key)).to be_within(5.seconds).of(6.weeks)
end
end end
end end
end end
end
context 'for daily events' do context 'for daily events' do
it 'sets the keys in Redis to expire after the given expiry time' do it 'sets the keys in Redis to expire after the given expiry time' do
described_class.track_event(entity1, "g_analytics_search") described_class.track_event(entity1, "g_analytics_search")
Gitlab::Redis::SharedState.with do |redis| Gitlab::Redis::SharedState.with do |redis|
keys = redis.scan_each(match: "*-g_{analytics}_search").to_a keys = redis.scan_each(match: "*-g_{analytics}_search").to_a
expect(keys).not_to be_empty expect(keys).not_to be_empty
keys.each do |key| keys.each do |key|
expect(redis.ttl(key)).to be_within(5.seconds).of(84.days) expect(redis.ttl(key)).to be_within(5.seconds).of(84.days)
end
end end
end end
end
it 'sets the keys in Redis to expire after 29 days by default' do it 'sets the keys in Redis to expire after 29 days by default' do
described_class.track_event(entity1, "no_slot") described_class.track_event(entity1, "no_slot")
Gitlab::Redis::SharedState.with do |redis| Gitlab::Redis::SharedState.with do |redis|
keys = redis.scan_each(match: "*-{no_slot}").to_a keys = redis.scan_each(match: "*-{no_slot}").to_a
expect(keys).not_to be_empty expect(keys).not_to be_empty
keys.each do |key| keys.each do |key|
expect(redis.ttl(key)).to be_within(5.seconds).of(29.days) expect(redis.ttl(key)).to be_within(5.seconds).of(29.days)
end
end end
end end
end end
end end
end
describe '.unique_events' do describe '.unique_events' do
before do before do
# events in current week, should not be counted as week is not complete # events in current week, should not be counted as week is not complete
described_class.track_event(entity1, weekly_event, Date.current) described_class.track_event(entity1, weekly_event, Date.current)
described_class.track_event(entity2, weekly_event, Date.current) described_class.track_event(entity2, weekly_event, Date.current)
# Events last week # Events last week
described_class.track_event(entity1, weekly_event, 2.days.ago) described_class.track_event(entity1, weekly_event, 2.days.ago)
described_class.track_event(entity1, weekly_event, 2.days.ago) described_class.track_event(entity1, weekly_event, 2.days.ago)
described_class.track_event(entity1, no_slot, 2.days.ago) described_class.track_event(entity1, no_slot, 2.days.ago)
# Events 2 weeks ago # Events 2 weeks ago
described_class.track_event(entity1, weekly_event, 2.weeks.ago) described_class.track_event(entity1, weekly_event, 2.weeks.ago)
# Events 4 weeks ago # Events 4 weeks ago
described_class.track_event(entity3, weekly_event, 4.weeks.ago) described_class.track_event(entity3, weekly_event, 4.weeks.ago)
described_class.track_event(entity4, weekly_event, 29.days.ago) described_class.track_event(entity4, weekly_event, 29.days.ago)
# events in current day should be counted in daily aggregation # events in current day should be counted in daily aggregation
described_class.track_event(entity1, daily_event, Date.current) described_class.track_event(entity1, daily_event, Date.current)
described_class.track_event(entity2, daily_event, Date.current) described_class.track_event(entity2, daily_event, Date.current)
# Events last week # Events last week
described_class.track_event(entity1, daily_event, 2.days.ago) described_class.track_event(entity1, daily_event, 2.days.ago)
described_class.track_event(entity1, daily_event, 2.days.ago) described_class.track_event(entity1, daily_event, 2.days.ago)
# Events 2 weeks ago # Events 2 weeks ago
described_class.track_event(entity1, daily_event, 14.days.ago) described_class.track_event(entity1, daily_event, 14.days.ago)
# Events 4 weeks ago # Events 4 weeks ago
described_class.track_event(entity3, daily_event, 28.days.ago) described_class.track_event(entity3, daily_event, 28.days.ago)
described_class.track_event(entity4, daily_event, 29.days.ago) described_class.track_event(entity4, daily_event, 29.days.ago)
end end
it 'raise error if metrics are not in the same slot' do it 'raise error if metrics are not in the same slot' do
expect { described_class.unique_events(event_names: [compliance_slot_event, analytics_slot_event], start_date: 4.weeks.ago, end_date: Date.current) }.to raise_error('Events should be in same slot') expect { described_class.unique_events(event_names: [compliance_slot_event, analytics_slot_event], start_date: 4.weeks.ago, end_date: Date.current) }.to raise_error('Events should be in same slot')
end end
it 'raise error if metrics are not in the same category' do it 'raise error if metrics are not in the same category' do
expect { described_class.unique_events(event_names: [category_analytics_event, category_productivity_event], start_date: 4.weeks.ago, end_date: Date.current) }.to raise_error('Events should be in same category') expect { described_class.unique_events(event_names: [category_analytics_event, category_productivity_event], start_date: 4.weeks.ago, end_date: Date.current) }.to raise_error('Events should be in same category')
end end
it "raise error if metrics don't have same aggregation" do it "raise error if metrics don't have same aggregation" do
expect { described_class.unique_events(event_names: [daily_event, weekly_event], start_date: 4.weeks.ago, end_date: Date.current) }.to raise_error('Events should have same aggregation level') expect { described_class.unique_events(event_names: [daily_event, weekly_event], start_date: 4.weeks.ago, end_date: Date.current) }.to raise_error('Events should have same aggregation level')
end end
context 'when data for the last complete week' do context 'when data for the last complete week' do
it { expect(described_class.unique_events(event_names: weekly_event, start_date: 1.week.ago, end_date: Date.current)).to eq(1) } it { expect(described_class.unique_events(event_names: weekly_event, start_date: 1.week.ago, end_date: Date.current)).to eq(1) }
end end
context 'when data for the last 4 complete weeks' do context 'when data for the last 4 complete weeks' do
it { expect(described_class.unique_events(event_names: weekly_event, start_date: 4.weeks.ago, end_date: Date.current)).to eq(2) } it { expect(described_class.unique_events(event_names: weekly_event, start_date: 4.weeks.ago, end_date: Date.current)).to eq(2) }
end end
context 'when data for the week 4 weeks ago' do context 'when data for the week 4 weeks ago' do
it { expect(described_class.unique_events(event_names: weekly_event, start_date: 4.weeks.ago, end_date: 3.weeks.ago)).to eq(1) } it { expect(described_class.unique_events(event_names: weekly_event, start_date: 4.weeks.ago, end_date: 3.weeks.ago)).to eq(1) }
end end
context 'when using daily aggregation' do context 'when using daily aggregation' do
it { expect(described_class.unique_events(event_names: daily_event, start_date: 7.days.ago, end_date: Date.current)).to eq(2) } it { expect(described_class.unique_events(event_names: daily_event, start_date: 7.days.ago, end_date: Date.current)).to eq(2) }
it { expect(described_class.unique_events(event_names: daily_event, start_date: 28.days.ago, end_date: Date.current)).to eq(3) } it { expect(described_class.unique_events(event_names: daily_event, start_date: 28.days.ago, end_date: Date.current)).to eq(3) }
it { expect(described_class.unique_events(event_names: daily_event, start_date: 28.days.ago, end_date: 21.days.ago)).to eq(1) } it { expect(described_class.unique_events(event_names: daily_event, start_date: 28.days.ago, end_date: 21.days.ago)).to eq(1) }
end end
context 'when no slot is set' do context 'when no slot is set' do
it { expect(described_class.unique_events(event_names: no_slot, start_date: 7.days.ago, end_date: Date.current)).to eq(1) } it { expect(described_class.unique_events(event_names: no_slot, start_date: 7.days.ago, end_date: Date.current)).to eq(1) }
end
end end
end end
describe 'unique_events_data' do describe 'unique_events_data' do
let(:categories) { described_class.categories }
let(:known_events) do let(:known_events) do
[ [
{ name: 'event1_slot', redis_slot: "slot", category: 'category1', aggregation: "weekly" }, { name: 'event1_slot', redis_slot: "slot", category: 'category1', aggregation: "weekly" },
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment