Skip to content
Snippets Groups Projects
Commit 7b4e9849 authored by Pavel Shutsin's avatar Pavel Shutsin :two:
Browse files

Merge branch '364361-add-time-columns-to-vsa-aggregated-records-fetcher' into 'master'

Add timestamp columns to VSA aggregated RecordsFetcher

See merge request !89529
parents 591505c7 0b5988a9
No related branches found
No related tags found
1 merge request!89529Add timestamp columns to VSA aggregated RecordsFetcher
Pipeline #564463480 passed
......@@ -33,9 +33,14 @@ module StageEventModel
)
duration_in_seconds = Arel::Nodes::Extract.new(duration, :epoch)
# start_event_timestamp and end_event_timestamp do not really influence the order,
# but are included so that they are part of the returned result, for example when
# using Gitlab::Analytics::CycleAnalytics::Aggregated::RecordsFetcher
keyset_order(
:total_time => { order_expression: arel_order(duration_in_seconds, direction), distinct: false, sql_type: 'double precision' },
issuable_id_column => { order_expression: arel_order(arel_table[issuable_id_column], direction), distinct: true }
issuable_id_column => { order_expression: arel_order(arel_table[issuable_id_column], direction), distinct: true },
:end_event_timestamp => { order_expression: arel_order(arel_table[:end_event_timestamp], direction), distinct: true },
:start_event_timestamp => { order_expression: arel_order(arel_table[:start_event_timestamp], direction), distinct: true }
)
end
end
......
......@@ -38,7 +38,7 @@ def serialized_records
strong_memoize(:serialized_records) do
# When RecordsFetcher is used with query sourced from
# InOperatorOptimization::QueryBuilder only columns
# used in ORDER BY statement would be selected by Arel.start operation
# used in ORDER BY statement would be selected by Arel.star operation
selections = [stage_event_model.arel_table[Arel.star]]
selections << duration_in_seconds.as('total_time') if params[:sort] != :duration # duration sorting already exposes this data
......@@ -55,7 +55,9 @@ def serialized_records
project_path: project.path,
namespace_path: project.namespace.route.path,
author: issuable.author,
total_time: record.total_time
total_time: record.total_time,
start_event_timestamp: record.start_event_timestamp,
end_event_timestamp: record.end_event_timestamp
})
serializer.represent(attributes)
end
......
......@@ -27,6 +27,26 @@
expect(returned_iids).to eq(expected_issue_ids)
end
it 'passes a hash with all expected attributes to the serializer' do
expected_attributes = [
'created_at',
'id',
'iid',
'title',
:end_event_timestamp,
:start_event_timestamp,
:total_time,
:author,
:namespace_path,
:project_path
]
serializer = instance_double(records_fetcher.send(:serializer).class.name)
allow(records_fetcher).to receive(:serializer).and_return(serializer)
expect(serializer).to receive(:represent).at_least(:once).with(hash_including(*expected_attributes)).and_return({})
records_fetcher.serialized_records
end
end
describe '#serialized_records' do
......
......@@ -8,15 +8,18 @@
end
let(:params) { { from: 1.year.ago, current_user: user } }
let(:records_fetcher) do
Gitlab::Analytics::CycleAnalytics::DataCollector.new(
stage: stage,
params: params
).records_fetcher
end
let_it_be(:project) { create(:project, :empty_repo) }
let_it_be(:user) { create(:user) }
subject do
Gitlab::Analytics::CycleAnalytics::DataCollector.new(
stage: stage,
params: params
).records_fetcher.serialized_records
records_fetcher.serialized_records
end
describe '#serialized_records' do
......@@ -28,6 +31,26 @@
it 'returns all records' do
expect(subject.size).to eq(2)
end
it 'passes a hash with all expected attributes to the serializer' do
expected_attributes = [
'created_at',
'id',
'iid',
'title',
'end_event_timestamp',
'start_event_timestamp',
'total_time',
:author,
:namespace_path,
:project_path
]
serializer = instance_double(records_fetcher.send(:serializer).class.name)
allow(records_fetcher).to receive(:serializer).and_return(serializer)
expect(serializer).to receive(:represent).twice.with(hash_including(*expected_attributes)).and_return({})
subject
end
end
describe 'for issue based stage' do
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment