10
10
11
11
import org .apache .lucene .index .LeafReaderContext ;
12
12
import org .apache .lucene .search .Collector ;
13
+ import org .opensearch .OpenSearchException ;
13
14
import org .opensearch .search .profile .AbstractProfileBreakdown ;
14
15
import org .opensearch .search .profile .ContextualProfileBreakdown ;
15
16
@@ -73,14 +74,14 @@ public Map<String, Long> toBreakdownMap() {
73
74
// If there are no leaf contexts, then return the default concurrent query level breakdown, which will include the
74
75
// create_weight time/count
75
76
queryNodeTime = createWeightTime ;
76
- maxSliceNodeTime = queryNodeTime ;
77
- minSliceNodeTime = queryNodeTime ;
78
- avgSliceNodeTime = queryNodeTime ;
77
+ maxSliceNodeTime = 0L ;
78
+ minSliceNodeTime = 0L ;
79
+ avgSliceNodeTime = 0L ;
79
80
return buildDefaultQueryBreakdownMap (createWeightTime );
80
81
}
81
82
82
83
// first create the slice level breakdowns
83
- final Map <Collector , Map <String , Long >> sliceLevelBreakdowns = buildSliceLevelBreakdown (createWeightStartTime );
84
+ final Map <Collector , Map <String , Long >> sliceLevelBreakdowns = buildSliceLevelBreakdown ();
84
85
return buildQueryBreakdownMap (sliceLevelBreakdowns , createWeightTime , createWeightStartTime );
85
86
}
86
87
@@ -124,18 +125,19 @@ private Map<String, Long> buildDefaultQueryBreakdownMap(long createWeightTime) {
124
125
/**
125
126
* Computes the slice level breakdownMap. It uses sliceCollectorsToLeaves to figure out all the leaves or segments part of a slice.
126
127
* Then use the breakdown timing stats for each of these leaves to calculate the breakdown stats at slice level.
127
- * @param createWeightStartTime start time when createWeight is called
128
+ *
128
129
* @return map of collector (or slice) to breakdown map
129
130
*/
130
- Map <Collector , Map <String , Long >> buildSliceLevelBreakdown (long createWeightStartTime ) {
131
+ Map <Collector , Map <String , Long >> buildSliceLevelBreakdown () {
131
132
final Map <Collector , Map <String , Long >> sliceLevelBreakdowns = new HashMap <>();
132
- long totalSliceNodeTime = 0 ;
133
+ long totalSliceNodeTime = 0L ;
133
134
for (Map .Entry <Collector , List <LeafReaderContext >> slice : sliceCollectorsToLeaves .entrySet ()) {
134
135
final Collector sliceCollector = slice .getKey ();
135
136
// initialize each slice level breakdown
136
137
final Map <String , Long > currentSliceBreakdown = sliceLevelBreakdowns .computeIfAbsent (sliceCollector , k -> new HashMap <>());
137
138
// max slice end time across all timing types
138
139
long sliceMaxEndTime = Long .MIN_VALUE ;
140
+ long sliceMinStartTime = Long .MAX_VALUE ;
139
141
for (QueryTimingType timingType : QueryTimingType .values ()) {
140
142
if (timingType .equals (QueryTimingType .CREATE_WEIGHT )) {
141
143
// do nothing for create weight as that is query level time and not slice level
@@ -155,6 +157,12 @@ Map<Collector, Map<String, Long>> buildSliceLevelBreakdown(long createWeightStar
155
157
// leaf, but the leaf level breakdown will not be created in the contexts map.
156
158
// This is because before updating the contexts map, the query hits earlyTerminationException.
157
159
// To handle such case, we will ignore the leaf that is not present.
160
+ //
161
+ // Other than early termination, it can also happen in other cases. For example: there is a must boolean query
162
+ // with 2 boolean clauses. While creating scorer for first clause if no docs are found for the field in a leaf
163
+ // context then it will return null scorer. Then for 2nd clause weight as well no scorer will be created for this
164
+ // leaf context (as it is a must query). Due to this it will end up missing the leaf context in the contexts map
165
+ // for second clause weight.
158
166
continue ;
159
167
}
160
168
final Map <String , Long > currentSliceLeafBreakdownMap = contexts .get (sliceLeaf ).toBreakdownMap ();
@@ -182,15 +190,36 @@ Map<Collector, Map<String, Long>> buildSliceLevelBreakdown(long createWeightStar
182
190
);
183
191
}
184
192
// compute sliceMaxEndTime as max of sliceEndTime across all timing types
185
- sliceMaxEndTime = Math .max (sliceMaxEndTime , currentSliceBreakdown .get (timingTypeSliceEndTimeKey ));
193
+ sliceMaxEndTime = Math .max (sliceMaxEndTime , currentSliceBreakdown .getOrDefault (timingTypeSliceEndTimeKey , Long .MIN_VALUE ));
194
+ sliceMinStartTime = Math .min (
195
+ sliceMinStartTime ,
196
+ currentSliceBreakdown .getOrDefault (timingTypeSliceStartTimeKey , Long .MAX_VALUE )
197
+ );
186
198
// compute total time for each timing type at slice level using sliceEndTime and sliceStartTime
187
199
currentSliceBreakdown .put (
188
200
timingType .toString (),
189
- currentSliceBreakdown .get (timingTypeSliceEndTimeKey ) - currentSliceBreakdown .get (timingTypeSliceStartTimeKey )
201
+ currentSliceBreakdown .getOrDefault (timingTypeSliceEndTimeKey , 0L ) - currentSliceBreakdown .getOrDefault (
202
+ timingTypeSliceStartTimeKey ,
203
+ 0L
204
+ )
190
205
);
191
206
}
192
- // currentSliceNodeTime includes the create weight time as well which will be same for all the slices
193
- long currentSliceNodeTime = sliceMaxEndTime - createWeightStartTime ;
207
+ // currentSliceNodeTime does not include the create weight time, as that is computed in non-concurrent part
208
+ long currentSliceNodeTime ;
209
+ if (sliceMinStartTime == Long .MAX_VALUE && sliceMaxEndTime == Long .MIN_VALUE ) {
210
+ currentSliceNodeTime = 0L ;
211
+ } else if (sliceMinStartTime == Long .MAX_VALUE || sliceMaxEndTime == Long .MIN_VALUE ) {
212
+ throw new OpenSearchException (
213
+ "Unexpected value of sliceMinStartTime ["
214
+ + sliceMinStartTime
215
+ + "] or sliceMaxEndTime ["
216
+ + sliceMaxEndTime
217
+ + "] while computing the slice level timing profile breakdowns"
218
+ );
219
+ } else {
220
+ currentSliceNodeTime = sliceMaxEndTime - sliceMinStartTime ;
221
+ }
222
+
194
223
// compute max/min slice times
195
224
maxSliceNodeTime = Math .max (maxSliceNodeTime , currentSliceNodeTime );
196
225
minSliceNodeTime = Math .min (minSliceNodeTime , currentSliceNodeTime );
@@ -245,8 +274,8 @@ public Map<String, Long> buildQueryBreakdownMap(
245
274
246
275
// for all other timing types, we will compute min/max/avg/total across slices
247
276
for (Map .Entry <Collector , Map <String , Long >> sliceBreakdown : sliceLevelBreakdowns .entrySet ()) {
248
- Long sliceBreakdownTypeTime = sliceBreakdown .getValue ().get (timingTypeKey );
249
- Long sliceBreakdownTypeCount = sliceBreakdown .getValue ().get (timingTypeCountKey );
277
+ long sliceBreakdownTypeTime = sliceBreakdown .getValue ().getOrDefault (timingTypeKey , 0L );
278
+ long sliceBreakdownTypeCount = sliceBreakdown .getValue ().getOrDefault (timingTypeCountKey , 0L );
250
279
// compute max/min/avg TimingType time across slices
251
280
queryBreakdownMap .compute (
252
281
maxBreakdownTypeTime ,
@@ -276,17 +305,38 @@ public Map<String, Long> buildQueryBreakdownMap(
276
305
);
277
306
278
307
// query start/end time for a TimingType is min/max of start/end time across slices for that TimingType
279
- queryTimingTypeEndTime = Math .max (queryTimingTypeEndTime , sliceBreakdown .getValue ().get (sliceEndTimeForTimingType ));
280
- queryTimingTypeStartTime = Math .min (queryTimingTypeStartTime , sliceBreakdown .getValue ().get (sliceStartTimeForTimingType ));
308
+ queryTimingTypeEndTime = Math .max (
309
+ queryTimingTypeEndTime ,
310
+ sliceBreakdown .getValue ().getOrDefault (sliceEndTimeForTimingType , Long .MIN_VALUE )
311
+ );
312
+ queryTimingTypeStartTime = Math .min (
313
+ queryTimingTypeStartTime ,
314
+ sliceBreakdown .getValue ().getOrDefault (sliceStartTimeForTimingType , Long .MAX_VALUE )
315
+ );
281
316
queryTimingTypeCount += sliceBreakdownTypeCount ;
282
317
}
318
+
319
+ if (queryTimingTypeStartTime == Long .MAX_VALUE || queryTimingTypeEndTime == Long .MIN_VALUE ) {
320
+ throw new OpenSearchException (
321
+ "Unexpected timing type ["
322
+ + timingTypeKey
323
+ + "] start ["
324
+ + queryTimingTypeStartTime
325
+ + "] or end time ["
326
+ + queryTimingTypeEndTime
327
+ + "] computed across slices for profile results"
328
+ );
329
+ }
283
330
queryBreakdownMap .put (timingTypeKey , queryTimingTypeEndTime - queryTimingTypeStartTime );
284
331
queryBreakdownMap .put (timingTypeCountKey , queryTimingTypeCount );
285
- queryBreakdownMap .compute (avgBreakdownTypeTime , (key , value ) -> (value == null ) ? 0 : value / sliceLevelBreakdowns .size ());
286
- queryBreakdownMap .compute (avgBreakdownTypeCount , (key , value ) -> (value == null ) ? 0 : value / sliceLevelBreakdowns .size ());
332
+ queryBreakdownMap .compute (avgBreakdownTypeTime , (key , value ) -> (value == null ) ? 0L : value / sliceLevelBreakdowns .size ());
333
+ queryBreakdownMap .compute (avgBreakdownTypeCount , (key , value ) -> (value == null ) ? 0L : value / sliceLevelBreakdowns .size ());
287
334
// compute query end time using max of query end time across all timing types
288
335
queryEndTime = Math .max (queryEndTime , queryTimingTypeEndTime );
289
336
}
337
+ if (queryEndTime == Long .MIN_VALUE ) {
338
+ throw new OpenSearchException ("Unexpected error while computing the query end time across slices in profile result" );
339
+ }
290
340
queryNodeTime = queryEndTime - createWeightStartTime ;
291
341
return queryBreakdownMap ;
292
342
}
0 commit comments