|
11 | 11 | import org.apache.lucene.document.Field;
|
12 | 12 | import org.apache.lucene.document.LongField;
|
13 | 13 | import org.apache.lucene.document.LongPoint;
|
| 14 | +import org.apache.lucene.document.SortedNumericDocValuesField; |
14 | 15 | import org.apache.lucene.document.SortedSetDocValuesField;
|
15 | 16 | import org.apache.lucene.index.DirectoryReader;
|
16 | 17 | import org.apache.lucene.index.IndexReader;
|
|
35 | 36 | import org.elasticsearch.common.util.MockPageCacheRecycler;
|
36 | 37 | import org.elasticsearch.common.util.concurrent.ConcurrentCollections;
|
37 | 38 | import org.elasticsearch.compute.aggregation.CountAggregatorFunction;
|
| 39 | +import org.elasticsearch.compute.aggregation.ValuesLongAggregatorFunctionSupplier; |
38 | 40 | import org.elasticsearch.compute.aggregation.blockhash.BlockHash;
|
39 | 41 | import org.elasticsearch.compute.data.Block;
|
40 | 42 | import org.elasticsearch.compute.data.BlockFactory;
|
@@ -254,6 +256,112 @@ public String toString() {
|
254 | 256 | assertThat(blockFactory.breaker().getUsed(), equalTo(0L));
|
255 | 257 | }
|
256 | 258 |
|
| 259 | + // TODO: Remove ordinals grouping operator or enable it GroupingAggregatorFunctionTestCase |
| 260 | + public void testValuesWithOrdinalGrouping() throws Exception { |
| 261 | + DriverContext driverContext = driverContext(); |
| 262 | + BlockFactory blockFactory = driverContext.blockFactory(); |
| 263 | + |
| 264 | + final int numDocs = between(100, 1000); |
| 265 | + Map<BytesRef, Set<Long>> expectedValues = new HashMap<>(); |
| 266 | + try (BaseDirectoryWrapper dir = newDirectory(); RandomIndexWriter writer = new RandomIndexWriter(random(), dir)) { |
| 267 | + String VAL_NAME = "val"; |
| 268 | + String KEY_NAME = "key"; |
| 269 | + for (int i = 0; i < numDocs; i++) { |
| 270 | + Document doc = new Document(); |
| 271 | + BytesRef key = new BytesRef(Integer.toString(between(1, 100))); |
| 272 | + SortedSetDocValuesField keyField = new SortedSetDocValuesField(KEY_NAME, key); |
| 273 | + doc.add(keyField); |
| 274 | + if (randomBoolean()) { |
| 275 | + int numValues = between(0, 2); |
| 276 | + for (int v = 0; v < numValues; v++) { |
| 277 | + long val = between(1, 1000); |
| 278 | + var valuesField = new SortedNumericDocValuesField(VAL_NAME, val); |
| 279 | + doc.add(valuesField); |
| 280 | + expectedValues.computeIfAbsent(key, k -> new HashSet<>()).add(val); |
| 281 | + } |
| 282 | + } |
| 283 | + writer.addDocument(doc); |
| 284 | + } |
| 285 | + writer.commit(); |
| 286 | + try (DirectoryReader reader = writer.getReader()) { |
| 287 | + List<Operator> operators = new ArrayList<>(); |
| 288 | + if (randomBoolean()) { |
| 289 | + operators.add(new ShuffleDocsOperator(blockFactory)); |
| 290 | + } |
| 291 | + operators.add( |
| 292 | + new ValuesSourceReaderOperator( |
| 293 | + blockFactory, |
| 294 | + List.of( |
| 295 | + new ValuesSourceReaderOperator.FieldInfo( |
| 296 | + VAL_NAME, |
| 297 | + ElementType.LONG, |
| 298 | + unused -> new BlockDocValuesReader.LongsBlockLoader(VAL_NAME) |
| 299 | + ) |
| 300 | + ), |
| 301 | + List.of(new ValuesSourceReaderOperator.ShardContext(reader, () -> { |
| 302 | + throw new UnsupportedOperationException(); |
| 303 | + }, 0.2)), |
| 304 | + 0 |
| 305 | + ) |
| 306 | + ); |
| 307 | + operators.add( |
| 308 | + new OrdinalsGroupingOperator( |
| 309 | + shardIdx -> new KeywordFieldMapper.KeywordFieldType(KEY_NAME).blockLoader(mockBlContext()), |
| 310 | + List.of(new ValuesSourceReaderOperator.ShardContext(reader, () -> SourceLoader.FROM_STORED_SOURCE, 0.2)), |
| 311 | + ElementType.BYTES_REF, |
| 312 | + 0, |
| 313 | + KEY_NAME, |
| 314 | + List.of(new ValuesLongAggregatorFunctionSupplier().groupingAggregatorFactory(INITIAL, List.of(1))), |
| 315 | + randomPageSize(), |
| 316 | + driverContext |
| 317 | + ) |
| 318 | + ); |
| 319 | + operators.add( |
| 320 | + new HashAggregationOperator( |
| 321 | + List.of(new ValuesLongAggregatorFunctionSupplier().groupingAggregatorFactory(FINAL, List.of(1))), |
| 322 | + () -> BlockHash.build( |
| 323 | + List.of(new BlockHash.GroupSpec(0, ElementType.BYTES_REF)), |
| 324 | + driverContext.blockFactory(), |
| 325 | + randomPageSize(), |
| 326 | + false |
| 327 | + ), |
| 328 | + driverContext |
| 329 | + ) |
| 330 | + ); |
| 331 | + Map<BytesRef, Set<Long>> actualValues = new HashMap<>(); |
| 332 | + Driver driver = TestDriverFactory.create( |
| 333 | + driverContext, |
| 334 | + luceneOperatorFactory( |
| 335 | + reader, |
| 336 | + List.of(new LuceneSliceQueue.QueryAndTags(new MatchAllDocsQuery(), List.of())), |
| 337 | + LuceneOperator.NO_LIMIT |
| 338 | + ).get(driverContext), |
| 339 | + operators, |
| 340 | + new PageConsumerOperator(page -> { |
| 341 | + BytesRefBlock keyBlock = page.getBlock(0); |
| 342 | + LongBlock valueBlock = page.getBlock(1); |
| 343 | + BytesRef spare = new BytesRef(); |
| 344 | + for (int p = 0; p < page.getPositionCount(); p++) { |
| 345 | + var key = keyBlock.getBytesRef(p, spare); |
| 346 | + int valueCount = valueBlock.getValueCount(p); |
| 347 | + for (int i = 0; i < valueCount; i++) { |
| 348 | + long val = valueBlock.getLong(valueBlock.getFirstValueIndex(p) + i); |
| 349 | + boolean added = actualValues.computeIfAbsent(BytesRef.deepCopyOf(key), k -> new HashSet<>()).add(val); |
| 350 | + assertTrue(actualValues.toString(), added); |
| 351 | + } |
| 352 | + } |
| 353 | + page.releaseBlocks(); |
| 354 | + }) |
| 355 | + ); |
| 356 | + OperatorTestCase.runDriver(driver); |
| 357 | + assertDriverContext(driverContext); |
| 358 | + assertThat(actualValues, equalTo(expectedValues)); |
| 359 | + org.elasticsearch.common.util.MockBigArrays.ensureAllArraysAreReleased(); |
| 360 | + } |
| 361 | + } |
| 362 | + assertThat(blockFactory.breaker().getUsed(), equalTo(0L)); |
| 363 | + } |
| 364 | + |
257 | 365 | public void testPushRoundToToQuery() throws IOException {
|
258 | 366 | long firstGroupMax = randomLong();
|
259 | 367 | long secondGroupMax = randomLong();
|
|
0 commit comments