Skip to content

Commit

Permalink
Replace AggregatorTestCase#search with AggregatorTestCase#searchAndRe…
Browse files Browse the repository at this point in the history
…duce (elastic#60683)

* Replace AggregatorTestCase#search with AggregatorTestCase#searchAndReduce

This commit removes the ability to test the top level result of an aggregator
before it runs the final reduce. All aggregator tests that use AggregatorTestCase#search
are rewritten with AggregatorTestCase#searchAndReduce in order to ensure that we test
the final output (the one sent to the end user) rather than an intermediary result
that could be different.
This change also removes spurious commits triggered on top of a random index writer.
These commits slow down the tests and are redundant with the commits that the
random index writer performs.
  • Loading branch information
jimczi authored Aug 6, 2020
1 parent 807b0fe commit 5de0ed9
Show file tree
Hide file tree
Showing 39 changed files with 480 additions and 868 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -53,9 +53,9 @@ public void testNoData() throws Exception {
IndexSearcher searcher = new IndexSearcher(reader);
MatrixStatsAggregationBuilder aggBuilder = new MatrixStatsAggregationBuilder("my_agg")
.fields(Collections.singletonList("field"));
InternalMatrixStats stats = search(searcher, new MatchAllDocsQuery(), aggBuilder, ft);
InternalMatrixStats stats = searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, ft);
assertNull(stats.getStats());
assertFalse(MatrixAggregationInspectionHelper.hasValue(stats));
assertEquals(0L, stats.getDocCount());
}
}
}
Expand All @@ -72,9 +72,9 @@ public void testUnmapped() throws Exception {
IndexSearcher searcher = new IndexSearcher(reader);
MatrixStatsAggregationBuilder aggBuilder = new MatrixStatsAggregationBuilder("my_agg")
.fields(Collections.singletonList("bogus"));
InternalMatrixStats stats = search(searcher, new MatchAllDocsQuery(), aggBuilder, ft);
InternalMatrixStats stats = searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, ft);
assertNull(stats.getStats());
assertFalse(MatrixAggregationInspectionHelper.hasValue(stats));
assertEquals(0L, stats.getDocCount());
}
}
}
Expand All @@ -85,43 +85,6 @@ public void testTwoFields() throws Exception {
String fieldB = "b";
MappedFieldType ftB = new NumberFieldMapper.NumberFieldType(fieldB, NumberFieldMapper.NumberType.DOUBLE);

try (Directory directory = newDirectory();
RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory)) {

int numDocs = scaledRandomIntBetween(8192, 16384);
Double[] fieldAValues = new Double[numDocs];
Double[] fieldBValues = new Double[numDocs];
for (int docId = 0; docId < numDocs; docId++) {
Document document = new Document();
fieldAValues[docId] = randomDouble();
document.add(new SortedNumericDocValuesField(fieldA, NumericUtils.doubleToSortableLong(fieldAValues[docId])));

fieldBValues[docId] = randomDouble();
document.add(new SortedNumericDocValuesField(fieldB, NumericUtils.doubleToSortableLong(fieldBValues[docId])));
indexWriter.addDocument(document);
}

MultiPassStats multiPassStats = new MultiPassStats(fieldA, fieldB);
multiPassStats.computeStats(Arrays.asList(fieldAValues), Arrays.asList(fieldBValues));
try (IndexReader reader = indexWriter.getReader()) {
IndexSearcher searcher = new IndexSearcher(reader);
MatrixStatsAggregationBuilder aggBuilder = new MatrixStatsAggregationBuilder("my_agg")
.fields(Arrays.asList(fieldA, fieldB));
InternalMatrixStats stats = search(searcher, new MatchAllDocsQuery(), aggBuilder, ftA, ftB);
// Since `search` doesn't do any reduction, and the InternalMatrixStats object will have a null `MatrixStatsResults`
// object. That is created during the final reduction, which also does a final round of computations
// So we have to create a MatrixStatsResults object here manually so that the final `compute()` is called
multiPassStats.assertNearlyEqual(new MatrixStatsResults(stats.getStats()));
}
}
}

public void testTwoFieldsReduce() throws Exception {
String fieldA = "a";
MappedFieldType ftA = new NumberFieldMapper.NumberFieldType(fieldA, NumberFieldMapper.NumberType.DOUBLE);
String fieldB = "b";
MappedFieldType ftB = new NumberFieldMapper.NumberFieldType(fieldB, NumberFieldMapper.NumberType.DOUBLE);

try (Directory directory = newDirectory();
RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory)) {

Expand All @@ -145,8 +108,6 @@ public void testTwoFieldsReduce() throws Exception {
MatrixStatsAggregationBuilder aggBuilder = new MatrixStatsAggregationBuilder("my_agg")
.fields(Arrays.asList(fieldA, fieldB));
InternalMatrixStats stats = searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, ftA, ftB);
// Unlike testTwoFields, `searchAndReduce` will execute reductions so the `MatrixStatsResults` object
// will be populated and fully computed. We should use that value directly to test against
multiPassStats.assertNearlyEqual(stats);
assertTrue(MatrixAggregationInspectionHelper.hasValue(stats));
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -303,7 +303,7 @@ private void testCase(Query query, IndexSearcher indexSearcher, Consumer<Interna
aggregationBuilder.subAggregation(new MinAggregationBuilder("in_parent").field("number"));

MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("number", NumberFieldMapper.NumberType.LONG);
InternalParent result = search(indexSearcher, query, aggregationBuilder, fieldType);
InternalParent result = searchAndReduce(indexSearcher, query, aggregationBuilder, fieldType);
verify.accept(result);
}

Expand All @@ -314,7 +314,7 @@ private void testCaseTerms(Query query, IndexSearcher indexSearcher, Consumer<In
aggregationBuilder.subAggregation(new TermsAggregationBuilder("value_terms").field("number"));

MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("number", NumberFieldMapper.NumberType.LONG);
InternalParent result = search(indexSearcher, query, aggregationBuilder, fieldType);
InternalParent result = searchAndReduce(indexSearcher, query, aggregationBuilder, fieldType);
verify.accept(result);
}

Expand All @@ -328,7 +328,7 @@ private void testCaseTermsParentTerms(Query query, IndexSearcher indexSearcher,

MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("number", NumberFieldMapper.NumberType.LONG);
MappedFieldType subFieldType = new NumberFieldMapper.NumberFieldType("subNumber", NumberFieldMapper.NumberType.LONG);
LongTerms result = search(indexSearcher, query, aggregationBuilder, fieldType, subFieldType);
LongTerms result = searchAndReduce(indexSearcher, query, aggregationBuilder, fieldType, subFieldType);
verify.accept(result);
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -164,7 +164,8 @@ public void testParentChildAsSubAgg() throws IOException {
expectedOddMin = Math.min(expectedOddMin, e.getValue().v2());
}
}
StringTerms result = search(indexSearcher, new MatchAllDocsQuery(), request, longField("number"), keywordField("kwd"));
StringTerms result =
searchAndReduce(indexSearcher, new MatchAllDocsQuery(), request, longField("number"), keywordField("kwd"));

StringTerms.Bucket evenBucket = result.getBucketByKey("even");
InternalChildren evenChildren = evenBucket.getAggregations().get("children");
Expand Down Expand Up @@ -254,7 +255,7 @@ private void testCase(Query query, IndexSearcher indexSearcher, Consumer<Interna
aggregationBuilder.subAggregation(new MinAggregationBuilder("in_child").field("number"));

MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("number", NumberFieldMapper.NumberType.LONG);
InternalChildren result = search(indexSearcher, query, aggregationBuilder, fieldType);
InternalChildren result = searchAndReduce(indexSearcher, query, aggregationBuilder, fieldType);
verify.accept(result);
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -400,7 +400,6 @@ protected boolean lessThan(IteratorAndCurrent a, IteratorAndCurrent b) {
}

mergeBucketsIfNeeded(reducedBuckets, targetNumBuckets, reduceContext);

return reducedBuckets;
}

Expand Down Expand Up @@ -451,7 +450,8 @@ private void mergeBucketsWithPlan(List<Bucket> buckets, List<BucketRange> plan,
}
toMerge.add(buckets.get(startIdx)); // Don't remove the startIdx bucket because it will be replaced by the merged bucket

reduceContext.consumeBucketsAndMaybeBreak(- (toMerge.size() - 1));
int toRemove = toMerge.stream().mapToInt(b -> countInnerBucket(b)+1).sum();
reduceContext.consumeBucketsAndMaybeBreak(-toRemove + 1);
Bucket merged_bucket = reduceBucket(toMerge, reduceContext);

buckets.set(startIdx, merged_bucket);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1879,7 +1879,7 @@ public void testEarlyTermination() throws Exception {
)
);

executeTestCase(true, false, new TermQuery(new Term("foo", "bar")),
executeTestCase(true, new TermQuery(new Term("foo", "bar")),
dataset,
() ->
new CompositeAggregationBuilder("name",
Expand All @@ -1899,7 +1899,7 @@ public void testEarlyTermination() throws Exception {
);

// source field and index sorting config have different order
executeTestCase(true, false, new TermQuery(new Term("foo", "bar")),
executeTestCase(true, new TermQuery(new Term("foo", "bar")),
dataset,
() ->
new CompositeAggregationBuilder("name",
Expand Down Expand Up @@ -1936,7 +1936,7 @@ public void testIndexSortWithDuplicate() throws Exception {
);

for (SortOrder order : SortOrder.values()) {
executeTestCase(true, false, new MatchAllDocsQuery(),
executeTestCase(true, new MatchAllDocsQuery(),
dataset,
() ->
new CompositeAggregationBuilder("name",
Expand All @@ -1959,7 +1959,7 @@ public void testIndexSortWithDuplicate() throws Exception {
}
);

executeTestCase(true, false, new MatchAllDocsQuery(),
executeTestCase(true, new MatchAllDocsQuery(),
dataset,
() ->
new CompositeAggregationBuilder("name",
Expand Down Expand Up @@ -1989,14 +1989,12 @@ private void testSearchCase(List<Query> queries,
Supplier<CompositeAggregationBuilder> create,
Consumer<InternalComposite> verify) throws IOException {
for (Query query : queries) {
executeTestCase(false, false, query, dataset, create, verify);
executeTestCase(false, true, query, dataset, create, verify);
executeTestCase(true, true, query, dataset, create, verify);
executeTestCase(false, query, dataset, create, verify);
executeTestCase(true, query, dataset, create, verify);
}
}

private void executeTestCase(boolean useIndexSort,
boolean reduced,
Query query,
List<Map<String, List<Object>>> dataset,
Supplier<CompositeAggregationBuilder> create,
Expand All @@ -2019,18 +2017,13 @@ private void executeTestCase(boolean useIndexSort,
indexWriter.addDocument(document);
document.clear();
}
if (reduced == false && randomBoolean()) {
if (rarely()) {
indexWriter.forceMerge(1);
}
}
try (IndexReader indexReader = DirectoryReader.open(directory)) {
IndexSearcher indexSearcher = new IndexSearcher(indexReader);
final InternalComposite composite;
if (reduced) {
composite = searchAndReduce(indexSettings, indexSearcher, query, aggregationBuilder, FIELD_TYPES);
} else {
composite = search(indexSettings, indexSearcher, query, aggregationBuilder, FIELD_TYPES);
}
InternalComposite composite = searchAndReduce(indexSettings, indexSearcher, query, aggregationBuilder, FIELD_TYPES);
verify.accept(composite);
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -62,7 +62,7 @@ public void testEmpty() throws Exception {
IndexSearcher indexSearcher = newSearcher(indexReader, true, true);
QueryBuilder filter = QueryBuilders.termQuery("field", randomAlphaOfLength(5));
FilterAggregationBuilder builder = new FilterAggregationBuilder("test", filter);
InternalFilter response = search(indexSearcher, new MatchAllDocsQuery(), builder,
InternalFilter response = searchAndReduce(indexSearcher, new MatchAllDocsQuery(), builder,
fieldType);
assertEquals(response.getDocCount(), 0);
assertFalse(AggregationInspectionHelper.hasValue(response));
Expand All @@ -80,7 +80,7 @@ public void testRandom() throws Exception {
for (int i = 0; i < numDocs; i++) {
if (frequently()) {
// make sure we have more than one segment to test the merge
indexWriter.getReader().close();
indexWriter.commit();
}
int value = randomInt(maxTerm-1);
expectedBucketCount[value] += 1;
Expand All @@ -98,20 +98,12 @@ public void testRandom() throws Exception {
QueryBuilder filter = QueryBuilders.termQuery("field", Integer.toString(value));
FilterAggregationBuilder builder = new FilterAggregationBuilder("test", filter);

for (boolean doReduce : new boolean[]{true, false}) {
final InternalFilter response;
if (doReduce) {
response = searchAndReduce(indexSearcher, new MatchAllDocsQuery(), builder,
fieldType);
} else {
response = search(indexSearcher, new MatchAllDocsQuery(), builder, fieldType);
}
assertEquals(response.getDocCount(), (long) expectedBucketCount[value]);
if (expectedBucketCount[value] > 0) {
assertTrue(AggregationInspectionHelper.hasValue(response));
} else {
assertFalse(AggregationInspectionHelper.hasValue(response));
}
final InternalFilter response = searchAndReduce(indexSearcher, new MatchAllDocsQuery(), builder, fieldType);
assertEquals(response.getDocCount(), (long) expectedBucketCount[value]);
if (expectedBucketCount[value] > 0) {
assertTrue(AggregationInspectionHelper.hasValue(response));
} else {
assertFalse(AggregationInspectionHelper.hasValue(response));
}
} finally {
indexReader.close();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,7 @@ public void testEmpty() throws Exception {
}
FiltersAggregationBuilder builder = new FiltersAggregationBuilder("test", filters);
builder.otherBucketKey("other");
InternalFilters response = search(indexSearcher, new MatchAllDocsQuery(), builder, fieldType);
InternalFilters response = searchAndReduce(indexSearcher, new MatchAllDocsQuery(), builder, fieldType);
assertEquals(response.getBuckets().size(), numFilters);
for (InternalFilters.InternalBucket filter : response.getBuckets()) {
assertEquals(filter.getDocCount(), 0);
Expand Down Expand Up @@ -113,22 +113,15 @@ public void testKeyedFilter() throws Exception {
FiltersAggregationBuilder builder = new FiltersAggregationBuilder("test", keys);
builder.otherBucket(true);
builder.otherBucketKey("other");
for (boolean doReduce : new boolean[] {true, false}) {
final InternalFilters filters;
if (doReduce) {
filters = searchAndReduce(indexSearcher, new MatchAllDocsQuery(), builder, fieldType);
} else {
filters = search(indexSearcher, new MatchAllDocsQuery(), builder, fieldType);
}
assertEquals(filters.getBuckets().size(), 7);
assertEquals(filters.getBucketByKey("foobar").getDocCount(), 2);
assertEquals(filters.getBucketByKey("foo").getDocCount(), 2);
assertEquals(filters.getBucketByKey("foo2").getDocCount(), 2);
assertEquals(filters.getBucketByKey("bar").getDocCount(), 1);
assertEquals(filters.getBucketByKey("same").getDocCount(), 1);
assertEquals(filters.getBucketByKey("other").getDocCount(), 2);
assertTrue(AggregationInspectionHelper.hasValue(filters));
}
final InternalFilters filters = searchAndReduce(indexSearcher, new MatchAllDocsQuery(), builder, fieldType);
assertEquals(filters.getBuckets().size(), 7);
assertEquals(filters.getBucketByKey("foobar").getDocCount(), 2);
assertEquals(filters.getBucketByKey("foo").getDocCount(), 2);
assertEquals(filters.getBucketByKey("foo2").getDocCount(), 2);
assertEquals(filters.getBucketByKey("bar").getDocCount(), 1);
assertEquals(filters.getBucketByKey("same").getDocCount(), 1);
assertEquals(filters.getBucketByKey("other").getDocCount(), 2);
assertTrue(AggregationInspectionHelper.hasValue(filters));

indexReader.close();
directory.close();
Expand Down Expand Up @@ -175,28 +168,21 @@ public void testRandom() throws Exception {
builder.otherBucket(true);
builder.otherBucketKey("other");

for (boolean doReduce : new boolean[]{true, false}) {
final InternalFilters response;
if (doReduce) {
response = searchAndReduce(indexSearcher, new MatchAllDocsQuery(), builder, fieldType);
} else {
response = search(indexSearcher, new MatchAllDocsQuery(), builder, fieldType);
}
List<InternalFilters.InternalBucket> buckets = response.getBuckets();
assertEquals(buckets.size(), filters.length + 1);
final InternalFilters response = searchAndReduce(indexSearcher, new MatchAllDocsQuery(), builder, fieldType);
List<InternalFilters.InternalBucket> buckets = response.getBuckets();
assertEquals(buckets.size(), filters.length + 1);

for (InternalFilters.InternalBucket bucket : buckets) {
if ("other".equals(bucket.getKey())) {
assertEquals(bucket.getDocCount(), expectedOtherCount);
} else {
int index = Integer.parseInt(bucket.getKey());
assertEquals(bucket.getDocCount(), (long) expectedBucketCount[filterTerms[index]]);
}
for (InternalFilters.InternalBucket bucket : buckets) {
if ("other".equals(bucket.getKey())) {
assertEquals(bucket.getDocCount(), expectedOtherCount);
} else {
int index = Integer.parseInt(bucket.getKey());
assertEquals(bucket.getDocCount(), (long) expectedBucketCount[filterTerms[index]]);
}

// Always true because we include 'other' in the agg
assertTrue(AggregationInspectionHelper.hasValue(response));
}

// Always true because we include 'other' in the agg
assertTrue(AggregationInspectionHelper.hasValue(response));
} finally {
indexReader.close();
directory.close();
Expand Down
Loading

0 comments on commit 5de0ed9

Please sign in to comment.