Skip to content

Commit

Permalink
Add hard_bounds support for histogram field-based histograms (#64246) (
Browse files Browse the repository at this point in the history
…#64312)

hard_bounds should now support histogram fields, previously hard bounds
on histogram fields were ignored.

Closes #62124
  • Loading branch information
imotov authored Oct 28, 2020
1 parent c9af709 commit af98730
Show file tree
Hide file tree
Showing 3 changed files with 57 additions and 9 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -428,6 +428,29 @@ public void testExtendedBounds() throws Exception {
}
}

public void testHardBounds() throws Exception {
try (Directory dir = newDirectory(); RandomIndexWriter w = new RandomIndexWriter(random(), dir)) {
for (double value : new double[] { 3.2, -5, -4.5, 4.3 }) {
Document doc = new Document();
doc.add(new SortedNumericDocValuesField("field", NumericUtils.doubleToSortableLong(value)));
w.addDocument(doc);
}

HistogramAggregationBuilder aggBuilder = new HistogramAggregationBuilder("my_agg").field("field")
.interval(5)
.hardBounds(new DoubleBounds(0.0, 10.0));
MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("field", NumberFieldMapper.NumberType.DOUBLE);
try (IndexReader reader = w.getReader()) {
IndexSearcher searcher = new IndexSearcher(reader);
InternalHistogram histogram = searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType);
assertEquals(1, histogram.getBuckets().size());
assertEquals(0d, histogram.getBuckets().get(0).getKey());
assertEquals(2, histogram.getBuckets().get(0).getDocCount());
assertTrue(AggregationInspectionHelper.hasValue(histogram));
}
}
}

public void testAsSubAgg() throws IOException {
AggregationBuilder request = new HistogramAggregationBuilder("outer").field("outer").interval(5).subAggregation(
new HistogramAggregationBuilder("inner").field("inner").interval(5).subAggregation(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -76,16 +76,18 @@ public void collect(int doc, long owningBucketOrd) throws IOException {

double key = Math.floor((value - offset) / interval);
assert key >= previousKey;
long bucketOrd = bucketOrds.add(owningBucketOrd, Double.doubleToLongBits(key));
if (bucketOrd < 0) { // already seen
bucketOrd = -1 - bucketOrd;
collectExistingBucket(sub, doc, bucketOrd);
} else {
collectBucket(sub, doc, bucketOrd);
if (hardBounds == null || hardBounds.contain(key * interval)) {
long bucketOrd = bucketOrds.add(owningBucketOrd, Double.doubleToLongBits(key));
if (bucketOrd < 0) { // already seen
bucketOrd = -1 - bucketOrd;
collectExistingBucket(sub, doc, bucketOrd);
} else {
collectBucket(sub, doc, bucketOrd);
}
// We have added the document already. We should increment doc_count by count - 1
// so that we have added it count times.
incrementBucketDocCount(bucketOrd, count - 1);
}
// We have added the document already. We should increment doc_count by count - 1
// so that we have added it count times.
incrementBucketDocCount(bucketOrd, count - 1);
previousKey = key;
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@
import org.elasticsearch.plugins.SearchPlugin;
import org.elasticsearch.search.aggregations.AggregationBuilder;
import org.elasticsearch.search.aggregations.AggregatorTestCase;
import org.elasticsearch.search.aggregations.bucket.histogram.DoubleBounds;
import org.elasticsearch.search.aggregations.bucket.histogram.HistogramAggregationBuilder;
import org.elasticsearch.search.aggregations.bucket.histogram.InternalHistogram;
import org.elasticsearch.search.aggregations.metrics.TopHitsAggregationBuilder;
Expand Down Expand Up @@ -164,6 +165,28 @@ public void testExtendedBounds() throws Exception {
}
}

public void testHardBounds() throws Exception {
try (Directory dir = newDirectory(); RandomIndexWriter w = new RandomIndexWriter(random(), dir)) {

w.addDocument(singleton(histogramFieldDocValues(FIELD_NAME, new double[] { -4.5, 4.3 })));
w.addDocument(singleton(histogramFieldDocValues(FIELD_NAME, new double[] { -5, 3.2 })));
w.addDocument(singleton(histogramFieldDocValues(FIELD_NAME, new double[] { 1.0, 2.2 })));
w.addDocument(singleton(histogramFieldDocValues(FIELD_NAME, new double[] { -6.0, 12.2 })));

HistogramAggregationBuilder aggBuilder = new HistogramAggregationBuilder("my_agg").field(FIELD_NAME)
.interval(5)
.hardBounds(new DoubleBounds(0.0, 5.0));
try (IndexReader reader = w.getReader()) {
IndexSearcher searcher = new IndexSearcher(reader);
InternalHistogram histogram = searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, defaultFieldType(FIELD_NAME));
assertEquals(1, histogram.getBuckets().size());
assertEquals(0d, histogram.getBuckets().get(0).getKey());
assertEquals(4, histogram.getBuckets().get(0).getDocCount());
assertTrue(AggregationInspectionHelper.hasValue(histogram));
}
}
}

/**
* Test that sub-aggregations are not supported
*/
Expand Down

0 comments on commit af98730

Please sign in to comment.