Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Percentile/Ranks should return null instead of NaN when empty #30460

Merged
merged 10 commits into from
Jun 18, 2018
Merged
Original file line number Diff line number Diff line change
Expand Up @@ -123,9 +123,9 @@ public XContentBuilder doXContentBody(XContentBuilder builder, Params params) th
for(int i = 0; i < keys.length; ++i) {
String key = String.valueOf(keys[i]);
double value = value(keys[i]);
builder.field(key, value);
builder.field(key, state.getTotalCount() == 0 ? null : value);
if (format != DocValueFormat.RAW) {
builder.field(key + "_as_string", format.format(value));
builder.field(key + "_as_string", state.getTotalCount() == 0 ? null : format.format(value));
}
}
builder.endObject();
Expand All @@ -135,9 +135,10 @@ public XContentBuilder doXContentBody(XContentBuilder builder, Params params) th
double value = value(keys[i]);
builder.startObject();
builder.field(CommonFields.KEY.getPreferredName(), keys[i]);
builder.field(CommonFields.VALUE.getPreferredName(), value);
builder.field(CommonFields.VALUE.getPreferredName(), state.getTotalCount() == 0 ? null : value);
if (format != DocValueFormat.RAW) {
builder.field(CommonFields.VALUE_AS_STRING.getPreferredName(), format.format(value));
builder.field(CommonFields.VALUE_AS_STRING.getPreferredName(),
state.getTotalCount() == 0 ? null : format.format(value));
}
builder.endObject();
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -106,9 +106,9 @@ public XContentBuilder doXContentBody(XContentBuilder builder, Params params) th
for(int i = 0; i < keys.length; ++i) {
String key = String.valueOf(keys[i]);
double value = value(keys[i]);
builder.field(key, value);
builder.field(key, state.size() == 0 ? null : value);
if (format != DocValueFormat.RAW) {
builder.field(key + "_as_string", format.format(value));
builder.field(key + "_as_string", state.size() == 0 ? null : format.format(value));
}
}
builder.endObject();
Expand All @@ -118,9 +118,9 @@ public XContentBuilder doXContentBody(XContentBuilder builder, Params params) th
double value = value(keys[i]);
builder.startObject();
builder.field(CommonFields.KEY.getPreferredName(), keys[i]);
builder.field(CommonFields.VALUE.getPreferredName(), value);
builder.field(CommonFields.VALUE.getPreferredName(), state.size() == 0 ? null : value);
if (format != DocValueFormat.RAW) {
builder.field(CommonFields.VALUE_AS_STRING.getPreferredName(), format.format(value));
builder.field(CommonFields.VALUE_AS_STRING.getPreferredName(), state.size() == 0 ? null : format.format(value));
}
builder.endObject();
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,17 +20,27 @@
package org.elasticsearch.search.aggregations.metrics.percentiles.hdr;

import org.HdrHistogram.DoubleHistogram;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.io.stream.Writeable.Reader;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.json.JsonXContent;
import org.elasticsearch.search.DocValueFormat;
import org.elasticsearch.search.aggregations.metrics.percentiles.InternalPercentilesRanksTestCase;
import org.elasticsearch.search.aggregations.metrics.percentiles.ParsedPercentiles;
import org.elasticsearch.search.aggregations.metrics.percentiles.Percentile;
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;

import java.io.IOException;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;

import static org.hamcrest.Matchers.equalTo;


public class InternalHDRPercentilesRanksTests extends InternalPercentilesRanksTestCase<InternalHDRPercentileRanks> {

@Override
Expand Down Expand Up @@ -103,4 +113,85 @@ protected InternalHDRPercentileRanks mutateInstance(InternalHDRPercentileRanks i
}
return new InternalHDRPercentileRanks(name, percents, state, keyed, formatter, pipelineAggregators, metaData);
}

public void testEmptyRanksXContent() throws IOException {
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I'm just looking how similar the xContent output of InternalHDRPercentilesRanksTests and InternalTDigestPercentilesRanksTest, maybe these two test could be pushed up one level to InternalPercentilesRanksTestCase by calling the sub-tests createTestInstance() method with the appropriate values? I haven't really checked if the outputs are exactly the same, maybe I'm missing something, but it would be great to reduce the number of rather identical test cases.
Maybe pushing all four cases up to AbstractPercentilesTestCase would work as well? Not sure though.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

++ this combined nicely into two tests at the InternalPercentile(Ranks)TestCase level. Couldn't move fully to the Abstract class as the API between percentile and ranks is slightly different.

double[] percents = new double[]{1,2,3};
boolean keyed = randomBoolean();
DocValueFormat docValueFormat = randomNumericDocValueFormat();

final DoubleHistogram state = new DoubleHistogram(3);
InternalHDRPercentileRanks agg = new InternalHDRPercentileRanks("test", percents, state, keyed, docValueFormat,
Collections.emptyList(), Collections.emptyMap());

for (Percentile percentile : agg) {
Double value = percentile.getValue();
assertThat(agg.percent(value), equalTo(Double.NaN));
assertThat(agg.percentAsString(value), equalTo("NaN"));
}

XContentBuilder builder = JsonXContent.contentBuilder().prettyPrint();
builder.startObject();
agg.doXContentBody(builder, ToXContent.EMPTY_PARAMS);
builder.endObject();
String expected;
if (keyed && docValueFormat.equals(DocValueFormat.RAW)) {
expected = "{\n" +
" \"values\" : {\n" +
" \"1.0\" : null,\n" +
" \"2.0\" : null,\n" +
" \"3.0\" : null\n" +
" }\n" +
"}";
} else if (keyed) {
expected = "{\n" +
" \"values\" : {\n" +
" \"1.0\" : null,\n" +
" \"1.0_as_string\" : null,\n" +
" \"2.0\" : null,\n" +
" \"2.0_as_string\" : null,\n" +
" \"3.0\" : null,\n" +
" \"3.0_as_string\" : null\n" +
" }\n" +
"}";
} else if (keyed == false && docValueFormat.equals(DocValueFormat.RAW)) {
expected = "{\n" +
" \"values\" : [\n" +
" {\n" +
" \"key\" : 1.0,\n" +
" \"value\" : null\n" +
" },\n" +
" {\n" +
" \"key\" : 2.0,\n" +
" \"value\" : null\n" +
" },\n" +
" {\n" +
" \"key\" : 3.0,\n" +
" \"value\" : null\n" +
" }\n" +
" ]\n" +
"}";
} else {
expected = "{\n" +
" \"values\" : [\n" +
" {\n" +
" \"key\" : 1.0,\n" +
" \"value\" : null,\n" +
" \"value_as_string\" : null\n" +
" },\n" +
" {\n" +
" \"key\" : 2.0,\n" +
" \"value\" : null,\n" +
" \"value_as_string\" : null\n" +
" },\n" +
" {\n" +
" \"key\" : 3.0,\n" +
" \"value\" : null,\n" +
" \"value_as_string\" : null\n" +
" }\n" +
" ]\n" +
"}";
}

assertThat(Strings.toString(builder), equalTo(expected));
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -20,21 +20,28 @@
package org.elasticsearch.search.aggregations.metrics.percentiles.hdr;

import org.HdrHistogram.DoubleHistogram;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.json.JsonXContent;
import org.elasticsearch.search.DocValueFormat;
import org.elasticsearch.search.aggregations.metrics.percentiles.InternalPercentilesTestCase;
import org.elasticsearch.search.aggregations.metrics.percentiles.ParsedPercentiles;
import org.elasticsearch.search.aggregations.metrics.percentiles.Percentile;
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;

import java.io.IOException;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;

import static java.util.Collections.emptyList;
import static java.util.Collections.emptyMap;
import static org.hamcrest.Matchers.equalTo;

public class InternalHDRPercentilesTests extends InternalPercentilesTestCase<InternalHDRPercentiles> {

Expand Down Expand Up @@ -130,4 +137,84 @@ protected InternalHDRPercentiles mutateInstance(InternalHDRPercentiles instance)
}
return new InternalHDRPercentiles(name, percents, state, keyed, formatter, pipelineAggregators, metaData);
}

public void testEmptyPercentilesXContent() throws IOException {
double[] percents = new double[]{1,2,3};
boolean keyed = randomBoolean();
DocValueFormat docValueFormat = randomNumericDocValueFormat();

final DoubleHistogram state = new DoubleHistogram(3);
InternalHDRPercentiles agg = new InternalHDRPercentiles("test", percents, state, keyed, docValueFormat,
Collections.emptyList(), Collections.emptyMap());

for (Percentile percentile : agg) {
Double value = percentile.getValue();
assertThat(agg.percentile(value), equalTo(Double.NaN));
assertThat(agg.percentileAsString(value), equalTo("NaN"));
}

XContentBuilder builder = JsonXContent.contentBuilder().prettyPrint();
builder.startObject();
agg.doXContentBody(builder, ToXContent.EMPTY_PARAMS);
builder.endObject();
String expected;
if (keyed && docValueFormat.equals(DocValueFormat.RAW)) {
expected = "{\n" +
" \"values\" : {\n" +
" \"1.0\" : null,\n" +
" \"2.0\" : null,\n" +
" \"3.0\" : null\n" +
" }\n" +
"}";
} else if (keyed) {
expected = "{\n" +
" \"values\" : {\n" +
" \"1.0\" : null,\n" +
" \"1.0_as_string\" : null,\n" +
" \"2.0\" : null,\n" +
" \"2.0_as_string\" : null,\n" +
" \"3.0\" : null,\n" +
" \"3.0_as_string\" : null\n" +
" }\n" +
"}";
} else if (keyed == false && docValueFormat.equals(DocValueFormat.RAW)) {
expected = "{\n" +
" \"values\" : [\n" +
" {\n" +
" \"key\" : 1.0,\n" +
" \"value\" : null\n" +
" },\n" +
" {\n" +
" \"key\" : 2.0,\n" +
" \"value\" : null\n" +
" },\n" +
" {\n" +
" \"key\" : 3.0,\n" +
" \"value\" : null\n" +
" }\n" +
" ]\n" +
"}";
} else {
expected = "{\n" +
" \"values\" : [\n" +
" {\n" +
" \"key\" : 1.0,\n" +
" \"value\" : null,\n" +
" \"value_as_string\" : null\n" +
" },\n" +
" {\n" +
" \"key\" : 2.0,\n" +
" \"value\" : null,\n" +
" \"value_as_string\" : null\n" +
" },\n" +
" {\n" +
" \"key\" : 3.0,\n" +
" \"value\" : null,\n" +
" \"value_as_string\" : null\n" +
" }\n" +
" ]\n" +
"}";
}
assertThat(Strings.toString(builder), equalTo(expected));
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -19,17 +19,26 @@

package org.elasticsearch.search.aggregations.metrics.percentiles.tdigest;

import org.elasticsearch.common.Strings;
import org.elasticsearch.common.io.stream.Writeable.Reader;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.json.JsonXContent;
import org.elasticsearch.search.DocValueFormat;
import org.elasticsearch.search.aggregations.metrics.percentiles.InternalPercentilesRanksTestCase;
import org.elasticsearch.search.aggregations.metrics.percentiles.ParsedPercentiles;
import org.elasticsearch.search.aggregations.metrics.percentiles.Percentile;
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;

import java.io.IOException;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;

import static org.hamcrest.Matchers.equalTo;

public class InternalTDigestPercentilesRanksTests extends InternalPercentilesRanksTestCase<InternalTDigestPercentileRanks> {

@Override
Expand Down Expand Up @@ -118,4 +127,85 @@ protected InternalTDigestPercentileRanks mutateInstance(InternalTDigestPercentil
}
return new InternalTDigestPercentileRanks(name, percents, state, keyed, formatter, pipelineAggregators, metaData);
}

public void testEmptyRanksXContent() throws IOException {
double[] percents = new double[]{1,2,3};
boolean keyed = randomBoolean();
DocValueFormat docValueFormat = randomNumericDocValueFormat();

final TDigestState state = new TDigestState(100);
InternalTDigestPercentileRanks agg = new InternalTDigestPercentileRanks("test", percents, state, keyed, docValueFormat,
Collections.emptyList(), Collections.emptyMap());

for (Percentile percentile : agg) {
Double value = percentile.getValue();
assertThat(agg.percent(value), equalTo(Double.NaN));
assertThat(agg.percentAsString(value), equalTo("NaN"));
}


XContentBuilder builder = JsonXContent.contentBuilder().prettyPrint();
builder.startObject();
agg.doXContentBody(builder, ToXContent.EMPTY_PARAMS);
builder.endObject();
String expected;
if (keyed && docValueFormat.equals(DocValueFormat.RAW)) {
expected = "{\n" +
" \"values\" : {\n" +
" \"1.0\" : null,\n" +
" \"2.0\" : null,\n" +
" \"3.0\" : null\n" +
" }\n" +
"}";
} else if (keyed) {
expected = "{\n" +
" \"values\" : {\n" +
" \"1.0\" : null,\n" +
" \"1.0_as_string\" : null,\n" +
" \"2.0\" : null,\n" +
" \"2.0_as_string\" : null,\n" +
" \"3.0\" : null,\n" +
" \"3.0_as_string\" : null\n" +
" }\n" +
"}";
} else if (keyed == false && docValueFormat.equals(DocValueFormat.RAW)) {
expected = "{\n" +
" \"values\" : [\n" +
" {\n" +
" \"key\" : 1.0,\n" +
" \"value\" : null\n" +
" },\n" +
" {\n" +
" \"key\" : 2.0,\n" +
" \"value\" : null\n" +
" },\n" +
" {\n" +
" \"key\" : 3.0,\n" +
" \"value\" : null\n" +
" }\n" +
" ]\n" +
"}";
} else {
expected = "{\n" +
" \"values\" : [\n" +
" {\n" +
" \"key\" : 1.0,\n" +
" \"value\" : null,\n" +
" \"value_as_string\" : null\n" +
" },\n" +
" {\n" +
" \"key\" : 2.0,\n" +
" \"value\" : null,\n" +
" \"value_as_string\" : null\n" +
" },\n" +
" {\n" +
" \"key\" : 3.0,\n" +
" \"value\" : null,\n" +
" \"value_as_string\" : null\n" +
" }\n" +
" ]\n" +
"}";
}
assertThat(Strings.toString(builder), equalTo(expected));
}
}
Loading