Skip to content

Commit

Permalink
Fix key_as_string for date histogram and epoch_millis/epoch_second fo…
Browse files Browse the repository at this point in the history
…rmat

When doing a `date_histogram` aggregation with `"format":"epoch_millis"` or
`"format" : "epoch_second"` and using a time zone other than UTC, the
`key_as_string` ouput in the response does not reflect the UTC timestamp that is
used as the key. This happens because when applying the `time_zone` in
DocValueFormat.DateTime to an epoch-based formatter, this adds the time zone
offset to the value being formated. Instead we should adjust the added display
offset to get back the utc instance in EpochTimePrinter.

Closes #19038
  • Loading branch information
cbuescher committed Jun 29, 2016
1 parent 57f413e commit 0d81dee
Show file tree
Hide file tree
Showing 3 changed files with 55 additions and 5 deletions.
18 changes: 13 additions & 5 deletions core/src/main/java/org/elasticsearch/common/joda/Joda.java
Expand Up @@ -43,7 +43,6 @@
import java.io.IOException;
import java.io.Writer;
import java.util.Locale;
import java.util.regex.Pattern;

/**
*
Expand Down Expand Up @@ -375,21 +374,30 @@ public int estimatePrintedLength() {
return hasMilliSecondPrecision ? 19 : 16;
}


/**
* We adjust the instant by displayOffset to adjust for the offset that might have been added in
* {@link DateTimeFormatter#printTo(Appendable, long, Chronology)} when using a time zone.
*/
@Override
public void printTo(StringBuffer buf, long instant, Chronology chrono, int displayOffset, DateTimeZone displayZone, Locale locale) {
if (hasMilliSecondPrecision) {
buf.append(instant);
buf.append(instant - displayOffset);
} else {
buf.append(instant / 1000);
buf.append((instant - displayOffset) / 1000);
}
}

/**
* We adjust the instant by displayOffset to adjust for the offset that might have been added in
* {@link DateTimeFormatter#printTo(Appendable, long, Chronology)} when using a time zone.
*/
@Override
public void printTo(Writer out, long instant, Chronology chrono, int displayOffset, DateTimeZone displayZone, Locale locale) throws IOException {
if (hasMilliSecondPrecision) {
out.write(String.valueOf(instant));
out.write(String.valueOf(instant - displayOffset));
} else {
out.append(String.valueOf(instant / 1000));
out.append(String.valueOf((instant - displayOffset) / 1000));
}
}

Expand Down
Expand Up @@ -115,6 +115,7 @@ public double parseDouble(String value, boolean roundUp, Callable<Long> now) {
return Double.parseDouble(value);
}

@Override
public BytesRef parseBytesRef(String value) {
return new BytesRef(value);
}
Expand Down
Expand Up @@ -47,6 +47,7 @@
import java.util.Arrays;
import java.util.Collection;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.concurrent.Callable;
Expand Down Expand Up @@ -237,6 +238,46 @@ public void testSingleValuedFieldWithTimeZone() throws Exception {
assertThat(bucket.getDocCount(), equalTo(1L));
}

public void testSingleValued_timeZone_epoch() throws Exception {
String format = randomBoolean() ? "epoch_millis" : "epoch_second";
int millisDivider = format.equals("epoch_millis") ? 1 : 1000;
if (randomBoolean()) {
format = format + "||date_optional_time";
}
DateTimeZone tz = DateTimeZone.forID("+01:00");
SearchResponse response = client().prepareSearch("idx")
.addAggregation(dateHistogram("histo").field("date")
.dateHistogramInterval(DateHistogramInterval.DAY).minDocCount(1)
.timeZone(tz).format(format))
.execute()
.actionGet();
assertSearchResponse(response);

Histogram histo = response.getAggregations().get("histo");
assertThat(histo, notNullValue());
assertThat(histo.getName(), equalTo("histo"));
List<? extends Bucket> buckets = histo.getBuckets();
assertThat(buckets.size(), equalTo(6));

List<DateTime> expectedKeys = new ArrayList<>();
expectedKeys.add(new DateTime(2012, 1, 1, 23, 0, DateTimeZone.UTC));
expectedKeys.add(new DateTime(2012, 2, 1, 23, 0, DateTimeZone.UTC));
expectedKeys.add(new DateTime(2012, 2, 14, 23, 0, DateTimeZone.UTC));
expectedKeys.add(new DateTime(2012, 3, 1, 23, 0, DateTimeZone.UTC));
expectedKeys.add(new DateTime(2012, 3, 14, 23, 0, DateTimeZone.UTC));
expectedKeys.add(new DateTime(2012, 3, 22, 23, 0, DateTimeZone.UTC));


Iterator<DateTime> keyIterator = expectedKeys.iterator();
for (Histogram.Bucket bucket : buckets) {
assertThat(bucket, notNullValue());
DateTime expectedKey = keyIterator.next();
assertThat(bucket.getKeyAsString(), equalTo(Long.toString(expectedKey.getMillis() / millisDivider)));
assertThat(((DateTime) bucket.getKey()), equalTo(expectedKey));
assertThat(bucket.getDocCount(), equalTo(1L));
}
}

public void testSingleValuedFieldOrderedByKeyAsc() throws Exception {
SearchResponse response = client().prepareSearch("idx")
.addAggregation(dateHistogram("histo")
Expand Down

0 comments on commit 0d81dee

Please sign in to comment.