Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[ML] Add new geo_results.(actual_point|typical_point) fields for lat_long results #47050

Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,10 @@
*/
package org.elasticsearch.client.ml.job.results;

import org.elasticsearch.client.ml.job.config.DetectorFunction;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.geo.GeoPoint;
import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder;
Expand Down Expand Up @@ -256,6 +259,28 @@ void setInfluencers(List<Influence> influencers) {
this.influencers = Collections.unmodifiableList(influencers);
}

@Nullable
public GeoPoint getTypicalGeoPoint() {
if (DetectorFunction.LAT_LONG.getFullName().equals(function) == false || typical == null) {
return null;
}
if (typical.size() == 2) {
return new GeoPoint(typical.get(0), typical.get(1));
}
return null;
}

@Nullable
public GeoPoint getActualGeoPoint() {
if (DetectorFunction.LAT_LONG.getFullName().equals(function) == false || actual == null) {
return null;
}
if (actual.size() == 2) {
return new GeoPoint(actual.get(0), actual.get(1));
}
return null;
}

@Override
public int hashCode() {
return Objects.hash(probability, actual, typical, byFieldName, byFieldValue, correlatedByFieldValue, fieldName, function,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,8 +19,11 @@
package org.elasticsearch.client.ml.job.results;

import org.elasticsearch.client.common.TimeUtil;
import org.elasticsearch.client.ml.job.config.DetectorFunction;
import org.elasticsearch.client.ml.job.config.Job;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.geo.GeoPoint;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.common.xcontent.ObjectParser.ValueType;
import org.elasticsearch.common.xcontent.ToXContentObject;
Expand Down Expand Up @@ -388,6 +391,28 @@ void setInfluencers(List<Influence> influencers) {
this.influences = Collections.unmodifiableList(influencers);
}

@Nullable
public GeoPoint getTypicalGeoPoint() {
if (DetectorFunction.LAT_LONG.getFullName().equals(function) == false || typical == null) {
return null;
}
if (typical.size() == 2) {
return new GeoPoint(typical.get(0), typical.get(1));
}
return null;
}

@Nullable
public GeoPoint getActualGeoPoint() {
if (DetectorFunction.LAT_LONG.getFullName().equals(function) == false || actual == null) {
return null;
}
if (actual.size() == 2) {
return new GeoPoint(actual.get(0), actual.get(1));
}
return null;
}

@Override
public int hashCode() {
return Objects.hash(jobId, detectorIndex, bucketSpan, probability, multiBucketImpact, recordScore,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,12 +18,20 @@
*/
package org.elasticsearch.client.ml.job.results;

import org.elasticsearch.client.ml.job.config.DetectorFunction;
import org.elasticsearch.common.geo.GeoPoint;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.test.AbstractXContentTestCase;

import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;

import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.nullValue;
import static org.hamcrest.core.Is.is;

public class AnomalyCauseTests extends AbstractXContentTestCase<AnomalyCause> {

@Override
Expand Down Expand Up @@ -103,4 +111,40 @@ protected AnomalyCause doParseInstance(XContentParser parser) {
protected boolean supportsUnknownFields() {
return true;
}

public void testActualAsGeoPoint() {
AnomalyCause anomalyCause = new AnomalyCause();

assertThat(anomalyCause.getActualGeoPoint(), is(nullValue()));

anomalyCause.setFunction(DetectorFunction.LAT_LONG.getFullName());
assertThat(anomalyCause.getActualGeoPoint(), is(nullValue()));

anomalyCause.setActual(Collections.singletonList(80.0));
assertThat(anomalyCause.getActualGeoPoint(), is(nullValue()));

anomalyCause.setActual(Arrays.asList(90.0, 80.0));
assertThat(anomalyCause.getActualGeoPoint(), equalTo(new GeoPoint(90.0, 80.0)));

anomalyCause.setActual(Arrays.asList(10.0, 100.0, 90.0));
assertThat(anomalyCause.getActualGeoPoint(), is(nullValue()));
}

public void testTypicalAsGeoPoint() {
AnomalyCause anomalyCause = new AnomalyCause();

assertThat(anomalyCause.getTypicalGeoPoint(), is(nullValue()));

anomalyCause.setFunction(DetectorFunction.LAT_LONG.getFullName());
assertThat(anomalyCause.getTypicalGeoPoint(), is(nullValue()));

anomalyCause.setTypical(Collections.singletonList(80.0));
assertThat(anomalyCause.getTypicalGeoPoint(), is(nullValue()));

anomalyCause.setTypical(Arrays.asList(90.0, 80.0));
assertThat(anomalyCause.getTypicalGeoPoint(), equalTo(new GeoPoint(90.0, 80.0)));

anomalyCause.setTypical(Arrays.asList(10.0, 100.0, 90.0));
assertThat(anomalyCause.getTypicalGeoPoint(), is(nullValue()));
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -18,14 +18,21 @@
*/
package org.elasticsearch.client.ml.job.results;

import org.elasticsearch.client.ml.job.config.DetectorFunction;
import org.elasticsearch.common.geo.GeoPoint;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.test.AbstractXContentTestCase;

import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Date;
import java.util.List;

import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.nullValue;
import static org.hamcrest.core.Is.is;

public class AnomalyRecordTests extends AbstractXContentTestCase<AnomalyRecord> {

@Override
Expand Down Expand Up @@ -93,4 +100,40 @@ protected AnomalyRecord doParseInstance(XContentParser parser) {
protected boolean supportsUnknownFields() {
return true;
}

public void testActualAsGeoPoint() {
AnomalyRecord anomalyRecord = new AnomalyRecord(randomAlphaOfLength(10), new Date(), randomNonNegativeLong());

assertThat(anomalyRecord.getActualGeoPoint(), is(nullValue()));

anomalyRecord.setFunction(DetectorFunction.LAT_LONG.getFullName());
assertThat(anomalyRecord.getActualGeoPoint(), is(nullValue()));

anomalyRecord.setActual(Collections.singletonList(80.0));
assertThat(anomalyRecord.getActualGeoPoint(), is(nullValue()));

anomalyRecord.setActual(Arrays.asList(90.0, 80.0));
assertThat(anomalyRecord.getActualGeoPoint(), equalTo(new GeoPoint(90.0, 80.0)));

anomalyRecord.setActual(Arrays.asList(10.0, 100.0, 90.0));
assertThat(anomalyRecord.getActualGeoPoint(), is(nullValue()));
}

public void testTypicalAsGeoPoint() {
AnomalyRecord anomalyRecord = new AnomalyRecord(randomAlphaOfLength(10), new Date(), randomNonNegativeLong());

assertThat(anomalyRecord.getTypicalGeoPoint(), is(nullValue()));

anomalyRecord.setFunction(DetectorFunction.LAT_LONG.getFullName());
assertThat(anomalyRecord.getTypicalGeoPoint(), is(nullValue()));

anomalyRecord.setTypical(Collections.singletonList(80.0));
assertThat(anomalyRecord.getTypicalGeoPoint(), is(nullValue()));

anomalyRecord.setTypical(Arrays.asList(90.0, 80.0));
assertThat(anomalyRecord.getTypicalGeoPoint(), equalTo(new GeoPoint(90.0, 80.0)));

anomalyRecord.setTypical(Arrays.asList(10.0, 100.0, 90.0));
assertThat(anomalyRecord.getTypicalGeoPoint(), is(nullValue()));
}
}
14 changes: 12 additions & 2 deletions docs/reference/ml/anomaly-detection/apis/resultsresource.asciidoc
Original file line number Diff line number Diff line change
Expand Up @@ -303,8 +303,8 @@ A record object has the following properties:
part of the core analytical modeling, these low-level anomaly records are
aggregated for their parent over field record. The causes resource contains
similar elements to the record resource, namely `actual`, `typical`,
`*_field_name` and `*_field_value`. Probability and scores are not applicable
to causes.
`geo_results.actual`, `geo_results.typical`, `*_field_name` and `*_field_value`.
Probability and scores are not applicable to causes.

`detector_index`::
(number) A unique identifier for the detector.
Expand Down Expand Up @@ -383,6 +383,16 @@ A record object has the following properties:
`typical`::
(array) The typical value for the bucket, according to analytical modeling.

`geo_results.actual`::
(string) The actual value for the bucket formatted as a `geo_point`.
If the detector function is `lat_long`, this is a comma delimited string
of the latitude and longitude.

`geo_results.typical`::
(string) The typical value for the bucket formatted as a `geo_point`.
If the detector function is `lat_long`, this is a comma delimited string
of the latitude and longitude.

NOTE: Additional record properties are added, depending on the fields being
analyzed. For example, if it's analyzing `hostname` as a _by field_, then a field
`hostname` is added to the result document. This information enables you to
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -850,7 +850,7 @@ private XContentBuilder value(ToXContent value) throws IOException {
return value(value, ToXContent.EMPTY_PARAMS);
}

private XContentBuilder value(ToXContent value, ToXContent.Params params) throws IOException {
public XContentBuilder value(ToXContent value, ToXContent.Params params) throws IOException {
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I don't really see why making this public would be a problem given what it does and the other public methods nearby. However, you should probably run this by somebody in the ES core infra team before merging.

There is a simple way to avoid making this public - see my next comment.

if (value == null) {
return nullValue();
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -131,6 +131,7 @@ public class ElasticsearchMappings {
public static final String BOOLEAN = "boolean";
public static final String DATE = "date";
public static final String DOUBLE = "double";
public static final String GEO_POINT = "geo_point";
public static final String INTEGER = "integer";
public static final String KEYWORD = "keyword";
public static final String LONG = "long";
Expand Down Expand Up @@ -885,6 +886,16 @@ private static void addAnomalyRecordFieldsToMapping(XContentBuilder builder) thr
.field(TYPE, KEYWORD)
.field(COPY_TO, ALL_FIELD_VALUES)
.endObject()
.startObject(AnomalyCause.GEO_RESULTS.getPreferredName())
.startObject(PROPERTIES)
.startObject(AnomalyCause.ACTUAL.getPreferredName())
.field(TYPE, GEO_POINT)
.endObject()
.startObject(AnomalyCause.TYPICAL.getPreferredName())
.field(TYPE, GEO_POINT)
.endObject()
.endObject()
.endObject()
.endObject()
.endObject()
.startObject(AnomalyRecord.INFLUENCERS.getPreferredName())
Expand All @@ -899,6 +910,16 @@ private static void addAnomalyRecordFieldsToMapping(XContentBuilder builder) thr
.field(COPY_TO, ALL_FIELD_VALUES)
.endObject()
.endObject()
.endObject()
.startObject(AnomalyRecord.GEO_RESULTS.getPreferredName())
.startObject(PROPERTIES)
.startObject(AnomalyRecord.ACTUAL.getPreferredName())
.field(TYPE, GEO_POINT)
.endObject()
.startObject(AnomalyRecord.TYPICAL.getPreferredName())
.field(TYPE, GEO_POINT)
.endObject()
.endObject()
.endObject();
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@
import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.xpack.core.ml.job.config.DetectorFunction;

import java.io.IOException;
import java.util.List;
Expand Down Expand Up @@ -39,6 +40,7 @@ public class AnomalyCause implements ToXContentObject, Writeable {
public static final ParseField TYPICAL = new ParseField("typical");
public static final ParseField ACTUAL = new ParseField("actual");
public static final ParseField INFLUENCERS = new ParseField("influencers");
public static final ParseField GEO_RESULTS = new ParseField("geo_results");

/**
* Metric Results
Expand Down Expand Up @@ -189,10 +191,26 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws
if (influencers != null) {
builder.field(INFLUENCERS.getPreferredName(), influencers);
}
if (DetectorFunction.LAT_LONG.getFullName().equals(function)) {
builder.startObject(GEO_RESULTS.getPreferredName());
writeDoublesAsGeoPoint(builder, ACTUAL.getPreferredName(), actual);
writeDoublesAsGeoPoint(builder, TYPICAL.getPreferredName(), typical);
builder.endObject();
}
builder.endObject();
return builder;
}

private void writeDoublesAsGeoPoint(XContentBuilder builder, String fieldName, List<Double> doubles) throws IOException {
if (doubles != null) {
//TODO should we fail if `doubles` is not an array of length 2?
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I think an internal assertion as you've got it now is best. Throwing an exception here would mean it was impossible to get anything from the results endpoint.

However, see my main comment because that would render this a moot point as the whole method could be deleted.

assert doubles.size() == 2 : "for lat_lon function [" + fieldName + "] result has invalid format " + doubles;
if (doubles.size() == 2) {
builder.field(fieldName, doubles.get(0) + "," + doubles.get(1));
}
}
}


public double getProbability() {
return probability;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@
import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.xpack.core.ml.job.config.Detector;
import org.elasticsearch.xpack.core.ml.job.config.DetectorFunction;
import org.elasticsearch.xpack.core.ml.job.config.Job;
import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper;
import org.elasticsearch.xpack.core.common.time.TimeUtils;
Expand Down Expand Up @@ -55,6 +56,7 @@ public class AnomalyRecord implements ToXContentObject, Writeable {
public static final ParseField ACTUAL = new ParseField("actual");
public static final ParseField INFLUENCERS = new ParseField("influencers");
public static final ParseField BUCKET_SPAN = new ParseField("bucket_span");
public static final ParseField GEO_RESULTS = new ParseField("geo_results");

// Used for QueryPage
public static final ParseField RESULTS_FIELD = new ParseField("records");
Expand Down Expand Up @@ -290,11 +292,21 @@ XContentBuilder innerToXContent(XContentBuilder builder, Params params) throws I
builder.field(OVER_FIELD_VALUE.getPreferredName(), overFieldValue);
}
if (causes != null) {
builder.field(CAUSES.getPreferredName(), causes);
builder.startArray(CAUSES.getPreferredName());
for (AnomalyCause cause : causes) {
builder.value(cause, params);
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This line could be changed to cause.toXContent(builder, params); to avoid the need to change method visibility in the X-Content builder.

}
builder.endArray();
}
if (influences != null) {
builder.field(INFLUENCERS.getPreferredName(), influences);
}
if (DetectorFunction.LAT_LONG.getFullName().equals(function)) {
builder.startObject(GEO_RESULTS.getPreferredName());
writeDoublesAsGeoPoint(builder, ACTUAL.getPreferredName(), actual);
writeDoublesAsGeoPoint(builder, TYPICAL.getPreferredName(), typical);
builder.endObject();
}

Map<String, LinkedHashSet<String>> inputFields = inputFieldMap();
for (String fieldName : inputFields.keySet()) {
Expand All @@ -303,6 +315,17 @@ XContentBuilder innerToXContent(XContentBuilder builder, Params params) throws I
return builder;
}


private void writeDoublesAsGeoPoint(XContentBuilder builder, String fieldName, List<Double> doubles) throws IOException {
if (doubles != null) {
//TODO should we fail if `doubles` is not an array of length 2?
assert doubles.size() == 2 : "for lat_lon function [" + fieldName + "] result has invalid format " + doubles;
if (doubles.size() == 2) {
builder.field(fieldName, doubles.get(0) + "," + doubles.get(1));
}
}
}

private Map<String, LinkedHashSet<String>> inputFieldMap() {
// LinkedHashSet preserves insertion order when iterating entries
Map<String, LinkedHashSet<String>> result = new HashMap<>();
Expand Down
Loading