Skip to content

[ML] Label anomalies with multi_bucket_impact #34233

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 4 commits into from
Oct 4, 2018
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -48,6 +48,7 @@ public class AnomalyRecord implements ToXContentObject {
* Result fields (all detector types)
*/
public static final ParseField PROBABILITY = new ParseField("probability");
public static final ParseField MULTI_BUCKET_IMPACT = new ParseField("multi_bucket_impact");
public static final ParseField DETECTOR_INDEX = new ParseField("detector_index");
public static final ParseField BY_FIELD_NAME = new ParseField("by_field_name");
public static final ParseField BY_FIELD_VALUE = new ParseField("by_field_value");
Expand Down Expand Up @@ -94,6 +95,7 @@ public class AnomalyRecord implements ToXContentObject {
PARSER.declareLong(ConstructingObjectParser.constructorArg(), BUCKET_SPAN);
PARSER.declareString((anomalyRecord, s) -> {}, Result.RESULT_TYPE);
PARSER.declareDouble(AnomalyRecord::setProbability, PROBABILITY);
PARSER.declareDouble(AnomalyRecord::setMultiBucketImpact, MULTI_BUCKET_IMPACT);
PARSER.declareDouble(AnomalyRecord::setRecordScore, RECORD_SCORE);
PARSER.declareDouble(AnomalyRecord::setInitialRecordScore, INITIAL_RECORD_SCORE);
PARSER.declareInt(AnomalyRecord::setDetectorIndex, DETECTOR_INDEX);
Expand All @@ -117,6 +119,7 @@ public class AnomalyRecord implements ToXContentObject {
private final String jobId;
private int detectorIndex;
private double probability;
private Double multiBucketImpact;
private String byFieldName;
private String byFieldValue;
private String correlatedByFieldValue;
Expand Down Expand Up @@ -155,6 +158,9 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws
builder.field(Job.ID.getPreferredName(), jobId);
builder.field(Result.RESULT_TYPE.getPreferredName(), RESULT_TYPE_VALUE);
builder.field(PROBABILITY.getPreferredName(), probability);
if (multiBucketImpact != null) {
builder.field(MULTI_BUCKET_IMPACT.getPreferredName(), multiBucketImpact);
}
builder.field(RECORD_SCORE.getPreferredName(), recordScore);
builder.field(INITIAL_RECORD_SCORE.getPreferredName(), initialRecordScore);
builder.field(BUCKET_SPAN.getPreferredName(), bucketSpan);
Expand Down Expand Up @@ -254,6 +260,14 @@ void setProbability(double value) {
probability = value;
}

public double getMultiBucketImpact() {
return multiBucketImpact;
}

void setMultiBucketImpact(double value) {
multiBucketImpact = value;
}

public String getByFieldName() {
return byFieldName;
}
Expand Down Expand Up @@ -376,7 +390,7 @@ void setInfluencers(List<Influence> influencers) {

@Override
public int hashCode() {
return Objects.hash(jobId, detectorIndex, bucketSpan, probability, recordScore,
return Objects.hash(jobId, detectorIndex, bucketSpan, probability, multiBucketImpact, recordScore,
initialRecordScore, typical, actual,function, functionDescription, fieldName,
byFieldName, byFieldValue, correlatedByFieldValue, partitionFieldName,
partitionFieldValue, overFieldName, overFieldValue, timestamp, isInterim,
Expand All @@ -399,6 +413,7 @@ public boolean equals(Object other) {
&& this.detectorIndex == that.detectorIndex
&& this.bucketSpan == that.bucketSpan
&& this.probability == that.probability
&& Objects.equals(this.multiBucketImpact, that.multiBucketImpact)
&& this.recordScore == that.recordScore
&& this.initialRecordScore == that.initialRecordScore
&& Objects.deepEquals(this.typical, that.typical)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -38,6 +38,9 @@ public static AnomalyRecord createTestInstance(String jobId) {
anomalyRecord.setActual(Collections.singletonList(randomDouble()));
anomalyRecord.setTypical(Collections.singletonList(randomDouble()));
anomalyRecord.setProbability(randomDouble());
if (randomBoolean()) {
anomalyRecord.setMultiBucketImpact(randomDouble());
}
anomalyRecord.setRecordScore(randomDouble());
anomalyRecord.setInitialRecordScore(randomDouble());
anomalyRecord.setInterim(randomBoolean());
Expand Down
5 changes: 5 additions & 0 deletions docs/reference/ml/apis/resultsresource.asciidoc
Original file line number Diff line number Diff line change
Expand Up @@ -364,6 +364,11 @@ A record object has the following properties:
//In scientific notation, a value of 3.24E-300 is highly unlikely and therefore
//highly anomalous.

`multi_bucket_impact`::
(number) an indication of how strongly an anomaly is multi bucket or single bucket.
The value is on a scale of -5 to +5 where -5 means the anomaly is purely single
bucket and +5 means the anomaly is purely multi bucket.

`record_score`::
(number) A normalized score between 0-100, which is based on the probability
of the anomalousness of this record. Unlike `initial_record_score`, this
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -372,6 +372,9 @@ private static void addAnomalyRecordFieldsToMapping(XContentBuilder builder) thr
.startObject(AnomalyRecord.PROBABILITY.getPreferredName())
.field(TYPE, DOUBLE)
.endObject()
.startObject(AnomalyRecord.MULTI_BUCKET_IMPACT.getPreferredName())
.field(TYPE, DOUBLE)
.endObject()
.startObject(AnomalyRecord.FUNCTION.getPreferredName())
.field(TYPE, KEYWORD)
.endObject()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@
import org.elasticsearch.xpack.core.ml.job.config.Job;
import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper;
import org.elasticsearch.xpack.core.ml.utils.time.TimeUtils;
import org.elasticsearch.Version;

import java.io.IOException;
import java.util.ArrayList;
Expand All @@ -44,6 +45,7 @@ public class AnomalyRecord implements ToXContentObject, Writeable {
* Result fields (all detector types)
*/
public static final ParseField PROBABILITY = new ParseField("probability");
public static final ParseField MULTI_BUCKET_IMPACT = new ParseField("multi_bucket_impact");
public static final ParseField BY_FIELD_NAME = new ParseField("by_field_name");
public static final ParseField BY_FIELD_VALUE = new ParseField("by_field_value");
public static final ParseField CORRELATED_BY_FIELD_VALUE = new ParseField("correlated_by_field_value");
Expand Down Expand Up @@ -100,6 +102,7 @@ private static ConstructingObjectParser<AnomalyRecord, Void> createParser(boolea
parser.declareLong(ConstructingObjectParser.constructorArg(), BUCKET_SPAN);
parser.declareString((anomalyRecord, s) -> {}, Result.RESULT_TYPE);
parser.declareDouble(AnomalyRecord::setProbability, PROBABILITY);
parser.declareDouble(AnomalyRecord::setMultiBucketImpact, MULTI_BUCKET_IMPACT);
parser.declareDouble(AnomalyRecord::setRecordScore, RECORD_SCORE);
parser.declareDouble(AnomalyRecord::setInitialRecordScore, INITIAL_RECORD_SCORE);
parser.declareInt(AnomalyRecord::setDetectorIndex, Detector.DETECTOR_INDEX);
Expand Down Expand Up @@ -127,6 +130,7 @@ private static ConstructingObjectParser<AnomalyRecord, Void> createParser(boolea
private final String jobId;
private int detectorIndex;
private double probability;
private Double multiBucketImpact;
private String byFieldName;
private String byFieldValue;
private String correlatedByFieldValue;
Expand Down Expand Up @@ -164,6 +168,9 @@ public AnomalyRecord(StreamInput in) throws IOException {
jobId = in.readString();
detectorIndex = in.readInt();
probability = in.readDouble();
if (in.getVersion().onOrAfter(Version.V_6_5_0)) {
multiBucketImpact = in.readOptionalDouble();
}
byFieldName = in.readOptionalString();
byFieldValue = in.readOptionalString();
correlatedByFieldValue = in.readOptionalString();
Expand Down Expand Up @@ -198,6 +205,9 @@ public void writeTo(StreamOutput out) throws IOException {
out.writeString(jobId);
out.writeInt(detectorIndex);
out.writeDouble(probability);
if (out.getVersion().onOrAfter(Version.V_6_5_0)) {
out.writeOptionalDouble(multiBucketImpact);
}
out.writeOptionalString(byFieldName);
out.writeOptionalString(byFieldValue);
out.writeOptionalString(correlatedByFieldValue);
Expand Down Expand Up @@ -247,6 +257,9 @@ XContentBuilder innerToXContent(XContentBuilder builder, Params params) throws I
builder.field(Job.ID.getPreferredName(), jobId);
builder.field(Result.RESULT_TYPE.getPreferredName(), RESULT_TYPE_VALUE);
builder.field(PROBABILITY.getPreferredName(), probability);
if (multiBucketImpact != null) {
builder.field(MULTI_BUCKET_IMPACT.getPreferredName(), multiBucketImpact);
}
builder.field(RECORD_SCORE.getPreferredName(), recordScore);
builder.field(INITIAL_RECORD_SCORE.getPreferredName(), initialRecordScore);
builder.field(BUCKET_SPAN.getPreferredName(), bucketSpan);
Expand Down Expand Up @@ -389,6 +402,14 @@ public void setProbability(double value) {
probability = value;
}

public double getMultiBucketImpact() {
return multiBucketImpact;
}

public void setMultiBucketImpact(double value) {
multiBucketImpact = value;
}

public String getByFieldName() {
return byFieldName;
}
Expand Down Expand Up @@ -519,7 +540,7 @@ public void setInfluencers(List<Influence> influencers) {

@Override
public int hashCode() {
return Objects.hash(jobId, detectorIndex, bucketSpan, probability, recordScore,
return Objects.hash(jobId, detectorIndex, bucketSpan, probability, multiBucketImpact, recordScore,
initialRecordScore, typical, actual,function, functionDescription, fieldName,
byFieldName, byFieldValue, correlatedByFieldValue, partitionFieldName,
partitionFieldValue, overFieldName, overFieldValue, timestamp, isInterim,
Expand All @@ -543,6 +564,7 @@ public boolean equals(Object other) {
&& this.detectorIndex == that.detectorIndex
&& this.bucketSpan == that.bucketSpan
&& this.probability == that.probability
&& Objects.equals(this.multiBucketImpact, that.multiBucketImpact)
&& this.recordScore == that.recordScore
&& this.initialRecordScore == that.initialRecordScore
&& Objects.deepEquals(this.typical, that.typical)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -57,6 +57,7 @@ public final class ReservedFieldNames {
AnomalyCause.FIELD_NAME.getPreferredName(),

AnomalyRecord.PROBABILITY.getPreferredName(),
AnomalyRecord.MULTI_BUCKET_IMPACT.getPreferredName(),
AnomalyRecord.BY_FIELD_NAME.getPreferredName(),
AnomalyRecord.BY_FIELD_VALUE.getPreferredName(),
AnomalyRecord.CORRELATED_BY_FIELD_VALUE.getPreferredName(),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,9 @@ public AnomalyRecord createTestInstance(String jobId) {
anomalyRecord.setActual(Collections.singletonList(randomDouble()));
anomalyRecord.setTypical(Collections.singletonList(randomDouble()));
anomalyRecord.setProbability(randomDouble());
if (randomBoolean()) {
anomalyRecord.setMultiBucketImpact(randomDouble());
}
anomalyRecord.setRecordScore(randomDouble());
anomalyRecord.setInitialRecordScore(randomDouble());
anomalyRecord.setInterim(randomBoolean());
Expand Down