Skip to content

Commit a553168

Browse files
committed
Adding additional tests for agg parsing in datafeedconfig (#36261)
* Adding additional tests for agg parsing in datafeedconfig * Fixing bug, adding yml test
1 parent 5f3893f commit a553168

File tree

3 files changed

+168
-5
lines changed

3 files changed

+168
-5
lines changed

x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/utils/XContentObjectTransformer.java

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -46,7 +46,8 @@ public class XContentObjectTransformer<T extends ToXContentObject> {
4646
public static XContentObjectTransformer<AggregatorFactories.Builder> aggregatorTransformer() {
4747
return new XContentObjectTransformer<>(searchRegistry, (p) -> {
4848
// Serializing a map creates an object, need to skip the start object for the aggregation parser
49-
assert(XContentParser.Token.START_OBJECT.equals(p.nextToken()));
49+
XContentParser.Token token = p.nextToken();
50+
assert(XContentParser.Token.START_OBJECT.equals(token));
5051
return AggregatorFactories.parseAggregators(p);
5152
});
5253
}

x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedConfigTests.java

Lines changed: 109 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -8,28 +8,38 @@
88
import com.carrotsearch.randomizedtesting.generators.CodepointSetGenerator;
99

1010
import org.elasticsearch.ElasticsearchException;
11+
import org.elasticsearch.Version;
12+
import org.elasticsearch.common.bytes.BytesReference;
13+
import org.elasticsearch.common.io.stream.BytesStreamOutput;
14+
import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput;
1115
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
16+
import org.elasticsearch.common.io.stream.StreamInput;
1217
import org.elasticsearch.common.io.stream.Writeable;
1318
import org.elasticsearch.common.settings.Settings;
1419
import org.elasticsearch.common.unit.TimeValue;
1520
import org.elasticsearch.common.xcontent.DeprecationHandler;
1621
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
1722
import org.elasticsearch.common.xcontent.XContentFactory;
23+
import org.elasticsearch.common.xcontent.XContentHelper;
1824
import org.elasticsearch.common.xcontent.XContentParseException;
1925
import org.elasticsearch.common.xcontent.XContentParser;
2026
import org.elasticsearch.common.xcontent.XContentType;
2127
import org.elasticsearch.index.query.BoolQueryBuilder;
28+
import org.elasticsearch.index.query.QueryBuilder;
2229
import org.elasticsearch.index.query.QueryBuilders;
2330
import org.elasticsearch.index.query.TermQueryBuilder;
2431
import org.elasticsearch.script.Script;
2532
import org.elasticsearch.search.SearchModule;
2633
import org.elasticsearch.search.aggregations.AggregationBuilders;
2734
import org.elasticsearch.search.aggregations.AggregatorFactories;
35+
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorBuilders;
2836
import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramAggregationBuilder;
2937
import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval;
3038
import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregationBuilder;
3139
import org.elasticsearch.search.aggregations.metrics.avg.AvgAggregationBuilder;
3240
import org.elasticsearch.search.aggregations.metrics.max.MaxAggregationBuilder;
41+
import org.elasticsearch.search.aggregations.pipeline.bucketscript.BucketScriptPipelineAggregationBuilder;
42+
import org.elasticsearch.search.aggregations.pipeline.derivative.DerivativePipelineAggregationBuilder;
3343
import org.elasticsearch.search.builder.SearchSourceBuilder;
3444
import org.elasticsearch.search.builder.SearchSourceBuilder.ScriptField;
3545
import org.elasticsearch.test.AbstractSerializingTestCase;
@@ -83,7 +93,7 @@ public static DatafeedConfig createRandomizedDatafeedConfig(String jobId, long b
8393
if (randomBoolean() && addScriptFields == false) {
8494
// can only test with a single agg as the xcontent order gets randomized by test base class and then
8595
// the actual xcontent isn't the same and test fail.
86-
// Testing with a single agg is ok as we don't have special list writeable / xconent logic
96+
// Testing with a single agg is ok as we don't have special list writeable / xcontent logic
8797
AggregatorFactories.Builder aggs = new AggregatorFactories.Builder();
8898
aggHistogramInterval = randomNonNegativeLong();
8999
aggHistogramInterval = aggHistogramInterval> bucketSpanMillis ? bucketSpanMillis : aggHistogramInterval;
@@ -567,6 +577,98 @@ public void testDefaultFrequency_GivenAggregationsWithHistogramInterval_1_Hour()
567577
assertEquals(TimeValue.timeValueHours(1), datafeed.defaultFrequency(TimeValue.timeValueHours(12)));
568578
}
569579

580+
public void testSerializationOfComplexAggs() throws IOException {
581+
MaxAggregationBuilder maxTime = AggregationBuilders.max("timestamp").field("timestamp");
582+
AvgAggregationBuilder avgAggregationBuilder = AggregationBuilders.avg("bytes_in_avg").field("system.network.in.bytes");
583+
DerivativePipelineAggregationBuilder derivativePipelineAggregationBuilder =
584+
PipelineAggregatorBuilders.derivative("bytes_in_derivative", "bytes_in_avg");
585+
BucketScriptPipelineAggregationBuilder bucketScriptPipelineAggregationBuilder =
586+
PipelineAggregatorBuilders.bucketScript("non_negative_bytes",
587+
Collections.singletonMap("bytes", "bytes_in_derivative"),
588+
new Script("params.bytes > 0 ? params.bytes : null"));
589+
DateHistogramAggregationBuilder dateHistogram =
590+
AggregationBuilders.dateHistogram("histogram_buckets")
591+
.field("timestamp").interval(300000).timeZone(DateTimeZone.UTC)
592+
.subAggregation(maxTime)
593+
.subAggregation(avgAggregationBuilder)
594+
.subAggregation(derivativePipelineAggregationBuilder)
595+
.subAggregation(bucketScriptPipelineAggregationBuilder);
596+
DatafeedConfig.Builder datafeedConfigBuilder = createDatafeedBuilderWithDateHistogram(dateHistogram);
597+
QueryBuilder terms =
598+
new BoolQueryBuilder().filter(new TermQueryBuilder(randomAlphaOfLengthBetween(1, 10), randomAlphaOfLengthBetween(1, 10)));
599+
datafeedConfigBuilder.setParsedQuery(terms);
600+
DatafeedConfig datafeedConfig = datafeedConfigBuilder.build();
601+
AggregatorFactories.Builder aggBuilder = new AggregatorFactories.Builder().addAggregator(dateHistogram);
602+
603+
604+
XContentType xContentType = XContentType.JSON;
605+
BytesReference bytes = XContentHelper.toXContent(datafeedConfig, xContentType, false);
606+
XContentParser parser = XContentHelper.createParser(xContentRegistry(),
607+
DeprecationHandler.THROW_UNSUPPORTED_OPERATION,
608+
bytes,
609+
xContentType);
610+
611+
DatafeedConfig parsedDatafeedConfig = doParseInstance(parser);
612+
assertEquals(datafeedConfig, parsedDatafeedConfig);
613+
614+
// Assert that the parsed versions of our aggs and queries work as well
615+
assertEquals(aggBuilder, parsedDatafeedConfig.getParsedAggregations());
616+
assertEquals(terms, parsedDatafeedConfig.getParsedQuery());
617+
618+
try(BytesStreamOutput output = new BytesStreamOutput()) {
619+
datafeedConfig.writeTo(output);
620+
try(StreamInput streamInput = output.bytes().streamInput()) {
621+
DatafeedConfig streamedDatafeedConfig = new DatafeedConfig(streamInput);
622+
assertEquals(datafeedConfig, streamedDatafeedConfig);
623+
624+
// Assert that the parsed versions of our aggs and queries work as well
625+
assertEquals(aggBuilder, streamedDatafeedConfig.getParsedAggregations());
626+
assertEquals(terms, streamedDatafeedConfig.getParsedQuery());
627+
}
628+
}
629+
}
630+
631+
public void testSerializationOfComplexAggsBetweenVersions() throws IOException {
632+
MaxAggregationBuilder maxTime = AggregationBuilders.max("timestamp").field("timestamp");
633+
AvgAggregationBuilder avgAggregationBuilder = AggregationBuilders.avg("bytes_in_avg").field("system.network.in.bytes");
634+
DerivativePipelineAggregationBuilder derivativePipelineAggregationBuilder =
635+
PipelineAggregatorBuilders.derivative("bytes_in_derivative", "bytes_in_avg");
636+
BucketScriptPipelineAggregationBuilder bucketScriptPipelineAggregationBuilder =
637+
PipelineAggregatorBuilders.bucketScript("non_negative_bytes",
638+
Collections.singletonMap("bytes", "bytes_in_derivative"),
639+
new Script("params.bytes > 0 ? params.bytes : null"));
640+
DateHistogramAggregationBuilder dateHistogram =
641+
AggregationBuilders.dateHistogram("histogram_buckets")
642+
.field("timestamp").interval(300000).timeZone(DateTimeZone.UTC)
643+
.subAggregation(maxTime)
644+
.subAggregation(avgAggregationBuilder)
645+
.subAggregation(derivativePipelineAggregationBuilder)
646+
.subAggregation(bucketScriptPipelineAggregationBuilder);
647+
DatafeedConfig.Builder datafeedConfigBuilder = createDatafeedBuilderWithDateHistogram(dateHistogram);
648+
QueryBuilder terms =
649+
new BoolQueryBuilder().filter(new TermQueryBuilder(randomAlphaOfLengthBetween(1, 10), randomAlphaOfLengthBetween(1, 10)));
650+
datafeedConfigBuilder.setParsedQuery(terms);
651+
DatafeedConfig datafeedConfig = datafeedConfigBuilder.build();
652+
653+
SearchModule searchModule = new SearchModule(Settings.EMPTY, false, Collections.emptyList());
654+
NamedWriteableRegistry namedWriteableRegistry = new NamedWriteableRegistry(searchModule.getNamedWriteables());
655+
656+
try (BytesStreamOutput output = new BytesStreamOutput()) {
657+
output.setVersion(Version.V_6_0_0);
658+
datafeedConfig.writeTo(output);
659+
try (StreamInput in = new NamedWriteableAwareStreamInput(output.bytes().streamInput(), namedWriteableRegistry)) {
660+
in.setVersion(Version.V_6_0_0);
661+
DatafeedConfig streamedDatafeedConfig = new DatafeedConfig(in);
662+
assertEquals(datafeedConfig, streamedDatafeedConfig);
663+
664+
// Assert that the parsed versions of our aggs and queries work as well
665+
assertEquals(new AggregatorFactories.Builder().addAggregator(dateHistogram),
666+
streamedDatafeedConfig.getParsedAggregations());
667+
assertEquals(terms, streamedDatafeedConfig.getParsedQuery());
668+
}
669+
}
670+
}
671+
570672
public static String randomValidDatafeedId() {
571673
CodepointSetGenerator generator = new CodepointSetGenerator("abcdefghijklmnopqrstuvwxyz".toCharArray());
572674
return generator.ofCodePointsLength(random(), 10, 10);
@@ -590,14 +692,18 @@ private static DatafeedConfig createDatafeedWithDateHistogram(Long interval) {
590692
return createDatafeedWithDateHistogram(dateHistogram);
591693
}
592694

593-
private static DatafeedConfig createDatafeedWithDateHistogram(DateHistogramAggregationBuilder dateHistogram) {
695+
private static DatafeedConfig.Builder createDatafeedBuilderWithDateHistogram(DateHistogramAggregationBuilder dateHistogram) {
594696
DatafeedConfig.Builder builder = new DatafeedConfig.Builder("datafeed1", "job1");
595697
builder.setIndices(Collections.singletonList("myIndex"));
596698
builder.setTypes(Collections.singletonList("myType"));
597699
AggregatorFactories.Builder aggs = new AggregatorFactories.Builder().addAggregator(dateHistogram);
598700
DatafeedConfig.validateAggregations(aggs);
599701
builder.setParsedAggregations(aggs);
600-
return builder.build();
702+
return builder;
703+
}
704+
705+
private static DatafeedConfig createDatafeedWithDateHistogram(DateHistogramAggregationBuilder dateHistogram) {
706+
return createDatafeedBuilderWithDateHistogram(dateHistogram).build();
601707
}
602708

603709
@Override

x-pack/plugin/src/test/resources/rest-api-spec/test/ml/datafeeds_crud.yml

Lines changed: 57 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -11,7 +11,8 @@ setup:
1111
"job_id":"datafeeds-crud-1",
1212
"analysis_config" : {
1313
"bucket_span": "1h",
14-
"detectors" :[{"function":"count"}]
14+
"detectors" :[{"function":"count"}],
15+
"summary_count_field_name": "doc_count"
1516
},
1617
"data_description" : {
1718
"format":"xcontent",
@@ -321,6 +322,61 @@ setup:
321322
- match: { chunking_config.mode: "manual" }
322323
- match: { chunking_config.time_span: "1h" }
323324

325+
---
326+
"Test put datafeed with aggregations":
327+
- do:
328+
xpack.ml.put_datafeed:
329+
datafeed_id: test-datafeed-aggs-1
330+
body: >
331+
{
332+
"job_id":"datafeeds-crud-1",
333+
"indices":["index-foo"],
334+
"types":["type-bar"],
335+
"aggs": {
336+
"histogram_buckets":{
337+
"date_histogram": {
338+
"field": "@timestamp",
339+
"interval": "5m",
340+
"time_zone": "UTC",
341+
"min_doc_count": 0
342+
},
343+
"aggs": {
344+
"@timestamp": {
345+
"max": {
346+
"field": "@timestamp"
347+
}
348+
},
349+
"bytes_in_avg": {
350+
"avg": {
351+
"field": "system.network.in.bytes"
352+
}
353+
},
354+
"bytes_in_derivative": {
355+
"derivative": {
356+
"buckets_path": "bytes_in_avg"
357+
}
358+
},
359+
"non_negative_bytes": {
360+
"bucket_script": {
361+
"buckets_path": {
362+
"bytes": "bytes_in_derivative"
363+
},
364+
"script": "params.bytes > 0 ? params.bytes : null"
365+
}
366+
}
367+
}
368+
}
369+
}
370+
}
371+
- do:
372+
xpack.ml.get_datafeeds:
373+
datafeed_id: test-datafeed-aggs-1
374+
- match: { datafeeds.0.datafeed_id: "test-datafeed-aggs-1" }
375+
- match: { datafeeds.0.aggregations.histogram_buckets.date_histogram.field: "@timestamp" }
376+
- match: { datafeeds.0.aggregations.histogram_buckets.aggregations.@timestamp.max.field: "@timestamp" }
377+
- match: { datafeeds.0.aggregations.histogram_buckets.aggregations.bytes_in_avg.avg.field: "system.network.in.bytes" }
378+
- match: { datafeeds.0.aggregations.histogram_buckets.aggregations.non_negative_bytes.bucket_script.buckets_path.bytes: "bytes_in_derivative" }
379+
324380
---
325381
"Test delete datafeed":
326382
- do:

0 commit comments

Comments
 (0)