Skip to content

Support routing_rules.yml and ingest pipeline test for reroute processor #1372

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 12 commits into from
Aug 7, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion internal/benchrunner/runners/pipeline/benchmark.go
Original file line number Diff line number Diff line change
Expand Up @@ -303,7 +303,7 @@ func (r *runner) runSingleBenchmark(entryPipeline string, docs []json.RawMessage
return ingestResult{}, errors.New("no docs supplied for benchmark")
}

if _, err := ingest.SimulatePipeline(r.options.API, entryPipeline, docs); err != nil {
if _, err := ingest.SimulatePipeline(r.options.API, entryPipeline, docs, "test-generic-default"); err != nil {
return ingestResult{}, fmt.Errorf("simulate failed: %w", err)
}

Expand Down
125 changes: 124 additions & 1 deletion internal/elasticsearch/ingest/datastream.go
Original file line number Diff line number Diff line change
Expand Up @@ -16,11 +16,41 @@ import (
"strings"
"time"

"gopkg.in/yaml.v3"

"github.com/elastic/elastic-package/internal/elasticsearch"
"github.com/elastic/elastic-package/internal/packages"
)

var ingestPipelineTag = regexp.MustCompile(`{{\s*IngestPipeline.+}}`)
var (
ingestPipelineTag = regexp.MustCompile(`{{\s*IngestPipeline.+}}`)
defaultPipelineJSON = "default.json"
defaultPipelineYML = "default.yml"
)

type Rule struct {
TargetDataset interface{} `yaml:"target_dataset"`
If string `yaml:"if"`
Namespace interface{} `yaml:"namespace"`
}

type RoutingRule struct {
SourceDataset string `yaml:"source_dataset"`
Rules []Rule `yaml:"rules"`
}

type ESIngestPipeline struct {
Description string `yaml:"description"`
Processors []map[string]interface{} `yaml:"processors"`
AdditionalFields map[string]interface{} `yaml:",inline"`
}

type RerouteProcessor struct {
Tag string `yaml:"tag"`
If string `yaml:"if"`
Dataset []string `yaml:"dataset"`
Namespace []string `yaml:"namespace"`
}

func InstallDataStreamPipelines(api *elasticsearch.API, dataStreamPath string) (string, []Pipeline, error) {
dataStreamManifest, err := packages.ReadDataStreamManifest(filepath.Join(dataStreamPath, packages.DataStreamManifestFile))
Expand Down Expand Up @@ -70,6 +100,32 @@ func loadIngestPipelineFiles(dataStreamPath string, nonce int64) ([]Pipeline, er
pipelineTag := s[1]
return []byte(getPipelineNameWithNonce(pipelineTag, nonce))
})

// Unmarshal the YAML data into a ESIngestPipeline struct
var esPipeline ESIngestPipeline
err = yaml.Unmarshal(c, &esPipeline)
if err != nil {
return nil, fmt.Errorf("failed to unmarshal ingest pipeline YAML data (path: %s): %w", path, err)
}

// read routing_rules.yml and convert it into reroute processors in ingest pipeline
rerouteProcessors, err := loadRoutingRuleFile(dataStreamPath)
if err != nil {
log.Fatalf("failed loading routing_rules.yml: %v", err)
}

// only attach routing_rules.yml reroute processors after the default pipeline
filename := filepath.Base(path)
if filename == defaultPipelineJSON || filename == defaultPipelineYML {
esPipeline.Processors = append(esPipeline.Processors, rerouteProcessors...)
}

c, err = yaml.Marshal(esPipeline)
if err != nil {
log.Fatalf("Failed to marshal modified ingest pipeline YAML data: %v", err)
}

// put routing rules into processors
name := filepath.Base(path)
pipelines = append(pipelines, Pipeline{
Path: path,
Expand All @@ -81,6 +137,73 @@ func loadIngestPipelineFiles(dataStreamPath string, nonce int64) ([]Pipeline, er
return pipelines, nil
}

func loadRoutingRuleFile(dataStreamPath string) ([]map[string]interface{}, error) {
routingRulePath := filepath.Join(dataStreamPath, "routing_rules.yml")
c, err := os.ReadFile(routingRulePath)
if err != nil {
// routing_rules.yml does not exist
if os.IsNotExist(err) {
return nil, nil
} else {
return nil, fmt.Errorf("reading routing_rules.yml failed (path: %s): %w", routingRulePath, err)
}
}

// unmarshal yaml into a struct
var routingRule []RoutingRule
err = yaml.Unmarshal(c, &routingRule)
if err != nil {
return nil, fmt.Errorf("unmarshalling routing_rules.yml content failed: %w", err)
}

// Now you can work with the data as Go structs
var rerouteProcessors []map[string]interface{}
for _, r := range routingRule {
for _, rule := range r.Rules {
td, err := convertValue(rule.TargetDataset, "target_dataset")
if err != nil {
return nil, fmt.Errorf("convertValue failed: %w", err)
}

ns, err := convertValue(rule.Namespace, "namespace")
if err != nil {
return nil, fmt.Errorf("convertValue failed: %w", err)
}

processor := make(map[string]interface{})
processor["reroute"] = RerouteProcessor{
Tag: r.SourceDataset,
If: rule.If,
Dataset: td,
Namespace: ns,
}
rerouteProcessors = append(rerouteProcessors, processor)
}
}
return rerouteProcessors, nil
}

func convertValue(value interface{}, label string) ([]string, error) {
switch value := value.(type) {
case string:
return []string{value}, nil
case []string:
return value, nil
case []interface{}:
result := make([]string, 0, len(value))
for _, v := range value {
if vStr, ok := v.(string); ok {
result = append(result, vStr)
} else {
return nil, fmt.Errorf("%s in routing_rules.yml has to be a string or an array of strings: %v", label, value)
}
}
return result, nil
default:
return nil, fmt.Errorf("%s in routing_rules.yml has to be a string or an array of strings: %v", label, value)
}
}

func installPipelinesInElasticsearch(api *elasticsearch.API, pipelines []Pipeline) error {
for _, p := range pipelines {
if err := installPipeline(api, p); err != nil {
Expand Down
99 changes: 99 additions & 0 deletions internal/elasticsearch/ingest/datastream_test.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,99 @@
// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
// or more contributor license agreements. Licensed under the Elastic License;
// you may not use this file except in compliance with the Elastic License.

package ingest

import (
"testing"

"github.com/stretchr/testify/assert"
)

func TestLoadRoutingRuleFileGoodSingleRule(t *testing.T) {
mockDataStreamPath := "../testdata/routing_rules/good/single_rule"
rerouteProcessors, err := loadRoutingRuleFile(mockDataStreamPath)
assert.NoError(t, err)
assert.Equal(t, 3, len(rerouteProcessors))

expectedProcessors := map[string]struct {
expectedIf string
expectedDataset []string
expectedNamespace []string
}{
"multiple_namespace": {
"ctx['aws.cloudwatch.log_stream'].contains('CloudTrail')",
[]string{"aws.cloudtrail"},
[]string{"{{labels.data_stream.namespace}}", "default"},
},
"multiple_target_dataset": {
"ctx['aws.cloudwatch.log_stream'].contains('Firewall')",
[]string{"aws.firewall_logs", "aws.test_logs"},
[]string{"default"},
},
"single_namespace_target_dataset": {
"ctx['aws.cloudwatch.log_stream'].contains('Route53')",
[]string{"aws.route53_public_logs"},
[]string{"{{labels.data_stream.namespace}}"},
},
}

for _, rerouteProcessor := range rerouteProcessors {
p := rerouteProcessor["reroute"].(RerouteProcessor)
assert.Equal(t, expectedProcessors[p.Tag].expectedIf, p.If)
assert.Equal(t, expectedProcessors[p.Tag].expectedDataset, p.Dataset)
assert.Equal(t, expectedProcessors[p.Tag].expectedNamespace, p.Namespace)
}
}

func TestLoadRoutingRuleFileGoodMultipleRules(t *testing.T) {
mockDataStreamPath := "../testdata/routing_rules/good/multiple_rules"
rerouteProcessors, err := loadRoutingRuleFile(mockDataStreamPath)
assert.NoError(t, err)
assert.Equal(t, 2, len(rerouteProcessors))

expectedProcessors := map[string]struct {
expectedSourceDataset string
expectedDataset []string
expectedNamespace []string
}{
"ctx['aws.cloudwatch.log_stream'].contains('Test1')": {
"multiple_rules",
[]string{"aws.test1_logs"},
[]string{"default"},
},
"ctx['aws.cloudwatch.log_stream'].contains('Test2')": {
"multiple_rules",
[]string{"aws.test2_logs"},
[]string{"{{labels.data_stream.namespace}}"},
},
}

for _, rerouteProcessor := range rerouteProcessors {
p := rerouteProcessor["reroute"].(RerouteProcessor)
assert.Equal(t, expectedProcessors[p.If].expectedSourceDataset, p.Tag)
assert.Equal(t, expectedProcessors[p.If].expectedDataset, p.Dataset)
assert.Equal(t, expectedProcessors[p.If].expectedNamespace, p.Namespace)
}
}

func TestLoadRoutingRuleFileGoodEmpty(t *testing.T) {
mockDataStreamPath := "../testdata/routing_rules/good/empty"
rerouteProcessors, err := loadRoutingRuleFile(mockDataStreamPath)
assert.Equal(t, 0, len(rerouteProcessors))
assert.NoError(t, err)
}

func TestLoadRoutingRuleFileBadMultipleSourceDataset(t *testing.T) {
mockDataStreamPath := "../testdata/routing_rules/bad/multiple_source_dataset"
rerouteProcessors, err := loadRoutingRuleFile(mockDataStreamPath)
assert.Equal(t, 0, len(rerouteProcessors))
assert.Error(t, err)
}

func TestLoadRoutingRuleFileBadNotString(t *testing.T) {
mockDataStreamPath := "../testdata/routing_rules/bad/not_string"
rerouteProcessors, err := loadRoutingRuleFile(mockDataStreamPath)
assert.Equal(t, 0, len(rerouteProcessors))
assert.Error(t, err)
}
4 changes: 3 additions & 1 deletion internal/elasticsearch/ingest/pipeline.go
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@ type simulatePipelineResponse struct {
}

type pipelineDocument struct {
Index string `json:"_index"`
Source json.RawMessage `json:"_source"`
}

Expand Down Expand Up @@ -70,10 +71,11 @@ func (p *Pipeline) MarshalJSON() (asJSON []byte, err error) {
return asJSON, nil
}

func SimulatePipeline(api *elasticsearch.API, pipelineName string, events []json.RawMessage) ([]json.RawMessage, error) {
func SimulatePipeline(api *elasticsearch.API, pipelineName string, events []json.RawMessage, simulateDataStream string) ([]json.RawMessage, error) {
var request simulatePipelineRequest
for _, event := range events {
request.Docs = append(request.Docs, pipelineDocument{
Index: simulateDataStream,
Source: event,
})
}
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
- source_dataset:
- test1
- test2
rules:
- target_dataset: aws.route53_public_logs
if: ctx['aws.cloudwatch.log_stream'].contains('Route53')
namespace: "{{labels.data_stream.namespace}}"
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
- source_dataset: multiple_rules
rules:
- target_dataset: aws.test1_logs
if: ctx['aws.cloudwatch.log_stream'].contains('Test1')
namespace: 1
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
- source_dataset: multiple_rules
rules:
- target_dataset: aws.test1_logs
if: ctx['aws.cloudwatch.log_stream'].contains('Test1')
namespace: default
- target_dataset: aws.test2_logs
if: ctx['aws.cloudwatch.log_stream'].contains('Test2')
namespace: "{{labels.data_stream.namespace}}"
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
- source_dataset: multiple_namespace
rules:
- target_dataset: aws.cloudtrail
if: ctx['aws.cloudwatch.log_stream'].contains('CloudTrail')
namespace:
- "{{labels.data_stream.namespace}}"
- default
- source_dataset: multiple_target_dataset
rules:
- target_dataset:
- aws.firewall_logs
- aws.test_logs
if: ctx['aws.cloudwatch.log_stream'].contains('Firewall')
namespace: default
- source_dataset: single_namespace_target_dataset
rules:
- target_dataset: aws.route53_public_logs
if: ctx['aws.cloudwatch.log_stream'].contains('Route53')
namespace: "{{labels.data_stream.namespace}}"
31 changes: 22 additions & 9 deletions internal/fields/validate.go
Original file line number Diff line number Diff line change
Expand Up @@ -42,8 +42,8 @@ type Validator struct {
// SpecVersion contains the version of the spec used by the package.
specVersion semver.Version

// expectedDataset contains the value expected for dataset fields.
expectedDataset string
// expectedDatasets contains the value expected for dataset fields.
expectedDatasets []string

defaultNumericConversion bool
numericKeywordFields map[string]struct{}
Expand Down Expand Up @@ -111,10 +111,10 @@ func WithEnabledAllowedIPCheck() ValidatorOption {
}
}

// WithExpectedDataset configures the validator to check if the dataset fields have the expected values.
func WithExpectedDataset(dataset string) ValidatorOption {
// WithExpectedDatasets configures the validator to check if the dataset field value matches one of the expected values.
func WithExpectedDatasets(datasets []string) ValidatorOption {
return func(v *Validator) error {
v.expectedDataset = dataset
v.expectedDatasets = datasets
return nil
}
}
Expand Down Expand Up @@ -313,24 +313,37 @@ var datasetFieldNames = []string{

func (v *Validator) validateDocumentValues(body common.MapStr) multierror.Error {
var errs multierror.Error
if !v.specVersion.LessThan(semver2_0_0) && v.expectedDataset != "" {
if !v.specVersion.LessThan(semver2_0_0) && v.expectedDatasets != nil {
for _, datasetField := range datasetFieldNames {
value, err := body.GetValue(datasetField)
if err == common.ErrKeyNotFound {
continue
}

str, ok := valueToString(value, v.disabledNormalization)
if !ok || str != v.expectedDataset {
err := fmt.Errorf("field %q should have value %q, it has \"%v\"",
datasetField, v.expectedDataset, value)
exists := stringInArray(str, v.expectedDatasets)
if !ok || !exists {
err := fmt.Errorf("field %q should have value in %q, it has \"%v\"",
datasetField, v.expectedDatasets, value)
errs = append(errs, err)
}
}
}
return errs
}

func stringInArray(target string, arr []string) bool {
// Check if target is part of the array
found := false
for _, item := range arr {
if item == target {
found = true
break
}
}
return found
}

func valueToString(value any, disabledNormalization bool) (string, bool) {
if disabledNormalization {
// when synthetics mode is enabled, each field present in the document is an array
Expand Down
Loading