From 37297e7cedc98c6ffa13e2c1e372032ef2a1ed50 Mon Sep 17 00:00:00 2001 From: kkondaka <41027584+kkondaka@users.noreply.github.com> Date: Tue, 27 Jun 2023 08:42:27 -0700 Subject: [PATCH 1/9] Add support for writing tags along with events to Sink (#2850) * Updated to pass SinkContext to Sink constructors as suggested in the previous comments Signed-off-by: Krishna Kondaka * Fixed check style errors and renamed RoutedPluginSetting to SinkContextPluginSetting Signed-off-by: Krishna Kondaka * Fixed s3-sink integration test Signed-off-by: Krishna Kondaka * Added javadoc for SinkContext Signed-off-by: Krishna Kondaka --------- Signed-off-by: Krishna Kondaka Co-authored-by: Krishna Kondaka --- .../model/configuration/SinkModel.java | 30 ++++++++-- .../model/plugin/PluginFactory.java | 13 ++++ .../dataprepper/model/sink/SinkContext.java | 39 ++++++++++++ .../PipelinesDataFlowModelTest.java | 6 +- .../model/configuration/SinkModelTest.java | 13 ++-- .../model/sink/SinkContextTest.java | 33 ++++++++++ .../model/configuration/sink_plugin.yaml | 1 + .../PipelineConfigurationValidator.java | 6 +- .../dataprepper/parser/PipelineParser.java | 15 ++--- .../parser/model/PipelineConfiguration.java | 15 ++--- .../parser/model/RoutedPluginSetting.java | 24 -------- .../model/SinkContextPluginSetting.java | 24 ++++++++ .../ComponentPluginArgumentsContext.java | 10 ++++ .../plugin/DefaultPluginFactory.java | 18 +++++- .../model/PipelineConfigurationTests.java | 37 ++++++++++-- ...java => SinkContextPluginSettingTest.java} | 20 +++---- .../ComponentPluginArgumentsContextTest.java | 16 ++++- .../dataprepper/plugins/sink/FileSink.java | 10 +++- .../dataprepper/plugins/sink/StdOutSink.java | 16 +++-- .../plugins/sink/FileSinkTests.java | 44 +++++++++++--- .../plugins/sink/StdOutSinkTests.java | 4 +- .../sink/opensearch/OpenSearchSinkIT.java | 60 ++++++++++++++++--- .../sink/opensearch/OpenSearchSink.java | 7 ++- .../opensearch/index/DocumentBuilder.java | 4 +- .../opensearch/index/DocumentBuilderTest.java | 19 +++++- .../plugins/sink/S3SinkServiceIT.java | 4 +- .../dataprepper/plugins/sink/S3Sink.java | 9 ++- .../plugins/sink/S3SinkService.java | 8 ++- .../dataprepper/plugins/sink/codec/Codec.java | 5 +- .../plugins/sink/codec/JsonCodec.java | 6 +- .../plugins/sink/S3SinkServiceTest.java | 29 +++++---- .../dataprepper/plugins/sink/S3SinkTest.java | 7 ++- .../plugins/sink/codec/JsonCodecTest.java | 30 +++++++++- 33 files changed, 451 insertions(+), 131 deletions(-) create mode 100644 data-prepper-api/src/main/java/org/opensearch/dataprepper/model/sink/SinkContext.java create mode 100644 data-prepper-api/src/test/java/org/opensearch/dataprepper/model/sink/SinkContextTest.java delete mode 100644 data-prepper-core/src/main/java/org/opensearch/dataprepper/parser/model/RoutedPluginSetting.java create mode 100644 data-prepper-core/src/main/java/org/opensearch/dataprepper/parser/model/SinkContextPluginSetting.java rename data-prepper-core/src/test/java/org/opensearch/dataprepper/parser/model/{RoutedPluginSettingTest.java => SinkContextPluginSettingTest.java} (64%) diff --git a/data-prepper-api/src/main/java/org/opensearch/dataprepper/model/configuration/SinkModel.java b/data-prepper-api/src/main/java/org/opensearch/dataprepper/model/configuration/SinkModel.java index fe01b4730c..d772ff21c6 100644 --- a/data-prepper-api/src/main/java/org/opensearch/dataprepper/model/configuration/SinkModel.java +++ b/data-prepper-api/src/main/java/org/opensearch/dataprepper/model/configuration/SinkModel.java @@ -28,8 +28,8 @@ @JsonDeserialize(using = SinkModel.SinkModelDeserializer.class) public class SinkModel extends PluginModel { - SinkModel(final String pluginName, final List routes, final Map pluginSettings) { - this(pluginName, new SinkInternalJsonModel(routes, pluginSettings)); + SinkModel(final String pluginName, final List routes, final String tagsTargetKey, final Map pluginSettings) { + this(pluginName, new SinkInternalJsonModel(routes, tagsTargetKey, pluginSettings)); } private SinkModel(final String pluginName, final SinkInternalJsonModel sinkInnerModel) { @@ -46,18 +46,30 @@ public Collection getRoutes() { return this.getInternalJsonModel().routes; } + /** + * Gets the tags target key associated with this Sink. + * + * @return The tags target key + * @since 2.4 + */ + public String getTagsTargetKey() { + return this.getInternalJsonModel().tagsTargetKey; + } + public static class SinkModelBuilder { private final PluginModel pluginModel; private final List routes; + private final String tagsTargetKey; private SinkModelBuilder(final PluginModel pluginModel) { this.pluginModel = pluginModel; this.routes = Collections.emptyList(); + this.tagsTargetKey = null; } public SinkModel build() { - return new SinkModel(pluginModel.getPluginName(), routes, pluginModel.getPluginSettings()); + return new SinkModel(pluginModel.getPluginName(), routes, tagsTargetKey, pluginModel.getPluginSettings()); } } @@ -70,21 +82,27 @@ private static class SinkInternalJsonModel extends InternalJsonModel { @JsonProperty("routes") private final List routes; + @JsonInclude(JsonInclude.Include.NON_EMPTY) + @JsonProperty("tags_target_key") + private final String tagsTargetKey; + @JsonCreator - private SinkInternalJsonModel(@JsonProperty("routes") final List routes) { + private SinkInternalJsonModel(@JsonProperty("routes") final List routes, @JsonProperty("tags_target_key") final String tagsTargetKey) { super(); this.routes = routes != null ? routes : new ArrayList<>(); + this.tagsTargetKey = tagsTargetKey; } - private SinkInternalJsonModel(final List routes, final Map pluginSettings) { + private SinkInternalJsonModel(final List routes, final String tagsTargetKey, final Map pluginSettings) { super(pluginSettings); this.routes = routes != null ? routes : new ArrayList<>(); + this.tagsTargetKey = tagsTargetKey; } } static class SinkModelDeserializer extends AbstractPluginModelDeserializer { SinkModelDeserializer() { - super(SinkModel.class, SinkInternalJsonModel.class, SinkModel::new, () -> new SinkInternalJsonModel(null)); + super(SinkModel.class, SinkInternalJsonModel.class, SinkModel::new, () -> new SinkInternalJsonModel(null, null)); } } } diff --git a/data-prepper-api/src/main/java/org/opensearch/dataprepper/model/plugin/PluginFactory.java b/data-prepper-api/src/main/java/org/opensearch/dataprepper/model/plugin/PluginFactory.java index 87f0abfc30..aa6c435920 100644 --- a/data-prepper-api/src/main/java/org/opensearch/dataprepper/model/plugin/PluginFactory.java +++ b/data-prepper-api/src/main/java/org/opensearch/dataprepper/model/plugin/PluginFactory.java @@ -5,6 +5,7 @@ package org.opensearch.dataprepper.model.plugin; +import org.opensearch.dataprepper.model.sink.SinkContext; import org.opensearch.dataprepper.model.configuration.PluginSetting; import java.util.List; @@ -27,6 +28,18 @@ public interface PluginFactory { */ T loadPlugin(final Class baseClass, final PluginSetting pluginSetting); + /** + * Loads a new instance of a plugin with SinkContext. + * + * @param baseClass The class type that the plugin is supporting. + * @param pluginSetting The {@link PluginSetting} to configure this plugin + * @param sinkContext The {@link SinkContext} to configure this plugin + * @param The type + * @return A new instance of your plugin, configured + * @since 1.2 + */ + T loadPlugin(final Class baseClass, final PluginSetting pluginSetting, final SinkContext sinkContext); + /** * Loads a specified number of plugin instances. The total number of instances is provided * by the numberOfInstancesFunction. diff --git a/data-prepper-api/src/main/java/org/opensearch/dataprepper/model/sink/SinkContext.java b/data-prepper-api/src/main/java/org/opensearch/dataprepper/model/sink/SinkContext.java new file mode 100644 index 0000000000..9650411bd8 --- /dev/null +++ b/data-prepper-api/src/main/java/org/opensearch/dataprepper/model/sink/SinkContext.java @@ -0,0 +1,39 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.dataprepper.model.sink; + +import java.util.Collection; + +/** + * Data Prepper Sink Context class. This the class for keeping global + * sink configuration as context so that individual sinks may use them. + */ +public class SinkContext { + private final String tagsTargetKey; + private final Collection routes; + + public SinkContext(final String tagsTargetKey, final Collection routes) { + this.tagsTargetKey = tagsTargetKey; + this.routes = routes; + } + + /** + * returns the target key name for tags if configured for a given sink + * @return tags target key + */ + public String getTagsTargetKey() { + return tagsTargetKey; + } + + /** + * returns routes if configured for a given sink + * @return routes + */ + public Collection getRoutes() { + return routes; + } +} + diff --git a/data-prepper-api/src/test/java/org/opensearch/dataprepper/model/configuration/PipelinesDataFlowModelTest.java b/data-prepper-api/src/test/java/org/opensearch/dataprepper/model/configuration/PipelinesDataFlowModelTest.java index 23dd1aa301..162b7c8de3 100644 --- a/data-prepper-api/src/test/java/org/opensearch/dataprepper/model/configuration/PipelinesDataFlowModelTest.java +++ b/data-prepper-api/src/test/java/org/opensearch/dataprepper/model/configuration/PipelinesDataFlowModelTest.java @@ -50,7 +50,7 @@ void testSerializing_PipelinesDataFlowModel_empty_Plugins_with_nonEmpty_delay_an final PluginModel source = new PluginModel("testSource", (Map) null); final List processors = Collections.singletonList(new PluginModel("testProcessor", (Map) null)); - final List sinks = Collections.singletonList(new SinkModel("testSink", Collections.emptyList(), null)); + final List sinks = Collections.singletonList(new SinkModel("testSink", Collections.emptyList(), null, null)); final PipelineModel pipelineModel = new PipelineModel(source, null, processors, null, sinks, 8, 50); final PipelinesDataFlowModel pipelinesDataFlowModel = new PipelinesDataFlowModel(Collections.singletonMap(pipelineName, pipelineModel)); @@ -72,7 +72,7 @@ void testSerializing_PipelinesDataFlowModel_with_Version() throws JsonProcessing final DataPrepperVersion version = DataPrepperVersion.parse("2.0"); final PluginModel source = new PluginModel("testSource", (Map) null); final List processors = Collections.singletonList(new PluginModel("testProcessor", (Map) null)); - final List sinks = Collections.singletonList(new SinkModel("testSink", Collections.emptyList(), null)); + final List sinks = Collections.singletonList(new SinkModel("testSink", Collections.emptyList(), null, null)); final PipelineModel pipelineModel = new PipelineModel(source, null, processors, null, sinks, 8, 50); final PipelinesDataFlowModel pipelinesDataFlowModel = new PipelinesDataFlowModel(version, Collections.singletonMap(pipelineName, pipelineModel)); @@ -93,7 +93,7 @@ void testSerializing_PipelinesDataFlowModel_empty_Plugins_with_nonEmpty_delay_an final PluginModel source = new PluginModel("testSource", (Map) null); final List preppers = Collections.singletonList(new PluginModel("testPrepper", (Map) null)); - final List sinks = Collections.singletonList(new SinkModel("testSink", Collections.singletonList("my-route"), null)); + final List sinks = Collections.singletonList(new SinkModel("testSink", Collections.singletonList("my-route"), null, null)); final PipelineModel pipelineModel = new PipelineModel(source, null, preppers, Collections.singletonList(new ConditionalRoute("my-route", "/a==b")), sinks, 8, 50); final PipelinesDataFlowModel pipelinesDataFlowModel = new PipelinesDataFlowModel(Collections.singletonMap(pipelineName, pipelineModel)); diff --git a/data-prepper-api/src/test/java/org/opensearch/dataprepper/model/configuration/SinkModelTest.java b/data-prepper-api/src/test/java/org/opensearch/dataprepper/model/configuration/SinkModelTest.java index 24f6ac6f76..bfe5ad3e73 100644 --- a/data-prepper-api/src/test/java/org/opensearch/dataprepper/model/configuration/SinkModelTest.java +++ b/data-prepper-api/src/test/java/org/opensearch/dataprepper/model/configuration/SinkModelTest.java @@ -25,6 +25,7 @@ import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.CoreMatchers.hasItem; import static org.hamcrest.CoreMatchers.notNullValue; +import static org.hamcrest.CoreMatchers.nullValue; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.hasKey; @@ -74,13 +75,15 @@ void serialize_into_known_SinkModel() throws IOException { final Map pluginSettings = new LinkedHashMap<>(); pluginSettings.put("key1", "value1"); pluginSettings.put("key2", "value2"); - final SinkModel sinkModel = new SinkModel("customSinkPlugin", Arrays.asList("routeA", "routeB"), pluginSettings); + final String tagsTargetKey = "tags"; + final SinkModel sinkModel = new SinkModel("customSinkPlugin", Arrays.asList("routeA", "routeB"), tagsTargetKey, pluginSettings); final String actualJson = objectMapper.writeValueAsString(sinkModel); final String expectedJson = createStringFromInputStream(this.getClass().getResourceAsStream("sink_plugin.yaml")); assertThat("---\n" + actualJson, equalTo(expectedJson)); + assertThat(sinkModel.getTagsTargetKey(), equalTo(tagsTargetKey)); } @Test @@ -93,7 +96,8 @@ void deserialize_with_any_pluginModel() throws IOException { assertAll( () -> assertThat(sinkModel.getPluginName(), equalTo("customPlugin")), () -> assertThat(sinkModel.getPluginSettings(), notNullValue()), - () -> assertThat(sinkModel.getRoutes(), notNullValue()) + () -> assertThat(sinkModel.getRoutes(), notNullValue()), + () -> assertThat(sinkModel.getTagsTargetKey(), nullValue()) ); assertAll( () -> assertThat(sinkModel.getPluginSettings().size(), equalTo(3)), @@ -123,7 +127,7 @@ void serialize_with_just_pluginModel() throws IOException { pluginSettings.put("key1", "value1"); pluginSettings.put("key2", "value2"); pluginSettings.put("key3", "value3"); - final SinkModel sinkModel = new SinkModel("customPlugin", null, pluginSettings); + final SinkModel sinkModel = new SinkModel("customPlugin", null, null, pluginSettings); final String actualJson = objectMapper.writeValueAsString(sinkModel); @@ -156,10 +160,11 @@ void build_with_only_PluginModel_should_return_expected_SinkModel() { assertThat(actualSinkModel.getPluginSettings(), equalTo(pluginSettings)); assertThat(actualSinkModel.getRoutes(), notNullValue()); assertThat(actualSinkModel.getRoutes(), empty()); + assertThat(actualSinkModel.getTagsTargetKey(), nullValue()); } } private static String createStringFromInputStream(final InputStream inputStream) throws IOException { return new String(inputStream.readAllBytes(), StandardCharsets.UTF_8); } -} \ No newline at end of file +} diff --git a/data-prepper-api/src/test/java/org/opensearch/dataprepper/model/sink/SinkContextTest.java b/data-prepper-api/src/test/java/org/opensearch/dataprepper/model/sink/SinkContextTest.java new file mode 100644 index 0000000000..404c3bbbf5 --- /dev/null +++ b/data-prepper-api/src/test/java/org/opensearch/dataprepper/model/sink/SinkContextTest.java @@ -0,0 +1,33 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.dataprepper.model.sink; + +import org.junit.jupiter.api.Test; + +import java.util.Collections; +import java.util.List; + +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.MatcherAssert.assertThat; +import org.apache.commons.lang3.RandomStringUtils; + + + +public class SinkContextTest { + private SinkContext sinkContext; + + @Test + public void testSinkContextBasic() { + final String testTagsTargetKey = RandomStringUtils.randomAlphabetic(6); + final List testRoutes = Collections.emptyList(); + sinkContext = new SinkContext(testTagsTargetKey, testRoutes); + assertThat(sinkContext.getTagsTargetKey(), equalTo(testTagsTargetKey)); + assertThat(sinkContext.getRoutes(), equalTo(testRoutes)); + + } + +} + diff --git a/data-prepper-api/src/test/resources/org/opensearch/dataprepper/model/configuration/sink_plugin.yaml b/data-prepper-api/src/test/resources/org/opensearch/dataprepper/model/configuration/sink_plugin.yaml index af072de0fa..cccdee7224 100644 --- a/data-prepper-api/src/test/resources/org/opensearch/dataprepper/model/configuration/sink_plugin.yaml +++ b/data-prepper-api/src/test/resources/org/opensearch/dataprepper/model/configuration/sink_plugin.yaml @@ -3,5 +3,6 @@ customSinkPlugin: routes: - "routeA" - "routeB" + tags_target_key: "tags" key1: "value1" key2: "value2" diff --git a/data-prepper-core/src/main/java/org/opensearch/dataprepper/parser/PipelineConfigurationValidator.java b/data-prepper-core/src/main/java/org/opensearch/dataprepper/parser/PipelineConfigurationValidator.java index d008101797..bea3c68706 100644 --- a/data-prepper-core/src/main/java/org/opensearch/dataprepper/parser/PipelineConfigurationValidator.java +++ b/data-prepper-core/src/main/java/org/opensearch/dataprepper/parser/PipelineConfigurationValidator.java @@ -8,7 +8,7 @@ import org.apache.commons.collections.CollectionUtils; import org.opensearch.dataprepper.model.configuration.PluginSetting; import org.opensearch.dataprepper.parser.model.PipelineConfiguration; -import org.opensearch.dataprepper.parser.model.RoutedPluginSetting; +import org.opensearch.dataprepper.parser.model.SinkContextPluginSetting; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -82,7 +82,7 @@ private static void visitAndValidate( final PipelineConfiguration pipelineConfiguration = pipelineConfigurationMap.get(pipeline); touchedPipelineSet.add(pipeline); //if validation is successful, then there is definitely sink - final List connectedPipelinesSettings = pipelineConfiguration.getSinkPluginSettings(); + final List connectedPipelinesSettings = pipelineConfiguration.getSinkPluginSettings(); //Recursively check connected pipelines for (PluginSetting pluginSetting : connectedPipelinesSettings) { //Further process only if the sink is of pipeline type @@ -159,7 +159,7 @@ private static void validateForOrphans( throw new RuntimeException("Invalid configuration, cannot proceed with ambiguous configuration"); } final PipelineConfiguration pipelineConfiguration = pipelineConfigurationMap.get(currentPipelineName); - final List pluginSettings = pipelineConfiguration.getSinkPluginSettings(); + final List pluginSettings = pipelineConfiguration.getSinkPluginSettings(); for (PluginSetting pluginSetting : pluginSettings) { if (PIPELINE_TYPE.equals(pluginSetting.getName()) && pluginSetting.getAttributeFromSettings(PIPELINE_ATTRIBUTE_NAME) != null) { diff --git a/data-prepper-core/src/main/java/org/opensearch/dataprepper/parser/PipelineParser.java b/data-prepper-core/src/main/java/org/opensearch/dataprepper/parser/PipelineParser.java index f47e844ea1..6a0a67d0f0 100644 --- a/data-prepper-core/src/main/java/org/opensearch/dataprepper/parser/PipelineParser.java +++ b/data-prepper-core/src/main/java/org/opensearch/dataprepper/parser/PipelineParser.java @@ -19,9 +19,10 @@ import org.opensearch.dataprepper.model.processor.Processor; import org.opensearch.dataprepper.model.sink.Sink; import org.opensearch.dataprepper.model.source.Source; +import org.opensearch.dataprepper.model.sink.SinkContext; import org.opensearch.dataprepper.parser.model.DataPrepperConfiguration; import org.opensearch.dataprepper.parser.model.PipelineConfiguration; -import org.opensearch.dataprepper.parser.model.RoutedPluginSetting; +import org.opensearch.dataprepper.parser.model.SinkContextPluginSetting; import org.opensearch.dataprepper.peerforwarder.PeerForwarderConfiguration; import org.opensearch.dataprepper.peerforwarder.PeerForwarderProvider; import org.opensearch.dataprepper.peerforwarder.PeerForwardingProcessorDecorator; @@ -292,13 +293,13 @@ private Optional getSourceIfPipelineType( return Optional.empty(); } - private DataFlowComponent buildRoutedSinkOrConnector(final RoutedPluginSetting pluginSetting) { - final Sink sink = buildSinkOrConnector(pluginSetting); + private DataFlowComponent buildRoutedSinkOrConnector(final SinkContextPluginSetting pluginSetting) { + final Sink sink = buildSinkOrConnector(pluginSetting, pluginSetting.getSinkContext()); - return new DataFlowComponent<>(sink, pluginSetting.getRoutes()); + return new DataFlowComponent<>(sink, pluginSetting.getSinkContext().getRoutes()); } - private Sink buildSinkOrConnector(final PluginSetting pluginSetting) { + private Sink buildSinkOrConnector(final PluginSetting pluginSetting, final SinkContext sinkContext) { LOG.info("Building [{}] as sink component", pluginSetting.getName()); final Optional pipelineNameOptional = getPipelineNameIfPipelineType(pluginSetting); if (pipelineNameOptional.isPresent()) { //update to ifPresentOrElse when using JDK9 @@ -307,7 +308,7 @@ private Sink buildSinkOrConnector(final PluginSetting pluginSetting) { sourceConnectorMap.put(pipelineName, pipelineConnector); //TODO retrieve from parent Pipeline using name return pipelineConnector; } else { - return pluginFactory.loadPlugin(Sink.class, pluginSetting); + return pluginFactory.loadPlugin(Sink.class, pluginSetting, sinkContext); } } @@ -337,7 +338,7 @@ private void removeConnectedPipelines( sourcePipeline, pipelineConfigurationMap, pipelineMap)); //remove sink connected pipelines - final List sinkPluginSettings = failedPipelineConfiguration.getSinkPluginSettings(); + final List sinkPluginSettings = failedPipelineConfiguration.getSinkPluginSettings(); sinkPluginSettings.forEach(sinkPluginSetting -> { getPipelineNameIfPipelineType(sinkPluginSetting).ifPresent(sinkPipeline -> processRemoveIfRequired( sinkPipeline, pipelineConfigurationMap, pipelineMap)); diff --git a/data-prepper-core/src/main/java/org/opensearch/dataprepper/parser/model/PipelineConfiguration.java b/data-prepper-core/src/main/java/org/opensearch/dataprepper/parser/model/PipelineConfiguration.java index bde956ceeb..b35b05bdb5 100644 --- a/data-prepper-core/src/main/java/org/opensearch/dataprepper/parser/model/PipelineConfiguration.java +++ b/data-prepper-core/src/main/java/org/opensearch/dataprepper/parser/model/PipelineConfiguration.java @@ -10,6 +10,7 @@ import org.opensearch.dataprepper.model.configuration.PluginModel; import org.opensearch.dataprepper.model.configuration.PluginSetting; import org.opensearch.dataprepper.model.configuration.SinkModel; +import org.opensearch.dataprepper.model.sink.SinkContext; import org.opensearch.dataprepper.plugins.buffer.blockingbuffer.BlockingBuffer; import java.util.Collections; @@ -30,7 +31,7 @@ public class PipelineConfiguration { private final PluginSetting sourcePluginSetting; private final PluginSetting bufferPluginSetting; private final List processorPluginSettings; - private final List sinkPluginSettings; + private final List sinkPluginSettings; private final Integer workers; private final Integer readBatchDelay; @@ -58,7 +59,7 @@ public List getProcessorPluginSettings() { return processorPluginSettings; } - public List getSinkPluginSettings() { + public List getSinkPluginSettings() { return sinkPluginSettings; } @@ -104,12 +105,12 @@ private PluginSetting getBufferFromPluginModelOrDefault( return getPluginSettingFromPluginModel(pluginModel); } - private List getSinksFromPluginModel( + private List getSinksFromPluginModel( final List sinkConfigurations) { if (sinkConfigurations == null || sinkConfigurations.isEmpty()) { throw new IllegalArgumentException("Invalid configuration, at least one sink is required"); } - return sinkConfigurations.stream().map(PipelineConfiguration::getRoutedPluginSettingFromSinkModel) + return sinkConfigurations.stream().map(PipelineConfiguration::getSinkContextPluginSettingFromSinkModel) .collect(Collectors.toList()); } @@ -130,11 +131,11 @@ private static PluginSetting getPluginSettingFromPluginModel(final PluginModel p return new PluginSetting(pluginModel.getPluginName(), settingsMap); } - private static RoutedPluginSetting getRoutedPluginSettingFromSinkModel(final SinkModel sinkModel) { + private static SinkContextPluginSetting getSinkContextPluginSettingFromSinkModel(final SinkModel sinkModel) { final Map settingsMap = Optional .ofNullable(sinkModel.getPluginSettings()) .orElseGet(HashMap::new); - return new RoutedPluginSetting(sinkModel.getPluginName(), settingsMap, sinkModel.getRoutes()); + return new SinkContextPluginSetting(sinkModel.getPluginName(), settingsMap, new SinkContext(sinkModel.getTagsTargetKey(), sinkModel.getRoutes())); } private Integer getWorkersFromPipelineModel(final PipelineModel pipelineModel) { @@ -159,4 +160,4 @@ private void validateConfiguration(final Integer configuration, final String com component, configuration)); } } -} \ No newline at end of file +} diff --git a/data-prepper-core/src/main/java/org/opensearch/dataprepper/parser/model/RoutedPluginSetting.java b/data-prepper-core/src/main/java/org/opensearch/dataprepper/parser/model/RoutedPluginSetting.java deleted file mode 100644 index 85240bdc4e..0000000000 --- a/data-prepper-core/src/main/java/org/opensearch/dataprepper/parser/model/RoutedPluginSetting.java +++ /dev/null @@ -1,24 +0,0 @@ -/* - * Copyright OpenSearch Contributors - * SPDX-License-Identifier: Apache-2.0 - */ - -package org.opensearch.dataprepper.parser.model; - -import org.opensearch.dataprepper.model.configuration.PluginSetting; - -import java.util.Collection; -import java.util.Map; - -public class RoutedPluginSetting extends PluginSetting { - private final Collection routes; - - public RoutedPluginSetting(final String name, final Map settings, final Collection routes) { - super(name, settings); - this.routes = routes; - } - - public Collection getRoutes() { - return routes; - } -} diff --git a/data-prepper-core/src/main/java/org/opensearch/dataprepper/parser/model/SinkContextPluginSetting.java b/data-prepper-core/src/main/java/org/opensearch/dataprepper/parser/model/SinkContextPluginSetting.java new file mode 100644 index 0000000000..9cb72bf3e8 --- /dev/null +++ b/data-prepper-core/src/main/java/org/opensearch/dataprepper/parser/model/SinkContextPluginSetting.java @@ -0,0 +1,24 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.dataprepper.parser.model; + +import org.opensearch.dataprepper.model.configuration.PluginSetting; +import org.opensearch.dataprepper.model.sink.SinkContext; + +import java.util.Map; + +public class SinkContextPluginSetting extends PluginSetting { + private final SinkContext sinkContext; + + public SinkContextPluginSetting(final String name, final Map settings, final SinkContext sinkContext) { + super(name, settings); + this.sinkContext = sinkContext; + } + + public SinkContext getSinkContext() { + return sinkContext; + } +} diff --git a/data-prepper-core/src/main/java/org/opensearch/dataprepper/plugin/ComponentPluginArgumentsContext.java b/data-prepper-core/src/main/java/org/opensearch/dataprepper/plugin/ComponentPluginArgumentsContext.java index cb809f75b7..807de63367 100644 --- a/data-prepper-core/src/main/java/org/opensearch/dataprepper/plugin/ComponentPluginArgumentsContext.java +++ b/data-prepper-core/src/main/java/org/opensearch/dataprepper/plugin/ComponentPluginArgumentsContext.java @@ -5,6 +5,7 @@ package org.opensearch.dataprepper.plugin; +import org.opensearch.dataprepper.model.sink.SinkContext; import org.opensearch.dataprepper.metrics.PluginMetrics; import org.opensearch.dataprepper.model.configuration.PipelineDescription; import org.opensearch.dataprepper.model.configuration.PluginSetting; @@ -64,6 +65,9 @@ private ComponentPluginArgumentsContext(final Builder builder) { if (builder.acknowledgementSetManager != null) { typedArgumentsSuppliers.put(AcknowledgementSetManager.class, () -> builder.acknowledgementSetManager); } + if (builder.sinkContext != null) { + typedArgumentsSuppliers.put(SinkContext.class, () -> builder.sinkContext); + } } @Override @@ -114,6 +118,7 @@ static class Builder { private BeanFactory beanFactory; private EventFactory eventFactory; private AcknowledgementSetManager acknowledgementSetManager; + private SinkContext sinkContext; Builder withPluginConfiguration(final Object pluginConfiguration) { this.pluginConfiguration = pluginConfiguration; @@ -140,6 +145,11 @@ Builder withPluginFactory(final PluginFactory pluginFactory) { return this; } + Builder withSinkContext(final SinkContext sinkContext) { + this.sinkContext = sinkContext; + return this; + } + Builder withPipelineDescription(final PipelineDescription pipelineDescription) { this.pipelineDescription = pipelineDescription; return this; diff --git a/data-prepper-core/src/main/java/org/opensearch/dataprepper/plugin/DefaultPluginFactory.java b/data-prepper-core/src/main/java/org/opensearch/dataprepper/plugin/DefaultPluginFactory.java index e21acc8ec9..7560b98e10 100644 --- a/data-prepper-core/src/main/java/org/opensearch/dataprepper/plugin/DefaultPluginFactory.java +++ b/data-prepper-core/src/main/java/org/opensearch/dataprepper/plugin/DefaultPluginFactory.java @@ -5,6 +5,7 @@ package org.opensearch.dataprepper.plugin; +import org.opensearch.dataprepper.model.sink.SinkContext; import org.opensearch.dataprepper.model.annotations.DataPrepperPlugin; import org.opensearch.dataprepper.model.configuration.PluginSetting; import org.opensearch.dataprepper.model.plugin.NoPluginFoundException; @@ -70,7 +71,17 @@ public T loadPlugin(final Class baseClass, final PluginSetting pluginSett final String pluginName = pluginSetting.getName(); final Class pluginClass = getPluginClass(baseClass, pluginName); - final ComponentPluginArgumentsContext constructionContext = getConstructionContext(pluginSetting, pluginClass); + final ComponentPluginArgumentsContext constructionContext = getConstructionContext(pluginSetting, pluginClass, null); + + return pluginCreator.newPluginInstance(pluginClass, constructionContext, pluginName); + } + + @Override + public T loadPlugin(final Class baseClass, final PluginSetting pluginSetting, final SinkContext sinkContext) { + final String pluginName = pluginSetting.getName(); + final Class pluginClass = getPluginClass(baseClass, pluginName); + + final ComponentPluginArgumentsContext constructionContext = getConstructionContext(pluginSetting, pluginClass, sinkContext); return pluginCreator.newPluginInstance(pluginClass, constructionContext, pluginName); } @@ -88,7 +99,7 @@ public List loadPlugins( if(numberOfInstances == null || numberOfInstances < 0) throw new IllegalArgumentException("The numberOfInstances must be provided as a non-negative integer."); - final ComponentPluginArgumentsContext constructionContext = getConstructionContext(pluginSetting, pluginClass); + final ComponentPluginArgumentsContext constructionContext = getConstructionContext(pluginSetting, pluginClass, null); final List plugins = new ArrayList<>(numberOfInstances); for (int i = 0; i < numberOfInstances; i++) { @@ -97,7 +108,7 @@ public List loadPlugins( return plugins; } - private ComponentPluginArgumentsContext getConstructionContext(final PluginSetting pluginSetting, final Class pluginClass) { + private ComponentPluginArgumentsContext getConstructionContext(final PluginSetting pluginSetting, final Class pluginClass, final SinkContext sinkContext) { final DataPrepperPlugin pluginAnnotation = pluginClass.getAnnotation(DataPrepperPlugin.class); final Class pluginConfigurationType = pluginAnnotation.pluginConfigurationType(); @@ -111,6 +122,7 @@ private ComponentPluginArgumentsContext getConstructionContext(final PluginS .withBeanFactory(pluginBeanFactoryProvider.get()) .withEventFactory(eventFactory) .withAcknowledgementSetManager(acknowledgementSetManager) + .withSinkContext(sinkContext) .build(); } diff --git a/data-prepper-core/src/test/java/org/opensearch/dataprepper/parser/model/PipelineConfigurationTests.java b/data-prepper-core/src/test/java/org/opensearch/dataprepper/parser/model/PipelineConfigurationTests.java index 1ad21de14a..5d611e445d 100644 --- a/data-prepper-core/src/test/java/org/opensearch/dataprepper/parser/model/PipelineConfigurationTests.java +++ b/data-prepper-core/src/test/java/org/opensearch/dataprepper/parser/model/PipelineConfigurationTests.java @@ -58,7 +58,7 @@ void testPipelineConfigurationCreation() { final PluginSetting actualSourcePluginSetting = pipelineConfiguration.getSourcePluginSetting(); final PluginSetting actualBufferPluginSetting = pipelineConfiguration.getBufferPluginSetting(); final List actualProcesserPluginSettings = pipelineConfiguration.getProcessorPluginSettings(); - final List actualSinkPluginSettings = pipelineConfiguration.getSinkPluginSettings(); + final List actualSinkPluginSettings = pipelineConfiguration.getSinkPluginSettings(); comparePluginSettings(actualSourcePluginSetting, TestDataProvider.VALID_PLUGIN_SETTING_1); assertThat(pipelineConfiguration.getBufferPluginSetting(), notNullValue()); @@ -99,7 +99,7 @@ void testOnlySourceAndSink() { final PluginSetting actualSourcePluginSetting = pipelineConfiguration.getSourcePluginSetting(); final PluginSetting actualBufferPluginSetting = pipelineConfiguration.getBufferPluginSetting(); final List actualProcessorPluginSettings = pipelineConfiguration.getProcessorPluginSettings(); - final List actualSinkPluginSettings = pipelineConfiguration.getSinkPluginSettings(); + final List actualSinkPluginSettings = pipelineConfiguration.getSinkPluginSettings(); comparePluginSettings(actualSourcePluginSetting, TestDataProvider.VALID_PLUGIN_SETTING_1); assertThat(pipelineConfiguration.getBufferPluginSetting(), notNullValue()); @@ -221,13 +221,40 @@ void testSinksWithRoutes() { final PipelineConfiguration pipelineConfiguration = new PipelineConfiguration(pipelineModel); - final List actualSinkPluginSettings = pipelineConfiguration.getSinkPluginSettings(); + final List actualSinkPluginSettings = pipelineConfiguration.getSinkPluginSettings(); assertThat(actualSinkPluginSettings.size(), equalTo(2)); comparePluginSettings(actualSinkPluginSettings.get(0), TestDataProvider.VALID_PLUGIN_SETTING_1); comparePluginSettings(actualSinkPluginSettings.get(1), TestDataProvider.VALID_PLUGIN_SETTING_2); - assertThat(actualSinkPluginSettings.get(0).getRoutes(), equalTo(orderedSinkRoutes.get(0))); - assertThat(actualSinkPluginSettings.get(1).getRoutes(), equalTo(orderedSinkRoutes.get(1))); + assertThat(actualSinkPluginSettings.get(0).getSinkContext().getRoutes(), equalTo(orderedSinkRoutes.get(0))); + assertThat(actualSinkPluginSettings.get(1).getSinkContext().getRoutes(), equalTo(orderedSinkRoutes.get(1))); + } + + @Test + void testSinksWithTagsTargetKey() { + final List orderedSinkTagTagets = new ArrayList<>(); + for (final SinkModel sink : sinks) { + final String tagsTargetKey = UUID.randomUUID().toString(); + when(sink.getTagsTargetKey()).thenReturn(tagsTargetKey); + orderedSinkTagTagets.add(tagsTargetKey); + } + + final PipelineModel pipelineModel = mock(PipelineModel.class); + when(pipelineModel.getSource()).thenReturn(source); + when(pipelineModel.getSinks()).thenReturn(sinks); + when(pipelineModel.getProcessors()).thenReturn(null); + when(pipelineModel.getWorkers()).thenReturn(null); + when(pipelineModel.getReadBatchDelay()).thenReturn(null); + + final PipelineConfiguration pipelineConfiguration = new PipelineConfiguration(pipelineModel); + + final List actualSinkPluginSettings = pipelineConfiguration.getSinkPluginSettings(); + + assertThat(actualSinkPluginSettings.size(), equalTo(2)); + comparePluginSettings(actualSinkPluginSettings.get(0), TestDataProvider.VALID_PLUGIN_SETTING_1); + comparePluginSettings(actualSinkPluginSettings.get(1), TestDataProvider.VALID_PLUGIN_SETTING_2); + assertThat(actualSinkPluginSettings.get(0).getSinkContext().getTagsTargetKey(), equalTo(orderedSinkTagTagets.get(0))); + assertThat(actualSinkPluginSettings.get(1).getSinkContext().getTagsTargetKey(), equalTo(orderedSinkTagTagets.get(1))); } private void comparePluginSettings(final PluginSetting actual, final PluginSetting expected) { diff --git a/data-prepper-core/src/test/java/org/opensearch/dataprepper/parser/model/RoutedPluginSettingTest.java b/data-prepper-core/src/test/java/org/opensearch/dataprepper/parser/model/SinkContextPluginSettingTest.java similarity index 64% rename from data-prepper-core/src/test/java/org/opensearch/dataprepper/parser/model/RoutedPluginSettingTest.java rename to data-prepper-core/src/test/java/org/opensearch/dataprepper/parser/model/SinkContextPluginSettingTest.java index b961f234f8..9264abe56c 100644 --- a/data-prepper-core/src/test/java/org/opensearch/dataprepper/parser/model/RoutedPluginSettingTest.java +++ b/data-prepper-core/src/test/java/org/opensearch/dataprepper/parser/model/SinkContextPluginSettingTest.java @@ -5,32 +5,32 @@ package org.opensearch.dataprepper.parser.model; +import org.opensearch.dataprepper.model.sink.SinkContext; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; -import java.util.Collection; import java.util.Map; -import java.util.Set; import java.util.UUID; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.MatcherAssert.assertThat; +import static org.mockito.Mockito.mock; -class RoutedPluginSettingTest { +class SinkContextPluginSettingTest { private String name; private Map settings; - private Collection routes; + private SinkContext sinkContext; @BeforeEach void setUp() { name = UUID.randomUUID().toString(); settings = Map.of(UUID.randomUUID().toString(), UUID.randomUUID().toString()); - routes = Set.of(UUID.randomUUID().toString(), UUID.randomUUID().toString()); + sinkContext = mock(SinkContext.class); } - private RoutedPluginSetting createObjectUnderTest() { - return new RoutedPluginSetting(name, settings, routes); + private SinkContextPluginSetting createObjectUnderTest() { + return new SinkContextPluginSetting(name, settings, sinkContext); } @Test @@ -44,7 +44,7 @@ void getSettings_returns_settings_from_constructor() { } @Test - void getRoutes_returns_routes_from_constructor() { - assertThat(createObjectUnderTest().getRoutes(), equalTo(routes)); + void getRoutes_returns_sink_context_from_constructor() { + assertThat(createObjectUnderTest().getSinkContext(), equalTo(sinkContext)); } -} \ No newline at end of file +} diff --git a/data-prepper-core/src/test/java/org/opensearch/dataprepper/plugin/ComponentPluginArgumentsContextTest.java b/data-prepper-core/src/test/java/org/opensearch/dataprepper/plugin/ComponentPluginArgumentsContextTest.java index 51506d4c23..ec3dc17644 100644 --- a/data-prepper-core/src/test/java/org/opensearch/dataprepper/plugin/ComponentPluginArgumentsContextTest.java +++ b/data-prepper-core/src/test/java/org/opensearch/dataprepper/plugin/ComponentPluginArgumentsContextTest.java @@ -10,6 +10,7 @@ import org.opensearch.dataprepper.model.configuration.PluginSetting; import org.opensearch.dataprepper.model.plugin.InvalidPluginDefinitionException; import org.opensearch.dataprepper.model.plugin.PluginFactory; +import org.opensearch.dataprepper.model.sink.SinkContext; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; import org.mockito.Mock; @@ -98,6 +99,19 @@ void createArguments_with_single_class_using_bean_factory() { equalTo(new Object[] {mock})); } + @Test + void createArguments_with_single_class_using_sink_context() { + final SinkContext sinkContext = mock(SinkContext.class); + + final ComponentPluginArgumentsContext objectUnderTest = new ComponentPluginArgumentsContext.Builder() + .withPluginSetting(pluginSetting) + .withSinkContext(sinkContext) + .build(); + + assertThat(objectUnderTest.createArguments(new Class[] { SinkContext.class }), + equalTo(new Object[] { sinkContext})); + } + @Test void createArguments_given_bean_not_available_with_single_class_using_bean_factory() { doThrow(mock(BeansException.class)).when(beanFactory).getBean((Class) any()); @@ -192,4 +206,4 @@ void createArguments_with_PluginMetrics() { assertThat(arguments, equalTo(new Object[] { pluginSetting, pluginMetrics })); } -} \ No newline at end of file +} diff --git a/data-prepper-plugins/common/src/main/java/org/opensearch/dataprepper/plugins/sink/FileSink.java b/data-prepper-plugins/common/src/main/java/org/opensearch/dataprepper/plugins/sink/FileSink.java index 5cd407c2ff..fc3df248fa 100644 --- a/data-prepper-plugins/common/src/main/java/org/opensearch/dataprepper/plugins/sink/FileSink.java +++ b/data-prepper-plugins/common/src/main/java/org/opensearch/dataprepper/plugins/sink/FileSink.java @@ -12,6 +12,7 @@ import org.opensearch.dataprepper.model.event.EventHandle; import org.opensearch.dataprepper.model.record.Record; import org.opensearch.dataprepper.model.sink.Sink; +import org.opensearch.dataprepper.model.sink.SinkContext; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -21,6 +22,7 @@ import java.nio.file.Files; import java.nio.file.Paths; import java.util.Collection; +import java.util.Objects; import java.util.concurrent.locks.ReentrantLock; import static java.lang.String.format; @@ -37,6 +39,7 @@ public class FileSink implements Sink> { private final ReentrantLock lock; private boolean isStopRequested; private boolean initialized; + private final String tagsTargetKey; /** * Mandatory constructor for Data Prepper Component - This constructor is used by Data Prepper @@ -47,11 +50,12 @@ public class FileSink implements Sink> { * @param fileSinkConfig The file sink configuration */ @DataPrepperPluginConstructor - public FileSink(final FileSinkConfig fileSinkConfig) { + public FileSink(final FileSinkConfig fileSinkConfig, final SinkContext sinkContext) { this.outputFilePath = fileSinkConfig.getPath(); isStopRequested = false; initialized = false; lock = new ReentrantLock(true); + tagsTargetKey = Objects.nonNull(sinkContext) ? sinkContext.getTagsTargetKey() : null; } @Override @@ -64,7 +68,6 @@ public void output(final Collection> records) { for (final Record record : records) { try { checkTypeAndWriteObject(record.getData(), writer); - } catch (final IOException ex) { throw new RuntimeException(format("Encountered exception writing to file %s", outputFilePath), ex); } @@ -84,7 +87,8 @@ public void output(final Collection> records) { // TODO: This function should be removed with the completion of: https://github.com/opensearch-project/data-prepper/issues/546 private void checkTypeAndWriteObject(final Object object, final BufferedWriter writer) throws IOException { if (object instanceof Event) { - writer.write(((Event) object).toJsonString()); + String output = ((Event)object).jsonBuilder().includeTags(tagsTargetKey).toJsonString(); + writer.write(output); writer.newLine(); EventHandle eventHandle = ((Event)object).getEventHandle(); if (eventHandle != null) { diff --git a/data-prepper-plugins/common/src/main/java/org/opensearch/dataprepper/plugins/sink/StdOutSink.java b/data-prepper-plugins/common/src/main/java/org/opensearch/dataprepper/plugins/sink/StdOutSink.java index 5e50601062..e43c25f658 100644 --- a/data-prepper-plugins/common/src/main/java/org/opensearch/dataprepper/plugins/sink/StdOutSink.java +++ b/data-prepper-plugins/common/src/main/java/org/opensearch/dataprepper/plugins/sink/StdOutSink.java @@ -11,11 +11,14 @@ import org.opensearch.dataprepper.model.event.EventHandle; import org.opensearch.dataprepper.model.record.Record; import org.opensearch.dataprepper.model.sink.Sink; +import org.opensearch.dataprepper.model.sink.SinkContext; import java.util.Collection; +import java.util.Objects; @DataPrepperPlugin(name = "stdout", pluginType = Sink.class) public class StdOutSink implements Sink> { + private final String tagsTargetKey; /** * Mandatory constructor for Data Prepper Component - This constructor is used by Data Prepper @@ -25,13 +28,17 @@ public class StdOutSink implements Sink> { * * @param pluginSetting instance with metadata information from pipeline pluginSetting file. */ - public StdOutSink(final PluginSetting pluginSetting) { - this(); + public StdOutSink(final PluginSetting pluginSetting, final SinkContext sinkContext) { + this(Objects.nonNull(sinkContext) ? sinkContext.getTagsTargetKey() : null); } - public StdOutSink() { + public StdOutSink(final String tagsTargetKey) { + this.tagsTargetKey = tagsTargetKey; } + public StdOutSink() { + this.tagsTargetKey = null; + } @Override public void output(final Collection> records) { for (final Record record : records) { @@ -43,7 +50,8 @@ public void output(final Collection> records) { // TODO: This function should be removed with the completion of: https://github.com/opensearch-project/data-prepper/issues/546 private void checkTypeAndPrintObject(final Object object) { if (object instanceof Event) { - System.out.println(((Event) object).toJsonString()); + String output = ((Event)object).jsonBuilder().includeTags(tagsTargetKey).toJsonString(); + System.out.println(output); EventHandle eventHandle = ((Event)object).getEventHandle(); if (eventHandle != null) { eventHandle.release(true); diff --git a/data-prepper-plugins/common/src/test/java/org/opensearch/dataprepper/plugins/sink/FileSinkTests.java b/data-prepper-plugins/common/src/test/java/org/opensearch/dataprepper/plugins/sink/FileSinkTests.java index b9b2457032..3ed9b99213 100644 --- a/data-prepper-plugins/common/src/test/java/org/opensearch/dataprepper/plugins/sink/FileSinkTests.java +++ b/data-prepper-plugins/common/src/test/java/org/opensearch/dataprepper/plugins/sink/FileSinkTests.java @@ -13,6 +13,7 @@ import org.junit.jupiter.api.Test; import org.opensearch.dataprepper.model.event.JacksonEvent; import org.opensearch.dataprepper.model.record.Record; +import org.opensearch.dataprepper.model.sink.SinkContext; import java.io.BufferedReader; import java.io.File; @@ -37,33 +38,38 @@ class FileSinkTests { private final String TEST_DATA_1 = "data_prepper"; private final String TEST_DATA_2 = "file_sink"; private final String TEST_KEY = "test_key"; + private final String tagStr1 = "tag1"; + private final String tagStr2 = "tag2"; private final Record TEST_STRING_RECORD_1 = new Record<>(TEST_DATA_1); private final Record TEST_STRING_RECORD_2 = new Record<>(TEST_DATA_2); // TODO: remove with the completion of: https://github.com/opensearch-project/data-prepper/issues/546 private final List> TEST_STRING_RECORDS = Arrays.asList(TEST_STRING_RECORD_1, TEST_STRING_RECORD_2); private List> TEST_RECORDS; private FileSinkConfig fileSinkConfig; + private SinkContext sinkContext; @BeforeEach void setUp() throws IOException { fileSinkConfig = mock(FileSinkConfig.class); + sinkContext = mock(SinkContext.class); TEST_OUTPUT_FILE = Files.createTempFile("", "output.txt").toFile(); TEST_RECORDS = new ArrayList<>(); - TEST_RECORDS.add(new Record<>(JacksonEvent - .builder() + JacksonEvent event = JacksonEvent.builder() .withEventType("event") .withData(Map.of(TEST_KEY, TEST_DATA_1)) - .build())); - TEST_RECORDS.add(new Record<>(JacksonEvent - .builder() + .build(); + event.getMetadata().addTags(List.of(tagStr1, tagStr2)); + TEST_RECORDS.add(new Record<>(event)); + event = JacksonEvent.builder() .withEventType("event") .withData(Map.of(TEST_KEY, TEST_DATA_2)) - .build())); + .build(); + TEST_RECORDS.add(new Record<>(event)); } private FileSink createObjectUnderTest() { - return new FileSink(fileSinkConfig); + return new FileSink(fileSinkConfig, sinkContext); } @AfterEach @@ -74,6 +80,7 @@ void tearDown() { @Test void testInvalidFilePath() { when(fileSinkConfig.getPath()).thenReturn(""); + when(sinkContext.getTagsTargetKey()).thenReturn(null); final FileSink objectUnderTest = createObjectUnderTest(); assertThrows(RuntimeException.class, objectUnderTest::initialize); } @@ -88,6 +95,7 @@ void setUp() { // TODO: remove with the completion of: https://github.com/opensearch-project/data-prepper/issues/546 @Test void testValidFilePathStringRecord() throws IOException { + when(sinkContext.getTagsTargetKey()).thenReturn(null); final FileSink fileSink = createObjectUnderTest(); fileSink.initialize(); @@ -100,9 +108,27 @@ void testValidFilePathStringRecord() throws IOException { Assertions.assertTrue(outputData.contains(TEST_DATA_2)); } + @Test + void testValidFilePathStringRecord_EventsWithTags() throws IOException { + when(sinkContext.getTagsTargetKey()).thenReturn("tags"); + final FileSink fileSink = createObjectUnderTest(); + fileSink.initialize(); + + Assertions.assertTrue(fileSink.isReady()); + fileSink.output(TEST_RECORDS); + fileSink.shutdown(); + + final String outputData = readDocFromFile(TEST_OUTPUT_FILE); + Assertions.assertTrue(outputData.contains(TEST_DATA_1)); + Assertions.assertTrue(outputData.contains(tagStr1)); + Assertions.assertTrue(outputData.contains(tagStr2)); + Assertions.assertTrue(outputData.contains(TEST_DATA_2)); + } + // TODO: remove with the completion of: https://github.com/opensearch-project/data-prepper/issues/546 @Test void testValidFilePathCustomTypeRecord() throws IOException { + when(sinkContext.getTagsTargetKey()).thenReturn(null); final FileSink fileSink = createObjectUnderTest(); fileSink.initialize(); Assertions.assertTrue(fileSink.isReady()); @@ -115,6 +141,7 @@ void testValidFilePathCustomTypeRecord() throws IOException { } @Test void testValidFilePath() throws IOException { + when(sinkContext.getTagsTargetKey()).thenReturn(null); final FileSink fileSink = createObjectUnderTest(); fileSink.initialize(); Assertions.assertTrue(fileSink.isReady()); @@ -128,6 +155,7 @@ void testValidFilePath() throws IOException { @Test void testMultipleCallsToOutput() throws IOException { + when(sinkContext.getTagsTargetKey()).thenReturn(null); final FileSink fileSink = createObjectUnderTest(); fileSink.initialize(); Assertions.assertTrue(fileSink.isReady()); @@ -142,6 +170,7 @@ void testMultipleCallsToOutput() throws IOException { @Test void testCallingOutputAfterShutdownDoesNotWrite() throws IOException { + when(sinkContext.getTagsTargetKey()).thenReturn(null); final FileSink fileSink = createObjectUnderTest(); fileSink.initialize(); Assertions.assertTrue(fileSink.isReady()); @@ -157,6 +186,7 @@ void testCallingOutputAfterShutdownDoesNotWrite() throws IOException { @Test void testWithDefaultFile() { + when(sinkContext.getTagsTargetKey()).thenReturn(null); when(fileSinkConfig.getPath()).thenReturn(null); final FileSink objectUnderTest = createObjectUnderTest(); assertThrows(RuntimeException.class, objectUnderTest::initialize); diff --git a/data-prepper-plugins/common/src/test/java/org/opensearch/dataprepper/plugins/sink/StdOutSinkTests.java b/data-prepper-plugins/common/src/test/java/org/opensearch/dataprepper/plugins/sink/StdOutSinkTests.java index db8bd20b83..a02ed5e30c 100644 --- a/data-prepper-plugins/common/src/test/java/org/opensearch/dataprepper/plugins/sink/StdOutSinkTests.java +++ b/data-prepper-plugins/common/src/test/java/org/opensearch/dataprepper/plugins/sink/StdOutSinkTests.java @@ -51,7 +51,7 @@ public void setup() { @Test public void testSinkWithEvents() { - final StdOutSink stdOutSink = new StdOutSink(new PluginSetting(PLUGIN_NAME, new HashMap<>())); + final StdOutSink stdOutSink = new StdOutSink(new PluginSetting(PLUGIN_NAME, new HashMap<>()), null); stdOutSink.output(testRecords); stdOutSink.shutdown(); } @@ -59,7 +59,7 @@ public void testSinkWithEvents() { // TODO: remove with the completion of: https://github.com/opensearch-project/data-prepper/issues/546 @Test public void testSinkWithCustomType() { - final StdOutSink stdOutSink = new StdOutSink(new PluginSetting(PLUGIN_NAME, new HashMap<>())); + final StdOutSink stdOutSink = new StdOutSink(new PluginSetting(PLUGIN_NAME, new HashMap<>()), null); stdOutSink.output(Collections.singletonList(new Record(new TestObject()))); } diff --git a/data-prepper-plugins/opensearch/src/integrationTest/java/org/opensearch/dataprepper/plugins/sink/opensearch/OpenSearchSinkIT.java b/data-prepper-plugins/opensearch/src/integrationTest/java/org/opensearch/dataprepper/plugins/sink/opensearch/OpenSearchSinkIT.java index 3905a5e4c0..fd8bf7d82f 100644 --- a/data-prepper-plugins/opensearch/src/integrationTest/java/org/opensearch/dataprepper/plugins/sink/opensearch/OpenSearchSinkIT.java +++ b/data-prepper-plugins/opensearch/src/integrationTest/java/org/opensearch/dataprepper/plugins/sink/opensearch/OpenSearchSinkIT.java @@ -41,11 +41,14 @@ import org.opensearch.dataprepper.model.event.JacksonEvent; import org.opensearch.dataprepper.model.plugin.PluginFactory; import org.opensearch.dataprepper.model.record.Record; +import org.opensearch.dataprepper.model.sink.SinkContext; import org.opensearch.dataprepper.plugins.sink.opensearch.bulk.BulkAction; import org.opensearch.dataprepper.plugins.sink.opensearch.index.AbstractIndexManager; import org.opensearch.dataprepper.plugins.sink.opensearch.index.IndexConfiguration; import org.opensearch.dataprepper.plugins.sink.opensearch.index.IndexConstants; import org.opensearch.dataprepper.plugins.sink.opensearch.index.IndexType; +import org.apache.commons.lang3.RandomStringUtils; +import static org.mockito.Mockito.when; import javax.ws.rs.HttpMethod; import java.io.BufferedReader; @@ -108,20 +111,33 @@ public class OpenSearchSinkIT { private RestClient client; private EventHandle eventHandle; + private SinkContext sinkContext; + private String testTagsTargetKey; @Mock private PluginFactory pluginFactory; - @Mock - private AwsCredentialsSupplier awsCredentialsSupplier; + @Mock + private AwsCredentialsSupplier awsCredentialsSupplier; - public OpenSearchSink createObjectUnderTest(PluginSetting pluginSetting, boolean doInitialize) { - OpenSearchSink sink = new OpenSearchSink(pluginSetting, pluginFactory, awsCredentialsSupplier); - if (doInitialize) { - sink.doInitialize(); + public OpenSearchSink createObjectUnderTest(PluginSetting pluginSetting, boolean doInitialize) { + OpenSearchSink sink = new OpenSearchSink(pluginSetting, pluginFactory, null, awsCredentialsSupplier); + if (doInitialize) { + sink.doInitialize(); + } + return sink; + } + + public OpenSearchSink createObjectUnderTestWithSinkContext(PluginSetting pluginSetting, boolean doInitialize) { + sinkContext = mock(SinkContext.class); + testTagsTargetKey = RandomStringUtils.randomAlphabetic(5); + when(sinkContext.getTagsTargetKey()).thenReturn(testTagsTargetKey); + OpenSearchSink sink = new OpenSearchSink(pluginSetting, pluginFactory, sinkContext, awsCredentialsSupplier); + if (doInitialize) { + sink.doInitialize(); + } + return sink; } - return sink; - } @BeforeEach public void setup() { @@ -587,6 +603,34 @@ public void testBulkActionCreate() throws IOException, InterruptedException { Assert.assertEquals(1.0, bulkRequestLatencies.get(0).getValue(), 0); } + @Test + public void testEventOutputWithTags() throws IOException, InterruptedException { + final Event testEvent = JacksonEvent.builder() + .withData("{\"log\": \"foobar\"}") + .withEventType("event") + .build(); + ((JacksonEvent)testEvent).setEventHandle(eventHandle); + List tagsList = List.of("tag1", "tag2"); + testEvent.getMetadata().addTags(tagsList); + + final List> testRecords = Collections.singletonList(new Record<>(testEvent)); + + final PluginSetting pluginSetting = generatePluginSetting(IndexType.TRACE_ANALYTICS_RAW.getValue(), null, null); + final OpenSearchSink sink = createObjectUnderTestWithSinkContext(pluginSetting, true); + sink.output(testRecords); + + final String expIndexAlias = IndexConstants.TYPE_TO_DEFAULT_ALIAS.get(IndexType.TRACE_ANALYTICS_RAW); + final List> retSources = getSearchResponseDocSources(expIndexAlias); + final Map expectedContent = new HashMap<>(); + expectedContent.put("log", "foobar"); + expectedContent.put(testTagsTargetKey, tagsList); + + MatcherAssert.assertThat(retSources.size(), equalTo(1)); + MatcherAssert.assertThat(retSources.containsAll(Arrays.asList(expectedContent)), equalTo(true)); + MatcherAssert.assertThat(getDocumentCount(expIndexAlias, "log", "foobar"), equalTo(Integer.valueOf(1))); + sink.shutdown(); + } + @Test public void testEventOutput() throws IOException, InterruptedException { diff --git a/data-prepper-plugins/opensearch/src/main/java/org/opensearch/dataprepper/plugins/sink/opensearch/OpenSearchSink.java b/data-prepper-plugins/opensearch/src/main/java/org/opensearch/dataprepper/plugins/sink/opensearch/OpenSearchSink.java index b78235966c..a5bee48df9 100644 --- a/data-prepper-plugins/opensearch/src/main/java/org/opensearch/dataprepper/plugins/sink/opensearch/OpenSearchSink.java +++ b/data-prepper-plugins/opensearch/src/main/java/org/opensearch/dataprepper/plugins/sink/opensearch/OpenSearchSink.java @@ -25,6 +25,7 @@ import org.opensearch.dataprepper.model.configuration.PluginModel; import org.opensearch.dataprepper.model.configuration.PluginSetting; import org.opensearch.dataprepper.model.event.Event; +import org.opensearch.dataprepper.model.sink.SinkContext; import org.opensearch.dataprepper.model.event.exceptions.EventKeyNotFoundException; import org.opensearch.dataprepper.model.failures.DlqObject; import org.opensearch.dataprepper.model.plugin.InvalidPluginConfigurationException; @@ -58,6 +59,7 @@ import java.nio.file.StandardOpenOption; import java.util.Collection; import java.util.List; +import java.util.Objects; import java.util.Optional; import java.util.StringJoiner; import java.util.concurrent.ConcurrentHashMap; @@ -104,6 +106,7 @@ public class OpenSearchSink extends AbstractSink> { private ObjectMapper objectMapper; private volatile boolean initialized; private PluginSetting pluginSetting; + private final SinkContext sinkContext; private FailedBulkOperationConverter failedBulkOperationConverter; @@ -114,9 +117,11 @@ public class OpenSearchSink extends AbstractSink> { @DataPrepperPluginConstructor public OpenSearchSink(final PluginSetting pluginSetting, final PluginFactory pluginFactory, + final SinkContext sinkContext, final AwsCredentialsSupplier awsCredentialsSupplier) { super(pluginSetting, Integer.MAX_VALUE, INITIALIZE_RETRY_WAIT_TIME_MS); this.awsCredentialsSupplier = awsCredentialsSupplier; + this.sinkContext = sinkContext; bulkRequestTimer = pluginMetrics.timer(BULKREQUEST_LATENCY); bulkRequestErrorsCounter = pluginMetrics.counter(BULKREQUEST_ERRORS); dynamicIndexDroppedEvents = pluginMetrics.counter(DYNAMIC_INDEX_DROPPED_EVENTS); @@ -302,7 +307,7 @@ private SerializedJson getDocument(final Event event) { String docId = (documentIdField != null) ? event.get(documentIdField, String.class) : null; String routing = (routingField != null) ? event.get(routingField, String.class) : null; - final String document = DocumentBuilder.build(event, documentRootKey); + final String document = DocumentBuilder.build(event, documentRootKey, Objects.nonNull(sinkContext)?sinkContext.getTagsTargetKey():null); return SerializedJson.fromStringAndOptionals(document, docId, routing); } diff --git a/data-prepper-plugins/opensearch/src/main/java/org/opensearch/dataprepper/plugins/sink/opensearch/index/DocumentBuilder.java b/data-prepper-plugins/opensearch/src/main/java/org/opensearch/dataprepper/plugins/sink/opensearch/index/DocumentBuilder.java index 68b18723fe..81e484904b 100644 --- a/data-prepper-plugins/opensearch/src/main/java/org/opensearch/dataprepper/plugins/sink/opensearch/index/DocumentBuilder.java +++ b/data-prepper-plugins/opensearch/src/main/java/org/opensearch/dataprepper/plugins/sink/opensearch/index/DocumentBuilder.java @@ -4,7 +4,7 @@ public final class DocumentBuilder { - public static String build(final Event event, final String documentRootKey) { + public static String build(final Event event, final String documentRootKey, final String tagsTargetKey) { if (documentRootKey != null && event.containsKey(documentRootKey)) { final String document = event.getAsJsonString(documentRootKey); if (document == null || !document.startsWith("{")) { @@ -12,6 +12,6 @@ public static String build(final Event event, final String documentRootKey) { } return document; } - return event.toJsonString(); + return event.jsonBuilder().includeTags(tagsTargetKey).toJsonString(); } } diff --git a/data-prepper-plugins/opensearch/src/test/java/org/opensearch/dataprepper/plugins/sink/opensearch/index/DocumentBuilderTest.java b/data-prepper-plugins/opensearch/src/test/java/org/opensearch/dataprepper/plugins/sink/opensearch/index/DocumentBuilderTest.java index de46b0ba58..1277303bd0 100644 --- a/data-prepper-plugins/opensearch/src/test/java/org/opensearch/dataprepper/plugins/sink/opensearch/index/DocumentBuilderTest.java +++ b/data-prepper-plugins/opensearch/src/test/java/org/opensearch/dataprepper/plugins/sink/opensearch/index/DocumentBuilderTest.java @@ -11,6 +11,7 @@ import org.opensearch.dataprepper.model.event.Event; import org.opensearch.dataprepper.model.event.JacksonEvent; +import java.util.List; import java.util.Map; import java.util.UUID; import java.util.stream.Stream; @@ -24,6 +25,8 @@ public class DocumentBuilderTest { private String random; private Event event; private String expectedOutput; + private String expectedOutputWithTags; + private final String tagsKey = "tags"; private ObjectMapper objectMapper = new ObjectMapper(); @@ -37,7 +40,9 @@ public void setup() throws JsonProcessingException { .withData(data) .withEventType("TestEvent") .build(); + event.getMetadata().addTags(List.of("tag1")); expectedOutput = objectMapper.writeValueAsString(data); + expectedOutputWithTags = event.jsonBuilder().includeTags(tagsKey).toJsonString(); } @ParameterizedTest @@ -45,16 +50,26 @@ public void setup() throws JsonProcessingException { @ValueSource(strings = {"missingObject", "/"}) public void buildWillReturnFullObject(final String documentRootKey) { - final String doc = DocumentBuilder.build(event, documentRootKey); + final String doc = DocumentBuilder.build(event, documentRootKey, null); assertThat(doc, is(equalTo(expectedOutput))); } + @ParameterizedTest + @NullSource + @ValueSource(strings = {"missingObject", "/"}) + public void buildWillReturnObjectWithTags(final String documentRootKey) { + + final String doc = DocumentBuilder.build(event, documentRootKey, tagsKey); + + assertThat(doc, is(equalTo(expectedOutputWithTags))); + } + @ParameterizedTest @MethodSource("provideSingleItemKeys") public void buildWillReturnSingleObject(final String documentRootKey, final Object expectedResult) { - final String doc = DocumentBuilder.build(event, documentRootKey); + final String doc = DocumentBuilder.build(event, documentRootKey, null); assertThat(doc, is(equalTo(String.format("{\"data\": %s}", expectedResult)))); } diff --git a/data-prepper-plugins/s3-sink/src/integrationTest/java/org/opensearch/dataprepper/plugins/sink/S3SinkServiceIT.java b/data-prepper-plugins/s3-sink/src/integrationTest/java/org/opensearch/dataprepper/plugins/sink/S3SinkServiceIT.java index c635650546..cfa9d3657b 100644 --- a/data-prepper-plugins/s3-sink/src/integrationTest/java/org/opensearch/dataprepper/plugins/sink/S3SinkServiceIT.java +++ b/data-prepper-plugins/s3-sink/src/integrationTest/java/org/opensearch/dataprepper/plugins/sink/S3SinkServiceIT.java @@ -134,7 +134,7 @@ void verify_flushed_records_into_s3_bucket() { } private S3SinkService createObjectUnderTest() { - return new S3SinkService(s3SinkConfig, bufferFactory, codec, s3Client, pluginMetrics); + return new S3SinkService(s3SinkConfig, bufferFactory, codec, s3Client, null, pluginMetrics); } private int gets3ObjectCount() { @@ -198,4 +198,4 @@ private static Map generateJson() { UUID.randomUUID().toString(), UUID.randomUUID().toString())); return jsonObject; } -} \ No newline at end of file +} diff --git a/data-prepper-plugins/s3-sink/src/main/java/org/opensearch/dataprepper/plugins/sink/S3Sink.java b/data-prepper-plugins/s3-sink/src/main/java/org/opensearch/dataprepper/plugins/sink/S3Sink.java index 1dc6963c23..a4baa538ae 100644 --- a/data-prepper-plugins/s3-sink/src/main/java/org/opensearch/dataprepper/plugins/sink/S3Sink.java +++ b/data-prepper-plugins/s3-sink/src/main/java/org/opensearch/dataprepper/plugins/sink/S3Sink.java @@ -15,6 +15,7 @@ import org.opensearch.dataprepper.model.plugin.PluginFactory; import org.opensearch.dataprepper.model.record.Record; import org.opensearch.dataprepper.model.sink.AbstractSink; +import org.opensearch.dataprepper.model.sink.SinkContext; import org.opensearch.dataprepper.model.sink.Sink; import org.opensearch.dataprepper.plugins.sink.accumulator.BufferFactory; import org.opensearch.dataprepper.plugins.sink.accumulator.BufferTypeOptions; @@ -26,6 +27,7 @@ import software.amazon.awssdk.services.s3.S3Client; import java.util.Collection; +import java.util.Objects; /** * Implementation class of s3-sink plugin. It is responsible for receive the collection of @@ -40,6 +42,7 @@ public class S3Sink extends AbstractSink> { private volatile boolean sinkInitialized; private final S3SinkService s3SinkService; private final BufferFactory bufferFactory; + private final SinkContext sinkContext; /** * @param pluginSetting dp plugin settings. @@ -50,9 +53,11 @@ public class S3Sink extends AbstractSink> { public S3Sink(final PluginSetting pluginSetting, final S3SinkConfig s3SinkConfig, final PluginFactory pluginFactory, + final SinkContext sinkContext, final AwsCredentialsSupplier awsCredentialsSupplier) { super(pluginSetting); this.s3SinkConfig = s3SinkConfig; + this.sinkContext = sinkContext; final PluginModel codecConfiguration = s3SinkConfig.getCodec(); final PluginSetting codecPluginSettings = new PluginSetting(codecConfiguration.getPluginName(), codecConfiguration.getPluginSettings()); @@ -65,7 +70,7 @@ public S3Sink(final PluginSetting pluginSetting, bufferFactory = new InMemoryBufferFactory(); } final S3Client s3Client = ClientFactory.createS3Client(s3SinkConfig, awsCredentialsSupplier); - s3SinkService = new S3SinkService(s3SinkConfig, bufferFactory, codec, s3Client, pluginMetrics); + s3SinkService = new S3SinkService(s3SinkConfig, bufferFactory, codec, s3Client, Objects.nonNull(sinkContext) ? sinkContext.getTagsTargetKey() : null, pluginMetrics); } @Override @@ -105,4 +110,4 @@ public void doOutput(final Collection> records) { } s3SinkService.output(records); } -} \ No newline at end of file +} diff --git a/data-prepper-plugins/s3-sink/src/main/java/org/opensearch/dataprepper/plugins/sink/S3SinkService.java b/data-prepper-plugins/s3-sink/src/main/java/org/opensearch/dataprepper/plugins/sink/S3SinkService.java index 5b627faa1b..34c49a9b25 100644 --- a/data-prepper-plugins/s3-sink/src/main/java/org/opensearch/dataprepper/plugins/sink/S3SinkService.java +++ b/data-prepper-plugins/s3-sink/src/main/java/org/opensearch/dataprepper/plugins/sink/S3SinkService.java @@ -57,6 +57,7 @@ public class S3SinkService { private final Counter numberOfRecordsSuccessCounter; private final Counter numberOfRecordsFailedCounter; private final DistributionSummary s3ObjectSizeSummary; + private final String tagsTargetKey; /** * @param s3SinkConfig s3 sink related configuration. @@ -66,11 +67,12 @@ public class S3SinkService { * @param pluginMetrics metrics. */ public S3SinkService(final S3SinkConfig s3SinkConfig, final BufferFactory bufferFactory, - final Codec codec, final S3Client s3Client, final PluginMetrics pluginMetrics) { + final Codec codec, final S3Client s3Client, final String tagsTargetKey, final PluginMetrics pluginMetrics) { this.s3SinkConfig = s3SinkConfig; this.bufferFactory = bufferFactory; this.codec = codec; this.s3Client = s3Client; + this.tagsTargetKey = tagsTargetKey; reentrantLock = new ReentrantLock(); bufferedEventHandles = new LinkedList<>(); @@ -102,7 +104,7 @@ void output(Collection> records) { final Event event = record.getData(); final String encodedEvent; - encodedEvent = codec.parse(event); + encodedEvent = codec.parse(event, tagsTargetKey); final byte[] encodedBytes = encodedEvent.getBytes(); currentBuffer.writeEvent(encodedBytes); @@ -181,4 +183,4 @@ protected String generateKey() { final String namePattern = ObjectKey.objectFileName(s3SinkConfig); return (!pathPrefix.isEmpty()) ? pathPrefix + namePattern : namePattern; } -} \ No newline at end of file +} diff --git a/data-prepper-plugins/s3-sink/src/main/java/org/opensearch/dataprepper/plugins/sink/codec/Codec.java b/data-prepper-plugins/s3-sink/src/main/java/org/opensearch/dataprepper/plugins/sink/codec/Codec.java index 676526dbb5..06b104287f 100644 --- a/data-prepper-plugins/s3-sink/src/main/java/org/opensearch/dataprepper/plugins/sink/codec/Codec.java +++ b/data-prepper-plugins/s3-sink/src/main/java/org/opensearch/dataprepper/plugins/sink/codec/Codec.java @@ -15,8 +15,9 @@ public interface Codec { /** * @param event input data. + * @param tagsTargetKey key name for including tags if not null * @return parse string. * @throws IOException exception. */ - String parse(Event event) throws IOException; -} \ No newline at end of file + String parse(final Event event, final String tagsTargetKey) throws IOException; +} diff --git a/data-prepper-plugins/s3-sink/src/main/java/org/opensearch/dataprepper/plugins/sink/codec/JsonCodec.java b/data-prepper-plugins/s3-sink/src/main/java/org/opensearch/dataprepper/plugins/sink/codec/JsonCodec.java index c3e2886e7c..78847f3145 100644 --- a/data-prepper-plugins/s3-sink/src/main/java/org/opensearch/dataprepper/plugins/sink/codec/JsonCodec.java +++ b/data-prepper-plugins/s3-sink/src/main/java/org/opensearch/dataprepper/plugins/sink/codec/JsonCodec.java @@ -19,8 +19,8 @@ public class JsonCodec implements Codec { * Generates a serialized json string of the Event */ @Override - public String parse(Event event) throws IOException { + public String parse(final Event event, final String tagsTargetKey) throws IOException { Objects.requireNonNull(event); - return event.toJsonString(); + return event.jsonBuilder().includeTags(tagsTargetKey).toJsonString(); } -} \ No newline at end of file +} diff --git a/data-prepper-plugins/s3-sink/src/test/java/org/opensearch/dataprepper/plugins/sink/S3SinkServiceTest.java b/data-prepper-plugins/s3-sink/src/test/java/org/opensearch/dataprepper/plugins/sink/S3SinkServiceTest.java index f89eb9026f..36302133a6 100644 --- a/data-prepper-plugins/s3-sink/src/test/java/org/opensearch/dataprepper/plugins/sink/S3SinkServiceTest.java +++ b/data-prepper-plugins/s3-sink/src/test/java/org/opensearch/dataprepper/plugins/sink/S3SinkServiceTest.java @@ -31,6 +31,7 @@ import software.amazon.awssdk.awscore.exception.AwsServiceException; import software.amazon.awssdk.regions.Region; import software.amazon.awssdk.services.s3.S3Client; +import org.apache.commons.lang3.RandomStringUtils; import java.io.IOException; import java.time.Duration; @@ -83,11 +84,13 @@ class S3SinkServiceTest { private Counter snapshotSuccessCounter; private DistributionSummary s3ObjectSizeSummary; private Random random; + private String tagsTargetKey; @BeforeEach void setUp() { random = new Random(); + tagsTargetKey = RandomStringUtils.randomAlphabetic(5); s3SinkConfig = mock(S3SinkConfig.class); s3Client = mock(S3Client.class); ThresholdOptions thresholdOptions = mock(ThresholdOptions.class); @@ -132,7 +135,7 @@ void setUp() { } private S3SinkService createObjectUnderTest() { - return new S3SinkService(s3SinkConfig, bufferFactory, codec, s3Client, pluginMetrics); + return new S3SinkService(s3SinkConfig, bufferFactory, codec, s3Client, tagsTargetKey, pluginMetrics); } @Test @@ -181,7 +184,7 @@ void test_output_with_threshold_set_as_more_then_zero_event_count() throws IOExc when(bufferFactory.getBuffer()).thenReturn(buffer); when(s3SinkConfig.getThresholdOptions().getEventCount()).thenReturn(5); - when(codec.parse(any())).thenReturn("{\"message\":\"31824252-adba-4c47-a2ac-05d16c5b8140\"}"); + when(codec.parse(any(), anyString())).thenReturn("{\"message\":\"31824252-adba-4c47-a2ac-05d16c5b8140\"}"); S3SinkService s3SinkService = createObjectUnderTest(); assertNotNull(s3SinkService); s3SinkService.output(generateRandomStringEventRecord()); @@ -202,7 +205,7 @@ void test_output_with_threshold_set_as_zero_event_count() throws IOException { when(s3SinkConfig.getThresholdOptions().getEventCount()).thenReturn(0); when(s3SinkConfig.getThresholdOptions().getMaximumSize()).thenReturn(ByteCount.parse("2kb")); - when(codec.parse(any())).thenReturn("{\"message\":\"31824252-adba-4c47-a2ac-05d16c5b8140\"}"); + when(codec.parse(any(), anyString())).thenReturn("{\"message\":\"31824252-adba-4c47-a2ac-05d16c5b8140\"}"); S3SinkService s3SinkService = createObjectUnderTest(); assertNotNull(s3SinkService); s3SinkService.output(generateRandomStringEventRecord()); @@ -218,7 +221,7 @@ void test_output_with_uploadedToS3_success() throws IOException { doNothing().when(buffer).flushToS3(any(S3Client.class), anyString(), any(String.class)); when(bufferFactory.getBuffer()).thenReturn(buffer); - when(codec.parse(any())).thenReturn("{\"message\":\"31824252-adba-4c47-a2ac-05d16c5b8140\"}"); + when(codec.parse(any(), anyString())).thenReturn("{\"message\":\"31824252-adba-4c47-a2ac-05d16c5b8140\"}"); S3SinkService s3SinkService = createObjectUnderTest(); assertNotNull(s3SinkService); assertThat(s3SinkService, instanceOf(S3SinkService.class)); @@ -236,7 +239,7 @@ void test_output_with_uploadedToS3_success_records_byte_count() throws IOExcepti final long objectSize = random.nextInt(1_000_000) + 10_000; when(buffer.getSize()).thenReturn(objectSize); - when(codec.parse(any())).thenReturn(UUID.randomUUID().toString()); + when(codec.parse(any(), anyString())).thenReturn(UUID.randomUUID().toString()); final S3SinkService s3SinkService = createObjectUnderTest(); s3SinkService.output(generateRandomStringEventRecord()); @@ -247,7 +250,7 @@ void test_output_with_uploadedToS3_success_records_byte_count() throws IOExcepti void test_output_with_uploadedToS3_failed() throws IOException { when(s3SinkConfig.getBucketName()).thenReturn(UUID.randomUUID().toString()); when(s3SinkConfig.getMaxUploadRetries()).thenReturn(3); - when(codec.parse(any())).thenReturn("{\"message\":\"31824252-adba-4c47-a2ac-05d16c5b8140\"}"); + when(codec.parse(any(), anyString())).thenReturn("{\"message\":\"31824252-adba-4c47-a2ac-05d16c5b8140\"}"); S3SinkService s3SinkService = createObjectUnderTest(); assertNotNull(s3SinkService); assertThat(s3SinkService, instanceOf(S3SinkService.class)); @@ -267,7 +270,7 @@ void test_output_with_uploadedToS3_failure_does_not_record_byte_count() throws I final long objectSize = random.nextInt(1_000_000) + 10_000; when(buffer.getSize()).thenReturn(objectSize); - when(codec.parse(any())).thenReturn(UUID.randomUUID().toString()); + when(codec.parse(any(), anyString())).thenReturn(UUID.randomUUID().toString()); final S3SinkService s3SinkService = createObjectUnderTest(); final Event event = JacksonEvent.fromMessage(UUID.randomUUID().toString()); s3SinkService.output(Collections.singletonList(new Record<>(event))); @@ -318,7 +321,7 @@ void output_will_release_all_handles_since_a_flush() throws IOException { final long objectSize = random.nextInt(1_000_000) + 10_000; when(buffer.getSize()).thenReturn(objectSize); - when(codec.parse(any())).thenReturn(UUID.randomUUID().toString()); + when(codec.parse(any(), anyString())).thenReturn(UUID.randomUUID().toString()); final S3SinkService s3SinkService = createObjectUnderTest(); final Collection> records = generateRandomStringEventRecord(); s3SinkService.output(records); @@ -339,7 +342,7 @@ void output_will_skip_releasing_events_without_EventHandle_objects() throws IOEx final long objectSize = random.nextInt(1_000_000) + 10_000; when(buffer.getSize()).thenReturn(objectSize); - when(codec.parse(any())).thenReturn(UUID.randomUUID().toString()); + when(codec.parse(any(), anyString())).thenReturn(UUID.randomUUID().toString()); final S3SinkService s3SinkService = createObjectUnderTest(); final Collection> records = generateRandomStringEventRecord(); records.stream() @@ -370,7 +373,7 @@ void output_will_release_all_handles_since_a_flush_when_S3_fails() throws IOExce final long objectSize = random.nextInt(1_000_000) + 10_000; when(buffer.getSize()).thenReturn(objectSize); - when(codec.parse(any())).thenReturn(UUID.randomUUID().toString()); + when(codec.parse(any(), anyString())).thenReturn(UUID.randomUUID().toString()); final S3SinkService s3SinkService = createObjectUnderTest(); final List> records = generateEventRecords(1); s3SinkService.output(records); @@ -391,7 +394,7 @@ void output_will_release_only_new_handles_since_a_flush() throws IOException { final long objectSize = random.nextInt(1_000_000) + 10_000; when(buffer.getSize()).thenReturn(objectSize); - when(codec.parse(any())).thenReturn(UUID.randomUUID().toString()); + when(codec.parse(any(), anyString())).thenReturn(UUID.randomUUID().toString()); final S3SinkService s3SinkService = createObjectUnderTest(); final Collection> records = generateRandomStringEventRecord(); s3SinkService.output(records); @@ -422,7 +425,7 @@ void output_will_release_only_new_handles_since_a_flush_when_S3_fails() throws I final long objectSize = random.nextInt(1_000_000) + 10_000; when(buffer.getSize()).thenReturn(objectSize); - when(codec.parse(any())).thenReturn(UUID.randomUUID().toString()); + when(codec.parse(any(), anyString())).thenReturn(UUID.randomUUID().toString()); final S3SinkService s3SinkService = createObjectUnderTest(); final List> records = generateEventRecords(1); s3SinkService.output(records); @@ -467,4 +470,4 @@ private byte[] generateByteArray() { } return bytes; } -} \ No newline at end of file +} diff --git a/data-prepper-plugins/s3-sink/src/test/java/org/opensearch/dataprepper/plugins/sink/S3SinkTest.java b/data-prepper-plugins/s3-sink/src/test/java/org/opensearch/dataprepper/plugins/sink/S3SinkTest.java index 25941e718f..b92da61bc6 100644 --- a/data-prepper-plugins/s3-sink/src/test/java/org/opensearch/dataprepper/plugins/sink/S3SinkTest.java +++ b/data-prepper-plugins/s3-sink/src/test/java/org/opensearch/dataprepper/plugins/sink/S3SinkTest.java @@ -14,6 +14,7 @@ import org.opensearch.dataprepper.model.event.Event; import org.opensearch.dataprepper.model.plugin.PluginFactory; import org.opensearch.dataprepper.model.record.Record; +import org.opensearch.dataprepper.model.sink.SinkContext; import org.opensearch.dataprepper.model.types.ByteCount; import org.opensearch.dataprepper.plugins.sink.accumulator.BufferTypeOptions; import org.opensearch.dataprepper.plugins.sink.codec.Codec; @@ -49,11 +50,13 @@ class S3SinkTest { private PluginSetting pluginSetting; private PluginFactory pluginFactory; private AwsCredentialsSupplier awsCredentialsSupplier; + private SinkContext sinkContext; @BeforeEach void setUp() { s3SinkConfig = mock(S3SinkConfig.class); + sinkContext = mock(SinkContext.class); ThresholdOptions thresholdOptions = mock(ThresholdOptions.class); AwsAuthenticationOptions awsAuthenticationOptions = mock(AwsAuthenticationOptions.class); Codec codec = mock(JsonCodec.class); @@ -80,7 +83,7 @@ void setUp() { } private S3Sink createObjectUnderTest() { - return new S3Sink(pluginSetting, s3SinkConfig, pluginFactory, awsCredentialsSupplier); + return new S3Sink(pluginSetting, s3SinkConfig, pluginFactory, sinkContext, awsCredentialsSupplier); } @Test @@ -107,4 +110,4 @@ void test_doOutput_with_empty_records() { Collection> records = new ArrayList<>(); s3Sink.doOutput(records); } -} \ No newline at end of file +} diff --git a/data-prepper-plugins/s3-sink/src/test/java/org/opensearch/dataprepper/plugins/sink/codec/JsonCodecTest.java b/data-prepper-plugins/s3-sink/src/test/java/org/opensearch/dataprepper/plugins/sink/codec/JsonCodecTest.java index 685bbd4e32..d2055cb0c8 100644 --- a/data-prepper-plugins/s3-sink/src/test/java/org/opensearch/dataprepper/plugins/sink/codec/JsonCodecTest.java +++ b/data-prepper-plugins/s3-sink/src/test/java/org/opensearch/dataprepper/plugins/sink/codec/JsonCodecTest.java @@ -11,6 +11,7 @@ import static org.junit.jupiter.api.Assertions.assertNotNull; import java.io.IOException; import java.util.HashMap; +import java.util.List; import java.util.Map; import java.util.UUID; import org.junit.jupiter.api.Test; @@ -29,7 +30,7 @@ void parse_with_events_output_stream_json_codec() throws IOException { String value2 = UUID.randomUUID().toString(); eventData.put("key2", value2); final JacksonEvent event = JacksonLog.builder().withData(eventData).withEventType("LOG").build(); - String output = createObjectUnderTest().parse(event); + String output = createObjectUnderTest().parse(event, null); assertNotNull(output); ObjectMapper objectMapper = new ObjectMapper(); @@ -41,7 +42,32 @@ void parse_with_events_output_stream_json_codec() throws IOException { assertThat(deserializedData.get("key2"), equalTo(value2)); } + @Test + void parse_with_events_output_stream_json_codec_with_tags() throws IOException { + + final Map eventData = new HashMap<>(); + String value1 = UUID.randomUUID().toString(); + eventData.put("key1", value1); + String value2 = UUID.randomUUID().toString(); + eventData.put("key2", value2); + final JacksonEvent event = JacksonLog.builder().withData(eventData).withEventType("LOG").build(); + List tagsList = List.of("tag1"); + event.getMetadata().addTags(tagsList); + String output = createObjectUnderTest().parse(event, "tags"); + assertNotNull(output); + + ObjectMapper objectMapper = new ObjectMapper(); + Map deserializedData = objectMapper.readValue(output, Map.class); + assertThat(deserializedData, notNullValue()); + assertThat(deserializedData.get("key1"), notNullValue()); + assertThat(deserializedData.get("key1"), equalTo(value1)); + assertThat(deserializedData.get("key2"), notNullValue()); + assertThat(deserializedData.get("key2"), equalTo(value2)); + assertThat(deserializedData.get("tags"), notNullValue()); + assertThat(deserializedData.get("tags"), equalTo(tagsList)); + } + private JsonCodec createObjectUnderTest() { return new JsonCodec(); } -} \ No newline at end of file +} From 9ed1529c8346669aafc2acd38d3d4b14cee5c1eb Mon Sep 17 00:00:00 2001 From: Taylor Gray Date: Tue, 27 Jun 2023 12:02:36 -0500 Subject: [PATCH 2/9] Add basic opensearch source documentation for config (#2940) Signed-off-by: Taylor Gray --- .../opensearch-source/README.md | 200 ++++++++++++++++++ 1 file changed, 200 insertions(+) create mode 100644 data-prepper-plugins/opensearch-source/README.md diff --git a/data-prepper-plugins/opensearch-source/README.md b/data-prepper-plugins/opensearch-source/README.md new file mode 100644 index 0000000000..043c606d8d --- /dev/null +++ b/data-prepper-plugins/opensearch-source/README.md @@ -0,0 +1,200 @@ +# OpenSearch Source + +This is the Date Prepper OpenSearch source plugin that processes indices for either OpenSearch, Elasticsearch, +or Amazon OpenSearch Service clusters. It is ideal for migrating index data from a cluster. + +Note: Only fully tested versions will be listed below. It is likely many more versions are supported already, but it is untested. + +The OpenSearch source is compatible with the following OpenSearch versions: +* 2.5 + +And is compatible with the following Elasticsearch versions: +* 7.10 + +# Usages + +### Minimum required config with username and password + +```yaml +opensearch-source-pipeline: + source: + opensearch: + connection: + insecure: true + hosts: [ "https://localhost:9200" ] + username: "username" + password: "password" +``` + +### Full config example + +```yaml +opensearch-source-pipeline: + source: + opensearch: + indices: + include: + - index_name_regex: "test-index-.*" + exclude: + - index_name_regex: "test-index-[1-9].*" + scheduling: + rate: "PT1H" + start_time: "2023-06-02T22:01:30.00Z" + job_count: 2 + search_options: + search_context_type: "none" + batch_size: 1000 + connection: + insecure: false + cert: "/path/to/cert.crt" + socket_timeout: "100ms" + connection_timeout: "100ms" + hosts: [ "https://localhost:9200" ] + username: "username" + password: "password" +``` + +### Amazon OpenSearch Service + +The OpenSearch source can also be configured for an Amazon OpenSearch Service domain. + +```yaml +opensearch-source-pipeline: + source: + opensearch: + connection: + insecure: true + hosts: [ "https://search-my-domain-soopywaovobopgs8ywurr3utsu.us-east-1.es.amazonaws.com" ] + aws: + region: "us-east-1" + sts_role_arn: "arn:aws:iam::123456789012:role/my-domain-role" +``` + +### Using Metadata + +When the OpenSearch source constructs Data Prepper Events from documents in the cluster, the +document index is stored in the `EventMetadata` with an `index` key, and the document_id is +stored in the `EventMetadata` with a `document_id` key. This allows conditional routing based on the index or document_id, +among other things. For example, one could send to an OpenSearch sink and use the same index and document_id from the source cluster in +the destination cluster. A full config example for this use case is below + +```yaml +opensearch-source-pipeline: + source: + opensearch: + connection: + insecure: true + hosts: [ "https://source-cluster:9200" ] + username: "username" + password: "password" + processor: + - add_entries: + entries: + - key: "document_id" + value_expression: "getMetadata(\"document_id\")" + - key: "index" + value_expression: "getMetadata(\"index\")" + sink: + - opensearch: + hosts: [ "https://sink-cluster:9200" ] + username: "username" + password: "password" + document_id_field: "document_id" + index: "copied-${index}" +``` + +## Configuration + +- `hosts` (Required) : A list of IP addresses of OpenSearch or Elasticsearch nodes. + + +- `username` (Optional) : A String of username used in the internal users of OpenSearch cluster. Default is null. + + +- `password` (Optional) : A String of password used in the internal users of OpenSearch cluster. Default is null. + + +- `aws` (Optional) : AWS configurations. See [AWS Configuration](#aws_configuration) for details. SigV4 is enabled by default when this option is used. + + +- `search_options` (Optional) : See [Search Configuration](#search_configuration) for details + + +- `indices` (Optional): See [Indices Configurations](#indices_configuration) for filtering options. + + +- `scheduling` (Optional): See [Scheduling Configuration](#scheduling_configuration) for details + + +- `connection` (Optional): See [Connection Configuration](#connection_configuration) + +### AWS Configuration + +* `region` (Optional) : The AWS region to use for credentials. Defaults to [standard SDK behavior to determine the region](https://docs.aws.amazon.com/sdk-for-java/latest/developer-guide/region-selection.html). + + +* `sts_role_arn` (Optional) : The STS role to assume for requests to AWS. Defaults to null, which will use the [standard SDK behavior for credentials](https://docs.aws.amazon.com/sdk-for-java/latest/developer-guide/credentials.html). + + +* `sts_header_overrides` (Optional): A map of header overrides to make when assuming the IAM role for the source plugin. + +### Search Configuration + +* `search_context_type` (Optional) : A direct override for which type of search context should be used to search documents. + Options include `point_in_time`, `scroll`, or `none` (just search after). + By default, the OpenSearch source will attempt to use `point_in_time` on a cluster by auto-detecting that the cluster version and distribution +supports Point in Time. If the cluster does not support `point_in_time`, then `scroll` is the default behavior. + + +* `batch_size` (Optional) : The amount of documents to read in at once while searching. +This size is passed to the search requests for all search context types (`none` (search_after), `point_in_time`, or `scroll`). +Defaults to 1,000. + +### Scheduling Configuration + +Schedule the start time and amount of times an index should be processed. For example, +a `rate` of `PT1H` and a `job_count` of 3 would result in each index getting processed 3 times, starting at `start_time` +and then every hour after the first time the index is processed. + +* `rate` (Optional) : A String that indicates the rate to process an index based on the `job_count`. +Supports ISO_8601 notation Strings ("PT20.345S", "PT15M", etc.) as well as simple notation Strings for seconds ("60s") and milliseconds ("1500ms"). +Defaults to 8 hours, and is only applicable when `job_count` is greater than 1. + + + +* `job_count` (Optional) : An Integer that specifies how many times each index should be processed. Defaults to 1. + + + +* `start_time` (Optional) : A String in the format of a timestamp that is compatible with Java Instant (i.e. `2023-06-02T22:01:30.00Z`). +Processing will be delayed until this timestamp is reached. The default start time is to start immediately. + +### Connection Configuration + +* `insecure` (Optional): A boolean flag to turn off SSL certificate verification. If set to true, CA certificate verification will be turned off and insecure HTTP requests will be sent. Default to false. + + +* `cert` (Optional) : CA certificate that is pem encoded. Accepts both .pem or .crt. This enables the client to trust the CA that has signed the certificate that the OpenSearch cluster is using. Default is null. + + +* `socket_timeout` (Optional) : A String that indicates the timeout duration for waiting for data. Supports ISO_8601 notation Strings ("PT20.345S", "PT15M", etc.) as well as simple notation Strings for seconds ("60s") and milliseconds ("1500ms"). If this timeout value not set, the underlying Apache HttpClient would rely on operating system settings for managing socket timeouts. + + +* `connection_timeout` (Optional) : A String that indicates the timeout duration used when requesting a connection from the connection manager. Supports ISO_8601 notation Strings ("PT20.345S", "PT15M", etc.) as well as simple notation Strings for seconds ("60s") and milliseconds ("1500ms"). If this timeout value is either negative or not set, the underlying Apache HttpClient would rely on operating system settings for managing connection timeouts. + +### Indices Configuration + +Can be used to filter which indices should be processed. +An index will be processed if its name matches one of the `index_name_regex` +patterns in the `include` list, and does not match any of the pattern in the `exclude` list. +The default behavior is to process all indices. + +* `include` (Optional) : A List of [Index Configuration](#index_configuration) that defines which indices should be processed. Defaults to an empty list. + + +* `exclude` (Optional) : A List of [Index Configuration](#index_configuration) that defines which indices should not be processed. + +#### Index Configuration + +* `index_name_regex`: A regex pattern to represent the index names for filtering + From 8bb96ddcf23859e0e7b55c3a9add5d77eddbccb0 Mon Sep 17 00:00:00 2001 From: Ajeesh Gopalakrishnakurup <61016936+ajeeshakd@users.noreply.github.com> Date: Tue, 27 Jun 2023 17:11:52 +0000 Subject: [PATCH 3/9] Kafka source integration test (#2891) * Integration testcases Signed-off-by: Ajeesh Gopalakrishnakurup * Fix for the Integration testcases Signed-off-by: Ajeesh Gopalakrishnakurup * Fix for the white source issue Signed-off-by: Ajeesh Gopalakrishnakurup * Fixes for the merge conflicts Signed-off-by: Ajeesh Gopalakrishnakurup --------- Signed-off-by: Ajeesh Gopalakrishnakurup --- .../kafka-plugins/build.gradle | 43 +++++ .../EmbeddedKafkaClusterSingleNode.java | 161 ++++++++++++++++++ .../kafka/source/EmbeddedKafkaServer.java | 142 +++++++++++++++ .../kafka/source/EmbeddedZooKeeperServer.java | 46 +++++ .../plugins/kafka/source/JSONConsumerIT.java | 114 +++++++++++++ .../kafka/source/PlainTextConsumerIT.java | 104 +++++++++++ .../main/resources/sample-pipelines-int.yaml | 34 ++++ 7 files changed, 644 insertions(+) create mode 100644 data-prepper-plugins/kafka-plugins/src/integrationTest/java/org/opensearch/dataprepper/plugins/kafka/source/EmbeddedKafkaClusterSingleNode.java create mode 100644 data-prepper-plugins/kafka-plugins/src/integrationTest/java/org/opensearch/dataprepper/plugins/kafka/source/EmbeddedKafkaServer.java create mode 100644 data-prepper-plugins/kafka-plugins/src/integrationTest/java/org/opensearch/dataprepper/plugins/kafka/source/EmbeddedZooKeeperServer.java create mode 100644 data-prepper-plugins/kafka-plugins/src/integrationTest/java/org/opensearch/dataprepper/plugins/kafka/source/JSONConsumerIT.java create mode 100644 data-prepper-plugins/kafka-plugins/src/integrationTest/java/org/opensearch/dataprepper/plugins/kafka/source/PlainTextConsumerIT.java create mode 100644 data-prepper-plugins/kafka-plugins/src/main/resources/sample-pipelines-int.yaml diff --git a/data-prepper-plugins/kafka-plugins/build.gradle b/data-prepper-plugins/kafka-plugins/build.gradle index 831c1c0963..f20e4928a8 100644 --- a/data-prepper-plugins/kafka-plugins/build.gradle +++ b/data-prepper-plugins/kafka-plugins/build.gradle @@ -16,10 +16,53 @@ dependencies { implementation 'org.apache.commons:commons-lang3:3.12.0' implementation 'io.confluent:kafka-avro-serializer:7.3.3' implementation 'io.confluent:kafka-schema-registry-client:7.3.3' + implementation 'io.confluent:kafka-avro-serializer:7.3.3' + implementation 'io.confluent:kafka-schema-registry-client:7.3.3' + implementation 'io.confluent:kafka-schema-registry:7.3.3:tests' testImplementation 'org.mockito:mockito-inline:4.1.0' testImplementation 'org.yaml:snakeyaml:2.0' testImplementation testLibs.spring.test testImplementation 'com.fasterxml.jackson.datatype:jackson-datatype-jsr310:2.14.2' testImplementation project(':data-prepper-test-common') testImplementation project(':data-prepper-plugins:blocking-buffer') + testImplementation 'org.mockito:mockito-inline:4.1.0' + testImplementation 'org.apache.kafka:kafka_2.13:3.4.0' + testImplementation 'org.apache.kafka:kafka_2.13:3.4.0:test' + testImplementation 'org.apache.curator:curator-test:5.5.0' + testImplementation 'io.confluent:kafka-schema-registry:7.4.0' + testImplementation 'junit:junit:4.13.1' + testImplementation 'org.apache.kafka:kafka-clients:3.4.0:test' + testImplementation 'org.apache.kafka:connect-json:3.4.0' +} + +test { + useJUnitPlatform() +} + +sourceSets { + integrationTest { + java { + compileClasspath += main.output + test.output + runtimeClasspath += main.output + test.output + srcDir file('src/integrationTest/java') + } + //resources.srcDir file('src/integrationTest/resources') + } } + +configurations { + integrationTestImplementation.extendsFrom testImplementation + integrationTestRuntime.extendsFrom testRuntime +} + +task integrationTest(type: Test) { + group = 'verification' + testClassesDirs = sourceSets.integrationTest.output.classesDirs + + useJUnitPlatform() + + filter { + includeTestsMatching '*IT' + } +} + diff --git a/data-prepper-plugins/kafka-plugins/src/integrationTest/java/org/opensearch/dataprepper/plugins/kafka/source/EmbeddedKafkaClusterSingleNode.java b/data-prepper-plugins/kafka-plugins/src/integrationTest/java/org/opensearch/dataprepper/plugins/kafka/source/EmbeddedKafkaClusterSingleNode.java new file mode 100644 index 0000000000..ec791c221f --- /dev/null +++ b/data-prepper-plugins/kafka-plugins/src/integrationTest/java/org/opensearch/dataprepper/plugins/kafka/source/EmbeddedKafkaClusterSingleNode.java @@ -0,0 +1,161 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.dataprepper.plugins.kafka.source; + +import io.confluent.kafka.schemaregistry.RestApp; +import io.confluent.kafka.schemaregistry.avro.AvroCompatibilityLevel; +import io.confluent.kafka.schemaregistry.rest.SchemaRegistryConfig; +import kafka.server.KafkaConfig$; +import org.junit.rules.ExternalResource; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.IOException; +import java.util.Collections; +import java.util.Map; +import java.util.Properties; + +/** + * Runs an in-memory, "embedded" Kafka cluster with 1 ZooKeeper instance, 1 Kafka broker, and 1 + * Confluent Schema Registry instance. + */ +public class EmbeddedKafkaClusterSingleNode extends ExternalResource { + + private static final Logger log = LoggerFactory.getLogger(EmbeddedKafkaClusterSingleNode.class); + private static final int DEFAULT_BROKER_PORT = 0; + private static final String KAFKA_SCHEMAS_TOPIC = "_schemas"; + private static final String AVRO_COMPATIBILITY_TYPE = AvroCompatibilityLevel.NONE.name; + private static final String KAFKASTORE_OPERATION_TIMEOUT_MS = "60000"; + private static final String KAFKASTORE_DEBUG = "true"; + private static final String KAFKASTORE_INIT_TIMEOUT = "90000"; + + private EmbeddedZooKeeperServer zookeeper; + private EmbeddedKafkaServer broker; + private RestApp schemaRegistry; + private final Properties brokerConfig; + private boolean running; + + public EmbeddedKafkaClusterSingleNode() { + this(new Properties()); + } + + public EmbeddedKafkaClusterSingleNode(final Properties brokerConfig) { + this.brokerConfig = new Properties(); + this.brokerConfig.put(SchemaRegistryConfig.KAFKASTORE_TIMEOUT_CONFIG, KAFKASTORE_OPERATION_TIMEOUT_MS); + this.brokerConfig.putAll(brokerConfig); + } + + /** + * Creates and starts the cluster. + */ + public void start() throws Exception { + log.debug("Initiating embedded Kafka cluster startup"); + log.debug("Starting a ZooKeeper instance..."); + zookeeper = new EmbeddedZooKeeperServer(); + log.debug("ZooKeeper instance is running at {}", zookeeper.connectString()); + + final Properties effectiveBrokerConfig = effectiveBrokerConfigFrom(brokerConfig, zookeeper); + log.debug("Starting a Kafka instance on ...", + effectiveBrokerConfig.getProperty(KafkaConfig$.MODULE$.ZkConnectDoc())); + broker = new EmbeddedKafkaServer(effectiveBrokerConfig); + log.debug("Kafka instance is running at {}, connected to ZooKeeper at {}", + broker.brokerList(), broker.zookeeperConnect()); + + final Properties schemaRegistryProps = new Properties(); + + schemaRegistryProps.put(SchemaRegistryConfig.KAFKASTORE_TIMEOUT_CONFIG, KAFKASTORE_OPERATION_TIMEOUT_MS); + schemaRegistryProps.put(SchemaRegistryConfig.DEBUG_CONFIG, KAFKASTORE_DEBUG); + schemaRegistryProps.put(SchemaRegistryConfig.KAFKASTORE_INIT_TIMEOUT_CONFIG, KAFKASTORE_INIT_TIMEOUT); + schemaRegistryProps.put(SchemaRegistryConfig.KAFKASTORE_BOOTSTRAP_SERVERS_CONFIG, "127.0.0.1:9092"); + + + schemaRegistry = new RestApp(0, zookeeperConnect(), KAFKA_SCHEMAS_TOPIC, "none", schemaRegistryProps); + schemaRegistry.start(); + running = true; + } + + private Properties effectiveBrokerConfigFrom(final Properties brokerConfig, final EmbeddedZooKeeperServer zookeeper) { + final Properties effectiveConfig = new Properties(); + effectiveConfig.putAll(brokerConfig); + effectiveConfig.put(KafkaConfig$.MODULE$.ZkConnectProp(), zookeeper.connectString()); + effectiveConfig.put(KafkaConfig$.MODULE$.ZkSessionTimeoutMsProp(), 30 * 1000); + effectiveConfig.put(KafkaConfig$.MODULE$.ZkConnectionTimeoutMsProp(), 60 * 1000); + effectiveConfig.put(KafkaConfig$.MODULE$.DeleteTopicEnableProp(), true); + effectiveConfig.put(KafkaConfig$.MODULE$.LogCleanerDedupeBufferSizeProp(), 2 * 1024 * 1024L); + effectiveConfig.put(KafkaConfig$.MODULE$.GroupMinSessionTimeoutMsProp(), 0); + effectiveConfig.put(KafkaConfig$.MODULE$.OffsetsTopicReplicationFactorProp(), (short) 1); + effectiveConfig.put(KafkaConfig$.MODULE$.OffsetsTopicPartitionsProp(), 1); + effectiveConfig.put(KafkaConfig$.MODULE$.AutoCreateTopicsEnableProp(), true); + return effectiveConfig; + } + + @Override + protected void before() throws Exception { + start(); + } + + @Override + protected void after() { + stop(); + } + + /** + * Stops the cluster. + */ + public void stop() { + log.info("Stopping Confluent"); + try { + try { + if (schemaRegistry != null) { + schemaRegistry.stop(); + } + } catch (final Exception fatal) { + throw new RuntimeException(fatal); + } + if (broker != null) { + broker.stop(); + } + try { + if (zookeeper != null) { + zookeeper.stop(); + } + } catch (final IOException fatal) { + throw new RuntimeException(fatal); + } + } finally { + running = false; + } + log.info("Confluent Stopped"); + } + + public String bootstrapServers() { + return broker.brokerList(); + } + + public String zookeeperConnect() { + return zookeeper.connectString(); + } + + public String schemaRegistryUrl() { + return schemaRegistry.restConnect; + } + + public void createTopic(final String topic) { + createTopic(topic, 1, (short) 1, Collections.emptyMap()); + } + + public void createTopic(final String topic, final int partitions, final short replication) { + createTopic(topic, partitions, replication, Collections.emptyMap()); + } + + public void createTopic(final String topic, + final int partitions, + final short replication, + final Map topicConfig) { + broker.createTopic(topic, partitions, replication, topicConfig); + } + +} diff --git a/data-prepper-plugins/kafka-plugins/src/integrationTest/java/org/opensearch/dataprepper/plugins/kafka/source/EmbeddedKafkaServer.java b/data-prepper-plugins/kafka-plugins/src/integrationTest/java/org/opensearch/dataprepper/plugins/kafka/source/EmbeddedKafkaServer.java new file mode 100644 index 0000000000..1bb6953ed9 --- /dev/null +++ b/data-prepper-plugins/kafka-plugins/src/integrationTest/java/org/opensearch/dataprepper/plugins/kafka/source/EmbeddedKafkaServer.java @@ -0,0 +1,142 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.dataprepper.plugins.kafka.source; + + +import kafka.server.KafkaConfig; +import kafka.server.KafkaConfig$; +import kafka.server.KafkaServer; +import kafka.utils.TestUtils; +import org.apache.kafka.clients.admin.AdminClient; +import org.apache.kafka.clients.admin.AdminClientConfig; +import org.apache.kafka.clients.admin.NewTopic; +import org.apache.kafka.common.errors.UnknownTopicOrPartitionException; +import org.apache.kafka.common.utils.Time; +import org.junit.rules.TemporaryFolder; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.File; +import java.io.IOException; +import java.util.Collections; +import java.util.Map; +import java.util.Properties; +import java.util.concurrent.ExecutionException; + +/** + * Runs an in-memory, "embedded" instance of a Kafka broker, which listens at `127.0.0.1:9092` by + * default. + * + * Requires a running ZooKeeper instance to connect to. By default, it expects a ZooKeeper instance + * running at `127.0.0.1:2181`. + */ +public class EmbeddedKafkaServer { + + private static final Logger log = LoggerFactory.getLogger(EmbeddedKafkaServer.class); + + private static final String DEFAULT_ZK_CONNECT = "127.0.0.1:2181"; + + private final Properties effectiveConfig; + private final File logDir; + private final TemporaryFolder tmpFolder; + private final KafkaServer kafka; + + public EmbeddedKafkaServer(final Properties config) throws IOException { + tmpFolder = new TemporaryFolder(); + tmpFolder.create(); + logDir = tmpFolder.newFolder(); + effectiveConfig = effectiveConfigFrom(config); + final boolean loggingEnabled = true; + + final KafkaConfig kafkaConfig = new KafkaConfig(effectiveConfig, loggingEnabled); + log.info("Starting embedded Kafka broker (with log.dirs={} and ZK ensemble at {}) ...", + logDir, zookeeperConnect()); + kafka = TestUtils.createServer(kafkaConfig, Time.SYSTEM); + log.debug("Startup of embedded Kafka broker at {} completed (with ZK ensemble at {}) ...", + brokerList(), zookeeperConnect()); + } + + private Properties effectiveConfigFrom(final Properties initialConfig) throws IOException { + final Properties effectiveConfig = new Properties(); + effectiveConfig.put(KafkaConfig$.MODULE$.BrokerIdProp(), 1); + effectiveConfig.put(KafkaConfig$.MODULE$.NumPartitionsProp(), 1); + effectiveConfig.put(KafkaConfig$.MODULE$.AutoCreateTopicsEnableProp(), true); + effectiveConfig.put(KafkaConfig$.MODULE$.MessageMaxBytesProp(), 1000000); + effectiveConfig.put(KafkaConfig$.MODULE$.ControlledShutdownEnableProp(), true); + + effectiveConfig.putAll(initialConfig); + effectiveConfig.setProperty(KafkaConfig$.MODULE$.LogDirProp(), logDir.getAbsolutePath()); + return effectiveConfig; + } + + public String brokerList() { + return kafka.config().zkConnect(); + } + + + public String zookeeperConnect() { + return effectiveConfig.getProperty("zookeeper.connect", DEFAULT_ZK_CONNECT); + } + + public void stop() { + log.debug("Shutting down embedded Kafka broker at {} (with ZK ensemble at {}) ...", + brokerList(), zookeeperConnect()); + kafka.shutdown(); + kafka.awaitShutdown(); + log.debug("Removing temp folder {} with logs.dir at {} ...", tmpFolder, logDir); + tmpFolder.delete(); + log.debug("Shutdown of embedded Kafka broker at {} completed (with ZK ensemble at {}) ...", + brokerList(), zookeeperConnect()); + } + + public void createTopic(final String topic) { + createTopic(topic, 1, (short) 1, Collections.emptyMap()); + } + + public void createTopic(final String topic, final int partitions, final short replication) { + createTopic(topic, partitions, replication, Collections.emptyMap()); + } + + public void createTopic(final String topic, + final int partitions, + final short replication, + final Map topicConfig) { + log.debug("Creating topic { name: {}, partitions: {}, replication: {}, config: {} }", + topic, partitions, replication, topicConfig); + + final Properties properties = new Properties(); + properties.put(AdminClientConfig.BOOTSTRAP_SERVERS_CONFIG, "127.0.0.1:9092"); + + try (final AdminClient adminClient = AdminClient.create(properties)) { + final NewTopic newTopic = new NewTopic(topic, partitions, replication); + newTopic.configs(topicConfig); + adminClient.createTopics(Collections.singleton(newTopic)).all().get(); + } catch (final InterruptedException | ExecutionException fatal) { + throw new RuntimeException(fatal); + } + + } + + public void deleteTopic(final String topic) { + log.debug("Deleting topic {}", topic); + final Properties properties = new Properties(); + properties.put(AdminClientConfig.BOOTSTRAP_SERVERS_CONFIG, brokerList()); + + try (final AdminClient adminClient = AdminClient.create(properties)) { + adminClient.deleteTopics(Collections.singleton(topic)).all().get(); + } catch (final InterruptedException e) { + throw new RuntimeException(e); + } catch (final ExecutionException e) { + if (!(e.getCause() instanceof UnknownTopicOrPartitionException)) { + throw new RuntimeException(e); + } + } + } + + KafkaServer kafkaServer() { + return kafka; + } +} diff --git a/data-prepper-plugins/kafka-plugins/src/integrationTest/java/org/opensearch/dataprepper/plugins/kafka/source/EmbeddedZooKeeperServer.java b/data-prepper-plugins/kafka-plugins/src/integrationTest/java/org/opensearch/dataprepper/plugins/kafka/source/EmbeddedZooKeeperServer.java new file mode 100644 index 0000000000..e4e3d9fdd9 --- /dev/null +++ b/data-prepper-plugins/kafka-plugins/src/integrationTest/java/org/opensearch/dataprepper/plugins/kafka/source/EmbeddedZooKeeperServer.java @@ -0,0 +1,46 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.dataprepper.plugins.kafka.source; + +import org.apache.curator.test.TestingServer; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.IOException; + +/** + * Runs an in-memory, "embedded" instance of a ZooKeeper server. + * + * The ZooKeeper server instance is automatically started when you create a new instance of this class. + */ +public class EmbeddedZooKeeperServer { + + private static final Logger log = LoggerFactory.getLogger(EmbeddedZooKeeperServer.class); + + private final TestingServer server; + + public EmbeddedZooKeeperServer() throws Exception { + log.debug("Starting embedded ZooKeeper server..."); + this.server = new TestingServer(); + log.debug("Embedded ZooKeeper server at {} uses the temp directory at {}", + server.getConnectString(), server.getTempDirectory()); + } + + public void stop() throws IOException { + log.debug("Shutting down embedded ZooKeeper server at {} ...", server.getConnectString()); + server.close(); + log.debug("Shutdown of embedded ZooKeeper server at {} completed", server.getConnectString()); + } + + public String connectString() { + return server.getConnectString(); + } + + public String hostname() { + return connectString().substring(0, connectString().lastIndexOf(':')); + } + +} diff --git a/data-prepper-plugins/kafka-plugins/src/integrationTest/java/org/opensearch/dataprepper/plugins/kafka/source/JSONConsumerIT.java b/data-prepper-plugins/kafka-plugins/src/integrationTest/java/org/opensearch/dataprepper/plugins/kafka/source/JSONConsumerIT.java new file mode 100644 index 0000000000..1fab0d7ac1 --- /dev/null +++ b/data-prepper-plugins/kafka-plugins/src/integrationTest/java/org/opensearch/dataprepper/plugins/kafka/source/JSONConsumerIT.java @@ -0,0 +1,114 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.dataprepper.plugins.kafka.source; + +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.datatype.jsr310.JavaTimeModule; +import io.confluent.kafka.serializers.AbstractKafkaAvroSerDeConfig; +import org.apache.kafka.clients.producer.KafkaProducer; +import org.apache.kafka.clients.producer.ProducerConfig; +import org.apache.kafka.clients.producer.ProducerRecord; +import org.apache.kafka.common.serialization.StringSerializer; +import org.apache.kafka.connect.json.JsonSerializer; +import org.junit.Before; +import org.junit.BeforeClass; +import org.junit.ClassRule; +import org.junit.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; +import org.opensearch.dataprepper.metrics.PluginMetrics; +import org.opensearch.dataprepper.model.buffer.Buffer; +import org.opensearch.dataprepper.model.record.Record; +import org.opensearch.dataprepper.plugins.kafka.configuration.KafkaSourceConfig; +import org.opensearch.dataprepper.plugins.kafka.configuration.SchemaConfig; +import org.opensearch.dataprepper.plugins.kafka.configuration.TopicConfig; +import org.yaml.snakeyaml.Yaml; + +import java.io.FileReader; +import java.io.IOException; +import java.io.Reader; +import java.io.StringReader; +import java.util.List; +import java.util.Map; +import java.util.Properties; + +@ExtendWith(MockitoExtension.class) +public class JSONConsumerIT { + + private PluginMetrics pluginMetrics; + @Mock + TopicConfig topicConfig; + @Mock + private SchemaConfig schemaConfig; + private KafkaSourceConfig kafkaSourceConfig; + + private KafkaSource kafkaSource; + private Buffer> buffer; + + @ClassRule + public static final EmbeddedKafkaClusterSingleNode CLUSTER = new EmbeddedKafkaClusterSingleNode(); + + @BeforeClass + public static void createTopics() { + CLUSTER.createTopic("test-IT-topic-1"); + } + + @Before + public void configure() throws IOException { + Yaml yaml = new Yaml(); + FileReader fileReader = new FileReader(getClass().getClassLoader().getResource("sample-pipelines-int.yaml").getFile()); + Object data = yaml.load(fileReader); + if(data instanceof Map){ + Map propertyMap = (Map) data; + Map logPipelineMap = (Map) propertyMap.get("log-pipeline"); + Map sourceMap = (Map) logPipelineMap.get("source"); + Map kafkaConfigMap = (Map) sourceMap.get("kafka"); + ObjectMapper mapper = new ObjectMapper(); + mapper.registerModule(new JavaTimeModule()); + String json = mapper.writeValueAsString(kafkaConfigMap); + Reader reader = new StringReader(json); + kafkaSourceConfig = mapper.readValue(reader, KafkaSourceConfig.class); + List topicConfigList = kafkaSourceConfig.getTopics(); + topicConfig = topicConfigList.get(0); + schemaConfig = kafkaSourceConfig.getSchemaConfig(); + } + } + + + @Test + public void testKafkaMessagesForJsonConsumer() throws JsonProcessingException { + produceTestMessages(); + kafkaSource.start(buffer); + } + + private void produceTestMessages() throws JsonProcessingException { + + String value = "{\"writebuffer\":\"true\",\"buffertype\":\"json\"}"; + JsonNode mapper = new ObjectMapper().readTree(value); + + final Properties props = new Properties(); + props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "127.0.0.1:9092"); + props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, + StringSerializer.class); + props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, + JsonSerializer.class); + props.put(AbstractKafkaAvroSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG, CLUSTER.schemaRegistryUrl()); + try (KafkaProducer producer = new KafkaProducer(props)) { + + for (long i = 0; i < 10; i++) { + producer.send(new ProducerRecord<>("test-IT-topic-1", + mapper )); + Thread.sleep(1000L); + } + producer.flush(); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } + } +} \ No newline at end of file diff --git a/data-prepper-plugins/kafka-plugins/src/integrationTest/java/org/opensearch/dataprepper/plugins/kafka/source/PlainTextConsumerIT.java b/data-prepper-plugins/kafka-plugins/src/integrationTest/java/org/opensearch/dataprepper/plugins/kafka/source/PlainTextConsumerIT.java new file mode 100644 index 0000000000..a2f10eeba7 --- /dev/null +++ b/data-prepper-plugins/kafka-plugins/src/integrationTest/java/org/opensearch/dataprepper/plugins/kafka/source/PlainTextConsumerIT.java @@ -0,0 +1,104 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.dataprepper.plugins.kafka.source; + +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.datatype.jsr310.JavaTimeModule; +import io.confluent.kafka.serializers.AbstractKafkaAvroSerDeConfig; +import org.apache.kafka.clients.producer.KafkaProducer; +import org.apache.kafka.clients.producer.ProducerConfig; +import org.apache.kafka.clients.producer.ProducerRecord; +import org.apache.kafka.common.serialization.StringSerializer; +import org.junit.Before; +import org.junit.BeforeClass; +import org.junit.ClassRule; +import org.junit.Test; +import org.mockito.Mock; +import org.opensearch.dataprepper.metrics.PluginMetrics; +import org.opensearch.dataprepper.model.buffer.Buffer; +import org.opensearch.dataprepper.model.record.Record; +import org.opensearch.dataprepper.plugins.kafka.configuration.KafkaSourceConfig; +import org.opensearch.dataprepper.plugins.kafka.configuration.SchemaConfig; +import org.opensearch.dataprepper.plugins.kafka.configuration.TopicConfig; +import org.yaml.snakeyaml.Yaml; + +import java.io.FileReader; +import java.io.IOException; +import java.io.Reader; +import java.io.StringReader; +import java.util.List; +import java.util.Map; +import java.util.Properties; + + +public class PlainTextConsumerIT { + + private PluginMetrics pluginMetrics; + @Mock + TopicConfig topicConfig; + @Mock + private SchemaConfig schemaConfig; + private KafkaSourceConfig kafkaSourceConfig; + + private KafkaSource kafkaSource; + private Buffer> buffer; + + @ClassRule + public static final EmbeddedKafkaClusterSingleNode CLUSTER = new EmbeddedKafkaClusterSingleNode(); + + @BeforeClass + public static void createTopics() { + CLUSTER.createTopic("test-IT-topic"); + } + + @Before + public void configure() throws IOException { + Yaml yaml = new Yaml(); + FileReader fileReader = new FileReader(getClass().getClassLoader().getResource("sample-pipelines-int.yaml").getFile()); + Object data = yaml.load(fileReader); + if(data instanceof Map){ + Map propertyMap = (Map) data; + Map logPipelineMap = (Map) propertyMap.get("log-pipeline"); + Map sourceMap = (Map) logPipelineMap.get("source"); + Map kafkaConfigMap = (Map) sourceMap.get("kafka"); + ObjectMapper mapper = new ObjectMapper(); + mapper.registerModule(new JavaTimeModule()); + String json = mapper.writeValueAsString(kafkaConfigMap); + Reader reader = new StringReader(json); + kafkaSourceConfig = mapper.readValue(reader, KafkaSourceConfig.class); + List topicConfigList = kafkaSourceConfig.getTopics(); + topicConfig = topicConfigList.get(0); + schemaConfig = kafkaSourceConfig.getSchemaConfig(); + } + } + + @Test + public void consumeKafkaMessages_should_return_at_least_one_message() { + produceTestMessages(); + kafkaSource.start(buffer); + } + + private void produceTestMessages() { + + final Properties props = new Properties(); + props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "127.0.0.1:9092"); + props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, + StringSerializer.class); + props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, + StringSerializer.class); + props.put(AbstractKafkaAvroSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG, CLUSTER.schemaRegistryUrl()); + try (KafkaProducer producer = new KafkaProducer(props)) { + for (long i = 0; i < 10; i++) { + producer.send(new ProducerRecord<>("test-IT-topic", + "hello" + i)); + Thread.sleep(1000L); + } + producer.flush(); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } + } +} \ No newline at end of file diff --git a/data-prepper-plugins/kafka-plugins/src/main/resources/sample-pipelines-int.yaml b/data-prepper-plugins/kafka-plugins/src/main/resources/sample-pipelines-int.yaml new file mode 100644 index 0000000000..88efc7b2e5 --- /dev/null +++ b/data-prepper-plugins/kafka-plugins/src/main/resources/sample-pipelines-int.yaml @@ -0,0 +1,34 @@ +log-pipeline: + source: + kafka: + bootstrap_servers: + - 127.0.0.1:9093 + auth_type: plaintext + topics: + - name: my-topic-2 + group_name: kafka-consumer-group-2 + group_id: DPKafkaProj-2 + workers: 10 #optional and default is 10 + autocommit: false #optional and dafault is false + autocommit_interval: 5 #optional and dafault is 5s + session_timeout: 45 #optional and dafault is 45s + max_retry_attempts: 1000 #optional and dafault is 5 + max_retry_delay: 1 #optional and dafault is 5 + auto_offset_reset: earliest #optional and dafault is earliest + thread_waiting_time: 1 #optional and dafault is 1s + max_record_fetch_time: 4 #optional and dafault is 4s + heart_beat_interval: 3 #optional and dafault is 3s + buffer_default_timeout: 5 #optional and dafault is 5s + fetch_max_bytes: 52428800 #optional and dafault is 52428800 + fetch_max_wait: 500 #optional and dafault is 500 + fetch_min_bytes: 1 #optional and dafault is 1 + retry_backoff: 100 #optional and dafault is 10s + max_poll_interval: 300000 #optional and dafault is 300000s + consumer_max_poll_records: 500 #optional and dafault is 500 + - name: my-topic-1 + group_id: DPKafkaProj-1 + schema: + registry_url: http://localhost:8081/ + version: 1 + sink: + - stdout: \ No newline at end of file From 05d229a06ceddb21cd9dedcaf49b1d455272fe6f Mon Sep 17 00:00:00 2001 From: Taylor Gray Date: Tue, 27 Jun 2023 16:25:19 -0500 Subject: [PATCH 4/9] Remove validation that made keys starting or ending with . - or _ invalid, catch all exceptions in the parse json processor (#2945) Remove validation that made keys starting or ending with . - or _ invalid, catch all exceptions in the parse json processor Signed-off-by: Taylor Gray --- .../dataprepper/model/event/JacksonEvent.java | 11 ---- .../model/event/JacksonEventTest.java | 5 +- .../opensearch-source/README.md | 1 - .../parsejson/ParseJsonProcessor.java | 55 ++++++++++--------- .../parsejson/ParseJsonProcessorTest.java | 19 +++++++ 5 files changed, 50 insertions(+), 41 deletions(-) diff --git a/data-prepper-api/src/main/java/org/opensearch/dataprepper/model/event/JacksonEvent.java b/data-prepper-api/src/main/java/org/opensearch/dataprepper/model/event/JacksonEvent.java index 047e4c4a07..8b4b2a729f 100644 --- a/data-prepper-api/src/main/java/org/opensearch/dataprepper/model/event/JacksonEvent.java +++ b/data-prepper-api/src/main/java/org/opensearch/dataprepper/model/event/JacksonEvent.java @@ -373,19 +373,9 @@ private String trimKey(final String key) { } private boolean isValidKey(final String key) { - char previous = ' '; - char next = ' '; for (int i = 0; i < key.length(); i++) { char c = key.charAt(i); - if (i < key.length() - 1) { - next = key.charAt(i + 1); - } - - if ((i == 0 || i == key.length() - 1 || previous == '/' || next == '/') && (c == '_' || c == '.' || c == '-')) { - return false; - } - if (!(c >= 48 && c <= 57 || c >= 65 && c <= 90 || c >= 97 && c <= 122 @@ -397,7 +387,6 @@ private boolean isValidKey(final String key) { return false; } - previous = c; } return true; } diff --git a/data-prepper-api/src/test/java/org/opensearch/dataprepper/model/event/JacksonEventTest.java b/data-prepper-api/src/test/java/org/opensearch/dataprepper/model/event/JacksonEventTest.java index 9de73495f9..92b181ac8c 100644 --- a/data-prepper-api/src/test/java/org/opensearch/dataprepper/model/event/JacksonEventTest.java +++ b/data-prepper-api/src/test/java/org/opensearch/dataprepper/model/event/JacksonEventTest.java @@ -323,9 +323,8 @@ public void testIsValueAList_withNull() { } @ParameterizedTest - @ValueSource(strings = {"", "withSpecialChars*$%", "-withPrefixDash", "\\-withEscapeChars", "\\\\/withMultipleEscapeChars", - "withDashSuffix-", "withDashSuffix-/nestedKey", "withDashPrefix/-nestedKey", "_withUnderscorePrefix", "withUnderscoreSuffix_", - ".withDotPrefix", "withDotSuffix.", "with,Comma", "with:Colon", "with[Bracket", "with|Brace"}) + @ValueSource(strings = {"", "withSpecialChars*$%", "\\-withEscapeChars", "\\\\/withMultipleEscapeChars", + "with,Comma", "with:Colon", "with[Bracket", "with|Brace"}) void testKey_withInvalidKey_throwsIllegalArgumentException(final String invalidKey) { assertThrowsForKeyCheck(IllegalArgumentException.class, invalidKey); } diff --git a/data-prepper-plugins/opensearch-source/README.md b/data-prepper-plugins/opensearch-source/README.md index 043c606d8d..4894ba646d 100644 --- a/data-prepper-plugins/opensearch-source/README.md +++ b/data-prepper-plugins/opensearch-source/README.md @@ -197,4 +197,3 @@ The default behavior is to process all indices. #### Index Configuration * `index_name_regex`: A regex pattern to represent the index names for filtering - diff --git a/data-prepper-plugins/parse-json-processor/src/main/java/org/opensearch/dataprepper/plugins/processor/parsejson/ParseJsonProcessor.java b/data-prepper-plugins/parse-json-processor/src/main/java/org/opensearch/dataprepper/plugins/processor/parsejson/ParseJsonProcessor.java index 99f04d3e33..e076b7de10 100644 --- a/data-prepper-plugins/parse-json-processor/src/main/java/org/opensearch/dataprepper/plugins/processor/parsejson/ParseJsonProcessor.java +++ b/data-prepper-plugins/parse-json-processor/src/main/java/org/opensearch/dataprepper/plugins/processor/parsejson/ParseJsonProcessor.java @@ -64,34 +64,37 @@ public Collection> doExecute(final Collection> recor final boolean doUsePointer = Objects.nonNull(pointer); for (final Record record : records) { - final Event event = record.getData(); - if (Objects.nonNull(parseWhen) && !expressionEvaluator.evaluateConditional(parseWhen, event)) { - continue; - } - - final String message = event.get(source, String.class); - if (Objects.isNull(message)) { - continue; - } - - try { - final TypeReference> hashMapTypeReference = new TypeReference>() {}; - Map parsedJson = objectMapper.readValue(message, hashMapTypeReference); - - if (doUsePointer) { - parsedJson = parseUsingPointer(event, parsedJson, pointer, doWriteToRoot); - } - - if (doWriteToRoot) { - writeToRoot(event, parsedJson); - } else { - event.put(destination, parsedJson); + final Event event = record.getData(); + try { + if (Objects.nonNull(parseWhen) && !expressionEvaluator.evaluateConditional(parseWhen, event)) { + continue; + } + + final String message = event.get(source, String.class); + if (Objects.isNull(message)) { + continue; + } + final TypeReference> hashMapTypeReference = new TypeReference>() { + }; + Map parsedJson = objectMapper.readValue(message, hashMapTypeReference); + + if (doUsePointer) { + parsedJson = parseUsingPointer(event, parsedJson, pointer, doWriteToRoot); + } + + if (doWriteToRoot) { + writeToRoot(event, parsedJson); + } else { + event.put(destination, parsedJson); + } + } catch (final JsonProcessingException jsonException) { + event.getMetadata().addTags(tagsOnFailure); + LOG.error(EVENT, "An exception occurred due to invalid JSON while reading event [{}]", event, jsonException); + } catch (final Exception e) { + event.getMetadata().addTags(tagsOnFailure); + LOG.error(EVENT, "An exception occurred while using the parse_json processor on Event [{}]", event, e); } - } catch (final JsonProcessingException jsonException) { - event.getMetadata().addTags(tagsOnFailure); - LOG.error(EVENT, "An exception occurred due to invalid JSON while reading event [{}]", event, jsonException); - } } return records; } diff --git a/data-prepper-plugins/parse-json-processor/src/test/java/org/opensearch/dataprepper/plugins/processor/parsejson/ParseJsonProcessorTest.java b/data-prepper-plugins/parse-json-processor/src/test/java/org/opensearch/dataprepper/plugins/processor/parsejson/ParseJsonProcessorTest.java index d60c877f95..7fce6ecbe5 100644 --- a/data-prepper-plugins/parse-json-processor/src/test/java/org/opensearch/dataprepper/plugins/processor/parsejson/ParseJsonProcessorTest.java +++ b/data-prepper-plugins/parse-json-processor/src/test/java/org/opensearch/dataprepper/plugins/processor/parsejson/ParseJsonProcessorTest.java @@ -306,6 +306,25 @@ void test_tags_when_json_parse_fails() { assertTrue(parsedEvent.getMetadata().hasTags(testTags)); } + @Test + void when_evaluate_conditional_throws_RuntimeException_events_are_not_dropped() { + final String source = "different_source"; + final String destination = "destination_key"; + when(processorConfig.getSource()).thenReturn(source); + when(processorConfig.getDestination()).thenReturn(destination); + final String whenCondition = UUID.randomUUID().toString(); + when(processorConfig.getParseWhen()).thenReturn(whenCondition); + final Map data = Collections.singletonMap("key", "value"); + final String serializedMessage = convertMapToJSONString(data); + final Record testEvent = createMessageEvent(serializedMessage); + when(expressionEvaluator.evaluateConditional(whenCondition, testEvent.getData())).thenThrow(RuntimeException.class); + parseJsonProcessor = createObjectUnderTest(); + + final Event parsedEvent = createAndParseMessageEvent(testEvent); + + assertThat(parsedEvent.toMap(), equalTo(testEvent.getData().toMap())); + } + private String constructDeeplyNestedJsonPointer(final int numberOfLayers) { String pointer = "/" + DEEPLY_NESTED_KEY_NAME; for (int layer = 0; layer < numberOfLayers; layer++) { From 4967df2eec0454cf7363bbde40e7d1dba4749436 Mon Sep 17 00:00:00 2001 From: Uday Chintala Date: Wed, 28 Jun 2023 20:32:25 +0530 Subject: [PATCH 5/9] Sqs Source Integration Tests (#2874) Sqs Source Integration Tests Signed-off-by: Uday Kumar Chintala --- data-prepper-plugins/sqs-source/build.gradle | 31 ++++ .../source/sqssource/RecordsGenerator.java | 11 ++ .../source/sqssource/SqsRecordsGenerator.java | 47 ++++++ .../source/sqssource/SqsSourceTaskIT.java | 147 ++++++++++++++++++ 4 files changed, 236 insertions(+) create mode 100644 data-prepper-plugins/sqs-source/src/integrationTest/java/org/opensearch/dataprepper/plugins/source/sqssource/RecordsGenerator.java create mode 100644 data-prepper-plugins/sqs-source/src/integrationTest/java/org/opensearch/dataprepper/plugins/source/sqssource/SqsRecordsGenerator.java create mode 100644 data-prepper-plugins/sqs-source/src/integrationTest/java/org/opensearch/dataprepper/plugins/source/sqssource/SqsSourceTaskIT.java diff --git a/data-prepper-plugins/sqs-source/build.gradle b/data-prepper-plugins/sqs-source/build.gradle index a7c4f005fa..893a1faf8d 100644 --- a/data-prepper-plugins/sqs-source/build.gradle +++ b/data-prepper-plugins/sqs-source/build.gradle @@ -15,4 +15,35 @@ dependencies { } test { useJUnitPlatform() +} + +sourceSets { + integrationTest { + java { + compileClasspath += main.output + test.output + runtimeClasspath += main.output + test.output + srcDir file('src/integrationTest/java') + } + resources.srcDir file('src/integrationTest/resources') + } +} + +configurations { + integrationTestImplementation.extendsFrom testImplementation + integrationTestRuntime.extendsFrom testRuntime +} + +task integrationTest(type: Test) { + group = 'verification' + testClassesDirs = sourceSets.integrationTest.output.classesDirs + + useJUnitPlatform() + + classpath = sourceSets.integrationTest.runtimeClasspath + systemProperty 'tests.sqs.source.aws.region', System.getProperty('tests.sqs.source.aws.region') + systemProperty 'tests.sqs.source.queue.url', System.getProperty('tests.sqs.source.queue.url') + + filter { + includeTestsMatching '*IT' + } } \ No newline at end of file diff --git a/data-prepper-plugins/sqs-source/src/integrationTest/java/org/opensearch/dataprepper/plugins/source/sqssource/RecordsGenerator.java b/data-prepper-plugins/sqs-source/src/integrationTest/java/org/opensearch/dataprepper/plugins/source/sqssource/RecordsGenerator.java new file mode 100644 index 0000000000..a0d4a24d68 --- /dev/null +++ b/data-prepper-plugins/sqs-source/src/integrationTest/java/org/opensearch/dataprepper/plugins/source/sqssource/RecordsGenerator.java @@ -0,0 +1,11 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ +package org.opensearch.dataprepper.plugins.source.sqssource; + +import java.util.List; + +public interface RecordsGenerator { + void pushMessages(final List messages, final String queueUrl); +} diff --git a/data-prepper-plugins/sqs-source/src/integrationTest/java/org/opensearch/dataprepper/plugins/source/sqssource/SqsRecordsGenerator.java b/data-prepper-plugins/sqs-source/src/integrationTest/java/org/opensearch/dataprepper/plugins/source/sqssource/SqsRecordsGenerator.java new file mode 100644 index 0000000000..18e4ea2980 --- /dev/null +++ b/data-prepper-plugins/sqs-source/src/integrationTest/java/org/opensearch/dataprepper/plugins/source/sqssource/SqsRecordsGenerator.java @@ -0,0 +1,47 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ +package org.opensearch.dataprepper.plugins.source.sqssource; + +import software.amazon.awssdk.services.sqs.SqsClient; +import software.amazon.awssdk.services.sqs.model.SendMessageBatchRequest; +import software.amazon.awssdk.services.sqs.model.SendMessageBatchRequestEntry; + +import java.util.ArrayList; +import java.util.List; +import java.util.UUID; + +public class SqsRecordsGenerator implements RecordsGenerator { + + private final SqsClient sqsClient; + + public SqsRecordsGenerator(final SqsClient sqsClient){ + this.sqsClient = sqsClient; + } + + @Override + public void pushMessages(final List messages, String queueUrl) { + final List> batches = splitIntoBatches(messages, 10); + batches.forEach(batch -> { + List entries = new ArrayList<>(); + batch.forEach(msg -> entries.add(SendMessageBatchRequestEntry.builder() + .id(UUID.randomUUID() + "-" + UUID.randomUUID()).messageBody(msg).build())); + sqsClient.sendMessageBatch(SendMessageBatchRequest.builder().queueUrl(queueUrl).entries(entries).build()); + }); + } + + private static List> splitIntoBatches(List messages, int batchSize) { + List> batches = new ArrayList<>(); + int totalRecords = messages.size(); + int numBatches = (int) Math.ceil((double) totalRecords / batchSize); + + for (int i = 0; i < numBatches; i++) { + int startIndex = i * batchSize; + int endIndex = Math.min(startIndex + batchSize, totalRecords); + List batch = messages.subList(startIndex, endIndex); + batches.add(batch); + } + return batches; + } +} diff --git a/data-prepper-plugins/sqs-source/src/integrationTest/java/org/opensearch/dataprepper/plugins/source/sqssource/SqsSourceTaskIT.java b/data-prepper-plugins/sqs-source/src/integrationTest/java/org/opensearch/dataprepper/plugins/source/sqssource/SqsSourceTaskIT.java new file mode 100644 index 0000000000..31e12db422 --- /dev/null +++ b/data-prepper-plugins/sqs-source/src/integrationTest/java/org/opensearch/dataprepper/plugins/source/sqssource/SqsSourceTaskIT.java @@ -0,0 +1,147 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ +package org.opensearch.dataprepper.plugins.source.sqssource; + +import com.linecorp.armeria.client.retry.Backoff; +import io.micrometer.core.instrument.Counter; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.CsvSource; +import org.mockito.Mock; +import org.opensearch.dataprepper.model.acknowledgements.AcknowledgementSetManager; +import org.opensearch.dataprepper.model.configuration.PluginSetting; +import org.opensearch.dataprepper.model.event.Event; +import org.opensearch.dataprepper.model.record.Record; +import org.opensearch.dataprepper.plugins.aws.sqs.common.SqsService; +import org.opensearch.dataprepper.plugins.aws.sqs.common.handler.SqsMessageHandler; +import org.opensearch.dataprepper.plugins.aws.sqs.common.metrics.SqsMetrics; +import org.opensearch.dataprepper.plugins.aws.sqs.common.model.SqsOptions; +import org.opensearch.dataprepper.plugins.buffer.blockingbuffer.BlockingBuffer; +import org.opensearch.dataprepper.plugins.source.sqssource.handler.RawSqsMessageHandler; +import software.amazon.awssdk.regions.Region; +import software.amazon.awssdk.services.sqs.SqsClient; + +import java.time.Duration; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.concurrent.Executors; +import java.util.concurrent.ScheduledExecutorService; +import java.util.concurrent.TimeUnit; +import java.util.stream.Collectors; + +import static org.hamcrest.CoreMatchers.equalTo; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.collection.IsIterableContainingInAnyOrder.containsInAnyOrder; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +public class SqsSourceTaskIT { + + static final long INITIAL_DELAY = Duration.ofSeconds(20).toMillis(); + + static final long MAXIMUM_DELAY = Duration.ofMinutes(5).toMillis(); + + static final double JITTER_RATE = 0.20; + + private static final String TEST_PIPELINE_NAME = "pipeline"; + + private static final String MESSAGE = "message"; + + private static final String JSON_MESSAGE = "{\"array\":[{\"name\":\"abc\",\"test\":[{\"company\":\"xyz\"}]},{\"number\":1}]}"; + + private static final String LOG_MESSAGE = "2023-06-14T11:59:54,350 [main] INFO Test - Application started Successfully\n"; + + private static final String AWS_SQS_QUEUE_URL = "tests.sqs.source.queue.url"; + + private static final String AWS_REGION = "tests.sqs.source.aws.region"; + public static final Duration BUFFER_TIMEOUT = Duration.ofSeconds(10); + public static final int RECORDS_TO_ACCUMULATE = 100; + + @Mock + private AcknowledgementSetManager acknowledgementSetManager; + + private BlockingBuffer> buffer; + + private Counter messageReceivedCounter; + + private Counter messageDeletedCounter; + + private Backoff backoff; + + private SqsClient sqsClient; + + private SqsMetrics sqsMetrics; + + private ScheduledExecutorService executorService; + + @ParameterizedTest + @CsvSource({"2,1","10,2","50,4","100,5","200,7","500,10","1000,15","2000,24"}) + public void process_sqs_messages(int messageLoad,int threadSleepTime){ + final SqsRecordsGenerator sqsRecordsGenerator = new SqsRecordsGenerator(sqsClient); + final String queueUrl = System.getProperty(AWS_SQS_QUEUE_URL); + + List inputDataList = pushMessagesToQueue(sqsRecordsGenerator, queueUrl,messageLoad); + this.buffer = getBuffer(inputDataList.size()); + + SqsOptions sqsOptions = new SqsOptions.Builder().setSqsUrl(queueUrl).setMaximumMessages(10).build(); + executorService.scheduleAtFixedRate(createObjectUnderTest(sqsOptions),0,1, TimeUnit.MILLISECONDS); + try { + Thread.sleep(Duration.ofSeconds(threadSleepTime).toMillis()); + } catch (InterruptedException e) { + } + executorService.shutdown(); + final List> bufferEvents = new ArrayList<>(buffer.read((int) Duration.ofSeconds(10).toMillis()).getKey()); + final List bufferData = bufferEvents.stream().map(obj -> obj.getData().get(MESSAGE, String.class)).collect(Collectors.toList()); + assertThat(bufferData, containsInAnyOrder(inputDataList.toArray())); + assertThat(bufferData.size(),equalTo(inputDataList.size())); + } + + @BeforeEach + public void setup(){ + this.acknowledgementSetManager = mock(AcknowledgementSetManager.class); + this.messageReceivedCounter = mock(Counter.class); + this.messageDeletedCounter = mock(Counter.class); + this.sqsMetrics = mock(SqsMetrics.class); + when(sqsMetrics.getSqsMessagesReceivedCounter()).thenReturn(messageReceivedCounter); + when(sqsMetrics.getSqsMessagesDeletedCounter()).thenReturn(messageDeletedCounter); + this.backoff = Backoff.exponential(INITIAL_DELAY, MAXIMUM_DELAY).withJitter(JITTER_RATE) + .withMaxAttempts(Integer.MAX_VALUE); + this.sqsClient = SqsClient.builder().region(Region.of(System.getProperty(AWS_REGION))).build(); + executorService = Executors.newSingleThreadScheduledExecutor(); + } + + public SqsSourceTask createObjectUnderTest(final SqsOptions sqsOptions){ + SqsService sqsService = new SqsService(sqsMetrics,sqsClient,backoff); + SqsMessageHandler sqsHandler = new RawSqsMessageHandler(sqsService); + return new SqsSourceTask(buffer, RECORDS_TO_ACCUMULATE, BUFFER_TIMEOUT + ,sqsService,sqsOptions,sqsMetrics, + acknowledgementSetManager,Boolean.FALSE,sqsHandler); + } + + private static List pushMessagesToQueue(SqsRecordsGenerator sqsRecordsGenerator, String queueUrl,final int load) { + List inputDataList = new ArrayList<>(); + for(int msgCount = 0; msgCount < load/2; msgCount++) + generateMessagesForSqsPush().forEach(obj -> inputDataList.add(obj)); + sqsRecordsGenerator.pushMessages(inputDataList, queueUrl); + return inputDataList; + } + + private static List generateMessagesForSqsPush(){ + List messages = new ArrayList<>(2); + messages.add(JSON_MESSAGE); + messages.add(LOG_MESSAGE); + return messages; + } + + private BlockingBuffer> getBuffer(final int bufferSize) { + final HashMap integerHashMap = new HashMap<>(); + integerHashMap.put("buffer_size", bufferSize); + integerHashMap.put("batch_size", bufferSize); + final PluginSetting pluginSetting = new PluginSetting("blocking_buffer", integerHashMap); + pluginSetting.setPipelineName(TEST_PIPELINE_NAME); + return new BlockingBuffer<>(pluginSetting); + } +} From 241489fdedc55098cf9a715bbaab2344bc16d027 Mon Sep 17 00:00:00 2001 From: Vishal Boinapalli Date: Wed, 28 Jun 2023 13:45:02 -0700 Subject: [PATCH 6/9] Adding Translate Processor functionality and config files (#2913) * Adding MapValues Processor functionality and config file Signed-off-by: Vishal Boinapalli * Changed logic for splitting of Keys, Added config file for Regex option Signed-off-by: Vishal Boinapalli * Added testcases, made changes addressing previous review comments, Changed the processor name from map_values to translate Signed-off-by: Vishal Boinapalli * Added regexConfigTests, made code structure changes, added check for patterns under regex Signed-off-by: Vishal Boinapalli * Removed * imports Signed-off-by: Vishal Boinapalli --------- Signed-off-by: Vishal Boinapalli --- .../translate-processor/build.gradle | 23 ++ .../RegexParameterConfiguration.java | 25 ++ .../translate/TranslateProcessor.java | 213 +++++++++++++ .../translate/TranslateProcessorConfig.java | 66 ++++ .../RegexParameterConfigurationTest.java | 46 +++ .../TranslateProcessorConfigTest.java | 59 ++++ .../translate/TranslateProcessorTest.java | 300 ++++++++++++++++++ settings.gradle | 1 + 8 files changed, 733 insertions(+) create mode 100644 data-prepper-plugins/translate-processor/build.gradle create mode 100644 data-prepper-plugins/translate-processor/src/main/java/org/opensearch/dataprepper/plugins/processor/translate/RegexParameterConfiguration.java create mode 100644 data-prepper-plugins/translate-processor/src/main/java/org/opensearch/dataprepper/plugins/processor/translate/TranslateProcessor.java create mode 100644 data-prepper-plugins/translate-processor/src/main/java/org/opensearch/dataprepper/plugins/processor/translate/TranslateProcessorConfig.java create mode 100644 data-prepper-plugins/translate-processor/src/test/java/org/opensearch/dataprepper/plugins/processor/translate/RegexParameterConfigurationTest.java create mode 100644 data-prepper-plugins/translate-processor/src/test/java/org/opensearch/dataprepper/plugins/processor/translate/TranslateProcessorConfigTest.java create mode 100644 data-prepper-plugins/translate-processor/src/test/java/org/opensearch/dataprepper/plugins/processor/translate/TranslateProcessorTest.java diff --git a/data-prepper-plugins/translate-processor/build.gradle b/data-prepper-plugins/translate-processor/build.gradle new file mode 100644 index 0000000000..6b6526fda4 --- /dev/null +++ b/data-prepper-plugins/translate-processor/build.gradle @@ -0,0 +1,23 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +plugins { + id 'java' +} + + +dependencies { + implementation project(':data-prepper-api') + implementation 'com.fasterxml.jackson.dataformat:jackson-dataformat-yaml:2.15.0' + implementation 'io.micrometer:micrometer-core' + implementation project(path: ':data-prepper-api') + testImplementation project(':data-prepper-plugins:log-generator-source') + testImplementation project(':data-prepper-test-common') + implementation 'org.apache.commons:commons-lang3:3.12.0' +} + +test { + useJUnitPlatform() +} \ No newline at end of file diff --git a/data-prepper-plugins/translate-processor/src/main/java/org/opensearch/dataprepper/plugins/processor/translate/RegexParameterConfiguration.java b/data-prepper-plugins/translate-processor/src/main/java/org/opensearch/dataprepper/plugins/processor/translate/RegexParameterConfiguration.java new file mode 100644 index 0000000000..e8e49bd83f --- /dev/null +++ b/data-prepper-plugins/translate-processor/src/main/java/org/opensearch/dataprepper/plugins/processor/translate/RegexParameterConfiguration.java @@ -0,0 +1,25 @@ +package org.opensearch.dataprepper.plugins.processor.translate; + +import com.fasterxml.jackson.annotation.JsonProperty; +import jakarta.validation.constraints.NotNull; + +import java.util.Map; + + +public class RegexParameterConfiguration { + + private static final boolean DEFAULT_EXACT = true; + @NotNull + @JsonProperty("patterns") + private Map patterns; + + @JsonProperty("exact") + private Boolean exact = DEFAULT_EXACT; + + public Map getPatterns() { + return patterns; + } + + public Boolean getExact() { return exact; } + +} diff --git a/data-prepper-plugins/translate-processor/src/main/java/org/opensearch/dataprepper/plugins/processor/translate/TranslateProcessor.java b/data-prepper-plugins/translate-processor/src/main/java/org/opensearch/dataprepper/plugins/processor/translate/TranslateProcessor.java new file mode 100644 index 0000000000..be965029a1 --- /dev/null +++ b/data-prepper-plugins/translate-processor/src/main/java/org/opensearch/dataprepper/plugins/processor/translate/TranslateProcessor.java @@ -0,0 +1,213 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.dataprepper.plugins.processor.translate; + +import org.apache.commons.lang3.Range; +import org.apache.commons.lang3.StringUtils; +import org.apache.commons.lang3.math.NumberUtils; +import org.opensearch.dataprepper.expression.ExpressionEvaluator; +import org.opensearch.dataprepper.metrics.PluginMetrics; +import org.opensearch.dataprepper.model.annotations.DataPrepperPlugin; +import org.opensearch.dataprepper.model.annotations.DataPrepperPluginConstructor; +import org.opensearch.dataprepper.model.event.Event; +import org.opensearch.dataprepper.model.plugin.InvalidPluginConfigurationException; +import org.opensearch.dataprepper.model.processor.AbstractProcessor; +import org.opensearch.dataprepper.model.processor.Processor; +import org.opensearch.dataprepper.model.record.Record; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.Collection; +import java.util.Collections; +import java.util.Map; +import java.util.LinkedHashMap; +import java.util.HashMap; +import java.util.Objects; +import java.util.regex.Pattern; + +import static org.opensearch.dataprepper.logging.DataPrepperMarkers.EVENT; + + +@DataPrepperPlugin(name = "translate", pluginType = Processor.class, pluginConfigurationType = TranslateProcessorConfig.class) +public class TranslateProcessor extends AbstractProcessor, Record> { + + private static final Logger LOG = LoggerFactory.getLogger(TranslateProcessor.class); + private final ExpressionEvaluator expressionEvaluator; + private final TranslateProcessorConfig translateProcessorConfig; + private final LinkedHashMap, String> rangeMappings; + private final Map individualMappings; + private final Map patternMappings; + + @DataPrepperPluginConstructor + public TranslateProcessor(PluginMetrics pluginMetrics, final TranslateProcessorConfig translateProcessorConfig, final ExpressionEvaluator expressionEvaluator) { + super(pluginMetrics); + this.translateProcessorConfig = translateProcessorConfig; + this.expressionEvaluator = expressionEvaluator; + individualMappings = new HashMap<>(); + rangeMappings = new LinkedHashMap<>(); + if(this.translateProcessorConfig.getRegexParameterConfiguration()!=null) { + patternMappings = translateProcessorConfig.getRegexParameterConfiguration().getPatterns(); + } + else{ + patternMappings = Collections.emptyMap(); + } + + processMapField(translateProcessorConfig.getMap()); + parseFile(translateProcessorConfig.getFilePath()); + checkOverlappingKeys(); + } + + private void processMapField(Map map){ + if(Objects.nonNull(map)) { + for (Map.Entry mapEntry : map.entrySet()) { + parseIndividualKeys(mapEntry); + } + } + } + + private void parseIndividualKeys(Map.Entry mapEntry){ + String[] commaSeparatedKeys = mapEntry.getKey().split(","); + for(String individualKey : commaSeparatedKeys){ + if(individualKey.contains("-")){ + addRangeMapping(Map.entry(individualKey, mapEntry.getValue())); + } + else { + addIndividualMapping(individualKey, mapEntry.getValue()); + } + } + } + + private void addRangeMapping(Map.Entry mapEntry){ + String[] rangeKeys = mapEntry.getKey().split("-"); + if(rangeKeys.length!=2 || !StringUtils.isNumericSpace(rangeKeys[0]) || !StringUtils.isNumericSpace(rangeKeys[1])){ + addIndividualMapping(mapEntry.getKey(), mapEntry.getValue()); + } + else { + Float lowKey = Float.parseFloat(rangeKeys[0]); + Float highKey = Float.parseFloat(rangeKeys[1]); + Range rangeEntry = Range.between(lowKey, highKey); + if (isRangeOverlapping(rangeEntry)) { + String exceptionMsg = "map option contains key "+mapEntry.getKey()+" that overlaps with other range entries"; + throw new InvalidPluginConfigurationException(exceptionMsg); + } else { + rangeMappings.put(Range.between(lowKey, highKey), mapEntry.getValue()); + } + } + } + + private void addIndividualMapping(String key, String value){ + if(individualMappings.containsKey(key)){ + String exceptionMsg = "map option contains duplicate entries of "+key; + throw new InvalidPluginConfigurationException(exceptionMsg); + } + else{ + individualMappings.put(key.strip(), value); + } + } + + private boolean isRangeOverlapping(Range rangeEntry){ + for(Range range : rangeMappings.keySet()){ + if(range.isOverlappedBy(rangeEntry)){ + return true; + } + } + return false; + } + + private void checkOverlappingKeys(){ + for(String individualKey : individualMappings.keySet()){ + if(NumberUtils.isParsable(individualKey)){ + Float floatKey = Float.parseFloat(individualKey); + Range range = Range.between(floatKey, floatKey); + if(isRangeOverlapping(range)){ + String exceptionMsg = "map option contains key "+individualKey+" that overlaps with other range entries"; + throw new InvalidPluginConfigurationException(exceptionMsg); + } + } + } + } + + private void parseFile(String filePath){ + //todo + } + + @Override + public Collection> doExecute(Collection> records) { + //todo + for(final Record record : records) { + final Event recordEvent = record.getData(); + if (Objects.nonNull(translateProcessorConfig.getMapWhen()) && !expressionEvaluator.evaluateConditional(translateProcessorConfig.getMapWhen(), recordEvent)) { + continue; + } + try { + String matchKey = record.getData().get(translateProcessorConfig.getSource(), String.class); + if(matchesIndividualEntry(record, matchKey) || matchesRangeEntry(record, matchKey) || matchesPatternEntry(record, matchKey)){ + continue; + } + else{ + + // todo : add default, increment metrics, and/or add_tags + + } + } catch (Exception ex){ + LOG.error(EVENT, "Error mapping the source [{}] of entry [{}]", + translateProcessorConfig.getSource(), record.getData(), ex); + } + } + return records; + } + + public boolean matchesIndividualEntry(Record record, String matchKey){ + if(individualMappings.containsKey(matchKey)){ + record.getData().put(translateProcessorConfig.getTarget(), individualMappings.get(matchKey)); + return true; + } + return false; + } + + public boolean matchesRangeEntry(Record record, String matchKey){ + if(!NumberUtils.isParsable(matchKey)){ + return false; + } + Float floatKey = Float.parseFloat(matchKey); + for(Map.Entry, String> rangeEntry : rangeMappings.entrySet()) { + Range range = rangeEntry.getKey(); + if (range.contains(floatKey)) { + record.getData().put(translateProcessorConfig.getTarget(), rangeEntry.getValue()); + return true; + } + } + return false; + } + + public boolean matchesPatternEntry(Record record, String matchKey){ + //todo + if(!Objects.nonNull(patternMappings)){ + return false; + } + for(String pattern : patternMappings.keySet()){ + if(Pattern.matches(pattern, matchKey)){ + record.getData().put(translateProcessorConfig.getTarget(), patternMappings.get(pattern)); + return true; + } + } + return false; + } + @Override + public void prepareForShutdown() { + + } + + @Override + public boolean isReadyForShutdown() { + return true; + } + + @Override + public void shutdown() { + + } +} diff --git a/data-prepper-plugins/translate-processor/src/main/java/org/opensearch/dataprepper/plugins/processor/translate/TranslateProcessorConfig.java b/data-prepper-plugins/translate-processor/src/main/java/org/opensearch/dataprepper/plugins/processor/translate/TranslateProcessorConfig.java new file mode 100644 index 0000000000..906778d9f2 --- /dev/null +++ b/data-prepper-plugins/translate-processor/src/main/java/org/opensearch/dataprepper/plugins/processor/translate/TranslateProcessorConfig.java @@ -0,0 +1,66 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.dataprepper.plugins.processor.translate; + + +import com.fasterxml.jackson.annotation.JsonProperty; +import jakarta.validation.constraints.AssertTrue; +import jakarta.validation.constraints.NotEmpty; +import jakarta.validation.constraints.NotNull; +import java.util.Map; +import java.util.stream.Stream; + + +public class TranslateProcessorConfig { + + @JsonProperty("source") + @NotNull + @NotEmpty + private String source; + + @JsonProperty("target") + @NotNull + @NotEmpty + private String target; + + @JsonProperty("map") + private Map map; + @JsonProperty("file_path") + private String filePath; + + @JsonProperty("map_when") + private String mapWhen; + + @JsonProperty("regex") + private RegexParameterConfiguration regexParameterConfiguration; + + + public String getSource() { return source; } + + public String getTarget() { return target; } + + public Map getMap() { return map; } + + public String getFilePath() { return filePath; } + + public String getMapWhen() { return mapWhen; } + + public RegexParameterConfiguration getRegexParameterConfiguration(){ return regexParameterConfiguration; } + + + @AssertTrue(message = "Either of map / patterns / file_path options need to be configured. (pattern option is mandatory while configuring regex option)") + public boolean hasMappings() { + return (Stream.of(map, filePath, regexParameterConfiguration).filter(n -> n!=null).count() != 0) && checkPatternUnderRegex(); + } + + public boolean checkPatternUnderRegex(){ + if(regexParameterConfiguration!=null && regexParameterConfiguration.getPatterns()==null){ + return false; + } + return true; + } + +} diff --git a/data-prepper-plugins/translate-processor/src/test/java/org/opensearch/dataprepper/plugins/processor/translate/RegexParameterConfigurationTest.java b/data-prepper-plugins/translate-processor/src/test/java/org/opensearch/dataprepper/plugins/processor/translate/RegexParameterConfigurationTest.java new file mode 100644 index 0000000000..a20b8ff98a --- /dev/null +++ b/data-prepper-plugins/translate-processor/src/test/java/org/opensearch/dataprepper/plugins/processor/translate/RegexParameterConfigurationTest.java @@ -0,0 +1,46 @@ +package org.opensearch.dataprepper.plugins.processor.translate; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + +import java.util.Collections; +import java.util.Map; + +import static org.hamcrest.MatcherAssert.assertThat; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.opensearch.dataprepper.test.helper.ReflectivelySetField.setField; +import static org.hamcrest.CoreMatchers.is; + +class RegexParameterConfigurationTest { + + private RegexParameterConfiguration regexParameterConfiguration; + + @BeforeEach + void setup(){ + regexParameterConfiguration = createObjectUnderTest(); + } + + @Test + public void test_get_patterns() throws NoSuchFieldException, IllegalAccessException{ + final Map patternMap = Collections.singletonMap("key1", "val1"); + setField(RegexParameterConfiguration.class, regexParameterConfiguration, "patterns", patternMap); + assertThat(regexParameterConfiguration.getPatterns(), is(patternMap)); + } + + @Test + void test_get_exact() throws NoSuchFieldException, IllegalAccessException{ + setField(RegexParameterConfiguration.class, regexParameterConfiguration, "exact", false); + assertFalse(regexParameterConfiguration.getExact()); + } + + @Test + void test_default_exact_option(){ + assertTrue(regexParameterConfiguration.getExact()); + } + + private RegexParameterConfiguration createObjectUnderTest() { + return new RegexParameterConfiguration(); + } + +} \ No newline at end of file diff --git a/data-prepper-plugins/translate-processor/src/test/java/org/opensearch/dataprepper/plugins/processor/translate/TranslateProcessorConfigTest.java b/data-prepper-plugins/translate-processor/src/test/java/org/opensearch/dataprepper/plugins/processor/translate/TranslateProcessorConfigTest.java new file mode 100644 index 0000000000..32534a44b7 --- /dev/null +++ b/data-prepper-plugins/translate-processor/src/test/java/org/opensearch/dataprepper/plugins/processor/translate/TranslateProcessorConfigTest.java @@ -0,0 +1,59 @@ +package org.opensearch.dataprepper.plugins.processor.translate; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + +import java.util.Collections; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.opensearch.dataprepper.test.helper.ReflectivelySetField.setField; + + +class TranslateProcessorConfigTest { + private TranslateProcessorConfig translateProcessorConfig; + private RegexParameterConfiguration regexParameterConfiguration; + private TranslateProcessorConfig createObjectUnderTest() { + return new TranslateProcessorConfig(); + } + + @BeforeEach + void setup() throws NoSuchFieldException, IllegalAccessException{ + translateProcessorConfig = createObjectUnderTest(); + setField(TranslateProcessorConfig.class, translateProcessorConfig, "source", "sourceKey"); + setField(TranslateProcessorConfig.class, translateProcessorConfig, "target", "targetKey"); + } + + @Test + void test_no_map_patterns_filepath_options_present(){ + assertFalse(translateProcessorConfig.hasMappings()); + } + + @Test + void test_only_map_option_present() throws NoSuchFieldException, IllegalAccessException{ + setField(TranslateProcessorConfig.class, translateProcessorConfig, "map", Collections.singletonMap("key1", "val1")); + assertTrue(translateProcessorConfig.hasMappings()); + } + + @Test + void test_only_filepath_option_present() throws NoSuchFieldException, IllegalAccessException{ + setField(TranslateProcessorConfig.class, translateProcessorConfig, "filePath", "/path/to/file.yaml"); + assertTrue(translateProcessorConfig.hasMappings()); + } + + @Test + void test_only_patterns_option_present() throws NoSuchFieldException, IllegalAccessException{ + regexParameterConfiguration = new RegexParameterConfiguration(); + setField(RegexParameterConfiguration.class, regexParameterConfiguration, "patterns", Collections.singletonMap("patternKey1", "patternVal1")); + setField(TranslateProcessorConfig.class, translateProcessorConfig, "regexParameterConfiguration", regexParameterConfiguration); + assertTrue(translateProcessorConfig.hasMappings()); + } + + @Test + void test_no_patterns_under_regex() throws NoSuchFieldException, IllegalAccessException{ + regexParameterConfiguration = new RegexParameterConfiguration(); + setField(RegexParameterConfiguration.class, regexParameterConfiguration, "exact", true); + setField(TranslateProcessorConfig.class, translateProcessorConfig, "map", Collections.singletonMap("key1", "val1")); + setField(TranslateProcessorConfig.class, translateProcessorConfig, "regexParameterConfiguration", regexParameterConfiguration); + assertFalse(translateProcessorConfig.hasMappings()); + } +} \ No newline at end of file diff --git a/data-prepper-plugins/translate-processor/src/test/java/org/opensearch/dataprepper/plugins/processor/translate/TranslateProcessorTest.java b/data-prepper-plugins/translate-processor/src/test/java/org/opensearch/dataprepper/plugins/processor/translate/TranslateProcessorTest.java new file mode 100644 index 0000000000..52688cfe8d --- /dev/null +++ b/data-prepper-plugins/translate-processor/src/test/java/org/opensearch/dataprepper/plugins/processor/translate/TranslateProcessorTest.java @@ -0,0 +1,300 @@ +package org.opensearch.dataprepper.plugins.processor.translate; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; +import org.opensearch.dataprepper.expression.ExpressionEvaluator; +import org.opensearch.dataprepper.metrics.PluginMetrics; +import org.opensearch.dataprepper.model.event.Event; +import org.opensearch.dataprepper.model.event.JacksonEvent; +import org.opensearch.dataprepper.model.plugin.InvalidPluginConfigurationException; +import org.opensearch.dataprepper.model.record.Record; + +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Collections; +import java.util.AbstractMap; +import java.util.Arrays; + +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.mockito.Mockito.lenient; +import static org.mockito.Mockito.when; + +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.CoreMatchers.is; + +@ExtendWith(MockitoExtension.class) +class TranslateProcessorTest { + + @Mock + private PluginMetrics pluginMetrics; + @Mock + private TranslateProcessorConfig mockConfig; + + @Mock + private RegexParameterConfiguration mockRegexConfig; + + @Mock + private ExpressionEvaluator expressionEvaluator; + + @BeforeEach + void setup(){ + lenient().when(mockConfig.getSource()).thenReturn("sourceField"); + lenient().when(mockConfig.getTarget()).thenReturn("targetField"); + } + + @Test + public void test_string_keys_in_map(){ + when(mockConfig.getMap()).thenReturn(createMapEntries(createMapping("key1","mappedValue1"))); + final TranslateProcessor processor = createObjectUnderTest(); + final Record record = getEvent("key1"); + final List> translatedRecords = (List>) processor.doExecute(Collections.singletonList(record)); + + assertTrue(translatedRecords.get(0).getData().containsKey("targetField")); + assertThat(translatedRecords.get(0).getData().get("targetField", String.class), is("mappedValue1")); + } + + @Test + public void test_integer_keys_in_map(){ + when(mockConfig.getMap()).thenReturn(createMapEntries(createMapping("123","mappedValue1"))); + final TranslateProcessor processor = createObjectUnderTest(); + final Record record = getEvent("123"); + final List> translatedRecords = (List>) processor.doExecute(Collections.singletonList(record)); + + assertTrue(translatedRecords.get(0).getData().containsKey("targetField")); + assertThat(translatedRecords.get(0).getData().get("targetField", String.class), is("mappedValue1")); + } + + @Test + public void test_integer_range_keys_in_map(){ + when(mockConfig.getMap()).thenReturn(createMapEntries(createMapping("1-10","mappedValue1"))); + final TranslateProcessor processor = createObjectUnderTest(); + final Record record = getEvent("5"); + final List> translatedRecords = (List>) processor.doExecute(Collections.singletonList(record)); + + assertTrue(translatedRecords.get(0).getData().containsKey("targetField")); + assertThat(translatedRecords.get(0).getData().get("targetField", String.class), is("mappedValue1")); + + } + + @Test + public void test_comma_separated_keys_in_map(){ + when(mockConfig.getMap()).thenReturn(createMapEntries(createMapping("key1,key2, key3","mappedValue1"))); + final TranslateProcessor processor = createObjectUnderTest(); + + for(String key : Arrays.asList("key1","key2","key3")){ + final Record record = getEvent(key); + final List> translatedRecords = (List>) processor.doExecute(Collections.singletonList(record)); + + assertTrue(translatedRecords.get(0).getData().containsKey("targetField")); + assertThat(translatedRecords.get(0).getData().get("targetField", String.class), is("mappedValue1")); + } + + final Record failureRecord = getEvent("key4"); + final List> failingTranslatedRecords = (List>) processor.doExecute(Collections.singletonList(failureRecord)); + + assertFalse(failingTranslatedRecords.get(0).getData().containsKey("targetField")); + } + + @Test + public void test_comma_separated_range_keys_in_map(){ + when(mockConfig.getMap()).thenReturn(createMapEntries(createMapping("1-10,11-20, 21-30","mappedValue1"))); + final TranslateProcessor processor = createObjectUnderTest(); + + for(String key : Arrays.asList("5","15","25")){ + final Record record = getEvent(key); + final List> translatedRecords = (List>) processor.doExecute(Collections.singletonList(record)); + + assertTrue(translatedRecords.get(0).getData().containsKey("targetField")); + assertThat(translatedRecords.get(0).getData().get("targetField", String.class), is("mappedValue1")); + } + + final Record failureRecord = getEvent("35"); + final List> failingTranslatedRecords = (List>) processor.doExecute(Collections.singletonList(failureRecord)); + + assertFalse(failingTranslatedRecords.get(0).getData().containsKey("targetField")); + } + + @Test + public void test_float_source(){ + when(mockConfig.getMap()).thenReturn(createMapEntries(createMapping("1-10,11-20, 21-30","mappedValue1"))); + final TranslateProcessor processor = createObjectUnderTest(); + final Record record = getEvent("11.1"); + final List> translatedRecords = (List>) processor.doExecute(Collections.singletonList(record)); + + assertTrue(translatedRecords.get(0).getData().containsKey("targetField")); + assertThat(translatedRecords.get(0).getData().get("targetField", String.class), is("mappedValue1")); + + final Record failureRecord = getEvent("20.5"); + final List> failingTranslatedRecords = (List>) processor.doExecute(Collections.singletonList(failureRecord)); + + assertFalse(failingTranslatedRecords.get(0).getData().containsKey("targetField")); + } + + @Test + public void test_comma_separated_integer_ranges_and_string_keys(){ + when(mockConfig.getMap()).thenReturn(createMapEntries(createMapping("1-10,key1","mappedValue1"))); + final TranslateProcessor processor = createObjectUnderTest(); + final Record record = getEvent("5.2"); + final List> translatedRecords = (List>) processor.doExecute(Collections.singletonList(record)); + + assertTrue(translatedRecords.get(0).getData().containsKey("targetField")); + assertThat(translatedRecords.get(0).getData().get("targetField", String.class), is("mappedValue1")); + + final Record recordStringKey = getEvent("key1"); + final List> translatedStringKeyRecords = (List>) processor.doExecute(Collections.singletonList(recordStringKey)); + + assertTrue(translatedStringKeyRecords.get(0).getData().containsKey("targetField")); + assertThat(translatedStringKeyRecords.get(0).getData().get("targetField", String.class), is("mappedValue1")); + } + + @Test + public void test_multiple_dashes_in_keys_should_be_treated_as_string_literal(){ + when(mockConfig.getMap()).thenReturn(createMapEntries(createMapping("1-10-20","mappedValue1"))); + final TranslateProcessor processor = createObjectUnderTest(); + final Record failureRecord = getEvent("1-10-20"); + final List> failingTranslatedRecords = (List>) processor.doExecute(Collections.singletonList(failureRecord)); + + assertTrue(failingTranslatedRecords.get(0).getData().containsKey("targetField")); + + final Record record = getEvent("10"); + final List> translatedRecords = (List>) processor.doExecute(Collections.singletonList(record)); + + assertFalse(translatedRecords.get(0).getData().containsKey("targetField")); + + } + + @Test + public void test_overlapping_ranges_should_fail_when_overlapping(){ + when(mockConfig.getMap()).thenReturn(createMapEntries(createMapping("1-10","mappedValue1"), createMapping("10-20", "mappedValue2"))); + + assertThrows(InvalidPluginConfigurationException.class,() -> createObjectUnderTest()); + } + + @Test + public void test_overlapping_key_and_range_in_map_option(){ + when(mockConfig.getMap()).thenReturn(createMapEntries(createMapping("1-10","mappedValue1"), createMapping("5.3", "mappedValue2"))); + + assertThrows(InvalidPluginConfigurationException.class,() -> createObjectUnderTest()); + } + + @Test + public void test_string_literal_in_pattern_option(){ + when(mockConfig.getRegexParameterConfiguration()).thenReturn(mockRegexConfig); + when(mockRegexConfig.getPatterns()).thenReturn(createMapEntries(createMapping("key1","mappedValue1"))); + + final TranslateProcessor processor = createObjectUnderTest(); + final Record record = getEvent("key1"); + final List> translatedRecords = (List>) processor.doExecute(Collections.singletonList(record)); + + assertTrue(translatedRecords.get(0).getData().containsKey("targetField")); + assertThat(translatedRecords.get(0).getData().get("targetField", String.class), is("mappedValue1")); + + final Record failureRecord = getEvent("key2"); + final List> failingTranslatedRecords = (List>) processor.doExecute(Collections.singletonList(failureRecord)); + + assertFalse(failingTranslatedRecords.get(0).getData().containsKey("targetField")); + } + + @Test + public void test_matching_of_regex_pattern_in_pattern_option(){ + when(mockConfig.getRegexParameterConfiguration()).thenReturn(mockRegexConfig); + when(mockRegexConfig.getPatterns()).thenReturn(createMapEntries(createMapping("^(1[0-9]|20)$", "patternValue1"))); //Range between 10-20 + + final TranslateProcessor processor = createObjectUnderTest(); + final Record record = getEvent("15"); + final List> translatedRecords = (List>) processor.doExecute(Collections.singletonList(record)); + + assertTrue(translatedRecords.get(0).getData().containsKey("targetField")); + assertThat(translatedRecords.get(0).getData().get("targetField", String.class), is("patternValue1")); + + final Record failureRecord = getEvent("1"); + final List> failingTranslatedRecords = (List>) processor.doExecute(Collections.singletonList(failureRecord)); + + assertFalse(failingTranslatedRecords.get(0).getData().containsKey("targetField")); + } + + @Test + public void test_pattern_matching_when_no_match_in_map(){ + when(mockConfig.getRegexParameterConfiguration()).thenReturn(mockRegexConfig); + when(mockConfig.getMap()).thenReturn((createMapEntries(createMapping("key1", "mappedValue1"), createMapping("key2", "mappedValue2")))); + when(mockRegexConfig.getPatterns()).thenReturn(createMapEntries(createMapping("patternKey1", "patternValue1"))); + + final TranslateProcessor processor = createObjectUnderTest(); + final Record record = getEvent("patternKey1"); + final List> translatedRecords = (List>) processor.doExecute(Collections.singletonList(record)); + + assertTrue(translatedRecords.get(0).getData().containsKey("targetField")); + assertThat(translatedRecords.get(0).getData().get("targetField", String.class), is("patternValue1")); + + final Record recordMapKey = getEvent("key1"); + final List> translatedMapKeyRecords = (List>) processor.doExecute(Collections.singletonList(recordMapKey)); + + assertTrue(translatedMapKeyRecords.get(0).getData().containsKey("targetField")); + assertThat(translatedMapKeyRecords.get(0).getData().get("targetField", String.class), is("mappedValue1")); + } + + @Test + public void test_map_matching_when_overlapping_ranges_in_map_and_pattern(){ + when(mockConfig.getRegexParameterConfiguration()).thenReturn(mockRegexConfig); + when(mockConfig.getMap()).thenReturn((createMapEntries(createMapping("400", "mappedValue1")))); + when(mockRegexConfig.getPatterns()).thenReturn(createMapEntries(createMapping("^(400|404)$", "patternValue1"))); // Matches 400 or 404 + + final TranslateProcessor processor = createObjectUnderTest(); + final Record record = getEvent("400"); + final List> translatedRecords = (List>) processor.doExecute(Collections.singletonList(record)); + + assertTrue(translatedRecords.get(0).getData().containsKey("targetField")); + assertThat(translatedRecords.get(0).getData().get("targetField", String.class), is("mappedValue1")); + + final Record recordPatternKey = getEvent("404"); + final List> translatedPatternKeyRecords = (List>) processor.doExecute(Collections.singletonList(recordPatternKey)); + + assertTrue(translatedPatternKeyRecords.get(0).getData().containsKey("targetField")); + assertThat(translatedPatternKeyRecords.get(0).getData().get("targetField", String.class), is("patternValue1")); + } + + + + + private TranslateProcessor createObjectUnderTest() { + return new TranslateProcessor(pluginMetrics, mockConfig, expressionEvaluator); + } + + private Record sourceAndTargetFields(Object sourceValue, Object targetValue) { + final Map testData = new HashMap<>(); + testData.put("sourceField", sourceValue); + testData.put("targetField", targetValue); + return buildRecordWithEvent(testData); + } + private Record getEvent(Object sourceField) { + final Map testData = new HashMap<>(); + testData.put("sourceField", sourceField); + return buildRecordWithEvent(testData); + } + + private static Record buildRecordWithEvent(final Map data) { + return new Record<>(JacksonEvent.builder() + .withData(data) + .withEventType("event") + .build()); + } + + public Map.Entry createMapping(String key, String value){ + return new AbstractMap.SimpleEntry<>(key, value); + } + + public Map createMapEntries(Map.Entry... mappings){ + final Map finalMap = new HashMap<>(); + for(Map.Entry mapping : mappings){ + finalMap.put(mapping.getKey(), mapping.getValue()); + } + + return finalMap; + } +} \ No newline at end of file diff --git a/settings.gradle b/settings.gradle index 0760851409..26bb563e62 100644 --- a/settings.gradle +++ b/settings.gradle @@ -103,6 +103,7 @@ include 'data-prepper-plugins:rss-source' include 'data-prepper-plugins:csv-processor' include 'data-prepper-plugins:parse-json-processor' include 'data-prepper-plugins:trace-peer-forwarder-processor' +include 'data-prepper-plugins:translate-processor' include 'data-prepper-plugins:dynamodb-source-coordination-store' include 'release' include 'release:archives' From 8445d4890e8fa0bc28647fb5e48b1447c72bcb78 Mon Sep 17 00:00:00 2001 From: Katherine Shen <40495707+shenkw1@users.noreply.github.com> Date: Thu, 29 Jun 2023 07:29:49 -0700 Subject: [PATCH 7/9] Update simple_pipelines.md (#2947) Removed a space between the second ${PWD} and the rest of the line Signed-off-by: Katherine Shen <40495707+shenkw1@users.noreply.github.com> --- docs/simple_pipelines.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/simple_pipelines.md b/docs/simple_pipelines.md index b05953e575..0f6aea94aa 100644 --- a/docs/simple_pipelines.md +++ b/docs/simple_pipelines.md @@ -40,7 +40,7 @@ built from source, you will need to make some modifications to the example comma For Data Prepper 2.0 or above, use this command: ``` -docker run --name data-prepper -p 4900:4900 -v ${PWD}/pipelines.yaml:/usr/share/data-prepper/pipelines/pipelines.yaml -v ${PWD} /data-prepper-config.yaml:/usr/share/data-prepper/config/data-prepper-config.yaml opensearchproject/data-prepper:latest +docker run --name data-prepper -p 4900:4900 -v ${PWD}/pipelines.yaml:/usr/share/data-prepper/pipelines/pipelines.yaml -v ${PWD}/data-prepper-config.yaml:/usr/share/data-prepper/config/data-prepper-config.yaml opensearchproject/data-prepper:latest ``` For Data Prepper before version 2.0, use this command: From 8e2145cc4c00fb2a93b97a3fcdb689609e23ff63 Mon Sep 17 00:00:00 2001 From: Asif Sohail Mohammed Date: Thu, 29 Jun 2023 09:59:03 -0500 Subject: [PATCH 8/9] =?UTF-8?q?Fix=20CVE-2023-35165,=20CVE-2023-34455,=20C?= =?UTF-8?q?VE-2023-34453,=20CVE-2023-34454,=20C=E2=80=A6=20(#2948)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Fix CVE-2023-35165, CVE-2023-34455, CVE-2023-34453, CVE-2023-34454, CVE-2023-2976 Signed-off-by: Asif Sohail Mohammed * Updated snappy version in build.gradle files Signed-off-by: Asif Sohail Mohammed --------- Signed-off-by: Asif Sohail Mohammed --- build.gradle | 2 + data-prepper-plugins/common/build.gradle | 2 +- data-prepper-plugins/s3-source/build.gradle | 2 +- .../staging-resources-cdk/package-lock.json | 828 ++++++++++++------ release/staging-resources-cdk/package.json | 2 +- settings.gradle | 6 +- 6 files changed, 570 insertions(+), 272 deletions(-) diff --git a/build.gradle b/build.gradle index d1a3aa8d48..006e641354 100644 --- a/build.gradle +++ b/build.gradle @@ -177,6 +177,8 @@ subprojects { } } else if (details.requested.group == 'log4j' && details.requested.name == 'log4j') { details.useTarget group: 'org.apache.logging.log4j', name: 'log4j-1.2-api', version: '2.17.1' + } else if (details.requested.group == 'org.xerial.snappy' && details.requested.name == 'snappy-java') { + details.useTarget group: 'org.xerial.snappy', name: 'snappy-java', version: '1.1.10.1' } } } diff --git a/data-prepper-plugins/common/build.gradle b/data-prepper-plugins/common/build.gradle index 24a4fca124..87b15dcd31 100644 --- a/data-prepper-plugins/common/build.gradle +++ b/data-prepper-plugins/common/build.gradle @@ -21,7 +21,7 @@ dependencies { implementation 'io.micrometer:micrometer-core' testImplementation testLibs.junit.vintage implementation 'org.apache.parquet:parquet-common:1.12.3' - implementation 'org.xerial.snappy:snappy-java:1.1.9.1' + implementation 'org.xerial.snappy:snappy-java:1.1.10.1' testImplementation project(':data-prepper-plugins:blocking-buffer') testImplementation 'commons-io:commons-io:2.12.0' testImplementation testLibs.mockito.inline diff --git a/data-prepper-plugins/s3-source/build.gradle b/data-prepper-plugins/s3-source/build.gradle index 343f578b07..70c27e342a 100644 --- a/data-prepper-plugins/s3-source/build.gradle +++ b/data-prepper-plugins/s3-source/build.gradle @@ -30,7 +30,7 @@ dependencies { implementation 'org.hibernate.validator:hibernate-validator:7.0.5.Final' implementation 'com.fasterxml.jackson.dataformat:jackson-dataformat-csv' implementation 'com.fasterxml.jackson.datatype:jackson-datatype-jsr310:2.14.2' - implementation 'org.xerial.snappy:snappy-java:1.1.9.1' + implementation 'org.xerial.snappy:snappy-java:1.1.10.1' implementation 'org.apache.parquet:parquet-common:1.12.3' testImplementation 'org.apache.commons:commons-lang3:3.12.0' testImplementation 'com.github.tomakehurst:wiremock:3.0.0-beta-8' diff --git a/release/staging-resources-cdk/package-lock.json b/release/staging-resources-cdk/package-lock.json index a6fdae5930..8b77d7582f 100644 --- a/release/staging-resources-cdk/package-lock.json +++ b/release/staging-resources-cdk/package-lock.json @@ -8,7 +8,7 @@ "name": "staging-resources-cdk", "version": "0.1.0", "dependencies": { - "aws-cdk-lib": "2.13.0", + "aws-cdk-lib": "2.80.0", "constructs": "^10.0.0", "source-map-support": "^0.5.16" }, @@ -41,43 +41,58 @@ "node": ">=6.0.0" } }, + "node_modules/@aws-cdk/asset-awscli-v1": { + "version": "2.2.199", + "resolved": "https://registry.npmjs.org/@aws-cdk/asset-awscli-v1/-/asset-awscli-v1-2.2.199.tgz", + "integrity": "sha512-zNdD2OxALdsdQaRZBpTfMTuudxV+4jLMznJIvVj6O+OqCru4m5UtgVQmyApW1z2H9s4/06ovVt20aXe2G8Ta+w==" + }, + "node_modules/@aws-cdk/asset-kubectl-v20": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/@aws-cdk/asset-kubectl-v20/-/asset-kubectl-v20-2.1.2.tgz", + "integrity": "sha512-3M2tELJOxQv0apCIiuKQ4pAbncz9GuLwnKFqxifWfe77wuMxyTRPmxssYHs42ePqzap1LT6GDcPygGs+hHstLg==" + }, + "node_modules/@aws-cdk/asset-node-proxy-agent-v5": { + "version": "2.0.165", + "resolved": "https://registry.npmjs.org/@aws-cdk/asset-node-proxy-agent-v5/-/asset-node-proxy-agent-v5-2.0.165.tgz", + "integrity": "sha512-bsyLQD/vqXQcc9RDmlM1XqiFNO/yewgVFXmkMcQkndJbmE/jgYkzewwYGrBlfL725hGLQipXq19+jwWwdsXQqg==" + }, "node_modules/@babel/code-frame": { - "version": "7.18.6", - "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.18.6.tgz", - "integrity": "sha512-TDCmlK5eOvH+eH7cdAFlNXeVJqWIQ7gW9tY1GJIpUtFb6CmjVyq2VM3u71bOyR8CRihcCgMUYoDNyLXao3+70Q==", + "version": "7.22.5", + "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.22.5.tgz", + "integrity": "sha512-Xmwn266vad+6DAqEB2A6V/CcZVp62BbwVmcOJc2RPuwih1kw02TjQvWVWlcKGbBPd+8/0V5DEkOcizRGYsspYQ==", "dev": true, "dependencies": { - "@babel/highlight": "^7.18.6" + "@babel/highlight": "^7.22.5" }, "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/compat-data": { - "version": "7.21.0", - "resolved": "https://registry.npmjs.org/@babel/compat-data/-/compat-data-7.21.0.tgz", - "integrity": "sha512-gMuZsmsgxk/ENC3O/fRw5QY8A9/uxQbbCEypnLIiYYc/qVJtEV7ouxC3EllIIwNzMqAQee5tanFabWsUOutS7g==", + "version": "7.22.5", + "resolved": "https://registry.npmjs.org/@babel/compat-data/-/compat-data-7.22.5.tgz", + "integrity": "sha512-4Jc/YuIaYqKnDDz892kPIledykKg12Aw1PYX5i/TY28anJtacvM1Rrr8wbieB9GfEJwlzqT0hUEao0CxEebiDA==", "dev": true, "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/core": { - "version": "7.21.0", - "resolved": "https://registry.npmjs.org/@babel/core/-/core-7.21.0.tgz", - "integrity": "sha512-PuxUbxcW6ZYe656yL3EAhpy7qXKq0DmYsrJLpbB8XrsCP9Nm+XCg9XFMb5vIDliPD7+U/+M+QJlH17XOcB7eXA==", + "version": "7.22.5", + "resolved": "https://registry.npmjs.org/@babel/core/-/core-7.22.5.tgz", + "integrity": "sha512-SBuTAjg91A3eKOvD+bPEz3LlhHZRNu1nFOVts9lzDJTXshHTjII0BAtDS3Y2DAkdZdDKWVZGVwkDfc4Clxn1dg==", "dev": true, "dependencies": { "@ampproject/remapping": "^2.2.0", - "@babel/code-frame": "^7.18.6", - "@babel/generator": "^7.21.0", - "@babel/helper-compilation-targets": "^7.20.7", - "@babel/helper-module-transforms": "^7.21.0", - "@babel/helpers": "^7.21.0", - "@babel/parser": "^7.21.0", - "@babel/template": "^7.20.7", - "@babel/traverse": "^7.21.0", - "@babel/types": "^7.21.0", + "@babel/code-frame": "^7.22.5", + "@babel/generator": "^7.22.5", + "@babel/helper-compilation-targets": "^7.22.5", + "@babel/helper-module-transforms": "^7.22.5", + "@babel/helpers": "^7.22.5", + "@babel/parser": "^7.22.5", + "@babel/template": "^7.22.5", + "@babel/traverse": "^7.22.5", + "@babel/types": "^7.22.5", "convert-source-map": "^1.7.0", "debug": "^4.1.0", "gensync": "^1.0.0-beta.2", @@ -102,12 +117,12 @@ } }, "node_modules/@babel/generator": { - "version": "7.21.1", - "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.21.1.tgz", - "integrity": "sha512-1lT45bAYlQhFn/BHivJs43AiW2rg3/UbLyShGfF3C0KmHvO5fSghWd5kBJy30kpRRucGzXStvnnCFniCR2kXAA==", + "version": "7.22.5", + "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.22.5.tgz", + "integrity": "sha512-+lcUbnTRhd0jOewtFSedLyiPsD5tswKkbgcezOqqWFUVNEwoUTlpPOBmvhG7OXWLR4jMdv0czPGH5XbflnD1EA==", "dev": true, "dependencies": { - "@babel/types": "^7.21.0", + "@babel/types": "^7.22.5", "@jridgewell/gen-mapping": "^0.3.2", "@jridgewell/trace-mapping": "^0.3.17", "jsesc": "^2.5.1" @@ -131,13 +146,13 @@ } }, "node_modules/@babel/helper-compilation-targets": { - "version": "7.20.7", - "resolved": "https://registry.npmjs.org/@babel/helper-compilation-targets/-/helper-compilation-targets-7.20.7.tgz", - "integrity": "sha512-4tGORmfQcrc+bvrjb5y3dG9Mx1IOZjsHqQVUz7XCNHO+iTmqxWnVg3KRygjGmpRLJGdQSKuvFinbIb0CnZwHAQ==", + "version": "7.22.5", + "resolved": "https://registry.npmjs.org/@babel/helper-compilation-targets/-/helper-compilation-targets-7.22.5.tgz", + "integrity": "sha512-Ji+ywpHeuqxB8WDxraCiqR0xfhYjiDE/e6k7FuIaANnoOFxAHskHChz4vA1mJC9Lbm01s1PVAGhQY4FUKSkGZw==", "dev": true, "dependencies": { - "@babel/compat-data": "^7.20.5", - "@babel/helper-validator-option": "^7.18.6", + "@babel/compat-data": "^7.22.5", + "@babel/helper-validator-option": "^7.22.5", "browserslist": "^4.21.3", "lru-cache": "^5.1.1", "semver": "^6.3.0" @@ -159,65 +174,65 @@ } }, "node_modules/@babel/helper-environment-visitor": { - "version": "7.18.9", - "resolved": "https://registry.npmjs.org/@babel/helper-environment-visitor/-/helper-environment-visitor-7.18.9.tgz", - "integrity": "sha512-3r/aACDJ3fhQ/EVgFy0hpj8oHyHpQc+LPtJoY9SzTThAsStm4Ptegq92vqKoE3vD706ZVFWITnMnxucw+S9Ipg==", + "version": "7.22.5", + "resolved": "https://registry.npmjs.org/@babel/helper-environment-visitor/-/helper-environment-visitor-7.22.5.tgz", + "integrity": "sha512-XGmhECfVA/5sAt+H+xpSg0mfrHq6FzNr9Oxh7PSEBBRUb/mL7Kz3NICXb194rCqAEdxkhPT1a88teizAFyvk8Q==", "dev": true, "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/helper-function-name": { - "version": "7.21.0", - "resolved": "https://registry.npmjs.org/@babel/helper-function-name/-/helper-function-name-7.21.0.tgz", - "integrity": "sha512-HfK1aMRanKHpxemaY2gqBmL04iAPOPRj7DxtNbiDOrJK+gdwkiNRVpCpUJYbUT+aZyemKN8brqTOxzCaG6ExRg==", + "version": "7.22.5", + "resolved": "https://registry.npmjs.org/@babel/helper-function-name/-/helper-function-name-7.22.5.tgz", + "integrity": "sha512-wtHSq6jMRE3uF2otvfuD3DIvVhOsSNshQl0Qrd7qC9oQJzHvOL4qQXlQn2916+CXGywIjpGuIkoyZRRxHPiNQQ==", "dev": true, "dependencies": { - "@babel/template": "^7.20.7", - "@babel/types": "^7.21.0" + "@babel/template": "^7.22.5", + "@babel/types": "^7.22.5" }, "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/helper-hoist-variables": { - "version": "7.18.6", - "resolved": "https://registry.npmjs.org/@babel/helper-hoist-variables/-/helper-hoist-variables-7.18.6.tgz", - "integrity": "sha512-UlJQPkFqFULIcyW5sbzgbkxn2FKRgwWiRexcuaR8RNJRy8+LLveqPjwZV/bwrLZCN0eUHD/x8D0heK1ozuoo6Q==", + "version": "7.22.5", + "resolved": "https://registry.npmjs.org/@babel/helper-hoist-variables/-/helper-hoist-variables-7.22.5.tgz", + "integrity": "sha512-wGjk9QZVzvknA6yKIUURb8zY3grXCcOZt+/7Wcy8O2uctxhplmUPkOdlgoNhmdVee2c92JXbf1xpMtVNbfoxRw==", "dev": true, "dependencies": { - "@babel/types": "^7.18.6" + "@babel/types": "^7.22.5" }, "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/helper-module-imports": { - "version": "7.18.6", - "resolved": "https://registry.npmjs.org/@babel/helper-module-imports/-/helper-module-imports-7.18.6.tgz", - "integrity": "sha512-0NFvs3VkuSYbFi1x2Vd6tKrywq+z/cLeYC/RJNFrIX/30Bf5aiGYbtvGXolEktzJH8o5E5KJ3tT+nkxuuZFVlA==", + "version": "7.22.5", + "resolved": "https://registry.npmjs.org/@babel/helper-module-imports/-/helper-module-imports-7.22.5.tgz", + "integrity": "sha512-8Dl6+HD/cKifutF5qGd/8ZJi84QeAKh+CEe1sBzz8UayBBGg1dAIJrdHOcOM5b2MpzWL2yuotJTtGjETq0qjXg==", "dev": true, "dependencies": { - "@babel/types": "^7.18.6" + "@babel/types": "^7.22.5" }, "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/helper-module-transforms": { - "version": "7.21.2", - "resolved": "https://registry.npmjs.org/@babel/helper-module-transforms/-/helper-module-transforms-7.21.2.tgz", - "integrity": "sha512-79yj2AR4U/Oqq/WOV7Lx6hUjau1Zfo4cI+JLAVYeMV5XIlbOhmjEk5ulbTc9fMpmlojzZHkUUxAiK+UKn+hNQQ==", + "version": "7.22.5", + "resolved": "https://registry.npmjs.org/@babel/helper-module-transforms/-/helper-module-transforms-7.22.5.tgz", + "integrity": "sha512-+hGKDt/Ze8GFExiVHno/2dvG5IdstpzCq0y4Qc9OJ25D4q3pKfiIP/4Vp3/JvhDkLKsDK2api3q3fpIgiIF5bw==", "dev": true, "dependencies": { - "@babel/helper-environment-visitor": "^7.18.9", - "@babel/helper-module-imports": "^7.18.6", - "@babel/helper-simple-access": "^7.20.2", - "@babel/helper-split-export-declaration": "^7.18.6", - "@babel/helper-validator-identifier": "^7.19.1", - "@babel/template": "^7.20.7", - "@babel/traverse": "^7.21.2", - "@babel/types": "^7.21.2" + "@babel/helper-environment-visitor": "^7.22.5", + "@babel/helper-module-imports": "^7.22.5", + "@babel/helper-simple-access": "^7.22.5", + "@babel/helper-split-export-declaration": "^7.22.5", + "@babel/helper-validator-identifier": "^7.22.5", + "@babel/template": "^7.22.5", + "@babel/traverse": "^7.22.5", + "@babel/types": "^7.22.5" }, "engines": { "node": ">=6.9.0" @@ -233,77 +248,77 @@ } }, "node_modules/@babel/helper-simple-access": { - "version": "7.20.2", - "resolved": "https://registry.npmjs.org/@babel/helper-simple-access/-/helper-simple-access-7.20.2.tgz", - "integrity": "sha512-+0woI/WPq59IrqDYbVGfshjT5Dmk/nnbdpcF8SnMhhXObpTq2KNBdLFRFrkVdbDOyUmHBCxzm5FHV1rACIkIbA==", + "version": "7.22.5", + "resolved": "https://registry.npmjs.org/@babel/helper-simple-access/-/helper-simple-access-7.22.5.tgz", + "integrity": "sha512-n0H99E/K+Bika3++WNL17POvo4rKWZ7lZEp1Q+fStVbUi8nxPQEBOlTmCOxW/0JsS56SKKQ+ojAe2pHKJHN35w==", "dev": true, "dependencies": { - "@babel/types": "^7.20.2" + "@babel/types": "^7.22.5" }, "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/helper-split-export-declaration": { - "version": "7.18.6", - "resolved": "https://registry.npmjs.org/@babel/helper-split-export-declaration/-/helper-split-export-declaration-7.18.6.tgz", - "integrity": "sha512-bde1etTx6ZyTmobl9LLMMQsaizFVZrquTEHOqKeQESMKo4PlObf+8+JA25ZsIpZhT/WEd39+vOdLXAFG/nELpA==", + "version": "7.22.5", + "resolved": "https://registry.npmjs.org/@babel/helper-split-export-declaration/-/helper-split-export-declaration-7.22.5.tgz", + "integrity": "sha512-thqK5QFghPKWLhAV321lxF95yCg2K3Ob5yw+M3VHWfdia0IkPXUtoLH8x/6Fh486QUvzhb8YOWHChTVen2/PoQ==", "dev": true, "dependencies": { - "@babel/types": "^7.18.6" + "@babel/types": "^7.22.5" }, "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/helper-string-parser": { - "version": "7.19.4", - "resolved": "https://registry.npmjs.org/@babel/helper-string-parser/-/helper-string-parser-7.19.4.tgz", - "integrity": "sha512-nHtDoQcuqFmwYNYPz3Rah5ph2p8PFeFCsZk9A/48dPc/rGocJ5J3hAAZ7pb76VWX3fZKu+uEr/FhH5jLx7umrw==", + "version": "7.22.5", + "resolved": "https://registry.npmjs.org/@babel/helper-string-parser/-/helper-string-parser-7.22.5.tgz", + "integrity": "sha512-mM4COjgZox8U+JcXQwPijIZLElkgEpO5rsERVDJTc2qfCDfERyob6k5WegS14SX18IIjv+XD+GrqNumY5JRCDw==", "dev": true, "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/helper-validator-identifier": { - "version": "7.19.1", - "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.19.1.tgz", - "integrity": "sha512-awrNfaMtnHUr653GgGEs++LlAvW6w+DcPrOliSMXWCKo597CwL5Acf/wWdNkf/tfEQE3mjkeD1YOVZOUV/od1w==", + "version": "7.22.5", + "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.22.5.tgz", + "integrity": "sha512-aJXu+6lErq8ltp+JhkJUfk1MTGyuA4v7f3pA+BJ5HLfNC6nAQ0Cpi9uOquUj8Hehg0aUiHzWQbOVJGao6ztBAQ==", "dev": true, "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/helper-validator-option": { - "version": "7.21.0", - "resolved": "https://registry.npmjs.org/@babel/helper-validator-option/-/helper-validator-option-7.21.0.tgz", - "integrity": "sha512-rmL/B8/f0mKS2baE9ZpyTcTavvEuWhTTW8amjzXNvYG4AwBsqTLikfXsEofsJEfKHf+HQVQbFOHy6o+4cnC/fQ==", + "version": "7.22.5", + "resolved": "https://registry.npmjs.org/@babel/helper-validator-option/-/helper-validator-option-7.22.5.tgz", + "integrity": "sha512-R3oB6xlIVKUnxNUxbmgq7pKjxpru24zlimpE8WK47fACIlM0II/Hm1RS8IaOI7NgCr6LNS+jl5l75m20npAziw==", "dev": true, "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/helpers": { - "version": "7.21.0", - "resolved": "https://registry.npmjs.org/@babel/helpers/-/helpers-7.21.0.tgz", - "integrity": "sha512-XXve0CBtOW0pd7MRzzmoyuSj0e3SEzj8pgyFxnTT1NJZL38BD1MK7yYrm8yefRPIDvNNe14xR4FdbHwpInD4rA==", + "version": "7.22.5", + "resolved": "https://registry.npmjs.org/@babel/helpers/-/helpers-7.22.5.tgz", + "integrity": "sha512-pSXRmfE1vzcUIDFQcSGA5Mr+GxBV9oiRKDuDxXvWQQBCh8HoIjs/2DlDB7H8smac1IVrB9/xdXj2N3Wol9Cr+Q==", "dev": true, "dependencies": { - "@babel/template": "^7.20.7", - "@babel/traverse": "^7.21.0", - "@babel/types": "^7.21.0" + "@babel/template": "^7.22.5", + "@babel/traverse": "^7.22.5", + "@babel/types": "^7.22.5" }, "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/highlight": { - "version": "7.18.6", - "resolved": "https://registry.npmjs.org/@babel/highlight/-/highlight-7.18.6.tgz", - "integrity": "sha512-u7stbOuYjaPezCuLj29hNW1v64M2Md2qupEKP1fHc7WdOA3DgLh37suiSrZYY7haUB7iBeQZ9P1uiRF359do3g==", + "version": "7.22.5", + "resolved": "https://registry.npmjs.org/@babel/highlight/-/highlight-7.22.5.tgz", + "integrity": "sha512-BSKlD1hgnedS5XRnGOljZawtag7H1yPfQp0tdNJCHoH6AZ+Pcm9VvkrK59/Yy593Ypg0zMxH2BxD1VPYUQ7UIw==", "dev": true, "dependencies": { - "@babel/helper-validator-identifier": "^7.18.6", + "@babel/helper-validator-identifier": "^7.22.5", "chalk": "^2.0.0", "js-tokens": "^4.0.0" }, @@ -383,9 +398,9 @@ } }, "node_modules/@babel/parser": { - "version": "7.21.2", - "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.21.2.tgz", - "integrity": "sha512-URpaIJQwEkEC2T9Kn+Ai6Xe/02iNaVCuT/PtoRz3GPVJVDpPd7mLo+VddTbhCRU9TXqW5mSrQfXZyi8kDKOVpQ==", + "version": "7.22.5", + "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.22.5.tgz", + "integrity": "sha512-DFZMC9LJUG9PLOclRC32G63UXwzqS2koQC8dkx+PLdmt1xSePYpbT/NbsrJy8Q/muXz7o/h/d4A7Fuyixm559Q==", "dev": true, "bin": { "parser": "bin/babel-parser.js" @@ -542,33 +557,33 @@ } }, "node_modules/@babel/template": { - "version": "7.20.7", - "resolved": "https://registry.npmjs.org/@babel/template/-/template-7.20.7.tgz", - "integrity": "sha512-8SegXApWe6VoNw0r9JHpSteLKTpTiLZ4rMlGIm9JQ18KiCtyQiAMEazujAHrUS5flrcqYZa75ukev3P6QmUwUw==", + "version": "7.22.5", + "resolved": "https://registry.npmjs.org/@babel/template/-/template-7.22.5.tgz", + "integrity": "sha512-X7yV7eiwAxdj9k94NEylvbVHLiVG1nvzCV2EAowhxLTwODV1jl9UzZ48leOC0sH7OnuHrIkllaBgneUykIcZaw==", "dev": true, "dependencies": { - "@babel/code-frame": "^7.18.6", - "@babel/parser": "^7.20.7", - "@babel/types": "^7.20.7" + "@babel/code-frame": "^7.22.5", + "@babel/parser": "^7.22.5", + "@babel/types": "^7.22.5" }, "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/traverse": { - "version": "7.21.2", - "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.21.2.tgz", - "integrity": "sha512-ts5FFU/dSUPS13tv8XiEObDu9K+iagEKME9kAbaP7r0Y9KtZJZ+NGndDvWoRAYNpeWafbpFeki3q9QoMD6gxyw==", - "dev": true, - "dependencies": { - "@babel/code-frame": "^7.18.6", - "@babel/generator": "^7.21.1", - "@babel/helper-environment-visitor": "^7.18.9", - "@babel/helper-function-name": "^7.21.0", - "@babel/helper-hoist-variables": "^7.18.6", - "@babel/helper-split-export-declaration": "^7.18.6", - "@babel/parser": "^7.21.2", - "@babel/types": "^7.21.2", + "version": "7.22.5", + "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.22.5.tgz", + "integrity": "sha512-7DuIjPgERaNo6r+PZwItpjCZEa5vyw4eJGufeLxrPdBXBoLcCJCIasvK6pK/9DVNrLZTLFhUGqaC6X/PA007TQ==", + "dev": true, + "dependencies": { + "@babel/code-frame": "^7.22.5", + "@babel/generator": "^7.22.5", + "@babel/helper-environment-visitor": "^7.22.5", + "@babel/helper-function-name": "^7.22.5", + "@babel/helper-hoist-variables": "^7.22.5", + "@babel/helper-split-export-declaration": "^7.22.5", + "@babel/parser": "^7.22.5", + "@babel/types": "^7.22.5", "debug": "^4.1.0", "globals": "^11.1.0" }, @@ -586,13 +601,13 @@ } }, "node_modules/@babel/types": { - "version": "7.21.2", - "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.21.2.tgz", - "integrity": "sha512-3wRZSs7jiFaB8AjxiiD+VqN5DTG2iRvJGQ+qYFrs/654lg6kGTQWIOFjlBo5RaXuAZjBmP3+OQH4dmhqiiyYxw==", + "version": "7.22.5", + "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.22.5.tgz", + "integrity": "sha512-zo3MIHGOkPOfoRXitsgHLjEXmlDaD/5KU1Uzuc9GNiZPhSqVxVRtxuPaSBZDsYZ9qV88AjtMtWW7ww98loJ9KA==", "dev": true, "dependencies": { - "@babel/helper-string-parser": "^7.19.4", - "@babel/helper-validator-identifier": "^7.19.1", + "@babel/helper-string-parser": "^7.22.5", + "@babel/helper-validator-identifier": "^7.22.5", "to-fast-properties": "^2.0.0" }, "engines": { @@ -1703,9 +1718,9 @@ } }, "node_modules/aws-cdk-lib": { - "version": "2.13.0", - "resolved": "https://registry.npmjs.org/aws-cdk-lib/-/aws-cdk-lib-2.13.0.tgz", - "integrity": "sha512-nKpQk+9H7T128gpzl+7XTu+19Yzj6kmCMrvSwTXLa/qr4/soEpXI68/+19ymEAHOYEL4Dd3eyk490P+y0wzi6A==", + "version": "2.80.0", + "resolved": "https://registry.npmjs.org/aws-cdk-lib/-/aws-cdk-lib-2.80.0.tgz", + "integrity": "sha512-PoqD3Yms5I0ajuTi071nTW/hpkH3XsdyZzn5gYsPv0qD7mqP3h6Qr+6RiGx+yQ1KcVFyxWdX15uK+DsC0KwvcQ==", "bundleDependencies": [ "@balena/dockerignore", "case", @@ -1715,17 +1730,22 @@ "minimatch", "punycode", "semver", + "table", "yaml" ], "dependencies": { + "@aws-cdk/asset-awscli-v1": "^2.2.177", + "@aws-cdk/asset-kubectl-v20": "^2.1.1", + "@aws-cdk/asset-node-proxy-agent-v5": "^2.0.148", "@balena/dockerignore": "^1.0.2", "case": "1.6.3", - "fs-extra": "^9.1.0", - "ignore": "^5.2.0", - "jsonschema": "^1.4.0", + "fs-extra": "^11.1.1", + "ignore": "^5.2.4", + "jsonschema": "^1.4.1", "minimatch": "^3.1.2", - "punycode": "^2.1.1", - "semver": "^7.3.5", + "punycode": "^2.3.0", + "semver": "^7.5.1", + "table": "^6.8.1", "yaml": "1.10.2" }, "engines": { @@ -1740,12 +1760,49 @@ "inBundle": true, "license": "Apache-2.0" }, - "node_modules/aws-cdk-lib/node_modules/at-least-node": { - "version": "1.0.0", + "node_modules/aws-cdk-lib/node_modules/ajv": { + "version": "8.12.0", "inBundle": true, - "license": "ISC", + "license": "MIT", + "dependencies": { + "fast-deep-equal": "^3.1.1", + "json-schema-traverse": "^1.0.0", + "require-from-string": "^2.0.2", + "uri-js": "^4.2.2" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/epoberezkin" + } + }, + "node_modules/aws-cdk-lib/node_modules/ansi-regex": { + "version": "5.0.1", + "inBundle": true, + "license": "MIT", "engines": { - "node": ">= 4.0.0" + "node": ">=8" + } + }, + "node_modules/aws-cdk-lib/node_modules/ansi-styles": { + "version": "4.3.0", + "inBundle": true, + "license": "MIT", + "dependencies": { + "color-convert": "^2.0.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/aws-cdk-lib/node_modules/astral-regex": { + "version": "2.0.0", + "inBundle": true, + "license": "MIT", + "engines": { + "node": ">=8" } }, "node_modules/aws-cdk-lib/node_modules/balanced-match": { @@ -1770,38 +1827,76 @@ "node": ">= 0.8.0" } }, + "node_modules/aws-cdk-lib/node_modules/color-convert": { + "version": "2.0.1", + "inBundle": true, + "license": "MIT", + "dependencies": { + "color-name": "~1.1.4" + }, + "engines": { + "node": ">=7.0.0" + } + }, + "node_modules/aws-cdk-lib/node_modules/color-name": { + "version": "1.1.4", + "inBundle": true, + "license": "MIT" + }, "node_modules/aws-cdk-lib/node_modules/concat-map": { "version": "0.0.1", "inBundle": true, "license": "MIT" }, + "node_modules/aws-cdk-lib/node_modules/emoji-regex": { + "version": "8.0.0", + "inBundle": true, + "license": "MIT" + }, + "node_modules/aws-cdk-lib/node_modules/fast-deep-equal": { + "version": "3.1.3", + "inBundle": true, + "license": "MIT" + }, "node_modules/aws-cdk-lib/node_modules/fs-extra": { - "version": "9.1.0", + "version": "11.1.1", "inBundle": true, "license": "MIT", "dependencies": { - "at-least-node": "^1.0.0", "graceful-fs": "^4.2.0", "jsonfile": "^6.0.1", "universalify": "^2.0.0" }, "engines": { - "node": ">=10" + "node": ">=14.14" } }, "node_modules/aws-cdk-lib/node_modules/graceful-fs": { - "version": "4.2.9", + "version": "4.2.11", "inBundle": true, "license": "ISC" }, "node_modules/aws-cdk-lib/node_modules/ignore": { - "version": "5.2.0", + "version": "5.2.4", "inBundle": true, "license": "MIT", "engines": { "node": ">= 4" } }, + "node_modules/aws-cdk-lib/node_modules/is-fullwidth-code-point": { + "version": "3.0.0", + "inBundle": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/aws-cdk-lib/node_modules/json-schema-traverse": { + "version": "1.0.0", + "inBundle": true, + "license": "MIT" + }, "node_modules/aws-cdk-lib/node_modules/jsonfile": { "version": "6.1.0", "inBundle": true, @@ -1814,13 +1909,18 @@ } }, "node_modules/aws-cdk-lib/node_modules/jsonschema": { - "version": "1.4.0", + "version": "1.4.1", "inBundle": true, "license": "MIT", "engines": { "node": "*" } }, + "node_modules/aws-cdk-lib/node_modules/lodash.truncate": { + "version": "4.4.2", + "inBundle": true, + "license": "MIT" + }, "node_modules/aws-cdk-lib/node_modules/lru-cache": { "version": "6.0.0", "inBundle": true, @@ -1844,15 +1944,23 @@ } }, "node_modules/aws-cdk-lib/node_modules/punycode": { - "version": "2.1.1", + "version": "2.3.0", "inBundle": true, "license": "MIT", "engines": { "node": ">=6" } }, + "node_modules/aws-cdk-lib/node_modules/require-from-string": { + "version": "2.0.2", + "inBundle": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, "node_modules/aws-cdk-lib/node_modules/semver": { - "version": "7.3.5", + "version": "7.5.1", "inBundle": true, "license": "ISC", "dependencies": { @@ -1865,6 +1973,61 @@ "node": ">=10" } }, + "node_modules/aws-cdk-lib/node_modules/slice-ansi": { + "version": "4.0.0", + "inBundle": true, + "license": "MIT", + "dependencies": { + "ansi-styles": "^4.0.0", + "astral-regex": "^2.0.0", + "is-fullwidth-code-point": "^3.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/slice-ansi?sponsor=1" + } + }, + "node_modules/aws-cdk-lib/node_modules/string-width": { + "version": "4.2.3", + "inBundle": true, + "license": "MIT", + "dependencies": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/aws-cdk-lib/node_modules/strip-ansi": { + "version": "6.0.1", + "inBundle": true, + "license": "MIT", + "dependencies": { + "ansi-regex": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/aws-cdk-lib/node_modules/table": { + "version": "6.8.1", + "inBundle": true, + "license": "BSD-3-Clause", + "dependencies": { + "ajv": "^8.0.1", + "lodash.truncate": "^4.4.2", + "slice-ansi": "^4.0.0", + "string-width": "^4.2.3", + "strip-ansi": "^6.0.1" + }, + "engines": { + "node": ">=10.0.0" + } + }, "node_modules/aws-cdk-lib/node_modules/universalify": { "version": "2.0.0", "inBundle": true, @@ -1873,6 +2036,14 @@ "node": ">= 10.0.0" } }, + "node_modules/aws-cdk-lib/node_modules/uri-js": { + "version": "4.4.1", + "inBundle": true, + "license": "BSD-2-Clause", + "dependencies": { + "punycode": "^2.1.0" + } + }, "node_modules/aws-cdk-lib/node_modules/yallist": { "version": "4.0.0", "inBundle": true, @@ -2006,7 +2177,8 @@ "node_modules/balanced-match": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", - "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==" + "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==", + "dev": true }, "node_modules/base": { "version": "0.11.2", @@ -2042,6 +2214,7 @@ "version": "1.1.11", "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", + "dev": true, "dependencies": { "balanced-match": "^1.0.0", "concat-map": "0.0.1" @@ -2399,7 +2572,8 @@ "node_modules/concat-map": { "version": "0.0.1", "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", - "integrity": "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==" + "integrity": "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==", + "dev": true }, "node_modules/constructs": { "version": "10.1.264", @@ -4977,6 +5151,7 @@ "version": "3.1.2", "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", + "dev": true, "dependencies": { "brace-expansion": "^1.1.7" }, @@ -6243,9 +6418,9 @@ } }, "node_modules/semver": { - "version": "7.3.8", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.3.8.tgz", - "integrity": "sha512-NB1ctGL5rlHrPJtFDVIVzTyQylMLu9N9VICA6HSFJo8MCGVTMW6gfpicwKmmK/dAjTOrqu5l63JJOpDSrAis3A==", + "version": "7.5.3", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.5.3.tgz", + "integrity": "sha512-QBlUtyVk/5EeHbi7X0fw6liDZc7BBmEaSYn01fMU1OUYbf6GPsbTtd8WmnqbI20SeycoHSeiybkE/q1Q+qlThQ==", "dependencies": { "lru-cache": "^6.0.0" }, @@ -7641,37 +7816,52 @@ "@jridgewell/trace-mapping": "^0.3.9" } }, + "@aws-cdk/asset-awscli-v1": { + "version": "2.2.199", + "resolved": "https://registry.npmjs.org/@aws-cdk/asset-awscli-v1/-/asset-awscli-v1-2.2.199.tgz", + "integrity": "sha512-zNdD2OxALdsdQaRZBpTfMTuudxV+4jLMznJIvVj6O+OqCru4m5UtgVQmyApW1z2H9s4/06ovVt20aXe2G8Ta+w==" + }, + "@aws-cdk/asset-kubectl-v20": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/@aws-cdk/asset-kubectl-v20/-/asset-kubectl-v20-2.1.2.tgz", + "integrity": "sha512-3M2tELJOxQv0apCIiuKQ4pAbncz9GuLwnKFqxifWfe77wuMxyTRPmxssYHs42ePqzap1LT6GDcPygGs+hHstLg==" + }, + "@aws-cdk/asset-node-proxy-agent-v5": { + "version": "2.0.165", + "resolved": "https://registry.npmjs.org/@aws-cdk/asset-node-proxy-agent-v5/-/asset-node-proxy-agent-v5-2.0.165.tgz", + "integrity": "sha512-bsyLQD/vqXQcc9RDmlM1XqiFNO/yewgVFXmkMcQkndJbmE/jgYkzewwYGrBlfL725hGLQipXq19+jwWwdsXQqg==" + }, "@babel/code-frame": { - "version": "7.18.6", - "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.18.6.tgz", - "integrity": "sha512-TDCmlK5eOvH+eH7cdAFlNXeVJqWIQ7gW9tY1GJIpUtFb6CmjVyq2VM3u71bOyR8CRihcCgMUYoDNyLXao3+70Q==", + "version": "7.22.5", + "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.22.5.tgz", + "integrity": "sha512-Xmwn266vad+6DAqEB2A6V/CcZVp62BbwVmcOJc2RPuwih1kw02TjQvWVWlcKGbBPd+8/0V5DEkOcizRGYsspYQ==", "dev": true, "requires": { - "@babel/highlight": "^7.18.6" + "@babel/highlight": "^7.22.5" } }, "@babel/compat-data": { - "version": "7.21.0", - "resolved": "https://registry.npmjs.org/@babel/compat-data/-/compat-data-7.21.0.tgz", - "integrity": "sha512-gMuZsmsgxk/ENC3O/fRw5QY8A9/uxQbbCEypnLIiYYc/qVJtEV7ouxC3EllIIwNzMqAQee5tanFabWsUOutS7g==", + "version": "7.22.5", + "resolved": "https://registry.npmjs.org/@babel/compat-data/-/compat-data-7.22.5.tgz", + "integrity": "sha512-4Jc/YuIaYqKnDDz892kPIledykKg12Aw1PYX5i/TY28anJtacvM1Rrr8wbieB9GfEJwlzqT0hUEao0CxEebiDA==", "dev": true }, "@babel/core": { - "version": "7.21.0", - "resolved": "https://registry.npmjs.org/@babel/core/-/core-7.21.0.tgz", - "integrity": "sha512-PuxUbxcW6ZYe656yL3EAhpy7qXKq0DmYsrJLpbB8XrsCP9Nm+XCg9XFMb5vIDliPD7+U/+M+QJlH17XOcB7eXA==", + "version": "7.22.5", + "resolved": "https://registry.npmjs.org/@babel/core/-/core-7.22.5.tgz", + "integrity": "sha512-SBuTAjg91A3eKOvD+bPEz3LlhHZRNu1nFOVts9lzDJTXshHTjII0BAtDS3Y2DAkdZdDKWVZGVwkDfc4Clxn1dg==", "dev": true, "requires": { "@ampproject/remapping": "^2.2.0", - "@babel/code-frame": "^7.18.6", - "@babel/generator": "^7.21.0", - "@babel/helper-compilation-targets": "^7.20.7", - "@babel/helper-module-transforms": "^7.21.0", - "@babel/helpers": "^7.21.0", - "@babel/parser": "^7.21.0", - "@babel/template": "^7.20.7", - "@babel/traverse": "^7.21.0", - "@babel/types": "^7.21.0", + "@babel/code-frame": "^7.22.5", + "@babel/generator": "^7.22.5", + "@babel/helper-compilation-targets": "^7.22.5", + "@babel/helper-module-transforms": "^7.22.5", + "@babel/helpers": "^7.22.5", + "@babel/parser": "^7.22.5", + "@babel/template": "^7.22.5", + "@babel/traverse": "^7.22.5", + "@babel/types": "^7.22.5", "convert-source-map": "^1.7.0", "debug": "^4.1.0", "gensync": "^1.0.0-beta.2", @@ -7688,12 +7878,12 @@ } }, "@babel/generator": { - "version": "7.21.1", - "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.21.1.tgz", - "integrity": "sha512-1lT45bAYlQhFn/BHivJs43AiW2rg3/UbLyShGfF3C0KmHvO5fSghWd5kBJy30kpRRucGzXStvnnCFniCR2kXAA==", + "version": "7.22.5", + "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.22.5.tgz", + "integrity": "sha512-+lcUbnTRhd0jOewtFSedLyiPsD5tswKkbgcezOqqWFUVNEwoUTlpPOBmvhG7OXWLR4jMdv0czPGH5XbflnD1EA==", "dev": true, "requires": { - "@babel/types": "^7.21.0", + "@babel/types": "^7.22.5", "@jridgewell/gen-mapping": "^0.3.2", "@jridgewell/trace-mapping": "^0.3.17", "jsesc": "^2.5.1" @@ -7713,13 +7903,13 @@ } }, "@babel/helper-compilation-targets": { - "version": "7.20.7", - "resolved": "https://registry.npmjs.org/@babel/helper-compilation-targets/-/helper-compilation-targets-7.20.7.tgz", - "integrity": "sha512-4tGORmfQcrc+bvrjb5y3dG9Mx1IOZjsHqQVUz7XCNHO+iTmqxWnVg3KRygjGmpRLJGdQSKuvFinbIb0CnZwHAQ==", + "version": "7.22.5", + "resolved": "https://registry.npmjs.org/@babel/helper-compilation-targets/-/helper-compilation-targets-7.22.5.tgz", + "integrity": "sha512-Ji+ywpHeuqxB8WDxraCiqR0xfhYjiDE/e6k7FuIaANnoOFxAHskHChz4vA1mJC9Lbm01s1PVAGhQY4FUKSkGZw==", "dev": true, "requires": { - "@babel/compat-data": "^7.20.5", - "@babel/helper-validator-option": "^7.18.6", + "@babel/compat-data": "^7.22.5", + "@babel/helper-validator-option": "^7.22.5", "browserslist": "^4.21.3", "lru-cache": "^5.1.1", "semver": "^6.3.0" @@ -7734,53 +7924,53 @@ } }, "@babel/helper-environment-visitor": { - "version": "7.18.9", - "resolved": "https://registry.npmjs.org/@babel/helper-environment-visitor/-/helper-environment-visitor-7.18.9.tgz", - "integrity": "sha512-3r/aACDJ3fhQ/EVgFy0hpj8oHyHpQc+LPtJoY9SzTThAsStm4Ptegq92vqKoE3vD706ZVFWITnMnxucw+S9Ipg==", + "version": "7.22.5", + "resolved": "https://registry.npmjs.org/@babel/helper-environment-visitor/-/helper-environment-visitor-7.22.5.tgz", + "integrity": "sha512-XGmhECfVA/5sAt+H+xpSg0mfrHq6FzNr9Oxh7PSEBBRUb/mL7Kz3NICXb194rCqAEdxkhPT1a88teizAFyvk8Q==", "dev": true }, "@babel/helper-function-name": { - "version": "7.21.0", - "resolved": "https://registry.npmjs.org/@babel/helper-function-name/-/helper-function-name-7.21.0.tgz", - "integrity": "sha512-HfK1aMRanKHpxemaY2gqBmL04iAPOPRj7DxtNbiDOrJK+gdwkiNRVpCpUJYbUT+aZyemKN8brqTOxzCaG6ExRg==", + "version": "7.22.5", + "resolved": "https://registry.npmjs.org/@babel/helper-function-name/-/helper-function-name-7.22.5.tgz", + "integrity": "sha512-wtHSq6jMRE3uF2otvfuD3DIvVhOsSNshQl0Qrd7qC9oQJzHvOL4qQXlQn2916+CXGywIjpGuIkoyZRRxHPiNQQ==", "dev": true, "requires": { - "@babel/template": "^7.20.7", - "@babel/types": "^7.21.0" + "@babel/template": "^7.22.5", + "@babel/types": "^7.22.5" } }, "@babel/helper-hoist-variables": { - "version": "7.18.6", - "resolved": "https://registry.npmjs.org/@babel/helper-hoist-variables/-/helper-hoist-variables-7.18.6.tgz", - "integrity": "sha512-UlJQPkFqFULIcyW5sbzgbkxn2FKRgwWiRexcuaR8RNJRy8+LLveqPjwZV/bwrLZCN0eUHD/x8D0heK1ozuoo6Q==", + "version": "7.22.5", + "resolved": "https://registry.npmjs.org/@babel/helper-hoist-variables/-/helper-hoist-variables-7.22.5.tgz", + "integrity": "sha512-wGjk9QZVzvknA6yKIUURb8zY3grXCcOZt+/7Wcy8O2uctxhplmUPkOdlgoNhmdVee2c92JXbf1xpMtVNbfoxRw==", "dev": true, "requires": { - "@babel/types": "^7.18.6" + "@babel/types": "^7.22.5" } }, "@babel/helper-module-imports": { - "version": "7.18.6", - "resolved": "https://registry.npmjs.org/@babel/helper-module-imports/-/helper-module-imports-7.18.6.tgz", - "integrity": "sha512-0NFvs3VkuSYbFi1x2Vd6tKrywq+z/cLeYC/RJNFrIX/30Bf5aiGYbtvGXolEktzJH8o5E5KJ3tT+nkxuuZFVlA==", + "version": "7.22.5", + "resolved": "https://registry.npmjs.org/@babel/helper-module-imports/-/helper-module-imports-7.22.5.tgz", + "integrity": "sha512-8Dl6+HD/cKifutF5qGd/8ZJi84QeAKh+CEe1sBzz8UayBBGg1dAIJrdHOcOM5b2MpzWL2yuotJTtGjETq0qjXg==", "dev": true, "requires": { - "@babel/types": "^7.18.6" + "@babel/types": "^7.22.5" } }, "@babel/helper-module-transforms": { - "version": "7.21.2", - "resolved": "https://registry.npmjs.org/@babel/helper-module-transforms/-/helper-module-transforms-7.21.2.tgz", - "integrity": "sha512-79yj2AR4U/Oqq/WOV7Lx6hUjau1Zfo4cI+JLAVYeMV5XIlbOhmjEk5ulbTc9fMpmlojzZHkUUxAiK+UKn+hNQQ==", + "version": "7.22.5", + "resolved": "https://registry.npmjs.org/@babel/helper-module-transforms/-/helper-module-transforms-7.22.5.tgz", + "integrity": "sha512-+hGKDt/Ze8GFExiVHno/2dvG5IdstpzCq0y4Qc9OJ25D4q3pKfiIP/4Vp3/JvhDkLKsDK2api3q3fpIgiIF5bw==", "dev": true, "requires": { - "@babel/helper-environment-visitor": "^7.18.9", - "@babel/helper-module-imports": "^7.18.6", - "@babel/helper-simple-access": "^7.20.2", - "@babel/helper-split-export-declaration": "^7.18.6", - "@babel/helper-validator-identifier": "^7.19.1", - "@babel/template": "^7.20.7", - "@babel/traverse": "^7.21.2", - "@babel/types": "^7.21.2" + "@babel/helper-environment-visitor": "^7.22.5", + "@babel/helper-module-imports": "^7.22.5", + "@babel/helper-simple-access": "^7.22.5", + "@babel/helper-split-export-declaration": "^7.22.5", + "@babel/helper-validator-identifier": "^7.22.5", + "@babel/template": "^7.22.5", + "@babel/traverse": "^7.22.5", + "@babel/types": "^7.22.5" } }, "@babel/helper-plugin-utils": { @@ -7790,59 +7980,59 @@ "dev": true }, "@babel/helper-simple-access": { - "version": "7.20.2", - "resolved": "https://registry.npmjs.org/@babel/helper-simple-access/-/helper-simple-access-7.20.2.tgz", - "integrity": "sha512-+0woI/WPq59IrqDYbVGfshjT5Dmk/nnbdpcF8SnMhhXObpTq2KNBdLFRFrkVdbDOyUmHBCxzm5FHV1rACIkIbA==", + "version": "7.22.5", + "resolved": "https://registry.npmjs.org/@babel/helper-simple-access/-/helper-simple-access-7.22.5.tgz", + "integrity": "sha512-n0H99E/K+Bika3++WNL17POvo4rKWZ7lZEp1Q+fStVbUi8nxPQEBOlTmCOxW/0JsS56SKKQ+ojAe2pHKJHN35w==", "dev": true, "requires": { - "@babel/types": "^7.20.2" + "@babel/types": "^7.22.5" } }, "@babel/helper-split-export-declaration": { - "version": "7.18.6", - "resolved": "https://registry.npmjs.org/@babel/helper-split-export-declaration/-/helper-split-export-declaration-7.18.6.tgz", - "integrity": "sha512-bde1etTx6ZyTmobl9LLMMQsaizFVZrquTEHOqKeQESMKo4PlObf+8+JA25ZsIpZhT/WEd39+vOdLXAFG/nELpA==", + "version": "7.22.5", + "resolved": "https://registry.npmjs.org/@babel/helper-split-export-declaration/-/helper-split-export-declaration-7.22.5.tgz", + "integrity": "sha512-thqK5QFghPKWLhAV321lxF95yCg2K3Ob5yw+M3VHWfdia0IkPXUtoLH8x/6Fh486QUvzhb8YOWHChTVen2/PoQ==", "dev": true, "requires": { - "@babel/types": "^7.18.6" + "@babel/types": "^7.22.5" } }, "@babel/helper-string-parser": { - "version": "7.19.4", - "resolved": "https://registry.npmjs.org/@babel/helper-string-parser/-/helper-string-parser-7.19.4.tgz", - "integrity": "sha512-nHtDoQcuqFmwYNYPz3Rah5ph2p8PFeFCsZk9A/48dPc/rGocJ5J3hAAZ7pb76VWX3fZKu+uEr/FhH5jLx7umrw==", + "version": "7.22.5", + "resolved": "https://registry.npmjs.org/@babel/helper-string-parser/-/helper-string-parser-7.22.5.tgz", + "integrity": "sha512-mM4COjgZox8U+JcXQwPijIZLElkgEpO5rsERVDJTc2qfCDfERyob6k5WegS14SX18IIjv+XD+GrqNumY5JRCDw==", "dev": true }, "@babel/helper-validator-identifier": { - "version": "7.19.1", - "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.19.1.tgz", - "integrity": "sha512-awrNfaMtnHUr653GgGEs++LlAvW6w+DcPrOliSMXWCKo597CwL5Acf/wWdNkf/tfEQE3mjkeD1YOVZOUV/od1w==", + "version": "7.22.5", + "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.22.5.tgz", + "integrity": "sha512-aJXu+6lErq8ltp+JhkJUfk1MTGyuA4v7f3pA+BJ5HLfNC6nAQ0Cpi9uOquUj8Hehg0aUiHzWQbOVJGao6ztBAQ==", "dev": true }, "@babel/helper-validator-option": { - "version": "7.21.0", - "resolved": "https://registry.npmjs.org/@babel/helper-validator-option/-/helper-validator-option-7.21.0.tgz", - "integrity": "sha512-rmL/B8/f0mKS2baE9ZpyTcTavvEuWhTTW8amjzXNvYG4AwBsqTLikfXsEofsJEfKHf+HQVQbFOHy6o+4cnC/fQ==", + "version": "7.22.5", + "resolved": "https://registry.npmjs.org/@babel/helper-validator-option/-/helper-validator-option-7.22.5.tgz", + "integrity": "sha512-R3oB6xlIVKUnxNUxbmgq7pKjxpru24zlimpE8WK47fACIlM0II/Hm1RS8IaOI7NgCr6LNS+jl5l75m20npAziw==", "dev": true }, "@babel/helpers": { - "version": "7.21.0", - "resolved": "https://registry.npmjs.org/@babel/helpers/-/helpers-7.21.0.tgz", - "integrity": "sha512-XXve0CBtOW0pd7MRzzmoyuSj0e3SEzj8pgyFxnTT1NJZL38BD1MK7yYrm8yefRPIDvNNe14xR4FdbHwpInD4rA==", + "version": "7.22.5", + "resolved": "https://registry.npmjs.org/@babel/helpers/-/helpers-7.22.5.tgz", + "integrity": "sha512-pSXRmfE1vzcUIDFQcSGA5Mr+GxBV9oiRKDuDxXvWQQBCh8HoIjs/2DlDB7H8smac1IVrB9/xdXj2N3Wol9Cr+Q==", "dev": true, "requires": { - "@babel/template": "^7.20.7", - "@babel/traverse": "^7.21.0", - "@babel/types": "^7.21.0" + "@babel/template": "^7.22.5", + "@babel/traverse": "^7.22.5", + "@babel/types": "^7.22.5" } }, "@babel/highlight": { - "version": "7.18.6", - "resolved": "https://registry.npmjs.org/@babel/highlight/-/highlight-7.18.6.tgz", - "integrity": "sha512-u7stbOuYjaPezCuLj29hNW1v64M2Md2qupEKP1fHc7WdOA3DgLh37suiSrZYY7haUB7iBeQZ9P1uiRF359do3g==", + "version": "7.22.5", + "resolved": "https://registry.npmjs.org/@babel/highlight/-/highlight-7.22.5.tgz", + "integrity": "sha512-BSKlD1hgnedS5XRnGOljZawtag7H1yPfQp0tdNJCHoH6AZ+Pcm9VvkrK59/Yy593Ypg0zMxH2BxD1VPYUQ7UIw==", "dev": true, "requires": { - "@babel/helper-validator-identifier": "^7.18.6", + "@babel/helper-validator-identifier": "^7.22.5", "chalk": "^2.0.0", "js-tokens": "^4.0.0" }, @@ -7906,9 +8096,9 @@ } }, "@babel/parser": { - "version": "7.21.2", - "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.21.2.tgz", - "integrity": "sha512-URpaIJQwEkEC2T9Kn+Ai6Xe/02iNaVCuT/PtoRz3GPVJVDpPd7mLo+VddTbhCRU9TXqW5mSrQfXZyi8kDKOVpQ==", + "version": "7.22.5", + "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.22.5.tgz", + "integrity": "sha512-DFZMC9LJUG9PLOclRC32G63UXwzqS2koQC8dkx+PLdmt1xSePYpbT/NbsrJy8Q/muXz7o/h/d4A7Fuyixm559Q==", "dev": true }, "@babel/plugin-syntax-async-generators": { @@ -8020,30 +8210,30 @@ } }, "@babel/template": { - "version": "7.20.7", - "resolved": "https://registry.npmjs.org/@babel/template/-/template-7.20.7.tgz", - "integrity": "sha512-8SegXApWe6VoNw0r9JHpSteLKTpTiLZ4rMlGIm9JQ18KiCtyQiAMEazujAHrUS5flrcqYZa75ukev3P6QmUwUw==", + "version": "7.22.5", + "resolved": "https://registry.npmjs.org/@babel/template/-/template-7.22.5.tgz", + "integrity": "sha512-X7yV7eiwAxdj9k94NEylvbVHLiVG1nvzCV2EAowhxLTwODV1jl9UzZ48leOC0sH7OnuHrIkllaBgneUykIcZaw==", "dev": true, "requires": { - "@babel/code-frame": "^7.18.6", - "@babel/parser": "^7.20.7", - "@babel/types": "^7.20.7" + "@babel/code-frame": "^7.22.5", + "@babel/parser": "^7.22.5", + "@babel/types": "^7.22.5" } }, "@babel/traverse": { - "version": "7.21.2", - "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.21.2.tgz", - "integrity": "sha512-ts5FFU/dSUPS13tv8XiEObDu9K+iagEKME9kAbaP7r0Y9KtZJZ+NGndDvWoRAYNpeWafbpFeki3q9QoMD6gxyw==", - "dev": true, - "requires": { - "@babel/code-frame": "^7.18.6", - "@babel/generator": "^7.21.1", - "@babel/helper-environment-visitor": "^7.18.9", - "@babel/helper-function-name": "^7.21.0", - "@babel/helper-hoist-variables": "^7.18.6", - "@babel/helper-split-export-declaration": "^7.18.6", - "@babel/parser": "^7.21.2", - "@babel/types": "^7.21.2", + "version": "7.22.5", + "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.22.5.tgz", + "integrity": "sha512-7DuIjPgERaNo6r+PZwItpjCZEa5vyw4eJGufeLxrPdBXBoLcCJCIasvK6pK/9DVNrLZTLFhUGqaC6X/PA007TQ==", + "dev": true, + "requires": { + "@babel/code-frame": "^7.22.5", + "@babel/generator": "^7.22.5", + "@babel/helper-environment-visitor": "^7.22.5", + "@babel/helper-function-name": "^7.22.5", + "@babel/helper-hoist-variables": "^7.22.5", + "@babel/helper-split-export-declaration": "^7.22.5", + "@babel/parser": "^7.22.5", + "@babel/types": "^7.22.5", "debug": "^4.1.0", "globals": "^11.1.0" }, @@ -8057,13 +8247,13 @@ } }, "@babel/types": { - "version": "7.21.2", - "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.21.2.tgz", - "integrity": "sha512-3wRZSs7jiFaB8AjxiiD+VqN5DTG2iRvJGQ+qYFrs/654lg6kGTQWIOFjlBo5RaXuAZjBmP3+OQH4dmhqiiyYxw==", + "version": "7.22.5", + "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.22.5.tgz", + "integrity": "sha512-zo3MIHGOkPOfoRXitsgHLjEXmlDaD/5KU1Uzuc9GNiZPhSqVxVRtxuPaSBZDsYZ9qV88AjtMtWW7ww98loJ9KA==", "dev": true, "requires": { - "@babel/helper-string-parser": "^7.19.4", - "@babel/helper-validator-identifier": "^7.19.1", + "@babel/helper-string-parser": "^7.22.5", + "@babel/helper-validator-identifier": "^7.22.5", "to-fast-properties": "^2.0.0" } }, @@ -8902,18 +9092,22 @@ } }, "aws-cdk-lib": { - "version": "2.13.0", - "resolved": "https://registry.npmjs.org/aws-cdk-lib/-/aws-cdk-lib-2.13.0.tgz", - "integrity": "sha512-nKpQk+9H7T128gpzl+7XTu+19Yzj6kmCMrvSwTXLa/qr4/soEpXI68/+19ymEAHOYEL4Dd3eyk490P+y0wzi6A==", + "version": "2.80.0", + "resolved": "https://registry.npmjs.org/aws-cdk-lib/-/aws-cdk-lib-2.80.0.tgz", + "integrity": "sha512-PoqD3Yms5I0ajuTi071nTW/hpkH3XsdyZzn5gYsPv0qD7mqP3h6Qr+6RiGx+yQ1KcVFyxWdX15uK+DsC0KwvcQ==", "requires": { + "@aws-cdk/asset-awscli-v1": "^2.2.177", + "@aws-cdk/asset-kubectl-v20": "^2.1.1", + "@aws-cdk/asset-node-proxy-agent-v5": "^2.0.148", "@balena/dockerignore": "^1.0.2", "case": "1.6.3", - "fs-extra": "^9.1.0", - "ignore": "^5.2.0", - "jsonschema": "^1.4.0", + "fs-extra": "^11.1.1", + "ignore": "^5.2.4", + "jsonschema": "^1.4.1", "minimatch": "^3.1.2", - "punycode": "^2.1.1", - "semver": "^7.3.5", + "punycode": "^2.3.0", + "semver": "^7.5.1", + "table": "^6.8.1", "yaml": "1.10.2" }, "dependencies": { @@ -8921,8 +9115,29 @@ "version": "1.0.2", "bundled": true }, - "at-least-node": { - "version": "1.0.0", + "ajv": { + "version": "8.12.0", + "bundled": true, + "requires": { + "fast-deep-equal": "^3.1.1", + "json-schema-traverse": "^1.0.0", + "require-from-string": "^2.0.2", + "uri-js": "^4.2.2" + } + }, + "ansi-regex": { + "version": "5.0.1", + "bundled": true + }, + "ansi-styles": { + "version": "4.3.0", + "bundled": true, + "requires": { + "color-convert": "^2.0.1" + } + }, + "astral-regex": { + "version": "2.0.0", "bundled": true }, "balanced-match": { @@ -8941,26 +9156,52 @@ "version": "1.6.3", "bundled": true }, + "color-convert": { + "version": "2.0.1", + "bundled": true, + "requires": { + "color-name": "~1.1.4" + } + }, + "color-name": { + "version": "1.1.4", + "bundled": true + }, "concat-map": { "version": "0.0.1", "bundled": true }, + "emoji-regex": { + "version": "8.0.0", + "bundled": true + }, + "fast-deep-equal": { + "version": "3.1.3", + "bundled": true + }, "fs-extra": { - "version": "9.1.0", + "version": "11.1.1", "bundled": true, "requires": { - "at-least-node": "^1.0.0", "graceful-fs": "^4.2.0", "jsonfile": "^6.0.1", "universalify": "^2.0.0" } }, "graceful-fs": { - "version": "4.2.9", + "version": "4.2.11", "bundled": true }, "ignore": { - "version": "5.2.0", + "version": "5.2.4", + "bundled": true + }, + "is-fullwidth-code-point": { + "version": "3.0.0", + "bundled": true + }, + "json-schema-traverse": { + "version": "1.0.0", "bundled": true }, "jsonfile": { @@ -8972,7 +9213,11 @@ } }, "jsonschema": { - "version": "1.4.0", + "version": "1.4.1", + "bundled": true + }, + "lodash.truncate": { + "version": "4.4.2", "bundled": true }, "lru-cache": { @@ -8990,20 +9235,67 @@ } }, "punycode": { - "version": "2.1.1", + "version": "2.3.0", + "bundled": true + }, + "require-from-string": { + "version": "2.0.2", "bundled": true }, "semver": { - "version": "7.3.5", + "version": "7.5.1", "bundled": true, "requires": { "lru-cache": "^6.0.0" } }, + "slice-ansi": { + "version": "4.0.0", + "bundled": true, + "requires": { + "ansi-styles": "^4.0.0", + "astral-regex": "^2.0.0", + "is-fullwidth-code-point": "^3.0.0" + } + }, + "string-width": { + "version": "4.2.3", + "bundled": true, + "requires": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.1" + } + }, + "strip-ansi": { + "version": "6.0.1", + "bundled": true, + "requires": { + "ansi-regex": "^5.0.1" + } + }, + "table": { + "version": "6.8.1", + "bundled": true, + "requires": { + "ajv": "^8.0.1", + "lodash.truncate": "^4.4.2", + "slice-ansi": "^4.0.0", + "string-width": "^4.2.3", + "strip-ansi": "^6.0.1" + } + }, "universalify": { "version": "2.0.0", "bundled": true }, + "uri-js": { + "version": "4.4.1", + "bundled": true, + "requires": { + "punycode": "^2.1.0" + } + }, "yallist": { "version": "4.0.0", "bundled": true @@ -9109,7 +9401,8 @@ "balanced-match": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", - "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==" + "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==", + "dev": true }, "base": { "version": "0.11.2", @@ -9141,6 +9434,7 @@ "version": "1.1.11", "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", + "dev": true, "requires": { "balanced-match": "^1.0.0", "concat-map": "0.0.1" @@ -9414,7 +9708,8 @@ "concat-map": { "version": "0.0.1", "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", - "integrity": "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==" + "integrity": "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==", + "dev": true }, "constructs": { "version": "10.1.264", @@ -11399,6 +11694,7 @@ "version": "3.1.2", "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", + "dev": true, "requires": { "brace-expansion": "^1.1.7" } @@ -12356,9 +12652,9 @@ } }, "semver": { - "version": "7.3.8", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.3.8.tgz", - "integrity": "sha512-NB1ctGL5rlHrPJtFDVIVzTyQylMLu9N9VICA6HSFJo8MCGVTMW6gfpicwKmmK/dAjTOrqu5l63JJOpDSrAis3A==", + "version": "7.5.3", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.5.3.tgz", + "integrity": "sha512-QBlUtyVk/5EeHbi7X0fw6liDZc7BBmEaSYn01fMU1OUYbf6GPsbTtd8WmnqbI20SeycoHSeiybkE/q1Q+qlThQ==", "requires": { "lru-cache": "^6.0.0" }, diff --git a/release/staging-resources-cdk/package.json b/release/staging-resources-cdk/package.json index 9b16af0014..b293856d63 100644 --- a/release/staging-resources-cdk/package.json +++ b/release/staging-resources-cdk/package.json @@ -25,7 +25,7 @@ "typescript": "~3.9.7" }, "dependencies": { - "aws-cdk-lib": "2.13.0", + "aws-cdk-lib": "2.80.0", "constructs": "^10.0.0", "source-map-support": "^0.5.16" } diff --git a/settings.gradle b/settings.gradle index 26bb563e62..d668b7d12c 100644 --- a/settings.gradle +++ b/settings.gradle @@ -31,10 +31,10 @@ dependencyResolutionManagement { version('opensearch', '1.3.8') library('opensearch-client', 'org.opensearch.client', 'opensearch-rest-client').versionRef('opensearch') library('opensearch-rhlc', 'org.opensearch.client', 'opensearch-rest-high-level-client').versionRef('opensearch') - version('spring', '5.3.27') + version('spring', '5.3.28') library('spring-core', 'org.springframework', 'spring-core').versionRef('spring') library('spring-context', 'org.springframework', 'spring-context').versionRef('spring') - version('guava', '31.1-jre') + version('guava', '32.0.1-jre') library('guava-core', 'com.google.guava', 'guava').versionRef('guava') } testLibs { @@ -42,7 +42,7 @@ dependencyResolutionManagement { version('mockito', '3.11.2') version('hamcrest', '2.2') version('awaitility', '4.2.0') - version('spring', '5.3.26') + version('spring', '5.3.28') version('slf4j', '2.0.6') library('junit-core', 'org.junit.jupiter', 'junit-jupiter').versionRef('junit') library('junit-params', 'org.junit.jupiter', 'junit-jupiter-params').versionRef('junit') From 0d294183996077bfcf5c4c66fbb9b862ec31a163 Mon Sep 17 00:00:00 2001 From: Taylor Gray Date: Thu, 29 Jun 2023 11:37:34 -0500 Subject: [PATCH 9/9] Add disable_authentication flag to the opensearch source (#2942) Signed-off-by: Taylor Gray --- .../opensearch-source/README.md | 3 + .../source/opensearch/OpenSearchSource.java | 2 + .../OpenSearchSourceConfiguration.java | 30 +++++----- .../client/OpenSearchClientFactory.java | 41 ++++++++----- .../OpenSearchSourceConfigurationTest.java | 58 ++++++++++++++----- .../client/OpenSearchClientFactoryTest.java | 36 ++++++++++++ 6 files changed, 124 insertions(+), 46 deletions(-) diff --git a/data-prepper-plugins/opensearch-source/README.md b/data-prepper-plugins/opensearch-source/README.md index 4894ba646d..b904d67378 100644 --- a/data-prepper-plugins/opensearch-source/README.md +++ b/data-prepper-plugins/opensearch-source/README.md @@ -114,6 +114,9 @@ opensearch-source-pipeline: - `password` (Optional) : A String of password used in the internal users of OpenSearch cluster. Default is null. +- `disable_authentication` (Optional) : A boolean that can disable authentication if the cluster supports it. Defaults to false. + + - `aws` (Optional) : AWS configurations. See [AWS Configuration](#aws_configuration) for details. SigV4 is enabled by default when this option is used. diff --git a/data-prepper-plugins/opensearch-source/src/main/java/org/opensearch/dataprepper/plugins/source/opensearch/OpenSearchSource.java b/data-prepper-plugins/opensearch-source/src/main/java/org/opensearch/dataprepper/plugins/source/opensearch/OpenSearchSource.java index 9a91db3ac1..22455dec3f 100644 --- a/data-prepper-plugins/opensearch-source/src/main/java/org/opensearch/dataprepper/plugins/source/opensearch/OpenSearchSource.java +++ b/data-prepper-plugins/opensearch-source/src/main/java/org/opensearch/dataprepper/plugins/source/opensearch/OpenSearchSource.java @@ -31,6 +31,8 @@ public OpenSearchSource(final OpenSearchSourceConfiguration openSearchSourceConf final AwsCredentialsSupplier awsCredentialsSupplier) { this.openSearchSourceConfiguration = openSearchSourceConfiguration; this.awsCredentialsSupplier = awsCredentialsSupplier; + + openSearchSourceConfiguration.validateAwsConfigWithUsernameAndPassword(); } @Override diff --git a/data-prepper-plugins/opensearch-source/src/main/java/org/opensearch/dataprepper/plugins/source/opensearch/OpenSearchSourceConfiguration.java b/data-prepper-plugins/opensearch-source/src/main/java/org/opensearch/dataprepper/plugins/source/opensearch/OpenSearchSourceConfiguration.java index 8e2fd42384..a100e17b20 100644 --- a/data-prepper-plugins/opensearch-source/src/main/java/org/opensearch/dataprepper/plugins/source/opensearch/OpenSearchSourceConfiguration.java +++ b/data-prepper-plugins/opensearch-source/src/main/java/org/opensearch/dataprepper/plugins/source/opensearch/OpenSearchSourceConfiguration.java @@ -6,9 +6,8 @@ import com.fasterxml.jackson.annotation.JsonProperty; import jakarta.validation.Valid; -import jakarta.validation.constraints.AssertTrue; -import jakarta.validation.constraints.Min; import jakarta.validation.constraints.NotNull; +import org.opensearch.dataprepper.model.plugin.InvalidPluginConfigurationException; import org.opensearch.dataprepper.plugins.source.opensearch.configuration.AwsAuthenticationConfiguration; import org.opensearch.dataprepper.plugins.source.opensearch.configuration.ConnectionConfiguration; import org.opensearch.dataprepper.plugins.source.opensearch.configuration.IndexParametersConfiguration; @@ -20,13 +19,6 @@ public class OpenSearchSourceConfiguration { - /** - * 0 indicates infinite retries - */ - @JsonProperty("max_retries") - @Min(0) - private Integer maxRetries = 0; - @NotNull @JsonProperty("hosts") private List hosts; @@ -37,6 +29,9 @@ public class OpenSearchSourceConfiguration { @JsonProperty("password") private String password; + @JsonProperty("disable_authentication") + private Boolean disableAuthentication = false; + @JsonProperty("connection") @Valid private ConnectionConfiguration connectionConfiguration = new ConnectionConfiguration(); @@ -57,10 +52,6 @@ public class OpenSearchSourceConfiguration { @Valid private SearchConfiguration searchConfiguration = new SearchConfiguration(); - public Integer getMaxRetries() { - return maxRetries; - } - public List getHosts() { return hosts; } @@ -73,6 +64,8 @@ public String getPassword() { return password; } + public Boolean isAuthenticationDisabled() { return disableAuthentication; } + public ConnectionConfiguration getConnectionConfiguration() { return connectionConfiguration; } @@ -93,10 +86,13 @@ public SearchConfiguration getSearchConfiguration() { return searchConfiguration; } - @AssertTrue(message = "Either username and password, or aws options must be specified. Both cannot be set at once.") - boolean validateAwsConfigWithUsernameAndPassword() { - return !((Objects.nonNull(awsAuthenticationOptions) && (Objects.nonNull(username) || Objects.nonNull(password))) || - (Objects.isNull(awsAuthenticationOptions) && (Objects.isNull(username) || Objects.isNull(password)))); + void validateAwsConfigWithUsernameAndPassword() { + + if (((Objects.nonNull(awsAuthenticationOptions) && ((Objects.nonNull(username) || Objects.nonNull(password)) || disableAuthentication)) || + (Objects.nonNull(username) || Objects.nonNull(password)) && disableAuthentication) || + (Objects.isNull(awsAuthenticationOptions) && (Objects.isNull(username) || Objects.isNull(password)) && !disableAuthentication)) { + throw new InvalidPluginConfigurationException("Either username and password, or aws options must be specified. Both cannot be set at once. Authentication can be disabled by setting the disable_authentication flag to true."); + } } } diff --git a/data-prepper-plugins/opensearch-source/src/main/java/org/opensearch/dataprepper/plugins/source/opensearch/worker/client/OpenSearchClientFactory.java b/data-prepper-plugins/opensearch-source/src/main/java/org/opensearch/dataprepper/plugins/source/opensearch/worker/client/OpenSearchClientFactory.java index d9e3a2f739..e588e1f711 100644 --- a/data-prepper-plugins/opensearch-source/src/main/java/org/opensearch/dataprepper/plugins/source/opensearch/worker/client/OpenSearchClientFactory.java +++ b/data-prepper-plugins/opensearch-source/src/main/java/org/opensearch/dataprepper/plugins/source/opensearch/worker/client/OpenSearchClientFactory.java @@ -133,8 +133,7 @@ private RestClient createOpenSearchRestClient(final OpenSearchSourceConfiguratio final RestClientBuilder restClientBuilder = RestClient.builder(httpHosts); - LOG.info("Using username and password for auth for the OpenSearch source"); - attachUsernamePassword(restClientBuilder, openSearchSourceConfiguration); + attachBasicAuth(restClientBuilder, openSearchSourceConfiguration); setConnectAndSocketTimeout(restClientBuilder, openSearchSourceConfiguration); @@ -161,33 +160,36 @@ private org.elasticsearch.client.RestClient createElasticSearchRestClient(final new BasicHeader("Content-type", "application/json") }); - LOG.info("Using username and password for auth for the OpenSearch source"); - attachUsernamePassword(restClientBuilder, openSearchSourceConfiguration); - + attachBasicAuth(restClientBuilder, openSearchSourceConfiguration); setConnectAndSocketTimeout(restClientBuilder, openSearchSourceConfiguration); return restClientBuilder.build(); } - private void attachUsernamePassword(final RestClientBuilder restClientBuilder, final OpenSearchSourceConfiguration openSearchSourceConfiguration) { - final CredentialsProvider credentialsProvider = new BasicCredentialsProvider(); - credentialsProvider.setCredentials(AuthScope.ANY, - new UsernamePasswordCredentials(openSearchSourceConfiguration.getUsername(), openSearchSourceConfiguration.getPassword())); + private void attachBasicAuth(final RestClientBuilder restClientBuilder, final OpenSearchSourceConfiguration openSearchSourceConfiguration) { restClientBuilder.setHttpClientConfigCallback(httpClientBuilder -> { - httpClientBuilder.setDefaultCredentialsProvider(credentialsProvider); + if (!openSearchSourceConfiguration.isAuthenticationDisabled()) { + attachUsernameAndPassword(httpClientBuilder, openSearchSourceConfiguration); + } else { + LOG.warn("Authentication was explicitly disabled for the OpenSearch source"); + } + attachSSLContext(httpClientBuilder, openSearchSourceConfiguration); return httpClientBuilder; }); } - private void attachUsernamePassword(final org.elasticsearch.client.RestClientBuilder restClientBuilder, final OpenSearchSourceConfiguration openSearchSourceConfiguration) { - final CredentialsProvider credentialsProvider = new BasicCredentialsProvider(); - credentialsProvider.setCredentials(AuthScope.ANY, - new UsernamePasswordCredentials(openSearchSourceConfiguration.getUsername(), openSearchSourceConfiguration.getPassword())); + private void attachBasicAuth(final org.elasticsearch.client.RestClientBuilder restClientBuilder, final OpenSearchSourceConfiguration openSearchSourceConfiguration) { restClientBuilder.setHttpClientConfigCallback(httpClientBuilder -> { - httpClientBuilder.setDefaultCredentialsProvider(credentialsProvider); + + if (!openSearchSourceConfiguration.isAuthenticationDisabled()) { + attachUsernameAndPassword(httpClientBuilder, openSearchSourceConfiguration); + } else { + LOG.warn("Authentication was explicitly disabled for the OpenSearch source"); + } + attachSSLContext(httpClientBuilder, openSearchSourceConfiguration); httpClientBuilder.addInterceptorLast( (HttpResponseInterceptor) @@ -211,6 +213,15 @@ private void setConnectAndSocketTimeout(final RestClientBuilder restClientBuilde }); } + private void attachUsernameAndPassword(final HttpAsyncClientBuilder httpClientBuilder, final OpenSearchSourceConfiguration openSearchSourceConfiguration) { + LOG.info("Using username and password for auth for the OpenSearch source"); + + final CredentialsProvider credentialsProvider = new BasicCredentialsProvider(); + credentialsProvider.setCredentials(AuthScope.ANY, + new UsernamePasswordCredentials(openSearchSourceConfiguration.getUsername(), openSearchSourceConfiguration.getPassword())); + httpClientBuilder.setDefaultCredentialsProvider(credentialsProvider); + } + private void setConnectAndSocketTimeout(final org.elasticsearch.client.RestClientBuilder restClientBuilder, final OpenSearchSourceConfiguration openSearchSourceConfiguration) { restClientBuilder.setRequestConfigCallback(requestConfigBuilder -> { if (Objects.nonNull(openSearchSourceConfiguration.getConnectionConfiguration().getConnectTimeout())) { diff --git a/data-prepper-plugins/opensearch-source/src/test/java/org/opensearch/dataprepper/plugins/source/opensearch/OpenSearchSourceConfigurationTest.java b/data-prepper-plugins/opensearch-source/src/test/java/org/opensearch/dataprepper/plugins/source/opensearch/OpenSearchSourceConfigurationTest.java index 7c9f8dbd19..950533c145 100644 --- a/data-prepper-plugins/opensearch-source/src/test/java/org/opensearch/dataprepper/plugins/source/opensearch/OpenSearchSourceConfigurationTest.java +++ b/data-prepper-plugins/opensearch-source/src/test/java/org/opensearch/dataprepper/plugins/source/opensearch/OpenSearchSourceConfigurationTest.java @@ -9,10 +9,12 @@ import com.fasterxml.jackson.dataformat.yaml.YAMLFactory; import com.fasterxml.jackson.dataformat.yaml.YAMLGenerator; import org.junit.jupiter.api.Test; +import org.opensearch.dataprepper.model.plugin.InvalidPluginConfigurationException; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.notNullValue; +import static org.junit.jupiter.api.Assertions.assertThrows; public class OpenSearchSourceConfigurationTest { @@ -21,7 +23,7 @@ public class OpenSearchSourceConfigurationTest { @Test void open_search_source_username_password_only() throws JsonProcessingException { - final String sourceConfigurationYaml = "max_retries: 5\n" + + final String sourceConfigurationYaml = "hosts: [\"http://localhost:9200\"]\n" + "username: test\n" + "password: test\n" + @@ -44,18 +46,49 @@ void open_search_source_username_password_only() throws JsonProcessingException assertThat(sourceConfiguration.getIndexParametersConfiguration(), notNullValue()); assertThat(sourceConfiguration.getSchedulingParameterConfiguration(), notNullValue()); assertThat(sourceConfiguration.getHosts(), notNullValue()); - assertThat(sourceConfiguration.getMaxRetries(), equalTo(5)); - assertThat(sourceConfiguration.validateAwsConfigWithUsernameAndPassword(), equalTo(true)); + sourceConfiguration.validateAwsConfigWithUsernameAndPassword(); assertThat(sourceConfiguration.getPassword(), equalTo("test")); assertThat(sourceConfiguration.getUsername(), equalTo("test")); assertThat(sourceConfiguration.getAwsAuthenticationOptions(), equalTo(null)); } @Test - void opensearch_source_aws_only() throws JsonProcessingException { - final String sourceConfigurationYaml = "max_retries: 5\n" + + void open_search_disabled_authentication() throws JsonProcessingException { + + final String sourceConfigurationYaml = "hosts: [\"http://localhost:9200\"]\n" + + "disable_authentication: true\n" + + "connection:\n" + + " insecure: true\n" + + " cert: \"cert\"\n" + + "indices:\n" + + " include:\n" + + " - index_name_regex: \"regex\"\n" + + " - index_name_regex: \"regex-two\"\n" + + "scheduling:\n" + + " job_count: 3\n" + + "search_options:\n" + + " batch_size: 1000\n" + + " query: \"test\"\n"; + final OpenSearchSourceConfiguration sourceConfiguration = objectMapper.readValue(sourceConfigurationYaml, OpenSearchSourceConfiguration.class); + + assertThat(sourceConfiguration.getSearchConfiguration(), notNullValue()); + assertThat(sourceConfiguration.getConnectionConfiguration(), notNullValue()); + assertThat(sourceConfiguration.getIndexParametersConfiguration(), notNullValue()); + assertThat(sourceConfiguration.getSchedulingParameterConfiguration(), notNullValue()); + assertThat(sourceConfiguration.getHosts(), notNullValue()); + + sourceConfiguration.validateAwsConfigWithUsernameAndPassword(); + assertThat(sourceConfiguration.isAuthenticationDisabled(), equalTo(true)); + assertThat(sourceConfiguration.getPassword(), equalTo(null)); + assertThat(sourceConfiguration.getUsername(), equalTo(null)); + assertThat(sourceConfiguration.getAwsAuthenticationOptions(), equalTo(null)); + } + + @Test + void opensearch_source_aws_only() throws JsonProcessingException { + final String sourceConfigurationYaml = "hosts: [\"http://localhost:9200\"]\n" + "connection:\n" + " insecure: true\n" + " cert: \"cert\"\n" + @@ -74,7 +107,7 @@ void opensearch_source_aws_only() throws JsonProcessingException { final OpenSearchSourceConfiguration sourceConfiguration = objectMapper.readValue(sourceConfigurationYaml, OpenSearchSourceConfiguration.class); - assertThat(sourceConfiguration.validateAwsConfigWithUsernameAndPassword(), equalTo(true)); + sourceConfiguration.validateAwsConfigWithUsernameAndPassword(); assertThat(sourceConfiguration.getPassword(), equalTo(null)); assertThat(sourceConfiguration.getUsername(), equalTo(null)); assertThat(sourceConfiguration.getAwsAuthenticationOptions(), notNullValue()); @@ -85,8 +118,7 @@ void opensearch_source_aws_only() throws JsonProcessingException { @Test void opensearch_source_aws_sts_external_id() throws JsonProcessingException { - final String sourceConfigurationYaml = "max_retries: 5\n" + - "hosts: [\"http://localhost:9200\"]\n" + + final String sourceConfigurationYaml = "hosts: [\"http://localhost:9200\"]\n" + "connection:\n" + " insecure: true\n" + " cert: \"cert\"\n" + @@ -106,7 +138,7 @@ void opensearch_source_aws_sts_external_id() throws JsonProcessingException { final OpenSearchSourceConfiguration sourceConfiguration = objectMapper.readValue(sourceConfigurationYaml, OpenSearchSourceConfiguration.class); - assertThat(sourceConfiguration.validateAwsConfigWithUsernameAndPassword(), equalTo(true)); + sourceConfiguration.validateAwsConfigWithUsernameAndPassword(); assertThat(sourceConfiguration.getPassword(), equalTo(null)); assertThat(sourceConfiguration.getUsername(), equalTo(null)); assertThat(sourceConfiguration.getAwsAuthenticationOptions(), notNullValue()); @@ -141,14 +173,12 @@ void using_both_aws_config_and_username_password_is_invalid() throws JsonProcess final OpenSearchSourceConfiguration sourceConfiguration = objectMapper.readValue(sourceConfigurationYaml, OpenSearchSourceConfiguration.class); - assertThat(sourceConfiguration.validateAwsConfigWithUsernameAndPassword(), equalTo(false)); - assertThat(sourceConfiguration.getMaxRetries(), equalTo(0)); + assertThrows(InvalidPluginConfigurationException.class, sourceConfiguration::validateAwsConfigWithUsernameAndPassword); } @Test - void one_of_username_password_or_aws_config_is_required() throws JsonProcessingException { + void one_of_username_password_or_aws_config_or_authDisabled_is_required() throws JsonProcessingException { final String sourceConfigurationYaml = - "max_retries: 5\n" + "hosts: [\"http://localhost:9200\"]\n" + "connection:\n" + " insecure: true\n" + @@ -165,6 +195,6 @@ void one_of_username_password_or_aws_config_is_required() throws JsonProcessingE final OpenSearchSourceConfiguration sourceConfiguration = objectMapper.readValue(sourceConfigurationYaml, OpenSearchSourceConfiguration.class); - assertThat(sourceConfiguration.validateAwsConfigWithUsernameAndPassword(), equalTo(false)); + assertThrows(InvalidPluginConfigurationException.class, sourceConfiguration::validateAwsConfigWithUsernameAndPassword); } } diff --git a/data-prepper-plugins/opensearch-source/src/test/java/org/opensearch/dataprepper/plugins/source/opensearch/worker/client/OpenSearchClientFactoryTest.java b/data-prepper-plugins/opensearch-source/src/test/java/org/opensearch/dataprepper/plugins/source/opensearch/worker/client/OpenSearchClientFactoryTest.java index cc811625d1..1cd2ad551c 100644 --- a/data-prepper-plugins/opensearch-source/src/test/java/org/opensearch/dataprepper/plugins/source/opensearch/worker/client/OpenSearchClientFactoryTest.java +++ b/data-prepper-plugins/opensearch-source/src/test/java/org/opensearch/dataprepper/plugins/source/opensearch/worker/client/OpenSearchClientFactoryTest.java @@ -29,6 +29,8 @@ import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.notNullValue; import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.never; +import static org.mockito.Mockito.verify; import static org.mockito.Mockito.verifyNoInteractions; import static org.mockito.Mockito.when; @@ -119,4 +121,38 @@ void provideOpenSearchClient_with_aws_auth() { assertThat(awsCredentialsOptions.getStsHeaderOverrides(), equalTo(Collections.emptyMap())); assertThat(awsCredentialsOptions.getStsRoleArn(), equalTo(stsRoleArn)); } + + @Test + void provideElasticSearchClient_with_auth_disabled() { + when(openSearchSourceConfiguration.isAuthenticationDisabled()).thenReturn(true); + + when(connectionConfiguration.getCertPath()).thenReturn(null); + when(connectionConfiguration.getSocketTimeout()).thenReturn(null); + when(connectionConfiguration.getConnectTimeout()).thenReturn(null); + when(connectionConfiguration.isInsecure()).thenReturn(true); + + final ElasticsearchClient elasticsearchClient = createObjectUnderTest().provideElasticSearchClient(openSearchSourceConfiguration); + assertThat(elasticsearchClient, notNullValue()); + + verifyNoInteractions(awsCredentialsSupplier); + verify(openSearchSourceConfiguration, never()).getUsername(); + verify(openSearchSourceConfiguration, never()).getPassword(); + } + + @Test + void provideOpenSearchClient_with_auth_disabled() { + when(openSearchSourceConfiguration.isAuthenticationDisabled()).thenReturn(true); + + when(connectionConfiguration.getCertPath()).thenReturn(null); + when(connectionConfiguration.getSocketTimeout()).thenReturn(null); + when(connectionConfiguration.getConnectTimeout()).thenReturn(null); + when(connectionConfiguration.isInsecure()).thenReturn(true); + + final OpenSearchClient openSearchClient = createObjectUnderTest().provideOpenSearchClient(openSearchSourceConfiguration); + assertThat(openSearchClient, notNullValue()); + + verifyNoInteractions(awsCredentialsSupplier); + verify(openSearchSourceConfiguration, never()).getUsername(); + verify(openSearchSourceConfiguration, never()).getPassword(); + } }