diff --git a/CHANGELOG.md b/CHANGELOG.md index ced6db9b..59c52e5a 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,15 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/) and this project adheres to [Semantic Versioning](http://semver.org/). +## 2.7.0 (UNRELEASED) +#### New Features +- [PR-225](https://github.com/SourceLabOrg/kafka-webview/pull/225) + - Adds the ability to set custom kafka client properties when defining a cluster. + - Adds a new debugging tool under `/configuration/cluster` to see the generated kafka client properties. + +#### Internal Dependency Updates +- Updated Kafka Client library version from 2.0.1 to 2.2.2. + ## 2.6.0 (06/21/2020) - [ISSUE-144](https://github.com/SourceLabOrg/kafka-webview/issues/144) Make providing a TrustStore file when setting up a SSL enabled cluster optional. You might not want/need this option if your JVM is already configured to accept the SSL certificate served by the cluster, or if the cluster's certificate can be validated by a publically accessible CA. - [PR-215](https://github.com/SourceLabOrg/kafka-webview/pull/215) Improve errors displayed when using the `test cluster` functionality. diff --git a/dev-cluster/src/main/java/org/sourcelab/kafka/devcluster/DevCluster.java b/dev-cluster/src/main/java/org/sourcelab/kafka/devcluster/DevCluster.java index 7d3b3282..07b6abc6 100644 --- a/dev-cluster/src/main/java/org/sourcelab/kafka/devcluster/DevCluster.java +++ b/dev-cluster/src/main/java/org/sourcelab/kafka/devcluster/DevCluster.java @@ -47,7 +47,6 @@ import org.slf4j.LoggerFactory; import java.net.URL; -import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; diff --git a/dev-cluster/src/main/java/org/sourcelab/kafka/devcluster/LdapServer.java b/dev-cluster/src/main/java/org/sourcelab/kafka/devcluster/LdapServer.java index a34f1ba7..461afbb4 100644 --- a/dev-cluster/src/main/java/org/sourcelab/kafka/devcluster/LdapServer.java +++ b/dev-cluster/src/main/java/org/sourcelab/kafka/devcluster/LdapServer.java @@ -28,7 +28,6 @@ import com.unboundid.ldap.listener.InMemoryDirectoryServerConfig; import com.unboundid.ldap.listener.InMemoryListenerConfig; import com.unboundid.ldap.sdk.LDAPException; -import com.unboundid.ldap.sdk.OperationType; import org.slf4j.Logger; import org.slf4j.LoggerFactory; diff --git a/kafka-webview-ui/pom.xml b/kafka-webview-ui/pom.xml index c937ef2e..c9278505 100644 --- a/kafka-webview-ui/pom.xml +++ b/kafka-webview-ui/pom.xml @@ -34,7 +34,7 @@ 1.8.2 4.0.0-beta - 2.0.1 + 2.2.2 3.6.1 3.0.11.RELEASE diff --git a/kafka-webview-ui/src/main/java/org/sourcelab/kafka/webview/ui/configuration/PluginConfig.java b/kafka-webview-ui/src/main/java/org/sourcelab/kafka/webview/ui/configuration/PluginConfig.java index 2c4cdf48..17da5ccc 100644 --- a/kafka-webview-ui/src/main/java/org/sourcelab/kafka/webview/ui/configuration/PluginConfig.java +++ b/kafka-webview-ui/src/main/java/org/sourcelab/kafka/webview/ui/configuration/PluginConfig.java @@ -30,6 +30,7 @@ import org.apache.kafka.common.serialization.Deserializer; import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import org.sourcelab.kafka.webview.ui.manager.SensitiveConfigScrubber; import org.sourcelab.kafka.webview.ui.manager.encryption.SecretManager; import org.sourcelab.kafka.webview.ui.manager.kafka.KafkaAdminFactory; import org.sourcelab.kafka.webview.ui.manager.kafka.KafkaClientConfigUtil; @@ -202,4 +203,14 @@ public KafkaClientConfigUtil getKafkaClientConfigUtil(final AppProperties appPro public SaslUtility getSaslUtility(final SecretManager secretManager) { return new SaslUtility(secretManager); } + + /** + * For scrubbing sensitive values from client configs. + * @param saslUtility instance. + * @return instance. + */ + @Bean + public SensitiveConfigScrubber getSensitiveConfigScrubber(final SaslUtility saslUtility) { + return new SensitiveConfigScrubber(saslUtility); + } } diff --git a/kafka-webview-ui/src/main/java/org/sourcelab/kafka/webview/ui/controller/cluster/ClusterController.java b/kafka-webview-ui/src/main/java/org/sourcelab/kafka/webview/ui/controller/cluster/ClusterController.java index ce71ea67..85700c47 100644 --- a/kafka-webview-ui/src/main/java/org/sourcelab/kafka/webview/ui/controller/cluster/ClusterController.java +++ b/kafka-webview-ui/src/main/java/org/sourcelab/kafka/webview/ui/controller/cluster/ClusterController.java @@ -29,11 +29,9 @@ import org.sourcelab.kafka.webview.ui.manager.ui.FlashMessage; import org.sourcelab.kafka.webview.ui.manager.ui.datatable.Datatable; import org.sourcelab.kafka.webview.ui.manager.ui.datatable.DatatableColumn; -import org.sourcelab.kafka.webview.ui.manager.ui.datatable.DatatableFilter; import org.sourcelab.kafka.webview.ui.manager.ui.datatable.LinkTemplate; import org.sourcelab.kafka.webview.ui.manager.ui.datatable.YesNoBadgeTemplate; import org.sourcelab.kafka.webview.ui.model.Cluster; -import org.sourcelab.kafka.webview.ui.model.View; import org.sourcelab.kafka.webview.ui.repository.ClusterRepository; import org.sourcelab.kafka.webview.ui.repository.ViewRepository; import org.springframework.beans.factory.annotation.Autowired; diff --git a/kafka-webview-ui/src/main/java/org/sourcelab/kafka/webview/ui/controller/configuration/cluster/ClusterConfigController.java b/kafka-webview-ui/src/main/java/org/sourcelab/kafka/webview/ui/controller/configuration/cluster/ClusterConfigController.java index d9ec9b06..71cea66e 100644 --- a/kafka-webview-ui/src/main/java/org/sourcelab/kafka/webview/ui/controller/configuration/cluster/ClusterConfigController.java +++ b/kafka-webview-ui/src/main/java/org/sourcelab/kafka/webview/ui/controller/configuration/cluster/ClusterConfigController.java @@ -24,12 +24,16 @@ package org.sourcelab.kafka.webview.ui.controller.configuration.cluster; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.ObjectMapper; import org.apache.kafka.common.errors.TimeoutException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.sourcelab.kafka.webview.ui.controller.BaseController; import org.sourcelab.kafka.webview.ui.controller.configuration.cluster.forms.ClusterForm; +import org.sourcelab.kafka.webview.ui.manager.SensitiveConfigScrubber; import org.sourcelab.kafka.webview.ui.manager.encryption.SecretManager; +import org.sourcelab.kafka.webview.ui.manager.kafka.KafkaClientConfigUtil; import org.sourcelab.kafka.webview.ui.manager.kafka.KafkaOperations; import org.sourcelab.kafka.webview.ui.manager.kafka.KafkaOperationsFactory; import org.sourcelab.kafka.webview.ui.manager.plugin.UploadManager; @@ -51,7 +55,11 @@ import javax.validation.Valid; import java.io.IOException; +import java.util.HashMap; +import java.util.LinkedHashMap; +import java.util.Map; import java.util.Optional; +import java.util.stream.Collectors; /** * Controller for Cluster CRUD operations. @@ -76,6 +84,9 @@ public class ClusterConfigController extends BaseController { @Autowired private SaslUtility saslUtility; + @Autowired + private SensitiveConfigScrubber sensitiveConfigScrubber; + /** * GET Displays main configuration index. */ @@ -98,10 +109,16 @@ public String index(final Model model) { public String createClusterForm(final ClusterForm clusterForm, final Model model) { // Setup breadcrumbs setupBreadCrumbs(model, "Create", "/configuration/cluster/create"); + setupCreateForm(model); return "configuration/cluster/create"; } + private void setupCreateForm(final Model model) { + // Load all available properties + model.addAttribute("kafkaSettings", KafkaClientConfigUtil.getAllKafkaConsumerProperties()); + } + /** * GET Displays edit cluster form. */ @@ -112,6 +129,9 @@ public String editClusterForm( final RedirectAttributes redirectAttributes, final Model model) { + // Initial setup + setupCreateForm(model); + // Retrieve by id final Optional clusterOptional = clusterRepository.findById(id); if (!clusterOptional.isPresent()) { @@ -152,6 +172,23 @@ public String editClusterForm( clusterForm.setSaslPassword(saslProperties.getPlainPassword()); clusterForm.setSaslCustomJaas(saslProperties.getJaas()); + // Deserialize message parameters json string into a map + final ObjectMapper objectMapper = new ObjectMapper(); + Map customOptions; + try { + customOptions = objectMapper.readValue(cluster.getOptionParameters(), Map.class); + } catch (final IOException e) { + // Fail safe? + customOptions = new HashMap<>(); + } + + // Update form object with properties. + for (final Map.Entry entry : customOptions.entrySet()) { + clusterForm.getCustomOptionNames().add(entry.getKey()); + clusterForm.getCustomOptionValues().add(entry.getValue()); + } + clusterForm.setCustomOptionsEnabled(!customOptions.entrySet().isEmpty()); + // Display template return "configuration/cluster/create"; } @@ -163,7 +200,11 @@ public String editClusterForm( public String clusterUpdate( @Valid final ClusterForm clusterForm, final BindingResult bindingResult, - final RedirectAttributes redirectAttributes) { + final RedirectAttributes redirectAttributes, + final Model model) { + + // Initial Setup. + setupCreateForm(model); final boolean updateExisting = clusterForm.exists(); @@ -368,9 +409,13 @@ else if (!clusterForm.exists() || (clusterForm.getTrustStoreFile() != null && !c cluster.setSaslConfig(""); } + // Handle custom options, convert into a JSON string. + final String jsonStr = handleCustomOptions(clusterForm); + // Update properties cluster.setName(clusterForm.getName()); cluster.setBrokerHosts(clusterForm.getBrokerHosts()); + cluster.setOptionParameters(jsonStr); cluster.setValid(false); clusterRepository.save(cluster); @@ -382,6 +427,30 @@ else if (!clusterForm.exists() || (clusterForm.getTrustStoreFile() != null && !c return "redirect:/configuration/cluster"; } + /** + * Handles getting custom defined options and values. + * @param clusterForm The submitted form. + */ + private String handleCustomOptions(final ClusterForm clusterForm) { + // If the checkbox is unselected, then just return "{}" + if (!clusterForm.getCustomOptionsEnabled()) { + return "{}"; + } + + // Build a map of Name => Value + final Map mappedOptions = clusterForm.getCustomOptionsAsMap(); + + // For converting map to json string + final ObjectMapper objectMapper = new ObjectMapper(); + + try { + return objectMapper.writeValueAsString(mappedOptions); + } catch (final JsonProcessingException e) { + // Fail safe? + return "{}"; + } + } + /** * POST deletes the selected cluster. */ @@ -463,6 +532,47 @@ public String testCluster(@PathVariable final Long id, final RedirectAttributes return "redirect:/configuration/cluster"; } + /** + * GET for getting client configuration. + */ + @RequestMapping(path = "/config/{id}", method = RequestMethod.GET) + public String getClientConfig( + @PathVariable final Long id, + final RedirectAttributes redirectAttributes, + final Model model + ) { + // Retrieve it + final Optional clusterOptional = clusterRepository.findById(id); + if (!clusterOptional.isPresent()) { + // Set flash message & redirect + redirectAttributes.addFlashAttribute("FlashMessage", FlashMessage.newWarning("Unable to find cluster!")); + + // redirect to cluster index + return "redirect:/configuration/cluster"; + } + final Cluster cluster = clusterOptional.get(); + + // Setup breadcrumbs + setupBreadCrumbs(model, "Client Config: " + cluster.getName(), null); + + // Generate configs with sensitive fields scrubbed. + final Map configs = sensitiveConfigScrubber.filterSensitiveOptions( + kafkaOperationsFactory.getConsumerConfig(cluster, getLoggedInUserId()), + cluster + ) + // Sort by key for easier display. + .entrySet().stream() + .sorted(Map.Entry.comparingByKey()) + .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue, (oldValue, newValue) -> oldValue, LinkedHashMap::new)); + + // Render + model.addAttribute("configs", configs); + model.addAttribute("cluster", cluster); + + // Render cluster config template + return "configuration/cluster/config"; + } + private void setupBreadCrumbs(final Model model, final String name, final String url) { // Setup breadcrumbs final BreadCrumbManager manager = new BreadCrumbManager(model) diff --git a/kafka-webview-ui/src/main/java/org/sourcelab/kafka/webview/ui/controller/configuration/cluster/forms/ClusterForm.java b/kafka-webview-ui/src/main/java/org/sourcelab/kafka/webview/ui/controller/configuration/cluster/forms/ClusterForm.java index b7e57276..6ee7e650 100644 --- a/kafka-webview-ui/src/main/java/org/sourcelab/kafka/webview/ui/controller/configuration/cluster/forms/ClusterForm.java +++ b/kafka-webview-ui/src/main/java/org/sourcelab/kafka/webview/ui/controller/configuration/cluster/forms/ClusterForm.java @@ -28,6 +28,11 @@ import javax.validation.constraints.NotNull; import javax.validation.constraints.Size; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.Iterator; +import java.util.List; +import java.util.Map; /** * Represents the form for creating/updating the Cluster entity. @@ -91,6 +96,20 @@ public class ClusterForm { */ private String saslCustomJaas; + // Custom Client Properties + private Boolean customOptionsEnabled; + + /** + * Names of custom options. + */ + private List customOptionNames = new ArrayList<>(); + + /** + * Values of custom options. + */ + private List customOptionValues = new ArrayList<>(); + + public Long getId() { return id; } @@ -261,6 +280,66 @@ public boolean isPlainSaslMechanism() { return "PLAIN".equals(saslMechanism); } + /** + * Utility method to return custom options as a map. + */ + public Map getCustomOptionsAsMap() { + // Build a map of Name => Value + final Map mappedOptions = new HashMap<>(); + + final Iterator names = getCustomOptionNames().iterator(); + final Iterator values = getCustomOptionValues().iterator(); + + while (names.hasNext()) { + final String name = names.next(); + final String value; + if (values.hasNext()) { + value = values.next(); + } else { + value = ""; + } + mappedOptions.put(name, value); + } + return mappedOptions; + } + + public List getCustomOptionNames() { + return customOptionNames; + } + + public void setCustomOptionNames(final List customOptionNames) { + this.customOptionNames = customOptionNames; + } + + public List getCustomOptionValues() { + return customOptionValues; + } + + public void setCustomOptionValues(final List customOptionValues) { + this.customOptionValues = customOptionValues; + } + + /** + * Enable/Disable flag for custom client options. + */ + public Boolean getCustomOptionsEnabled() { + if (customOptionsEnabled == null) { + return customOptionsEnabled = false; + } + return customOptionsEnabled; + } + + /** + * Enable/Disable flag for custom client options. + */ + public void setCustomOptionsEnabled(final Boolean customOptionsEnabled) { + if (customOptionsEnabled == null) { + this.customOptionsEnabled = false; + } else { + this.customOptionsEnabled = customOptionsEnabled; + } + } + @Override public String toString() { return "ClusterForm{" diff --git a/kafka-webview-ui/src/main/java/org/sourcelab/kafka/webview/ui/controller/view/ViewController.java b/kafka-webview-ui/src/main/java/org/sourcelab/kafka/webview/ui/controller/view/ViewController.java index f38b959b..072ae54d 100644 --- a/kafka-webview-ui/src/main/java/org/sourcelab/kafka/webview/ui/controller/view/ViewController.java +++ b/kafka-webview-ui/src/main/java/org/sourcelab/kafka/webview/ui/controller/view/ViewController.java @@ -34,7 +34,6 @@ import org.sourcelab.kafka.webview.ui.model.Cluster; import org.sourcelab.kafka.webview.ui.model.View; import org.sourcelab.kafka.webview.ui.repository.ClusterRepository; -import org.sourcelab.kafka.webview.ui.repository.MessageFormatRepository; import org.sourcelab.kafka.webview.ui.repository.ViewRepository; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.data.domain.Pageable; diff --git a/kafka-webview-ui/src/main/java/org/sourcelab/kafka/webview/ui/manager/SensitiveConfigScrubber.java b/kafka-webview-ui/src/main/java/org/sourcelab/kafka/webview/ui/manager/SensitiveConfigScrubber.java new file mode 100644 index 00000000..4f0669d0 --- /dev/null +++ b/kafka-webview-ui/src/main/java/org/sourcelab/kafka/webview/ui/manager/SensitiveConfigScrubber.java @@ -0,0 +1,86 @@ +/** + * MIT License + * + * Copyright (c) 2017, 2018, 2019 SourceLab.org (https://github.com/SourceLabOrg/kafka-webview/) + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ + +package org.sourcelab.kafka.webview.ui.manager; + +import org.apache.kafka.common.config.SaslConfigs; +import org.apache.kafka.common.config.SslConfigs; +import org.sourcelab.kafka.webview.ui.manager.sasl.SaslProperties; +import org.sourcelab.kafka.webview.ui.manager.sasl.SaslUtility; +import org.sourcelab.kafka.webview.ui.model.Cluster; +import org.springframework.beans.factory.annotation.Autowired; + +import java.util.Arrays; +import java.util.HashMap; +import java.util.Map; +import java.util.Objects; + +/** + * Utility class for removing sensitive values from generated kafka client configuration. + */ +public class SensitiveConfigScrubber { + + private final SaslUtility saslUtility; + + @Autowired + public SensitiveConfigScrubber(final SaslUtility saslUtility) { + this.saslUtility = Objects.requireNonNull(saslUtility); + } + + /** + * Given a Kafka Client Config, scrub any sensentivie fields from the config and return a copy. + * @param config The config to scrub. + * @param cluster (optional) The cluster associated with the config. + * @return Copy of scrubbed configuration. + */ + public Map filterSensitiveOptions(final Map config, final Cluster cluster) { + Objects.requireNonNull(config); + + // Create a copy of the map + final Map copy = new HashMap<>(config); + + // Filter sensitive fields + final String[] sensitiveKeys = new String[] { + SslConfigs.SSL_KEYSTORE_PASSWORD_CONFIG, SslConfigs.SSL_TRUSTSTORE_PASSWORD_CONFIG, + }; + Arrays.stream(sensitiveKeys) + .filter(copy::containsKey) + .forEach((key) -> copy.put(key, "**HIDDEN**")); + + // Filter JAAS Config + if (copy.containsKey(SaslConfigs.SASL_JAAS_CONFIG) && cluster != null) { + final SaslProperties saslProperties = saslUtility.decodeProperties(cluster); + + // Only replace if plainPassword field set and non-empty. + if (saslProperties.getPlainPassword() != null && !saslProperties.getPlainPassword().isEmpty()) { + String jaasConfig = (String) copy.get(SaslConfigs.SASL_JAAS_CONFIG); + jaasConfig = jaasConfig.replaceAll(saslProperties.getPlainPassword(), "**HIDDEN**"); + copy.put(SaslConfigs.SASL_JAAS_CONFIG, jaasConfig); + } + } + + // Return copy of the map. + return copy; + } +} diff --git a/kafka-webview-ui/src/main/java/org/sourcelab/kafka/webview/ui/manager/kafka/KafkaAdminFactory.java b/kafka-webview-ui/src/main/java/org/sourcelab/kafka/webview/ui/manager/kafka/KafkaAdminFactory.java index db2ffbcf..0ef274cc 100644 --- a/kafka-webview-ui/src/main/java/org/sourcelab/kafka/webview/ui/manager/kafka/KafkaAdminFactory.java +++ b/kafka-webview-ui/src/main/java/org/sourcelab/kafka/webview/ui/manager/kafka/KafkaAdminFactory.java @@ -73,6 +73,20 @@ public AdminClient create(final ClusterConfig clusterConfig, final String client * @return KafkaConsumer instance. */ public KafkaConsumer createConsumer(final ClusterConfig clusterConfig, final String clientId) { + // Create config + final Map config = getConsumerConfig(clusterConfig, clientId); + + // Create consumer + return new KafkaConsumer<>(config); + } + + /** + * Build the configuration for the underlying consumer client. + * @param clusterConfig What cluster to connect to. + * @param clientId What clientId to associate the connection with. + * @return Map of kafka client properties. + */ + public Map getConsumerConfig(final ClusterConfig clusterConfig, final String clientId) { // Create a map final Map config = configUtil.applyCommonSettings(clusterConfig, clientId); @@ -80,7 +94,6 @@ public KafkaConsumer createConsumer(final ClusterConfig clusterC config.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class); config.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class); - // Create consumer - return new KafkaConsumer<>(config); + return config; } } diff --git a/kafka-webview-ui/src/main/java/org/sourcelab/kafka/webview/ui/manager/kafka/KafkaClientConfigUtil.java b/kafka-webview-ui/src/main/java/org/sourcelab/kafka/webview/ui/manager/kafka/KafkaClientConfigUtil.java index 2f3f4c9e..fe0b602f 100644 --- a/kafka-webview-ui/src/main/java/org/sourcelab/kafka/webview/ui/manager/kafka/KafkaClientConfigUtil.java +++ b/kafka-webview-ui/src/main/java/org/sourcelab/kafka/webview/ui/manager/kafka/KafkaClientConfigUtil.java @@ -27,18 +27,29 @@ import org.apache.kafka.clients.CommonClientConfigs; import org.apache.kafka.clients.admin.AdminClientConfig; import org.apache.kafka.clients.consumer.ConsumerConfig; +import org.apache.kafka.common.config.ConfigDef; import org.apache.kafka.common.config.SaslConfigs; import org.apache.kafka.common.config.SslConfigs; import org.apache.kafka.common.security.auth.SecurityProtocol; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.sourcelab.kafka.webview.ui.manager.kafka.config.ClusterConfig; +import java.util.ArrayList; +import java.util.Comparator; import java.util.HashMap; +import java.util.HashSet; +import java.util.List; import java.util.Map; +import java.util.Set; +import java.util.stream.Collectors; /** * Utility class to DRY out common Kafka client configuration options that apply to multiple client types. */ public class KafkaClientConfigUtil { + private static final Logger logger = LoggerFactory.getLogger(KafkaClientConfigUtil.class); + /** * Path on filesystem where keystores are persisted. */ @@ -109,9 +120,38 @@ private Map applyCommonSettings( // Optionally configure SASL applySaslSettings(clusterConfig, config); + // Apply cluster client properties if defined. + // Note: This should always be applied last. + applyClusterClientProperties(clusterConfig, config); + return config; } + /** + * If client properties are defined on the cluster, they get applied last ontop of everything else. + * @param clusterConfig configuration properties. + * @param config config to be applied to. + */ + private void applyClusterClientProperties(final ClusterConfig clusterConfig, final Map config) { + if (clusterConfig.getClusterClientProperties().isEmpty()) { + return; + } + + for (final Map.Entry entry : clusterConfig.getClusterClientProperties().entrySet()) { + if (config.containsKey(entry.getKey())) { + // Log a warning as behavior may be altered in a way that causes Kafka WebView to no longer function. + logger.warn( + "Client property defined on the cluster replaced property '{}'. " + + "The original value of '{}' was replaced with with '{}'. " + + "Overriding of configuration properties in this way may cause Kafka Webview to not function correctly.", + entry.getKey(), config.get(entry.getKey()), entry.getValue() + ); + } + // Set value. + config.put(entry.getKey(), entry.getValue()); + } + } + /** * If SSL is configured for this cluster, apply the settings. * @param clusterConfig Cluster configuration definition to source values from. @@ -164,4 +204,87 @@ private void applySaslSettings(final ClusterConfig clusterConfig, final Map getAllKafkaConsumerProperties() { + // Likely not the most graceful way to group these properties... + + // Our return value + final List kafkaSettings = new ArrayList<>(); + + // Keep a running set of all the keys we've collected so far. + final Set allPreviousKeys = new HashSet<>(); + + // Add SSL Settings + ConfigDef configDef = new ConfigDef(); + SslConfigs.addClientSslSupport(configDef); + kafkaSettings.add(new KafkaSettings( + "SSL", configDef.names() + )); + + // Add SASL Settings + configDef = new ConfigDef(); + SaslConfigs.addClientSaslSupport(configDef); + kafkaSettings.add(new KafkaSettings( + "SASL", configDef.names() + )); + + // Collect all keys. + kafkaSettings + .forEach((entry) -> allPreviousKeys.addAll(entry.getKeys())); + + // Add basic consumer properties, removing entries from the previous categories + kafkaSettings.add(new KafkaSettings( + "Consumer", + ConsumerConfig.configNames().stream() + .filter((entry) -> !allPreviousKeys.contains(entry)) + .collect(Collectors.toSet()) + )); + + // Sort our list and return + return kafkaSettings.stream() + .sorted(Comparator.comparing(KafkaSettings::getGroup)) + .collect(Collectors.toList()); + } + + /** + * Abstracted list of Kafka keys categorized. + */ + public static class KafkaSettings { + private final String group; + private final List keys; + + /** + * Constructor. + * @param group Category name. + * @param keys Available keys. + */ + private KafkaSettings(final String group, final Set keys) { + this.group = group; + + // Sort the list of keys + this.keys = keys.stream() + .sorted() + .collect(Collectors.toList()); + } + + public String getGroup() { + return group; + } + + public List getKeys() { + return keys; + } + + @Override + public String toString() { + return "KafkaSettings{" + + "group='" + group + '\'' + + ", keys=" + keys + + '}'; + } + } } diff --git a/kafka-webview-ui/src/main/java/org/sourcelab/kafka/webview/ui/manager/kafka/KafkaOperationsFactory.java b/kafka-webview-ui/src/main/java/org/sourcelab/kafka/webview/ui/manager/kafka/KafkaOperationsFactory.java index a738f05a..4977d93e 100644 --- a/kafka-webview-ui/src/main/java/org/sourcelab/kafka/webview/ui/manager/kafka/KafkaOperationsFactory.java +++ b/kafka-webview-ui/src/main/java/org/sourcelab/kafka/webview/ui/manager/kafka/KafkaOperationsFactory.java @@ -30,6 +30,8 @@ import org.sourcelab.kafka.webview.ui.manager.kafka.config.ClusterConfig; import org.sourcelab.kafka.webview.ui.model.Cluster; +import java.util.Map; + /** * Factory for creating an AdminClient and wrapping it with KafkaOperations. */ @@ -57,7 +59,7 @@ public KafkaOperationsFactory(final SecretManager secretManager, final KafkaAdmi * @return KafkaOperations client. */ public KafkaOperations create(final Cluster cluster, final long userId) { - final String clientId = consumerIdPrefix + userId; + final String clientId = getClientId(userId); // Create new Operational Client final ClusterConfig clusterConfig = ClusterConfig.newBuilder(cluster, secretManager).build(); @@ -66,4 +68,20 @@ public KafkaOperations create(final Cluster cluster, final long userId) { return new KafkaOperations(adminClient, kafkaConsumer); } + + /** + * Build the configuration for the underlying consumer client. + * @param cluster What cluster to connect to. + * @param userId What userId to associate the connection with. + * @return Map of kafka client properties. + */ + public Map getConsumerConfig(final Cluster cluster, final long userId) { + final String clientId = getClientId(userId); + final ClusterConfig clusterConfig = ClusterConfig.newBuilder(cluster, secretManager).build(); + return kafkaAdminFactory.getConsumerConfig(clusterConfig, clientId); + } + + private String getClientId(final long userId) { + return consumerIdPrefix + userId; + } } diff --git a/kafka-webview-ui/src/main/java/org/sourcelab/kafka/webview/ui/manager/kafka/config/ClusterConfig.java b/kafka-webview-ui/src/main/java/org/sourcelab/kafka/webview/ui/manager/kafka/config/ClusterConfig.java index 86d8cf3f..27eaab15 100644 --- a/kafka-webview-ui/src/main/java/org/sourcelab/kafka/webview/ui/manager/kafka/config/ClusterConfig.java +++ b/kafka-webview-ui/src/main/java/org/sourcelab/kafka/webview/ui/manager/kafka/config/ClusterConfig.java @@ -24,13 +24,18 @@ package org.sourcelab.kafka.webview.ui.manager.kafka.config; +import com.fasterxml.jackson.databind.ObjectMapper; import org.sourcelab.kafka.webview.ui.manager.encryption.SecretManager; import org.sourcelab.kafka.webview.ui.manager.sasl.SaslProperties; import org.sourcelab.kafka.webview.ui.manager.sasl.SaslUtility; import org.sourcelab.kafka.webview.ui.model.Cluster; +import java.io.IOException; import java.util.Arrays; +import java.util.Collections; +import java.util.HashMap; import java.util.HashSet; +import java.util.Map; import java.util.Set; import java.util.stream.Collectors; @@ -58,6 +63,11 @@ public class ClusterConfig { private final String saslMechanism; private final String saslJaas; + /** + * Client Properties defined on the Cluster configuration. + */ + private final Map clusterClientProperties; + /** * Private constructor for connecting to SSL brokers. */ @@ -72,7 +82,9 @@ private ClusterConfig( final String saslPlaintextUsername, final String saslPlaintextPassword, final String saslMechanism, - final String saslJaas) { + final String saslJaas, + final Map clusterClientProperties + ) { this.brokerHosts = brokerHosts; @@ -89,6 +101,11 @@ private ClusterConfig( this.saslPlaintextPassword = saslPlaintextPassword; this.saslMechanism = saslMechanism; this.saslJaas = saslJaas; + + // Shallow copy the cluster client properties. + this.clusterClientProperties = Collections.unmodifiableMap( + new HashMap<>(clusterClientProperties) + ); } public Set getBrokerHosts() { @@ -139,6 +156,10 @@ public String getSaslJaas() { return saslJaas; } + public Map getClusterClientProperties() { + return clusterClientProperties; + } + @Override public String toString() { return "ClusterConfig{" @@ -196,6 +217,19 @@ public static Builder newBuilder(final Cluster cluster, final SecretManager secr builder.withUseSasl(false); } + // If we have defined cluster client options, decode and set them. + if (cluster.getOptionParameters() != null) { + final ObjectMapper objectMapper = new ObjectMapper(); + Map customOptions; + try { + customOptions = objectMapper.readValue(cluster.getOptionParameters(), Map.class); + } catch (final IOException e) { + // Fail safe? + customOptions = new HashMap<>(); + } + builder.withClusterClientConfig(customOptions); + } + return builder; } @@ -223,6 +257,12 @@ public static final class Builder { private String saslMechanism; private String saslJaas; + /** + * Override properties defined from the cluster.option_parameters field. + * These should be applied LAST ontop of all the other config options. + */ + private Map clusterOverrideProperties = new HashMap<>(); + private Builder() { } @@ -323,6 +363,22 @@ public Builder withSaslJaas(final String saslJaas) { return this; } + /** + * Declare Override Properties defined on the cluster. + */ + public Builder withClusterClientConfig(final Map clusterClientConfig) { + this.clusterOverrideProperties.putAll(clusterClientConfig); + return this; + } + + /** + * Declare Override Properties defined on the cluster. + */ + public Builder withClusterClientConfig(final String key, final String value) { + this.clusterOverrideProperties.put(key, value); + return this; + } + /** * Create ClusterConfig instance from builder values. */ @@ -340,7 +396,9 @@ public ClusterConfig build() { saslPlaintextUsername, saslPlaintextPassword, saslMechanism, - saslJaas + saslJaas, + // Cluster client properties + clusterOverrideProperties ); } } diff --git a/kafka-webview-ui/src/main/java/org/sourcelab/kafka/webview/ui/manager/ui/datatable/Datatable.java b/kafka-webview-ui/src/main/java/org/sourcelab/kafka/webview/ui/manager/ui/datatable/Datatable.java index 36e0375a..b4737275 100644 --- a/kafka-webview-ui/src/main/java/org/sourcelab/kafka/webview/ui/manager/ui/datatable/Datatable.java +++ b/kafka-webview-ui/src/main/java/org/sourcelab/kafka/webview/ui/manager/ui/datatable/Datatable.java @@ -31,7 +31,6 @@ import org.springframework.data.jpa.repository.JpaSpecificationExecutor; import javax.persistence.criteria.Path; -import javax.persistence.criteria.Predicate; import java.io.UnsupportedEncodingException; import java.net.URLEncoder; import java.nio.charset.StandardCharsets; @@ -46,8 +45,6 @@ import java.util.Objects; import java.util.Set; -import static org.sourcelab.kafka.webview.ui.manager.ui.datatable.ConstraintOperator.EQUALS; - /** * Aims to be a re-usable datatable UI component backed by a JPA Repository. * @param Type of object being rendered in the datatable. diff --git a/kafka-webview-ui/src/main/java/org/sourcelab/kafka/webview/ui/manager/ui/datatable/DatatableColumn.java b/kafka-webview-ui/src/main/java/org/sourcelab/kafka/webview/ui/manager/ui/datatable/DatatableColumn.java index 1b01d754..03e9767c 100644 --- a/kafka-webview-ui/src/main/java/org/sourcelab/kafka/webview/ui/manager/ui/datatable/DatatableColumn.java +++ b/kafka-webview-ui/src/main/java/org/sourcelab/kafka/webview/ui/manager/ui/datatable/DatatableColumn.java @@ -29,7 +29,6 @@ import java.util.List; import java.util.Objects; import java.util.function.Function; -import java.util.stream.Collectors; /** * Defines a column within a datatable. diff --git a/kafka-webview-ui/src/main/java/org/sourcelab/kafka/webview/ui/model/Cluster.java b/kafka-webview-ui/src/main/java/org/sourcelab/kafka/webview/ui/model/Cluster.java index b7cb327b..d6fe64ea 100644 --- a/kafka-webview-ui/src/main/java/org/sourcelab/kafka/webview/ui/model/Cluster.java +++ b/kafka-webview-ui/src/main/java/org/sourcelab/kafka/webview/ui/model/Cluster.java @@ -45,6 +45,9 @@ public class Cluster { @Column(nullable = false) private String brokerHosts; + @Column(nullable = false) + private String optionParameters = "{}"; + @Column(nullable = false) private boolean isSslEnabled; @@ -106,6 +109,14 @@ public void setBrokerHosts(final String brokerHosts) { this.brokerHosts = brokerHosts; } + public String getOptionParameters() { + return optionParameters; + } + + public void setOptionParameters(final String optionParameters) { + this.optionParameters = optionParameters; + } + public boolean isSslEnabled() { return isSslEnabled; } diff --git a/kafka-webview-ui/src/main/java/org/sourcelab/kafka/webview/ui/repository/ClusterRepository.java b/kafka-webview-ui/src/main/java/org/sourcelab/kafka/webview/ui/repository/ClusterRepository.java index 4be5a6b0..9e1473f0 100644 --- a/kafka-webview-ui/src/main/java/org/sourcelab/kafka/webview/ui/repository/ClusterRepository.java +++ b/kafka-webview-ui/src/main/java/org/sourcelab/kafka/webview/ui/repository/ClusterRepository.java @@ -25,9 +25,6 @@ package org.sourcelab.kafka.webview.ui.repository; import org.sourcelab.kafka.webview.ui.model.Cluster; -import org.sourcelab.kafka.webview.ui.model.View; -import org.springframework.data.domain.Page; -import org.springframework.data.domain.Pageable; import org.springframework.data.jpa.repository.JpaSpecificationExecutor; import org.springframework.data.repository.CrudRepository; import org.springframework.stereotype.Repository; diff --git a/kafka-webview-ui/src/main/java/org/sourcelab/kafka/webview/ui/repository/UserRepository.java b/kafka-webview-ui/src/main/java/org/sourcelab/kafka/webview/ui/repository/UserRepository.java index 90d8bd32..e098d6c2 100644 --- a/kafka-webview-ui/src/main/java/org/sourcelab/kafka/webview/ui/repository/UserRepository.java +++ b/kafka-webview-ui/src/main/java/org/sourcelab/kafka/webview/ui/repository/UserRepository.java @@ -25,7 +25,6 @@ package org.sourcelab.kafka.webview.ui.repository; import org.sourcelab.kafka.webview.ui.model.User; -import org.sourcelab.kafka.webview.ui.model.View; import org.springframework.data.jpa.repository.JpaSpecificationExecutor; import org.springframework.data.repository.CrudRepository; import org.springframework.stereotype.Repository; diff --git a/kafka-webview-ui/src/main/resources/schema/master_schema.sql b/kafka-webview-ui/src/main/resources/schema/master_schema.sql index 60aa7a9f..f0c8871b 100644 --- a/kafka-webview-ui/src/main/resources/schema/master_schema.sql +++ b/kafka-webview-ui/src/main/resources/schema/master_schema.sql @@ -15,6 +15,7 @@ CREATE TABLE IF NOT EXISTS `cluster` ( id INT(11) UNSIGNED NOT NULL AUTO_INCREMENT, name VARCHAR(255) UNIQUE NOT NULL, broker_hosts TEXT NOT NULL, + option_parameters TEXT NOT NULL DEFAULT '{}', is_ssl_enabled BOOLEAN DEFAULT FALSE NOT NULL, trust_store_file TEXT DEFAULT NULL, trust_store_password TEXT DEFAULT NULL, diff --git a/kafka-webview-ui/src/main/resources/schema/migration/h2/V3__ClusterCustomOptions.sql b/kafka-webview-ui/src/main/resources/schema/migration/h2/V3__ClusterCustomOptions.sql new file mode 100644 index 00000000..d1217f5b --- /dev/null +++ b/kafka-webview-ui/src/main/resources/schema/migration/h2/V3__ClusterCustomOptions.sql @@ -0,0 +1,4 @@ +ALTER TABLE `cluster` ADD COLUMN IF NOT EXISTS option_parameters TEXT NOT NULL DEFAULT '{}' AFTER broker_hosts; +UPDATE `cluster` set option_parameters = '{}' where option_parameters IS NULL; + + diff --git a/kafka-webview-ui/src/main/resources/templates/cluster/index.html b/kafka-webview-ui/src/main/resources/templates/cluster/index.html index 6917a77c..20bf2831 100644 --- a/kafka-webview-ui/src/main/resources/templates/cluster/index.html +++ b/kafka-webview-ui/src/main/resources/templates/cluster/index.html @@ -2,7 +2,6 @@ diff --git a/kafka-webview-ui/src/main/resources/templates/configuration/cluster/config.html b/kafka-webview-ui/src/main/resources/templates/configuration/cluster/config.html new file mode 100644 index 00000000..21798676 --- /dev/null +++ b/kafka-webview-ui/src/main/resources/templates/configuration/cluster/config.html @@ -0,0 +1,58 @@ + + + + + Cluster Client Configuration + + + +
+
+
+
+
+
+ + kafka Consumer Configuration for [[${cluster.getName()}]] + +
+ + + +
+ +
+
+ + + + + + + + + + + + + + + + +
ConfigValue
+ No configuration found! +
+
+
+
+ +
+
+
+ + + \ No newline at end of file diff --git a/kafka-webview-ui/src/main/resources/templates/configuration/cluster/create.html b/kafka-webview-ui/src/main/resources/templates/configuration/cluster/create.html index 694134cb..0ac3dbbb 100644 --- a/kafka-webview-ui/src/main/resources/templates/configuration/cluster/create.html +++ b/kafka-webview-ui/src/main/resources/templates/configuration/cluster/create.html @@ -310,6 +310,98 @@
SASL Settings
+ +
+ Custom Client Properties +
+
+ + +
+ +
+ + + + + Use with care. Setting values here may overwrite values set by Kafka Webview and interfere with Kafka WebView's ability + to correctly communicate with your cluster. + +
+
+
+ + +
+ + +
+ +
+ +
+ + +
+
+ +
+
+ +
+ + +
+ +
+ + +
+
+
+ + +
+
+
+
+ + + + + + diff --git a/kafka-webview-ui/src/main/resources/templates/configuration/cluster/index.html b/kafka-webview-ui/src/main/resources/templates/configuration/cluster/index.html index b61a8b68..03dbd006 100644 --- a/kafka-webview-ui/src/main/resources/templates/configuration/cluster/index.html +++ b/kafka-webview-ui/src/main/resources/templates/configuration/cluster/index.html @@ -84,13 +84,17 @@ Actions
- +
@@ -112,7 +114,9 @@
placeholder="Enter property name"/>
- +
@@ -188,7 +192,9 @@
type="hidden" name="customOptionNames" value="{{optionName}}">
- +
diff --git a/kafka-webview-ui/src/main/resources/templates/layout.html b/kafka-webview-ui/src/main/resources/templates/layout.html index fe6aab51..ae38a88f 100644 --- a/kafka-webview-ui/src/main/resources/templates/layout.html +++ b/kafka-webview-ui/src/main/resources/templates/layout.html @@ -257,5 +257,11 @@ + + \ No newline at end of file diff --git a/kafka-webview-ui/src/test/java/org/sourcelab/kafka/webview/ui/controller/configuration/cluster/ClusterConfigControllerTest.java b/kafka-webview-ui/src/test/java/org/sourcelab/kafka/webview/ui/controller/configuration/cluster/ClusterConfigControllerTest.java index 972df768..5a713a8b 100644 --- a/kafka-webview-ui/src/test/java/org/sourcelab/kafka/webview/ui/controller/configuration/cluster/ClusterConfigControllerTest.java +++ b/kafka-webview-ui/src/test/java/org/sourcelab/kafka/webview/ui/controller/configuration/cluster/ClusterConfigControllerTest.java @@ -28,6 +28,8 @@ import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.sourcelab.kafka.webview.ui.controller.AbstractMvcTest; import org.sourcelab.kafka.webview.ui.manager.sasl.SaslProperties; import org.sourcelab.kafka.webview.ui.manager.sasl.SaslUtility; @@ -48,6 +50,8 @@ import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; +import java.util.HashMap; +import java.util.Map; import static org.hamcrest.Matchers.containsString; import static org.junit.Assert.assertEquals; @@ -69,6 +73,7 @@ @SpringBootTest @AutoConfigureMockMvc public class ClusterConfigControllerTest extends AbstractMvcTest { + private static final Logger logger = LoggerFactory.getLogger(ClusterConfigControllerTest.class); @Autowired private ClusterTestTools clusterTestTools; @@ -96,10 +101,11 @@ public void setupUploadPath() { @Transactional public void test_withoutAdminRole() throws Exception { testUrlWithOutAdminRole("/configuration/cluster", false); - testUrlWithOutAdminRole("/configuration/filter/create", false); - testUrlWithOutAdminRole("/configuration/filter/edit/1", false); - testUrlWithOutAdminRole("/configuration/filter/update", true); - testUrlWithOutAdminRole("/configuration/filter/delete/1", true); + testUrlWithOutAdminRole("/configuration/cluster/create", false); + testUrlWithOutAdminRole("/configuration/cluster/edit/1", false); + testUrlWithOutAdminRole("/configuration/cluster/update", true); + testUrlWithOutAdminRole("/configuration/cluster/delete/1", true); + testUrlWithOutAdminRole("/configuration/cluster/config/1", false); } /** @@ -142,7 +148,7 @@ public void testGetCreate() throws Exception { } /** - * Test creating new non-ssl cluster. + * Test creating new non-ssl cluster with no custom client options. */ @Test @Transactional @@ -169,6 +175,52 @@ public void testPostUpdate_newCluster() throws Exception { assertFalse("Should not be ssl enabled", cluster.isSslEnabled()); assertFalse("Should not be valid by default", cluster.isValid()); validateNonSaslCluster(cluster); + validateNoCustomClientOptions(cluster); + } + + /** + * Test creating new non-ssl cluster with custom client options. + */ + @Test + @Transactional + public void testPostUpdate_newCluster_withCustomClientOptions() throws Exception { + final String expectedClusterName = "My New Cluster Name"; + final String expectedBrokerHosts = "localhost:9092"; + + // Define expected custom options + final Map expectedCustomOptions = new HashMap<>(); + expectedCustomOptions.put("test.config.param1", "value1"); + expectedCustomOptions.put("test.config.param2", "value2"); + expectedCustomOptions.put("test.config.param3", ""); + + // Hit Update end point. + mockMvc + .perform(post("/configuration/cluster/update") + .with(user(adminUserDetails)) + .with(csrf()) + .param("name", expectedClusterName) + .param("brokerHosts", expectedBrokerHosts) + // Enable custom options + .param("customOptionsEnabled", "1") + .param("customOptionNames", "test.config.param1") + .param("customOptionValues", "value1") + .param("customOptionNames", "test.config.param2") + .param("customOptionValues", "value2") + .param("customOptionNames", "test.config.param3") + .param("customOptionValues", "")) + //.andDo(print()) + .andExpect(status().is3xxRedirection()) + .andExpect(redirectedUrl("/configuration/cluster")); + + // Lookup Cluster + final Cluster cluster = clusterRepository.findByName(expectedClusterName); + assertNotNull("Should have new cluster", cluster); + assertEquals("Has correct name", expectedClusterName, cluster.getName()); + assertEquals("Has correct brokerHosts", expectedBrokerHosts, cluster.getBrokerHosts()); + assertFalse("Should not be ssl enabled", cluster.isSslEnabled()); + assertFalse("Should not be valid by default", cluster.isValid()); + validateNonSaslCluster(cluster); + validateCustomClientOptions(cluster, expectedCustomOptions); } /** @@ -205,6 +257,7 @@ public void testPostUpdate_existingNonSslCluster() throws Exception { assertFalse("Should not be ssl enabled", cluster.isSslEnabled()); assertFalse("Should be set back to NOT valid", cluster.isValid()); validateNonSaslCluster(cluster); + validateNoCustomClientOptions(cluster); } /** @@ -1075,6 +1128,133 @@ public void testPostUpdate_newSaslSSL_Cluster_withNoUploadedTrustStore() throws ); } + /** + * Test updating existing cluster that already has custom client options, + * and updates them setting new values, removing one entry, and adding a new entry. + */ + @Test + @Transactional + public void testPostUpdate_existingCluster_updateCustomOptions() throws Exception { + // Define expected custom options + final Map expectedCustomOptions = new HashMap<>(); + expectedCustomOptions.put("test.config.param1", "new value1"); + expectedCustomOptions.put("test.config.param3", "new value3"); + expectedCustomOptions.put("test.config.param4", ""); + + final String originalJson = "{\"test.config.param1\":\"value1\",\"test.config.param2\":\"value2\",\"test.config.param3\":\"\"}"; + final String originalName = "My New Cluster " + System.currentTimeMillis(); + final String originalBrokerHosts = "updatedHost:9092"; + + // Create an existing cluster + final Cluster originalCluster = clusterTestTools.createCluster(originalName); + originalCluster.setBrokerHosts(originalBrokerHosts); + originalCluster.setValid(true); + originalCluster.setSslEnabled(false); + originalCluster.setOptionParameters(originalJson); + clusterRepository.save(originalCluster); + + // Only update custom option fields. + // Hit create page. + mockMvc + .perform(multipart("/configuration/cluster/update") + .with(user(adminUserDetails)) + .with(csrf()) + .param("id", String.valueOf(originalCluster.getId())) + .param("name", originalName) + .param("brokerHosts", originalBrokerHosts) + // Enable custom options + .param("customOptionsEnabled", "1") + .param("customOptionNames", "test.config.param1") + .param("customOptionValues", "new value1") + .param("customOptionNames", "test.config.param3") + .param("customOptionValues", "new value3") + .param("customOptionNames", "test.config.param4") + .param("customOptionValues", "") + ) + //.andDo(print()) + .andExpect(status().is3xxRedirection()) + .andExpect(redirectedUrl("/configuration/cluster")); + + // Lookup Cluster + final Cluster cluster = clusterRepository.findById(originalCluster.getId()).get(); + assertNotNull("Should have new cluster", cluster); + assertEquals("Has correct name", originalName, cluster.getName()); + assertEquals("Has correct brokerHosts", originalBrokerHosts, cluster.getBrokerHosts()); + assertFalse("Should be ssl disabled", cluster.isSslEnabled()); + assertFalse("Should not be valid by default", cluster.isValid()); + assertNull("Password should be null", cluster.getTrustStorePassword()); + assertNull("Password should be null", cluster.getKeyStorePassword()); + assertNull("File reference should be null", cluster.getTrustStoreFile()); + assertNull("File reference should be null", cluster.getKeyStorePassword()); + validateNonSaslCluster(cluster); + validateCustomClientOptions(cluster, expectedCustomOptions); + } + + /** + * Test updating existing cluster that already has custom client options, + * and removes them by unchecking the option (but still submits the old options). + * + * They should be removed. + */ + @Test + @Transactional + public void testPostUpdate_existingCluster_removeCustomOptions() throws Exception { +// Define expected custom options + final Map expectedCustomOptions = new HashMap<>(); + expectedCustomOptions.put("test.config.param1", "new value1"); + expectedCustomOptions.put("test.config.param3", "new value3"); + expectedCustomOptions.put("test.config.param4", ""); + + final String originalJson = "{\"test.config.param1\":\"value1\",\"test.config.param2\":\"value2\",\"test.config.param3\":\"\"}"; + final String originalName = "My New Cluster " + System.currentTimeMillis(); + final String originalBrokerHosts = "updatedHost:9092"; + + // Create an existing cluster + final Cluster originalCluster = clusterTestTools.createCluster(originalName); + originalCluster.setBrokerHosts(originalBrokerHosts); + originalCluster.setValid(true); + originalCluster.setSslEnabled(false); + originalCluster.setOptionParameters(originalJson); + clusterRepository.save(originalCluster); + + // Only update custom option fields. + // Hit create page. + mockMvc + .perform(multipart("/configuration/cluster/update") + .with(user(adminUserDetails)) + .with(csrf()) + .param("id", String.valueOf(originalCluster.getId())) + .param("name", originalName) + .param("brokerHosts", originalBrokerHosts) + // Disable flag, but we still submit the custom options + // No values should be persisted tho. + .param("customOptionsEnabled", "0") + .param("customOptionNames", "test.config.param1") + .param("customOptionValues", "new value1") + .param("customOptionNames", "test.config.param3") + .param("customOptionValues", "new value3") + .param("customOptionNames", "test.config.param4") + .param("customOptionValues", "") + ) + //.andDo(print()) + .andExpect(status().is3xxRedirection()) + .andExpect(redirectedUrl("/configuration/cluster")); + + // Lookup Cluster + final Cluster cluster = clusterRepository.findById(originalCluster.getId()).get(); + assertNotNull("Should have new cluster", cluster); + assertEquals("Has correct name", originalName, cluster.getName()); + assertEquals("Has correct brokerHosts", originalBrokerHosts, cluster.getBrokerHosts()); + assertFalse("Should be ssl disabled", cluster.isSslEnabled()); + assertFalse("Should not be valid by default", cluster.isValid()); + assertNull("Password should be null", cluster.getTrustStorePassword()); + assertNull("Password should be null", cluster.getKeyStorePassword()); + assertNull("File reference should be null", cluster.getTrustStoreFile()); + assertNull("File reference should be null", cluster.getKeyStorePassword()); + validateNonSaslCluster(cluster); + validateNoCustomClientOptions(cluster); + } + /** * Utility method for validating a cluster is not configured with SASL. * @param cluster cluster to validate. @@ -1084,4 +1264,34 @@ private void validateNonSaslCluster(final Cluster cluster) { assertEquals("Should have empty sasl mechanism", "", cluster.getSaslMechanism()); assertEquals("Should have empty sasl config", "", cluster.getSaslConfig()); } + + /** + * Utility method for validating a cluster has no special client options set. + * @param cluster cluster to validate. + */ + private void validateNoCustomClientOptions(final Cluster cluster) { + assertEquals("Should have empty custom client properties", "{}", cluster.getOptionParameters()); + } + + /** + * Utility method for validating expected custom client options. + * @param cluster cluster to validate. + * @param expectedCustomOptions expected key/value pairs. + */ + private void validateCustomClientOptions(final Cluster cluster, final Map expectedCustomOptions) { + assertNotNull("Should have non-null client properties", cluster.getOptionParameters()); + assertNotEquals("Should have non-empty client properties", "", cluster.getOptionParameters()); + assertNotEquals("Should have non-empty custom client properties", "{}", cluster.getOptionParameters()); + + for (final Map.Entry expectedEntry : expectedCustomOptions.entrySet()) { + final String expectedValue = '"' + expectedEntry.getKey() + "\":\"" + expectedEntry.getValue() + '"'; + + // Debug log if failed test. + if (!cluster.getOptionParameters().contains(expectedValue)) { + logger.error("Failed to find entry {}", expectedEntry); + logger.error("Found values: {}", cluster.getOptionParameters()); + } + assertTrue("Should contain value: " + expectedValue, cluster.getOptionParameters().contains(expectedValue)); + } + } } \ No newline at end of file diff --git a/kafka-webview-ui/src/test/java/org/sourcelab/kafka/webview/ui/manager/SensitiveConfigScrubberTest.java b/kafka-webview-ui/src/test/java/org/sourcelab/kafka/webview/ui/manager/SensitiveConfigScrubberTest.java new file mode 100644 index 00000000..2c722b0d --- /dev/null +++ b/kafka-webview-ui/src/test/java/org/sourcelab/kafka/webview/ui/manager/SensitiveConfigScrubberTest.java @@ -0,0 +1,111 @@ +/** + * MIT License + * + * Copyright (c) 2017, 2018, 2019 SourceLab.org (https://github.com/SourceLabOrg/kafka-webview/) + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ + +package org.sourcelab.kafka.webview.ui.manager; + +import org.apache.kafka.common.config.SaslConfigs; +import org.apache.kafka.common.config.SslConfigs; +import org.junit.Before; +import org.junit.Test; +import org.sourcelab.kafka.webview.ui.manager.encryption.SecretManager; +import org.sourcelab.kafka.webview.ui.manager.sasl.SaslProperties; +import org.sourcelab.kafka.webview.ui.manager.sasl.SaslUtility; +import org.sourcelab.kafka.webview.ui.model.Cluster; + +import java.util.HashMap; +import java.util.Map; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNotEquals; +import static org.junit.Assert.assertNotSame; + +public class SensitiveConfigScrubberTest { + + /** + * Instance under test. + */ + private SensitiveConfigScrubber scrubber; + + /** + * Dependency. + */ + private SaslUtility saslUtility; + + /** + * Setup. + */ + @Before + public void setup() { + this.saslUtility = new SaslUtility(new SecretManager("NotARealSecret")); + this.scrubber = new SensitiveConfigScrubber(saslUtility); + } + + /** + * Smoke test filterSensitiveOptions method. + */ + @Test + public void smokeTest() { + final String secretPhrase = "secret"; + + final SaslProperties saslProperties = new SaslProperties("Username", secretPhrase, "Anything", "Something"); + + // Create cluster + final Cluster cluster = new Cluster(); + cluster.setName("My Test Cluster"); + cluster.setSaslConfig(saslUtility.encryptProperties(saslProperties)); + cluster.setSaslEnabled(true); + + // Create a mock config + final Map mockConfig = new HashMap<>(); + mockConfig.put("Key1", "Value1"); + mockConfig.put("Key2", "Value2"); + + // Add "sensitive" fields + mockConfig.put(SslConfigs.SSL_KEYSTORE_PASSWORD_CONFIG, secretPhrase); + mockConfig.put(SslConfigs.SSL_TRUSTSTORE_PASSWORD_CONFIG, secretPhrase); + mockConfig.put(SaslConfigs.SASL_JAAS_CONFIG, "Anything anything " + secretPhrase + " something"); + + // Call method under test + final Map scrubbed = scrubber.filterSensitiveOptions(mockConfig, cluster); + + // Verify we have a new instance. + assertNotSame("Should be different instances", scrubbed, mockConfig); + + // Verify keys + assertEquals(scrubbed.get("Key1"), mockConfig.get("Key1")); + assertEquals(scrubbed.get("Key2"), mockConfig.get("Key2")); + + // Sensitive fields no longer match + assertNotEquals(scrubbed.get(SslConfigs.SSL_KEYSTORE_PASSWORD_CONFIG), mockConfig.get(SslConfigs.SSL_KEYSTORE_PASSWORD_CONFIG)); + assertEquals("**HIDDEN**", scrubbed.get(SslConfigs.SSL_KEYSTORE_PASSWORD_CONFIG)); + assertNotEquals(scrubbed.get(SslConfigs.SSL_TRUSTSTORE_PASSWORD_CONFIG), mockConfig.get(SslConfigs.SSL_TRUSTSTORE_PASSWORD_CONFIG)); + assertEquals("**HIDDEN**", scrubbed.get(SslConfigs.SSL_TRUSTSTORE_PASSWORD_CONFIG)); + + assertNotEquals(scrubbed.get(SaslConfigs.SASL_JAAS_CONFIG), mockConfig.get(SaslConfigs.SASL_JAAS_CONFIG)); + assertFalse( + ((String)scrubbed.get(SaslConfigs.SASL_JAAS_CONFIG)).contains(secretPhrase) + ); + } +} \ No newline at end of file diff --git a/kafka-webview-ui/src/test/java/org/sourcelab/kafka/webview/ui/manager/kafka/KafkaClientConfigUtilTest.java b/kafka-webview-ui/src/test/java/org/sourcelab/kafka/webview/ui/manager/kafka/KafkaClientConfigUtilTest.java index 48b5b70b..df782873 100644 --- a/kafka-webview-ui/src/test/java/org/sourcelab/kafka/webview/ui/manager/kafka/KafkaClientConfigUtilTest.java +++ b/kafka-webview-ui/src/test/java/org/sourcelab/kafka/webview/ui/manager/kafka/KafkaClientConfigUtilTest.java @@ -31,6 +31,7 @@ import org.junit.Test; import org.sourcelab.kafka.webview.ui.manager.kafka.config.ClusterConfig; +import java.util.HashMap; import java.util.Map; import static org.junit.Assert.assertEquals; @@ -81,6 +82,43 @@ public void testApplyCommonSettings_noSsl_noSasl() { validateNoKey(config, CommonClientConfigs.SECURITY_PROTOCOL_CONFIG); } + /** + * Basic smoke test, without SSL or SASL options, but has cluster properties. + */ + @Test + public void testApplyCommonSettings_noSsl_noSasl_withClientProperties() { + final Map customProperties = new HashMap<>(); + customProperties.put("key3", "value3"); + customProperties.put("key4", "value4"); + + final ClusterConfig clusterConfig = ClusterConfig.newBuilder() + .withBrokerHosts(expectedBrokerHosts) + .withUseSsl(false) + .withUseSasl(false) + // Use both setters + .withClusterClientConfig("key1", "value1") + .withClusterClientConfig("key2", "value2") + .withClusterClientConfig(customProperties) + // Build it. + .build(); + + final Map config = util.applyCommonSettings(clusterConfig, consumerId); + + // Validate + validateDefaultKeys(config); + validateNoSsl(config); + validateNoSasl(config); + + // Validate this is not set. + validateNoKey(config, CommonClientConfigs.SECURITY_PROTOCOL_CONFIG); + + // Validate custom properties applied + validateKey(config, "key1", "value1"); + validateKey(config, "key2", "value2"); + validateKey(config, "key3", "value3"); + validateKey(config, "key4", "value4"); + } + /** * Basic smoke test with SSL and TrustStore, without SASL options. */