From 1dbeda0f325960a4562f39a75b3e50d6b468edbf Mon Sep 17 00:00:00 2001 From: Andriy Redko Date: Tue, 5 Apr 2022 18:49:42 -0400 Subject: [PATCH 01/19] Update azure-storage-blob to 12.15.0 (#2774) Signed-off-by: Andriy Redko --- plugins/repository-azure/build.gradle | 2 +- .../licenses/azure-storage-blob-12.14.4.jar.sha1 | 1 - .../licenses/azure-storage-blob-12.15.0.jar.sha1 | 1 + .../repositories/azure/AzureBlobContainerRetriesTests.java | 7 ++++++- .../src/main/java/fixture/azure/AzureHttpHandler.java | 4 +++- 5 files changed, 11 insertions(+), 4 deletions(-) delete mode 100644 plugins/repository-azure/licenses/azure-storage-blob-12.14.4.jar.sha1 create mode 100644 plugins/repository-azure/licenses/azure-storage-blob-12.15.0.jar.sha1 diff --git a/plugins/repository-azure/build.gradle b/plugins/repository-azure/build.gradle index 648c045d97d01..040a29750b967 100644 --- a/plugins/repository-azure/build.gradle +++ b/plugins/repository-azure/build.gradle @@ -54,7 +54,7 @@ dependencies { api "io.netty:netty-resolver-dns:${versions.netty}" api "io.netty:netty-transport-native-unix-common:${versions.netty}" implementation project(':modules:transport-netty4') - api 'com.azure:azure-storage-blob:12.14.4' + api 'com.azure:azure-storage-blob:12.15.0' api 'org.reactivestreams:reactive-streams:1.0.3' api 'io.projectreactor:reactor-core:3.4.15' api 'io.projectreactor.netty:reactor-netty:1.0.17' diff --git a/plugins/repository-azure/licenses/azure-storage-blob-12.14.4.jar.sha1 b/plugins/repository-azure/licenses/azure-storage-blob-12.14.4.jar.sha1 deleted file mode 100644 index 5333f8fa90ada..0000000000000 --- a/plugins/repository-azure/licenses/azure-storage-blob-12.14.4.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -2b92020693d09e4980b96d278e8038a1087afea0 \ No newline at end of file diff --git a/plugins/repository-azure/licenses/azure-storage-blob-12.15.0.jar.sha1 b/plugins/repository-azure/licenses/azure-storage-blob-12.15.0.jar.sha1 new file mode 100644 index 0000000000000..513cb017f798d --- /dev/null +++ b/plugins/repository-azure/licenses/azure-storage-blob-12.15.0.jar.sha1 @@ -0,0 +1 @@ +a53a6bdf7564f4e3a7b0b93cd96b7f5f95c03d36 \ No newline at end of file diff --git a/plugins/repository-azure/src/test/java/org/opensearch/repositories/azure/AzureBlobContainerRetriesTests.java b/plugins/repository-azure/src/test/java/org/opensearch/repositories/azure/AzureBlobContainerRetriesTests.java index c9e6e299c7120..c973cb325b658 100644 --- a/plugins/repository-azure/src/test/java/org/opensearch/repositories/azure/AzureBlobContainerRetriesTests.java +++ b/plugins/repository-azure/src/test/java/org/opensearch/repositories/azure/AzureBlobContainerRetriesTests.java @@ -231,6 +231,8 @@ public void testReadBlobWithRetries() throws Exception { exchange.getResponseHeaders().add("Content-Type", "application/octet-stream"); exchange.getResponseHeaders().add("Content-Length", String.valueOf(length)); exchange.getResponseHeaders().add("x-ms-blob-type", "blockblob"); + exchange.getResponseHeaders() + .add("Content-Range", String.format("bytes %d-%d/%d", rangeStart, bytes.length, bytes.length)); exchange.sendResponseHeaders(RestStatus.OK.getStatus(), length); exchange.getResponseBody().write(bytes, rangeStart, length); return; @@ -247,7 +249,8 @@ public void testReadBlobWithRetries() throws Exception { final BlobContainer blobContainer = createBlobContainer(maxRetries); try (InputStream inputStream = blobContainer.readBlob("read_blob_max_retries")) { assertArrayEquals(bytes, BytesReference.toBytes(Streams.readFully(inputStream))); - assertThat(countDownHead.isCountedDown(), is(true)); + // No more getProperties() calls in BlobClientBase::openInputStream(), HEAD should not be invoked + assertThat(countDownHead.isCountedDown(), is(false)); assertThat(countDownGet.isCountedDown(), is(true)); } } @@ -278,6 +281,8 @@ public void testReadRangeBlobWithRetries() throws Exception { assertThat(length, lessThanOrEqualTo(bytes.length - rangeStart)); exchange.getResponseHeaders().add("Content-Type", "application/octet-stream"); exchange.getResponseHeaders().add("Content-Length", String.valueOf(length)); + exchange.getResponseHeaders() + .add("Content-Range", String.format("bytes %d-%d/%d", rangeStart, rangeEnd.get(), bytes.length)); exchange.getResponseHeaders().add("x-ms-blob-type", "blockblob"); exchange.sendResponseHeaders(RestStatus.OK.getStatus(), length); exchange.getResponseBody().write(bytes, rangeStart, length); diff --git a/test/fixtures/azure-fixture/src/main/java/fixture/azure/AzureHttpHandler.java b/test/fixtures/azure-fixture/src/main/java/fixture/azure/AzureHttpHandler.java index f12a4579a2d0c..8389bd839d165 100644 --- a/test/fixtures/azure-fixture/src/main/java/fixture/azure/AzureHttpHandler.java +++ b/test/fixtures/azure-fixture/src/main/java/fixture/azure/AzureHttpHandler.java @@ -150,12 +150,14 @@ public void handle(final HttpExchange exchange) throws IOException { } final int start = Integer.parseInt(matcher.group(1)); - final int length = Integer.parseInt(matcher.group(2)) - start + 1; + final int end = Integer.parseInt(matcher.group(2)); + final int length = Math.min(end - start + 1, blob.length()); exchange.getResponseHeaders().add("Content-Type", "application/octet-stream"); exchange.getResponseHeaders().add("Content-Length", String.valueOf(length)); exchange.getResponseHeaders().add("x-ms-blob-type", "blockblob"); exchange.getResponseHeaders().add("x-ms-request-server-encrypted", "false"); + exchange.getResponseHeaders().add("Content-Range", String.format("bytes %d-%d/%d", start, Math.min(end, length), length)); exchange.sendResponseHeaders(RestStatus.OK.getStatus(), length); exchange.getResponseBody().write(blob.toBytesRef().bytes, start, length); From ed040e9f1a36abc23b7605cc47b48bb57a569c04 Mon Sep 17 00:00:00 2001 From: Tianli Feng Date: Tue, 5 Apr 2022 17:48:39 -0700 Subject: [PATCH 02/19] Replace blacklist in Gradle build environment configuration (#2752) - Replace `blacklist` with `denylist` in all `tests.rest.blacklist` and `REST_TESTS_BLACKLIST` Signed-off-by: Tianli Feng --- TESTING.md | 2 +- plugins/repository-s3/build.gradle | 6 +++--- .../test/junit/listeners/ReproduceInfoPrinter.java | 2 +- .../rest/yaml/OpenSearchClientYamlSuiteTestCase.java | 12 ++++++------ 4 files changed, 11 insertions(+), 11 deletions(-) diff --git a/TESTING.md b/TESTING.md index 4a2a786469b67..d6f246dbd6dcc 100644 --- a/TESTING.md +++ b/TESTING.md @@ -245,7 +245,7 @@ The YAML REST tests support all the options provided by the randomized runner, p - `tests.rest.suite`: comma separated paths of the test suites to be run (by default loaded from /rest-api-spec/test). It is possible to run only a subset of the tests providing a sub-folder or even a single yaml file (the default /rest-api-spec/test prefix is optional when files are loaded from classpath) e.g. `-Dtests.rest.suite=index,get,create/10_with_id` -- `tests.rest.blacklist`: comma separated globs that identify tests that are denylisted and need to be skipped e.g. `-Dtests.rest.blacklist=index/**/Index document,get/10_basic/**` +- `tests.rest.denylist`: comma separated globs that identify tests that are denylisted and need to be skipped e.g. `-Dtests.rest.denylist=index/**/Index document,get/10_basic/**` Java REST tests can be run with the "javaRestTest" task. diff --git a/plugins/repository-s3/build.gradle b/plugins/repository-s3/build.gradle index 072683e3bd5e5..33448b0039ce2 100644 --- a/plugins/repository-s3/build.gradle +++ b/plugins/repository-s3/build.gradle @@ -190,7 +190,7 @@ internalClusterTest { } yamlRestTest { - systemProperty 'tests.rest.blacklist', ( + systemProperty 'tests.rest.denylist', ( useFixture ? ['repository_s3/50_repository_ecs_credentials/*'] : @@ -246,7 +246,7 @@ if (useFixture) { setClasspath(yamlRestTestSourceSet.getRuntimeClasspath()) // Minio only supports a single access key, see https://github.com/minio/minio/pull/5968 - systemProperty 'tests.rest.blacklist', [ + systemProperty 'tests.rest.denylist', [ 'repository_s3/30_repository_temporary_credentials/*', 'repository_s3/40_repository_ec2_credentials/*', 'repository_s3/50_repository_ecs_credentials/*' @@ -272,7 +272,7 @@ if (useFixture) { SourceSet yamlRestTestSourceSet = sourceSets.getByName(YamlRestTestPlugin.SOURCE_SET_NAME) setTestClassesDirs(yamlRestTestSourceSet.getOutput().getClassesDirs()) setClasspath(yamlRestTestSourceSet.getRuntimeClasspath()) - systemProperty 'tests.rest.blacklist', [ + systemProperty 'tests.rest.denylist', [ 'repository_s3/10_basic/*', 'repository_s3/20_repository_permanent_credentials/*', 'repository_s3/30_repository_temporary_credentials/*', diff --git a/test/framework/src/main/java/org/opensearch/test/junit/listeners/ReproduceInfoPrinter.java b/test/framework/src/main/java/org/opensearch/test/junit/listeners/ReproduceInfoPrinter.java index 668526d9d6d0d..3d5a906e50836 100644 --- a/test/framework/src/main/java/org/opensearch/test/junit/listeners/ReproduceInfoPrinter.java +++ b/test/framework/src/main/java/org/opensearch/test/junit/listeners/ReproduceInfoPrinter.java @@ -200,7 +200,7 @@ private ReproduceErrorMessageBuilder appendESProperties() { public ReproduceErrorMessageBuilder appendClientYamlSuiteProperties() { return appendProperties( OpenSearchClientYamlSuiteTestCase.REST_TESTS_SUITE, - OpenSearchClientYamlSuiteTestCase.REST_TESTS_BLACKLIST + OpenSearchClientYamlSuiteTestCase.REST_TESTS_DENYLIST ); } diff --git a/test/framework/src/main/java/org/opensearch/test/rest/yaml/OpenSearchClientYamlSuiteTestCase.java b/test/framework/src/main/java/org/opensearch/test/rest/yaml/OpenSearchClientYamlSuiteTestCase.java index 5a404ccd4b9fc..70e3adbefbfc3 100644 --- a/test/framework/src/main/java/org/opensearch/test/rest/yaml/OpenSearchClientYamlSuiteTestCase.java +++ b/test/framework/src/main/java/org/opensearch/test/rest/yaml/OpenSearchClientYamlSuiteTestCase.java @@ -89,14 +89,14 @@ public abstract class OpenSearchClientYamlSuiteTestCase extends OpenSearchRestTe public static final String REST_TESTS_SUITE = "tests.rest.suite"; /** * Property that allows to denylist some of the REST tests based on a comma separated list of globs - * e.g. "-Dtests.rest.blacklist=get/10_basic/*" + * e.g. "-Dtests.rest.denylist=get/10_basic/*" */ - public static final String REST_TESTS_BLACKLIST = "tests.rest.blacklist"; + public static final String REST_TESTS_DENYLIST = "tests.rest.denylist"; /** - * We use tests.rest.blacklist in build files to denylist tests; this property enables a user to add additional denylisted tests on + * We use tests.rest.denylist in build files to denylist tests; this property enables a user to add additional denylisted tests on * top of the tests denylisted in the build. */ - public static final String REST_TESTS_BLACKLIST_ADDITIONS = "tests.rest.blacklist_additions"; + public static final String REST_TESTS_DENYLIST_ADDITIONS = "tests.rest.denylist_additions"; /** * Property that allows to control whether spec validation is enabled or not (default true). */ @@ -154,12 +154,12 @@ public void initAndResetContext() throws Exception { clientYamlTestClient = initClientYamlTestClient(restSpec, client(), hosts, minVersion, masterVersion); restTestExecutionContext = new ClientYamlTestExecutionContext(clientYamlTestClient, randomizeContentType()); adminExecutionContext = new ClientYamlTestExecutionContext(clientYamlTestClient, false); - final String[] denylist = resolvePathsProperty(REST_TESTS_BLACKLIST, null); + final String[] denylist = resolvePathsProperty(REST_TESTS_DENYLIST, null); denylistPathMatchers = new ArrayList<>(); for (final String entry : denylist) { denylistPathMatchers.add(new BlacklistedPathPatternMatcher(entry)); } - final String[] denylistAdditions = resolvePathsProperty(REST_TESTS_BLACKLIST_ADDITIONS, null); + final String[] denylistAdditions = resolvePathsProperty(REST_TESTS_DENYLIST_ADDITIONS, null); for (final String entry : denylistAdditions) { denylistPathMatchers.add(new BlacklistedPathPatternMatcher(entry)); } From ce5c55dbbc58f9309662e2919834ed54358d28cb Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 6 Apr 2022 10:33:37 -0400 Subject: [PATCH 03/19] Bump geoip2 from 2.16.1 to 3.0.1 in /modules/ingest-geoip (#2646) * Bump geoip2 from 2.16.1 to 3.0.1 in /modules/ingest-geoip Bumps [geoip2](https://github.com/maxmind/GeoIP2-java) from 2.16.1 to 3.0.1. - [Release notes](https://github.com/maxmind/GeoIP2-java/releases) - [Changelog](https://github.com/maxmind/GeoIP2-java/blob/main/CHANGELOG.md) - [Commits](https://github.com/maxmind/GeoIP2-java/compare/v2.16.1...v3.0.1) --- updated-dependencies: - dependency-name: com.maxmind.geoip2:geoip2 dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] * Updating SHAs Signed-off-by: dependabot[bot] * Fix breaking change with geoip2 version 3.0.1. Signed-off-by: Marc Handalian * Fix precommit failures caused by ignoreMissingClasses check. Precommit is failing because forbiddenApis was configured to ignore missing classes that are present. Signed-off-by: Marc Handalian * Change asn in GeoIpProcessorTests from int to long. Signed-off-by: Marc Handalian Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: dependabot[bot] Co-authored-by: Marc Handalian --- modules/ingest-geoip/build.gradle | 20 +------------------ .../licenses/geoip2-2.16.1.jar.sha1 | 1 - .../licenses/geoip2-3.0.1.jar.sha1 | 1 + .../ingest/geoip/GeoIpProcessor.java | 2 +- .../ingest/geoip/GeoIpProcessorTests.java | 2 +- 5 files changed, 4 insertions(+), 22 deletions(-) delete mode 100644 modules/ingest-geoip/licenses/geoip2-2.16.1.jar.sha1 create mode 100644 modules/ingest-geoip/licenses/geoip2-3.0.1.jar.sha1 diff --git a/modules/ingest-geoip/build.gradle b/modules/ingest-geoip/build.gradle index b1d5afbe68a17..f3be0fe61d4be 100644 --- a/modules/ingest-geoip/build.gradle +++ b/modules/ingest-geoip/build.gradle @@ -39,7 +39,7 @@ opensearchplugin { } dependencies { - api('com.maxmind.geoip2:geoip2:2.16.1') + api('com.maxmind.geoip2:geoip2:3.0.1') // geoip2 dependencies: api("com.fasterxml.jackson.core:jackson-annotations:${versions.jackson}") api("com.fasterxml.jackson.core:jackson-databind:${versions.jackson_databind}") @@ -67,24 +67,6 @@ tasks.named("bundlePlugin").configure { } } -tasks.named("thirdPartyAudit").configure { - ignoreMissingClasses( - // geoip WebServiceClient needs apache http client, but we're not using WebServiceClient: - 'org.apache.http.HttpEntity', - 'org.apache.http.HttpResponse', - 'org.apache.http.StatusLine', - 'org.apache.http.client.config.RequestConfig$Builder', - 'org.apache.http.client.config.RequestConfig', - 'org.apache.http.client.methods.CloseableHttpResponse', - 'org.apache.http.client.methods.HttpGet', - 'org.apache.http.client.utils.URIBuilder', - 'org.apache.http.impl.auth.BasicScheme', - 'org.apache.http.impl.client.CloseableHttpClient', - 'org.apache.http.impl.client.HttpClientBuilder', - 'org.apache.http.util.EntityUtils' - ) -} - if (Os.isFamily(Os.FAMILY_WINDOWS)) { tasks.named("test").configure { // Windows cannot cleanup database files properly unless it loads everything on heap. diff --git a/modules/ingest-geoip/licenses/geoip2-2.16.1.jar.sha1 b/modules/ingest-geoip/licenses/geoip2-2.16.1.jar.sha1 deleted file mode 100644 index 0221476794d3a..0000000000000 --- a/modules/ingest-geoip/licenses/geoip2-2.16.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -c92040bd6ef2cb59be71c6749d08c141ca546caf \ No newline at end of file diff --git a/modules/ingest-geoip/licenses/geoip2-3.0.1.jar.sha1 b/modules/ingest-geoip/licenses/geoip2-3.0.1.jar.sha1 new file mode 100644 index 0000000000000..f1d5ac5aea546 --- /dev/null +++ b/modules/ingest-geoip/licenses/geoip2-3.0.1.jar.sha1 @@ -0,0 +1 @@ +8a814ae92a1d8c35f82d0ff76d86927c191b7916 \ No newline at end of file diff --git a/modules/ingest-geoip/src/main/java/org/opensearch/ingest/geoip/GeoIpProcessor.java b/modules/ingest-geoip/src/main/java/org/opensearch/ingest/geoip/GeoIpProcessor.java index 384ae6f14dc4d..030f75bf48e18 100644 --- a/modules/ingest-geoip/src/main/java/org/opensearch/ingest/geoip/GeoIpProcessor.java +++ b/modules/ingest-geoip/src/main/java/org/opensearch/ingest/geoip/GeoIpProcessor.java @@ -364,7 +364,7 @@ private Map retrieveAsnGeoData(InetAddress ipAddress) { }) ); - Integer asn = response.getAutonomousSystemNumber(); + Long asn = response.getAutonomousSystemNumber(); String organization_name = response.getAutonomousSystemOrganization(); Network network = response.getNetwork(); diff --git a/modules/ingest-geoip/src/test/java/org/opensearch/ingest/geoip/GeoIpProcessorTests.java b/modules/ingest-geoip/src/test/java/org/opensearch/ingest/geoip/GeoIpProcessorTests.java index f06802af8b571..34c80fec520aa 100644 --- a/modules/ingest-geoip/src/test/java/org/opensearch/ingest/geoip/GeoIpProcessorTests.java +++ b/modules/ingest-geoip/src/test/java/org/opensearch/ingest/geoip/GeoIpProcessorTests.java @@ -308,7 +308,7 @@ public void testAsn() throws Exception { Map geoData = (Map) ingestDocument.getSourceAndMetadata().get("target_field"); assertThat(geoData.size(), equalTo(4)); assertThat(geoData.get("ip"), equalTo(ip)); - assertThat(geoData.get("asn"), equalTo(1136)); + assertThat(geoData.get("asn"), equalTo(1136L)); assertThat(geoData.get("organization_name"), equalTo("KPN B.V.")); assertThat(geoData.get("network"), equalTo("82.168.0.0/14")); } From dd24e17ea6ed557829e6094c0a2af9f05c1cdebd Mon Sep 17 00:00:00 2001 From: Tianli Feng Date: Wed, 6 Apr 2022 12:01:12 -0700 Subject: [PATCH 04/19] Fix issue that deprecated setting 'cluster.initial_master_nodes' is not identified in node bootstrap check (#2779) * Fix issue that deprecated setting 'cluster.initial_master_nodes' is not identified during node bootstrap Signed-off-by: Tianli Feng * Restore a variable name Signed-off-by: Tianli Feng --- .../coordination/ClusterBootstrapService.java | 13 +++++++------ .../opensearch/bootstrap/BootstrapChecksTests.java | 2 ++ .../coordination/ClusterBootstrapServiceTests.java | 11 ++++++++++- 3 files changed, 19 insertions(+), 7 deletions(-) diff --git a/server/src/main/java/org/opensearch/cluster/coordination/ClusterBootstrapService.java b/server/src/main/java/org/opensearch/cluster/coordination/ClusterBootstrapService.java index 8df561149eb3d..c7708a54f9031 100644 --- a/server/src/main/java/org/opensearch/cluster/coordination/ClusterBootstrapService.java +++ b/server/src/main/java/org/opensearch/cluster/coordination/ClusterBootstrapService.java @@ -113,12 +113,12 @@ public ClusterBootstrapService( BooleanSupplier isBootstrappedSupplier, Consumer votingConfigurationConsumer ) { + // TODO: Remove variable 'initialClusterManagerSettingName' after removing MASTER_ROLE. + String initialClusterManagerSettingName = INITIAL_CLUSTER_MANAGER_NODES_SETTING.exists(settings) + ? INITIAL_CLUSTER_MANAGER_NODES_SETTING.getKey() + : INITIAL_MASTER_NODES_SETTING.getKey(); if (DiscoveryModule.isSingleNodeDiscovery(settings)) { if (INITIAL_CLUSTER_MANAGER_NODES_SETTING.existsOrFallbackExists(settings)) { - // TODO: Remove variable 'initialClusterManagerSettingName' after removing MASTER_ROLE. - String initialClusterManagerSettingName = INITIAL_CLUSTER_MANAGER_NODES_SETTING.exists(settings) - ? INITIAL_CLUSTER_MANAGER_NODES_SETTING.getKey() - : INITIAL_MASTER_NODES_SETTING.getKey(); throw new IllegalArgumentException( "setting [" + initialClusterManagerSettingName @@ -145,7 +145,7 @@ public ClusterBootstrapService( bootstrapRequirements = unmodifiableSet(new LinkedHashSet<>(initialMasterNodes)); if (bootstrapRequirements.size() != initialMasterNodes.size()) { throw new IllegalArgumentException( - "setting [" + INITIAL_CLUSTER_MANAGER_NODES_SETTING.getKey() + "] contains duplicates: " + initialMasterNodes + "setting [" + initialClusterManagerSettingName + "] contains duplicates: " + initialMasterNodes ); } unconfiguredBootstrapTimeout = discoveryIsConfigured(settings) ? null : UNCONFIGURED_BOOTSTRAP_TIMEOUT_SETTING.get(settings); @@ -163,7 +163,8 @@ public static boolean discoveryIsConfigured(Settings settings) { LEGACY_DISCOVERY_HOSTS_PROVIDER_SETTING, DISCOVERY_SEED_HOSTS_SETTING, LEGACY_DISCOVERY_ZEN_PING_UNICAST_HOSTS_SETTING, - INITIAL_CLUSTER_MANAGER_NODES_SETTING + INITIAL_CLUSTER_MANAGER_NODES_SETTING, + INITIAL_MASTER_NODES_SETTING ).anyMatch(s -> s.exists(settings)); } diff --git a/server/src/test/java/org/opensearch/bootstrap/BootstrapChecksTests.java b/server/src/test/java/org/opensearch/bootstrap/BootstrapChecksTests.java index d941c624509da..c59ca1dd60dc7 100644 --- a/server/src/test/java/org/opensearch/bootstrap/BootstrapChecksTests.java +++ b/server/src/test/java/org/opensearch/bootstrap/BootstrapChecksTests.java @@ -818,5 +818,7 @@ public void testDiscoveryConfiguredCheck() throws NodeValidationException { ensureChecksPass.accept(Settings.builder().putList(ClusterBootstrapService.INITIAL_CLUSTER_MANAGER_NODES_SETTING.getKey())); ensureChecksPass.accept(Settings.builder().putList(DiscoveryModule.DISCOVERY_SEED_PROVIDERS_SETTING.getKey())); ensureChecksPass.accept(Settings.builder().putList(SettingsBasedSeedHostsProvider.DISCOVERY_SEED_HOSTS_SETTING.getKey())); + // Validate the deprecated setting is still valid during the node bootstrap. + ensureChecksPass.accept(Settings.builder().putList(ClusterBootstrapService.INITIAL_MASTER_NODES_SETTING.getKey())); } } diff --git a/server/src/test/java/org/opensearch/cluster/coordination/ClusterBootstrapServiceTests.java b/server/src/test/java/org/opensearch/cluster/coordination/ClusterBootstrapServiceTests.java index dd55d078fe2c6..3e4148cef61cd 100644 --- a/server/src/test/java/org/opensearch/cluster/coordination/ClusterBootstrapServiceTests.java +++ b/server/src/test/java/org/opensearch/cluster/coordination/ClusterBootstrapServiceTests.java @@ -166,10 +166,19 @@ public void testDoesNothingByDefaultIfSeedHostsConfigured() { testDoesNothingWithSettings(builder().putList(DISCOVERY_SEED_HOSTS_SETTING.getKey())); } - public void testDoesNothingByDefaultIfMasterNodesConfigured() { + public void testDoesNothingByDefaultIfClusterManagerNodesConfigured() { testDoesNothingWithSettings(builder().putList(INITIAL_CLUSTER_MANAGER_NODES_SETTING.getKey())); } + // Validate the deprecated setting is still valid during the cluster bootstrap. + public void testDoesNothingByDefaultIfMasterNodesConfigured() { + testDoesNothingWithSettings(builder().putList(INITIAL_MASTER_NODES_SETTING.getKey())); + assertWarnings( + "[cluster.initial_master_nodes] setting was deprecated in OpenSearch and will be removed in a future release! " + + "See the breaking changes documentation for the next major version." + ); + } + public void testDoesNothingByDefaultOnMasterIneligibleNodes() { localNode = new DiscoveryNode( "local", From 0244b2a2df1816cd7bdcca85bd079c7e3e580155 Mon Sep 17 00:00:00 2001 From: Andriy Redko Date: Wed, 6 Apr 2022 17:28:14 -0400 Subject: [PATCH 05/19] Update azure-storage-blob to 12.15.0: fix test flakiness (#2795) Signed-off-by: Andriy Redko --- .../repositories/azure/AzureBlobContainerRetriesTests.java | 5 ++--- .../src/main/java/fixture/azure/AzureHttpHandler.java | 4 +++- 2 files changed, 5 insertions(+), 4 deletions(-) diff --git a/plugins/repository-azure/src/test/java/org/opensearch/repositories/azure/AzureBlobContainerRetriesTests.java b/plugins/repository-azure/src/test/java/org/opensearch/repositories/azure/AzureBlobContainerRetriesTests.java index c973cb325b658..e8417f9ceaf2c 100644 --- a/plugins/repository-azure/src/test/java/org/opensearch/repositories/azure/AzureBlobContainerRetriesTests.java +++ b/plugins/repository-azure/src/test/java/org/opensearch/repositories/azure/AzureBlobContainerRetriesTests.java @@ -231,8 +231,7 @@ public void testReadBlobWithRetries() throws Exception { exchange.getResponseHeaders().add("Content-Type", "application/octet-stream"); exchange.getResponseHeaders().add("Content-Length", String.valueOf(length)); exchange.getResponseHeaders().add("x-ms-blob-type", "blockblob"); - exchange.getResponseHeaders() - .add("Content-Range", String.format("bytes %d-%d/%d", rangeStart, bytes.length, bytes.length)); + exchange.getResponseHeaders().add("Content-Range", "bytes " + rangeStart + "-" + bytes.length + "/" + bytes.length); exchange.sendResponseHeaders(RestStatus.OK.getStatus(), length); exchange.getResponseBody().write(bytes, rangeStart, length); return; @@ -282,7 +281,7 @@ public void testReadRangeBlobWithRetries() throws Exception { exchange.getResponseHeaders().add("Content-Type", "application/octet-stream"); exchange.getResponseHeaders().add("Content-Length", String.valueOf(length)); exchange.getResponseHeaders() - .add("Content-Range", String.format("bytes %d-%d/%d", rangeStart, rangeEnd.get(), bytes.length)); + .add("Content-Range", "bytes " + rangeStart + "-" + rangeEnd.get() + "/" + bytes.length); exchange.getResponseHeaders().add("x-ms-blob-type", "blockblob"); exchange.sendResponseHeaders(RestStatus.OK.getStatus(), length); exchange.getResponseBody().write(bytes, rangeStart, length); diff --git a/test/fixtures/azure-fixture/src/main/java/fixture/azure/AzureHttpHandler.java b/test/fixtures/azure-fixture/src/main/java/fixture/azure/AzureHttpHandler.java index 8389bd839d165..4879425b7bcd6 100644 --- a/test/fixtures/azure-fixture/src/main/java/fixture/azure/AzureHttpHandler.java +++ b/test/fixtures/azure-fixture/src/main/java/fixture/azure/AzureHttpHandler.java @@ -157,7 +157,9 @@ public void handle(final HttpExchange exchange) throws IOException { exchange.getResponseHeaders().add("Content-Length", String.valueOf(length)); exchange.getResponseHeaders().add("x-ms-blob-type", "blockblob"); exchange.getResponseHeaders().add("x-ms-request-server-encrypted", "false"); - exchange.getResponseHeaders().add("Content-Range", String.format("bytes %d-%d/%d", start, Math.min(end, length), length)); + exchange.getResponseHeaders() + .add("Content-Range", "bytes " + start + "-" + Math.min(end, length) + "/" + blob.length()); + exchange.sendResponseHeaders(RestStatus.OK.getStatus(), length); exchange.getResponseBody().write(blob.toBytesRef().bytes, start, length); From 566ebfa4fef4bba2f71cb6964c03ab5320be98e0 Mon Sep 17 00:00:00 2001 From: Kartik Date: Thu, 7 Apr 2022 14:35:40 -0700 Subject: [PATCH 06/19] Bugfix to guard against stack overflow errors caused by very large reg-ex input (#2810) * Bugfix to guard against stack overflow errors caused by very large reg-ex input This change fixes a code path that did not properly impose the index-level max_regex_length limit. Therefore, it was possibly to provide ar arbitrarily large string as the include/exclude reg-ex value under search aggregations. This exposed the underlying node to crashes from a StackOverflowError, due to how the Lucene RegExp class processes strings using stack frames. Signed-off-by: Kartik Ganesh * Adding integration tests for large string RegEx Signed-off-by: Kartik Ganesh * Spotless Signed-off-by: Kartik Ganesh --- .../AggregationsIntegrationIT.java | 60 ++++++++++++ .../bucket/terms/IncludeExclude.java | 94 ++++++++++--------- .../terms/RareTermsAggregatorFactory.java | 6 +- .../SignificantTermsAggregatorFactory.java | 13 ++- .../SignificantTextAggregatorFactory.java | 6 +- .../bucket/terms/TermsAggregatorFactory.java | 11 ++- .../aggregations/bucket/RareTermsTests.java | 7 +- .../bucket/SignificantTermsTests.java | 7 +- .../aggregations/bucket/TermsTests.java | 7 +- .../terms/BinaryTermsAggregatorTests.java | 3 +- .../terms/NumericTermsAggregatorTests.java | 3 +- .../support/IncludeExcludeTests.java | 12 +-- 12 files changed, 156 insertions(+), 73 deletions(-) diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/AggregationsIntegrationIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/AggregationsIntegrationIT.java index 6778765599fe9..b73b7722f9728 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/AggregationsIntegrationIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/AggregationsIntegrationIT.java @@ -32,10 +32,18 @@ package org.opensearch.search.aggregations; +import org.opensearch.OpenSearchException; import org.opensearch.action.index.IndexRequestBuilder; +import org.opensearch.action.search.SearchPhaseExecutionException; import org.opensearch.action.search.SearchResponse; import org.opensearch.common.unit.TimeValue; +import org.opensearch.search.aggregations.bucket.terms.IncludeExclude; +import org.opensearch.search.aggregations.bucket.terms.RareTermsAggregationBuilder; +import org.opensearch.search.aggregations.bucket.terms.SignificantTermsAggregationBuilder; +import org.opensearch.search.aggregations.bucket.terms.SignificantTermsAggregatorFactory; import org.opensearch.search.aggregations.bucket.terms.Terms; +import org.opensearch.search.aggregations.bucket.terms.TermsAggregationBuilder; +import org.opensearch.search.aggregations.bucket.terms.TermsAggregatorFactory; import org.opensearch.test.OpenSearchIntegTestCase; import java.util.ArrayList; @@ -50,6 +58,11 @@ public class AggregationsIntegrationIT extends OpenSearchIntegTestCase { static int numDocs; + private static final String LARGE_STRING = "a".repeat(2000); + private static final String LARGE_STRING_EXCEPTION_MESSAGE = "The length of regex [" + + LARGE_STRING.length() + + "] used in the request has exceeded the allowed maximum"; + @Override public void setupSuiteScopeCluster() throws Exception { assertAcked(prepareCreate("index").setMapping("f", "type=keyword").get()); @@ -85,4 +98,51 @@ public void testScroll() { assertEquals(numDocs, total); } + public void testLargeRegExTermsAggregation() { + for (TermsAggregatorFactory.ExecutionMode executionMode : TermsAggregatorFactory.ExecutionMode.values()) { + TermsAggregationBuilder termsAggregation = terms("my_terms").field("f") + .includeExclude(getLargeStringInclude()) + .executionHint(executionMode.toString()); + runLargeStringAggregationTest(termsAggregation); + } + } + + public void testLargeRegExSignificantTermsAggregation() { + for (SignificantTermsAggregatorFactory.ExecutionMode executionMode : SignificantTermsAggregatorFactory.ExecutionMode.values()) { + SignificantTermsAggregationBuilder significantTerms = new SignificantTermsAggregationBuilder("my_terms").field("f") + .includeExclude(getLargeStringInclude()) + .executionHint(executionMode.toString()); + runLargeStringAggregationTest(significantTerms); + } + } + + public void testLargeRegExRareTermsAggregation() { + // currently this only supports "map" as an execution hint + RareTermsAggregationBuilder rareTerms = new RareTermsAggregationBuilder("my_terms").field("f") + .includeExclude(getLargeStringInclude()) + .maxDocCount(2); + runLargeStringAggregationTest(rareTerms); + } + + private IncludeExclude getLargeStringInclude() { + return new IncludeExclude(LARGE_STRING, null); + } + + private void runLargeStringAggregationTest(AggregationBuilder aggregation) { + boolean exceptionThrown = false; + IncludeExclude include = new IncludeExclude(LARGE_STRING, null); + try { + client().prepareSearch("index").addAggregation(aggregation).get(); + } catch (SearchPhaseExecutionException ex) { + exceptionThrown = true; + Throwable nestedException = ex.getCause(); + assertNotNull(nestedException); + assertTrue(nestedException instanceof OpenSearchException); + assertNotNull(nestedException.getCause()); + assertTrue(nestedException.getCause() instanceof IllegalArgumentException); + String actualExceptionMessage = nestedException.getCause().getMessage(); + assertTrue(actualExceptionMessage.startsWith(LARGE_STRING_EXCEPTION_MESSAGE)); + } + assertTrue("Exception should have been thrown", exceptionThrown); + } } diff --git a/server/src/main/java/org/opensearch/search/aggregations/bucket/terms/IncludeExclude.java b/server/src/main/java/org/opensearch/search/aggregations/bucket/terms/IncludeExclude.java index e632a13b95fb7..acb3a6629c734 100644 --- a/server/src/main/java/org/opensearch/search/aggregations/bucket/terms/IncludeExclude.java +++ b/server/src/main/java/org/opensearch/search/aggregations/bucket/terms/IncludeExclude.java @@ -48,6 +48,7 @@ import org.apache.lucene.util.automaton.Operations; import org.apache.lucene.util.automaton.RegExp; import org.opensearch.OpenSearchParseException; +import org.opensearch.common.Nullable; import org.opensearch.common.ParseField; import org.opensearch.common.io.stream.StreamInput; import org.opensearch.common.io.stream.StreamOutput; @@ -55,6 +56,7 @@ import org.opensearch.common.xcontent.ToXContentFragment; import org.opensearch.common.xcontent.XContentBuilder; import org.opensearch.common.xcontent.XContentParser; +import org.opensearch.index.IndexSettings; import org.opensearch.search.DocValueFormat; import java.io.IOException; @@ -337,19 +339,16 @@ public LongBitSet acceptedGlobalOrdinals(SortedSetDocValues globalOrdinals) thro } - private final RegExp include, exclude; + private final String include, exclude; private final SortedSet includeValues, excludeValues; private final int incZeroBasedPartition; private final int incNumPartitions; /** - * @param include The regular expression pattern for the terms to be included - * @param exclude The regular expression pattern for the terms to be excluded + * @param include The string or regular expression pattern for the terms to be included + * @param exclude The string or regular expression pattern for the terms to be excluded */ - public IncludeExclude(RegExp include, RegExp exclude) { - if (include == null && exclude == null) { - throw new IllegalArgumentException(); - } + public IncludeExclude(String include, String exclude) { this.include = include; this.exclude = exclude; this.includeValues = null; @@ -358,10 +357,6 @@ public IncludeExclude(RegExp include, RegExp exclude) { this.incNumPartitions = 0; } - public IncludeExclude(String include, String exclude) { - this(include == null ? null : new RegExp(include), exclude == null ? null : new RegExp(exclude)); - } - /** * @param includeValues The terms to be included * @param excludeValues The terms to be excluded @@ -412,10 +407,8 @@ public IncludeExclude(StreamInput in) throws IOException { excludeValues = null; incZeroBasedPartition = 0; incNumPartitions = 0; - String includeString = in.readOptionalString(); - include = includeString == null ? null : new RegExp(includeString); - String excludeString = in.readOptionalString(); - exclude = excludeString == null ? null : new RegExp(excludeString); + include = in.readOptionalString(); + exclude = in.readOptionalString(); return; } include = null; @@ -447,8 +440,8 @@ public void writeTo(StreamOutput out) throws IOException { boolean regexBased = isRegexBased(); out.writeBoolean(regexBased); if (regexBased) { - out.writeOptionalString(include == null ? null : include.getOriginalString()); - out.writeOptionalString(exclude == null ? null : exclude.getOriginalString()); + out.writeOptionalString(include); + out.writeOptionalString(exclude); } else { boolean hasIncludes = includeValues != null; out.writeBoolean(hasIncludes); @@ -584,26 +577,54 @@ public boolean isPartitionBased() { return incNumPartitions > 0; } - private Automaton toAutomaton() { - Automaton a = null; + private Automaton toAutomaton(@Nullable IndexSettings indexSettings) { + int maxRegexLength = indexSettings == null ? -1 : indexSettings.getMaxRegexLength(); + Automaton a; if (include != null) { - a = include.toAutomaton(); + if (include.length() > maxRegexLength) { + throw new IllegalArgumentException( + "The length of regex [" + + include.length() + + "] used in the request has exceeded " + + "the allowed maximum of [" + + maxRegexLength + + "]. " + + "This maximum can be set by changing the [" + + IndexSettings.MAX_REGEX_LENGTH_SETTING.getKey() + + "] index level setting." + ); + } + a = new RegExp(include).toAutomaton(); } else if (includeValues != null) { a = Automata.makeStringUnion(includeValues); } else { a = Automata.makeAnyString(); } if (exclude != null) { - a = Operations.minus(a, exclude.toAutomaton(), Operations.DEFAULT_DETERMINIZE_WORK_LIMIT); + if (exclude.length() > maxRegexLength) { + throw new IllegalArgumentException( + "The length of regex [" + + exclude.length() + + "] used in the request has exceeded " + + "the allowed maximum of [" + + maxRegexLength + + "]. " + + "This maximum can be set by changing the [" + + IndexSettings.MAX_REGEX_LENGTH_SETTING.getKey() + + "] index level setting." + ); + } + Automaton excludeAutomaton = new RegExp(exclude).toAutomaton(); + a = Operations.minus(a, excludeAutomaton, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT); } else if (excludeValues != null) { a = Operations.minus(a, Automata.makeStringUnion(excludeValues), Operations.DEFAULT_DETERMINIZE_WORK_LIMIT); } return a; } - public StringFilter convertToStringFilter(DocValueFormat format) { + public StringFilter convertToStringFilter(DocValueFormat format, IndexSettings indexSettings) { if (isRegexBased()) { - return new AutomatonBackedStringFilter(toAutomaton()); + return new AutomatonBackedStringFilter(toAutomaton(indexSettings)); } if (isPartitionBased()) { return new PartitionedStringFilter(); @@ -624,10 +645,10 @@ private static SortedSet parseForDocValues(SortedSet endUser return result; } - public OrdinalsFilter convertToOrdinalsFilter(DocValueFormat format) { + public OrdinalsFilter convertToOrdinalsFilter(DocValueFormat format, IndexSettings indexSettings) { if (isRegexBased()) { - return new AutomatonBackedOrdinalsFilter(toAutomaton()); + return new AutomatonBackedOrdinalsFilter(toAutomaton(indexSettings)); } if (isPartitionBased()) { return new PartitionedOrdinalsFilter(); @@ -684,7 +705,7 @@ public LongFilter convertToDoubleFilter() { @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { if (include != null) { - builder.field(INCLUDE_FIELD.getPreferredName(), include.getOriginalString()); + builder.field(INCLUDE_FIELD.getPreferredName(), include); } else if (includeValues != null) { builder.startArray(INCLUDE_FIELD.getPreferredName()); for (BytesRef value : includeValues) { @@ -698,7 +719,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.endObject(); } if (exclude != null) { - builder.field(EXCLUDE_FIELD.getPreferredName(), exclude.getOriginalString()); + builder.field(EXCLUDE_FIELD.getPreferredName(), exclude); } else if (excludeValues != null) { builder.startArray(EXCLUDE_FIELD.getPreferredName()); for (BytesRef value : excludeValues) { @@ -711,14 +732,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws @Override public int hashCode() { - return Objects.hash( - include == null ? null : include.getOriginalString(), - exclude == null ? null : exclude.getOriginalString(), - includeValues, - excludeValues, - incZeroBasedPartition, - incNumPartitions - ); + return Objects.hash(include, exclude, includeValues, excludeValues, incZeroBasedPartition, incNumPartitions); } @Override @@ -730,14 +744,8 @@ public boolean equals(Object obj) { return false; } IncludeExclude other = (IncludeExclude) obj; - return Objects.equals( - include == null ? null : include.getOriginalString(), - other.include == null ? null : other.include.getOriginalString() - ) - && Objects.equals( - exclude == null ? null : exclude.getOriginalString(), - other.exclude == null ? null : other.exclude.getOriginalString() - ) + return Objects.equals(include, other.include) + && Objects.equals(exclude, other.exclude) && Objects.equals(includeValues, other.includeValues) && Objects.equals(excludeValues, other.excludeValues) && Objects.equals(incZeroBasedPartition, other.incZeroBasedPartition) diff --git a/server/src/main/java/org/opensearch/search/aggregations/bucket/terms/RareTermsAggregatorFactory.java b/server/src/main/java/org/opensearch/search/aggregations/bucket/terms/RareTermsAggregatorFactory.java index 0e03f87b070e1..c0a5c77a98170 100644 --- a/server/src/main/java/org/opensearch/search/aggregations/bucket/terms/RareTermsAggregatorFactory.java +++ b/server/src/main/java/org/opensearch/search/aggregations/bucket/terms/RareTermsAggregatorFactory.java @@ -34,6 +34,7 @@ import org.opensearch.common.ParseField; import org.opensearch.common.logging.DeprecationLogger; +import org.opensearch.index.IndexSettings; import org.opensearch.index.query.QueryShardContext; import org.opensearch.search.DocValueFormat; import org.opensearch.search.aggregations.Aggregator; @@ -250,7 +251,10 @@ Aggregator create( double precision, CardinalityUpperBound cardinality ) throws IOException { - final IncludeExclude.StringFilter filter = includeExclude == null ? null : includeExclude.convertToStringFilter(format); + IndexSettings indexSettings = context.getQueryShardContext().getIndexSettings(); + final IncludeExclude.StringFilter filter = includeExclude == null + ? null + : includeExclude.convertToStringFilter(format, indexSettings); return new StringRareTermsAggregator( name, factories, diff --git a/server/src/main/java/org/opensearch/search/aggregations/bucket/terms/SignificantTermsAggregatorFactory.java b/server/src/main/java/org/opensearch/search/aggregations/bucket/terms/SignificantTermsAggregatorFactory.java index db6106d3ce9bc..4b93121ae06ef 100644 --- a/server/src/main/java/org/opensearch/search/aggregations/bucket/terms/SignificantTermsAggregatorFactory.java +++ b/server/src/main/java/org/opensearch/search/aggregations/bucket/terms/SignificantTermsAggregatorFactory.java @@ -34,6 +34,7 @@ import org.opensearch.common.ParseField; import org.opensearch.common.logging.DeprecationLogger; +import org.opensearch.index.IndexSettings; import org.opensearch.index.query.QueryBuilder; import org.opensearch.index.query.QueryShardContext; import org.opensearch.search.DocValueFormat; @@ -325,8 +326,10 @@ Aggregator create( CardinalityUpperBound cardinality, Map metadata ) throws IOException { - - final IncludeExclude.StringFilter filter = includeExclude == null ? null : includeExclude.convertToStringFilter(format); + IndexSettings indexSettings = aggregationContext.getQueryShardContext().getIndexSettings(); + final IncludeExclude.StringFilter filter = includeExclude == null + ? null + : includeExclude.convertToStringFilter(format, indexSettings); return new MapStringTermsAggregator( name, factories, @@ -364,8 +367,10 @@ Aggregator create( CardinalityUpperBound cardinality, Map metadata ) throws IOException { - - final IncludeExclude.OrdinalsFilter filter = includeExclude == null ? null : includeExclude.convertToOrdinalsFilter(format); + IndexSettings indexSettings = aggregationContext.getQueryShardContext().getIndexSettings(); + final IncludeExclude.OrdinalsFilter filter = includeExclude == null + ? null + : includeExclude.convertToOrdinalsFilter(format, indexSettings); boolean remapGlobalOrd = true; if (cardinality == CardinalityUpperBound.ONE && factories == AggregatorFactories.EMPTY && includeExclude == null) { /* diff --git a/server/src/main/java/org/opensearch/search/aggregations/bucket/terms/SignificantTextAggregatorFactory.java b/server/src/main/java/org/opensearch/search/aggregations/bucket/terms/SignificantTextAggregatorFactory.java index 85b4282e4c55b..992035f1fbe97 100644 --- a/server/src/main/java/org/opensearch/search/aggregations/bucket/terms/SignificantTextAggregatorFactory.java +++ b/server/src/main/java/org/opensearch/search/aggregations/bucket/terms/SignificantTextAggregatorFactory.java @@ -44,6 +44,7 @@ import org.opensearch.common.util.BigArrays; import org.opensearch.common.util.BytesRefHash; import org.opensearch.common.util.ObjectArray; +import org.opensearch.index.IndexSettings; import org.opensearch.index.mapper.MappedFieldType; import org.opensearch.index.query.QueryBuilder; import org.opensearch.index.query.QueryShardContext; @@ -137,7 +138,10 @@ protected Aggregator createInternal( // TODO - need to check with mapping that this is indeed a text field.... - IncludeExclude.StringFilter incExcFilter = includeExclude == null ? null : includeExclude.convertToStringFilter(DocValueFormat.RAW); + IndexSettings indexSettings = searchContext.getQueryShardContext().getIndexSettings(); + IncludeExclude.StringFilter incExcFilter = includeExclude == null + ? null + : includeExclude.convertToStringFilter(DocValueFormat.RAW, indexSettings); MapStringTermsAggregator.CollectorSource collectorSource = new SignificantTextCollectorSource( queryShardContext.lookup().source(), diff --git a/server/src/main/java/org/opensearch/search/aggregations/bucket/terms/TermsAggregatorFactory.java b/server/src/main/java/org/opensearch/search/aggregations/bucket/terms/TermsAggregatorFactory.java index d2272d0a63042..17b412f87107c 100644 --- a/server/src/main/java/org/opensearch/search/aggregations/bucket/terms/TermsAggregatorFactory.java +++ b/server/src/main/java/org/opensearch/search/aggregations/bucket/terms/TermsAggregatorFactory.java @@ -34,6 +34,7 @@ import org.apache.lucene.search.IndexSearcher; import org.opensearch.common.ParseField; +import org.opensearch.index.IndexSettings; import org.opensearch.index.query.QueryShardContext; import org.opensearch.search.DocValueFormat; import org.opensearch.search.aggregations.AggregationExecutionException; @@ -380,7 +381,10 @@ Aggregator create( CardinalityUpperBound cardinality, Map metadata ) throws IOException { - final IncludeExclude.StringFilter filter = includeExclude == null ? null : includeExclude.convertToStringFilter(format); + IndexSettings indexSettings = context.getQueryShardContext().getIndexSettings(); + final IncludeExclude.StringFilter filter = includeExclude == null + ? null + : includeExclude.convertToStringFilter(format, indexSettings); return new MapStringTermsAggregator( name, factories, @@ -458,7 +462,10 @@ Aggregator create( ); } - final IncludeExclude.OrdinalsFilter filter = includeExclude == null ? null : includeExclude.convertToOrdinalsFilter(format); + IndexSettings indexSettings = context.getQueryShardContext().getIndexSettings(); + final IncludeExclude.OrdinalsFilter filter = includeExclude == null + ? null + : includeExclude.convertToOrdinalsFilter(format, indexSettings); boolean remapGlobalOrds; if (cardinality == CardinalityUpperBound.ONE && REMAP_GLOBAL_ORDS != null) { /* diff --git a/server/src/test/java/org/opensearch/search/aggregations/bucket/RareTermsTests.java b/server/src/test/java/org/opensearch/search/aggregations/bucket/RareTermsTests.java index 799faecb5ab57..6b8655eccd74d 100644 --- a/server/src/test/java/org/opensearch/search/aggregations/bucket/RareTermsTests.java +++ b/server/src/test/java/org/opensearch/search/aggregations/bucket/RareTermsTests.java @@ -33,7 +33,6 @@ package org.opensearch.search.aggregations.bucket; import org.apache.lucene.util.BytesRef; -import org.apache.lucene.util.automaton.RegExp; import org.opensearch.search.aggregations.BaseAggregationTestCase; import org.opensearch.search.aggregations.bucket.terms.IncludeExclude; import org.opensearch.search.aggregations.bucket.terms.RareTermsAggregationBuilder; @@ -59,13 +58,13 @@ protected RareTermsAggregationBuilder createTestAggregatorBuilder() { IncludeExclude incExc = null; switch (randomInt(6)) { case 0: - incExc = new IncludeExclude(new RegExp("foobar"), null); + incExc = new IncludeExclude("foobar", null); break; case 1: - incExc = new IncludeExclude(null, new RegExp("foobaz")); + incExc = new IncludeExclude(null, "foobaz"); break; case 2: - incExc = new IncludeExclude(new RegExp("foobar"), new RegExp("foobaz")); + incExc = new IncludeExclude("foobar", "foobaz"); break; case 3: SortedSet includeValues = new TreeSet<>(); diff --git a/server/src/test/java/org/opensearch/search/aggregations/bucket/SignificantTermsTests.java b/server/src/test/java/org/opensearch/search/aggregations/bucket/SignificantTermsTests.java index 3001f8ede7f4d..6312d6c175866 100644 --- a/server/src/test/java/org/opensearch/search/aggregations/bucket/SignificantTermsTests.java +++ b/server/src/test/java/org/opensearch/search/aggregations/bucket/SignificantTermsTests.java @@ -33,7 +33,6 @@ package org.opensearch.search.aggregations.bucket; import org.apache.lucene.util.BytesRef; -import org.apache.lucene.util.automaton.RegExp; import org.opensearch.index.query.QueryBuilders; import org.opensearch.search.aggregations.BaseAggregationTestCase; import org.opensearch.search.aggregations.bucket.terms.IncludeExclude; @@ -160,13 +159,13 @@ static IncludeExclude getIncludeExclude() { IncludeExclude incExc = null; switch (randomInt(5)) { case 0: - incExc = new IncludeExclude(new RegExp("foobar"), null); + incExc = new IncludeExclude("foobar", null); break; case 1: - incExc = new IncludeExclude(null, new RegExp("foobaz")); + incExc = new IncludeExclude(null, "foobaz"); break; case 2: - incExc = new IncludeExclude(new RegExp("foobar"), new RegExp("foobaz")); + incExc = new IncludeExclude("foobar", "foobaz"); break; case 3: SortedSet includeValues = new TreeSet<>(); diff --git a/server/src/test/java/org/opensearch/search/aggregations/bucket/TermsTests.java b/server/src/test/java/org/opensearch/search/aggregations/bucket/TermsTests.java index eb4f33c6f8e19..04e7fad2105ec 100644 --- a/server/src/test/java/org/opensearch/search/aggregations/bucket/TermsTests.java +++ b/server/src/test/java/org/opensearch/search/aggregations/bucket/TermsTests.java @@ -33,7 +33,6 @@ package org.opensearch.search.aggregations.bucket; import org.apache.lucene.util.BytesRef; -import org.apache.lucene.util.automaton.RegExp; import org.opensearch.search.aggregations.Aggregator.SubAggCollectionMode; import org.opensearch.search.aggregations.BaseAggregationTestCase; import org.opensearch.search.aggregations.BucketOrder; @@ -118,13 +117,13 @@ protected TermsAggregationBuilder createTestAggregatorBuilder() { IncludeExclude incExc = null; switch (randomInt(6)) { case 0: - incExc = new IncludeExclude(new RegExp("foobar"), null); + incExc = new IncludeExclude("foobar", null); break; case 1: - incExc = new IncludeExclude(null, new RegExp("foobaz")); + incExc = new IncludeExclude(null, "foobaz"); break; case 2: - incExc = new IncludeExclude(new RegExp("foobar"), new RegExp("foobaz")); + incExc = new IncludeExclude("foobar", "foobaz"); break; case 3: SortedSet includeValues = new TreeSet<>(); diff --git a/server/src/test/java/org/opensearch/search/aggregations/bucket/terms/BinaryTermsAggregatorTests.java b/server/src/test/java/org/opensearch/search/aggregations/bucket/terms/BinaryTermsAggregatorTests.java index 7703afa88d93c..34cc29d40a9fd 100644 --- a/server/src/test/java/org/opensearch/search/aggregations/bucket/terms/BinaryTermsAggregatorTests.java +++ b/server/src/test/java/org/opensearch/search/aggregations/bucket/terms/BinaryTermsAggregatorTests.java @@ -41,7 +41,6 @@ import org.apache.lucene.search.Query; import org.apache.lucene.store.Directory; import org.apache.lucene.util.BytesRef; -import org.apache.lucene.util.automaton.RegExp; import org.opensearch.common.Numbers; import org.opensearch.index.mapper.BinaryFieldMapper; import org.opensearch.index.mapper.MappedFieldType; @@ -97,7 +96,7 @@ public void testMatchAllDocs() throws IOException { } public void testBadIncludeExclude() throws IOException { - IncludeExclude includeExclude = new IncludeExclude(new RegExp("foo"), null); + IncludeExclude includeExclude = new IncludeExclude("foo", null); // Make sure the include/exclude fails regardless of how the user tries to type hint the agg AggregationExecutionException e = expectThrows( diff --git a/server/src/test/java/org/opensearch/search/aggregations/bucket/terms/NumericTermsAggregatorTests.java b/server/src/test/java/org/opensearch/search/aggregations/bucket/terms/NumericTermsAggregatorTests.java index 13e41d5a2e543..846f71b12dab0 100644 --- a/server/src/test/java/org/opensearch/search/aggregations/bucket/terms/NumericTermsAggregatorTests.java +++ b/server/src/test/java/org/opensearch/search/aggregations/bucket/terms/NumericTermsAggregatorTests.java @@ -42,7 +42,6 @@ import org.apache.lucene.search.MatchNoDocsQuery; import org.apache.lucene.search.Query; import org.apache.lucene.store.Directory; -import org.apache.lucene.util.automaton.RegExp; import org.opensearch.index.mapper.MappedFieldType; import org.opensearch.index.mapper.NumberFieldMapper; import org.opensearch.search.aggregations.AggregationExecutionException; @@ -116,7 +115,7 @@ public void testMatchAllDocs() throws IOException { } public void testBadIncludeExclude() throws IOException { - IncludeExclude includeExclude = new IncludeExclude(new RegExp("foo"), null); + IncludeExclude includeExclude = new IncludeExclude("foo", null); // Numerics don't support any regex include/exclude, so should fail no matter what we do diff --git a/server/src/test/java/org/opensearch/search/aggregations/support/IncludeExcludeTests.java b/server/src/test/java/org/opensearch/search/aggregations/support/IncludeExcludeTests.java index d84812557ab18..9ebca90d84cab 100644 --- a/server/src/test/java/org/opensearch/search/aggregations/support/IncludeExcludeTests.java +++ b/server/src/test/java/org/opensearch/search/aggregations/support/IncludeExcludeTests.java @@ -55,12 +55,12 @@ public class IncludeExcludeTests extends OpenSearchTestCase { public void testEmptyTermsWithOrds() throws IOException { IncludeExclude inexcl = new IncludeExclude(new TreeSet<>(Collections.singleton(new BytesRef("foo"))), null); - OrdinalsFilter filter = inexcl.convertToOrdinalsFilter(DocValueFormat.RAW); + OrdinalsFilter filter = inexcl.convertToOrdinalsFilter(DocValueFormat.RAW, null); LongBitSet acceptedOrds = filter.acceptedGlobalOrdinals(DocValues.emptySortedSet()); assertEquals(0, acceptedOrds.length()); inexcl = new IncludeExclude(null, new TreeSet<>(Collections.singleton(new BytesRef("foo")))); - filter = inexcl.convertToOrdinalsFilter(DocValueFormat.RAW); + filter = inexcl.convertToOrdinalsFilter(DocValueFormat.RAW, null); acceptedOrds = filter.acceptedGlobalOrdinals(DocValues.emptySortedSet()); assertEquals(0, acceptedOrds.length()); } @@ -99,13 +99,13 @@ public long getValueCount() { }; IncludeExclude inexcl = new IncludeExclude(new TreeSet<>(Collections.singleton(new BytesRef("foo"))), null); - OrdinalsFilter filter = inexcl.convertToOrdinalsFilter(DocValueFormat.RAW); + OrdinalsFilter filter = inexcl.convertToOrdinalsFilter(DocValueFormat.RAW, null); LongBitSet acceptedOrds = filter.acceptedGlobalOrdinals(ords); assertEquals(1, acceptedOrds.length()); assertTrue(acceptedOrds.get(0)); inexcl = new IncludeExclude(new TreeSet<>(Collections.singleton(new BytesRef("bar"))), null); - filter = inexcl.convertToOrdinalsFilter(DocValueFormat.RAW); + filter = inexcl.convertToOrdinalsFilter(DocValueFormat.RAW, null); acceptedOrds = filter.acceptedGlobalOrdinals(ords); assertEquals(1, acceptedOrds.length()); assertFalse(acceptedOrds.get(0)); @@ -114,7 +114,7 @@ public long getValueCount() { new TreeSet<>(Collections.singleton(new BytesRef("foo"))), new TreeSet<>(Collections.singleton(new BytesRef("foo"))) ); - filter = inexcl.convertToOrdinalsFilter(DocValueFormat.RAW); + filter = inexcl.convertToOrdinalsFilter(DocValueFormat.RAW, null); acceptedOrds = filter.acceptedGlobalOrdinals(ords); assertEquals(1, acceptedOrds.length()); assertFalse(acceptedOrds.get(0)); @@ -123,7 +123,7 @@ public long getValueCount() { null, // means everything included new TreeSet<>(Collections.singleton(new BytesRef("foo"))) ); - filter = inexcl.convertToOrdinalsFilter(DocValueFormat.RAW); + filter = inexcl.convertToOrdinalsFilter(DocValueFormat.RAW, null); acceptedOrds = filter.acceptedGlobalOrdinals(ords); assertEquals(1, acceptedOrds.length()); assertFalse(acceptedOrds.get(0)); From 249155772b65681ba8e91d2356e3430f2236b6be Mon Sep 17 00:00:00 2001 From: Tianli Feng Date: Thu, 7 Apr 2022 15:31:31 -0700 Subject: [PATCH 07/19] Allow deprecation warning for API call GET _cat/master in ExceptionIT of mixed cluster BWC test (#2767) Signed-off-by: Tianli Feng --- .../test/java/org/opensearch/backwards/ExceptionIT.java | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/qa/mixed-cluster/src/test/java/org/opensearch/backwards/ExceptionIT.java b/qa/mixed-cluster/src/test/java/org/opensearch/backwards/ExceptionIT.java index e0246870181c0..f85a94cc9f556 100644 --- a/qa/mixed-cluster/src/test/java/org/opensearch/backwards/ExceptionIT.java +++ b/qa/mixed-cluster/src/test/java/org/opensearch/backwards/ExceptionIT.java @@ -47,8 +47,13 @@ public void testOpensearchException() throws Exception { private void logClusterNodes() throws IOException { ObjectPath objectPath = ObjectPath.createFromResponse(client().performRequest(new Request("GET", "_nodes"))); Map nodes = objectPath.evaluate("nodes"); - String master = EntityUtils.toString(client().performRequest(new Request("GET", "_cat/master?h=id")).getEntity()).trim(); - logger.info("cluster discovered: master id='{}'", master); + // As of 2.0, 'GET _cat/master' API is deprecated to promote inclusive language. + // Allow the deprecation warning for the node running an older version. + // TODO: Replace the API with 'GET _cat/cluster_manager' when dropping compatibility with 1.x versions. + Request catRequest = new Request("GET", "_cat/master?h=id"); + catRequest.setOptions(expectWarningsOnce("[GET /_cat/master] is deprecated! Use [GET /_cat/cluster_manager] instead.")); + String clusterManager = EntityUtils.toString(client().performRequest(catRequest).getEntity()).trim(); + logger.info("cluster discovered: cluster-manager id='{}'", clusterManager); for (String id : nodes.keySet()) { logger.info("{}: id='{}', name='{}', version={}", objectPath.evaluate("nodes." + id + ".http.publish_address"), From 47a22bb08d09a1eada4cc4349a35f3eed45e6336 Mon Sep 17 00:00:00 2001 From: Tianli Feng Date: Thu, 7 Apr 2022 15:45:20 -0700 Subject: [PATCH 08/19] Replace remaining 'blacklist' with 'denylist' in internal class and method names (#2784) * Replace blacklist with denylist in BlacklistedPathPatternMatcher Signed-off-by: Tianli Feng * Replace blacklist with denylist in assumption message Signed-off-by: Tianli Feng * Replace all Blacklisted with Denylisted Signed-off-by: Tianli Feng * Replace all blacklist(key) with denylist(key) Signed-off-by: Tianli Feng * Adjust format by spotlessApply task Signed-off-by: Tianli Feng --- .../opensearch/common/inject/BindingProcessor.java | 2 +- .../opensearch/common/inject/InheritingState.java | 8 ++++---- .../org/opensearch/common/inject/InjectorImpl.java | 8 ++++---- .../java/org/opensearch/common/inject/State.java | 12 ++++++------ ...tcher.java => DenylistedPathPatternMatcher.java} | 4 ++-- .../yaml/OpenSearchClientYamlSuiteTestCase.java | 13 +++++-------- ....java => DenylistedPathPatternMatcherTests.java} | 6 +++--- 7 files changed, 25 insertions(+), 28 deletions(-) rename test/framework/src/main/java/org/opensearch/test/rest/yaml/{BlacklistedPathPatternMatcher.java => DenylistedPathPatternMatcher.java} (97%) rename test/framework/src/test/java/org/opensearch/test/rest/yaml/{BlacklistedPathPatternMatcherTests.java => DenylistedPathPatternMatcherTests.java} (93%) diff --git a/server/src/main/java/org/opensearch/common/inject/BindingProcessor.java b/server/src/main/java/org/opensearch/common/inject/BindingProcessor.java index 671123f2df767..2635ead8d7f51 100644 --- a/server/src/main/java/org/opensearch/common/inject/BindingProcessor.java +++ b/server/src/main/java/org/opensearch/common/inject/BindingProcessor.java @@ -274,7 +274,7 @@ private void putBinding(BindingImpl binding) { } // prevent the parent from creating a JIT binding for this key - injector.state.parent().blacklist(key); + injector.state.parent().denylist(key); injector.state.putBinding(key, binding); } diff --git a/server/src/main/java/org/opensearch/common/inject/InheritingState.java b/server/src/main/java/org/opensearch/common/inject/InheritingState.java index 70a2fb335cca5..a25017ab9a2c0 100644 --- a/server/src/main/java/org/opensearch/common/inject/InheritingState.java +++ b/server/src/main/java/org/opensearch/common/inject/InheritingState.java @@ -143,18 +143,18 @@ public List getTypeListenerBindings() { } @Override - public void blacklist(Key key) { - parent.blacklist(key); + public void denylist(Key key) { + parent.denylist(key); denylistedKeys.add(key); } @Override - public boolean isBlacklisted(Key key) { + public boolean isDenylisted(Key key) { return denylistedKeys.contains(key); } @Override - public void clearBlacklisted() { + public void clearDenylisted() { denylistedKeys = new WeakKeySet(); } diff --git a/server/src/main/java/org/opensearch/common/inject/InjectorImpl.java b/server/src/main/java/org/opensearch/common/inject/InjectorImpl.java index 3c888cd92b226..439ce8fbae33b 100644 --- a/server/src/main/java/org/opensearch/common/inject/InjectorImpl.java +++ b/server/src/main/java/org/opensearch/common/inject/InjectorImpl.java @@ -530,12 +530,12 @@ public T get(Errors errors, InternalContext context, Dependency dependency) t * other ancestor injectors until this injector is tried. */ private BindingImpl createJustInTimeBindingRecursive(Key key, Errors errors) throws ErrorsException { - if (state.isBlacklisted(key)) { + if (state.isDenylisted(key)) { throw errors.childBindingAlreadySet(key).toException(); } BindingImpl binding = createJustInTimeBinding(key, errors); - state.parent().blacklist(key); + state.parent().denylist(key); jitBindings.put(key, binding); return binding; } @@ -555,7 +555,7 @@ private BindingImpl createJustInTimeBindingRecursive(Key key, Errors e * if the binding cannot be created. */ BindingImpl createJustInTimeBinding(Key key, Errors errors) throws ErrorsException { - if (state.isBlacklisted(key)) { + if (state.isDenylisted(key)) { throw errors.childBindingAlreadySet(key).toException(); } @@ -805,7 +805,7 @@ public String toString() { // ES_GUICE: clear caches public void clearCache() { - state.clearBlacklisted(); + state.clearDenylisted(); constructors = new ConstructorInjectorStore(this); membersInjectorStore = new MembersInjectorStore(this, state.getTypeListenerBindings()); jitBindings = new HashMap<>(); diff --git a/server/src/main/java/org/opensearch/common/inject/State.java b/server/src/main/java/org/opensearch/common/inject/State.java index 6a69e9547d707..560824c065793 100644 --- a/server/src/main/java/org/opensearch/common/inject/State.java +++ b/server/src/main/java/org/opensearch/common/inject/State.java @@ -106,15 +106,15 @@ public List getTypeListenerBindings() { } @Override - public void blacklist(Key key) {} + public void denylist(Key key) {} @Override - public boolean isBlacklisted(Key key) { + public boolean isDenylisted(Key key) { return true; } @Override - public void clearBlacklisted() {} + public void clearDenylisted() {} @Override public void makeAllBindingsToEagerSingletons(Injector injector) {} @@ -167,13 +167,13 @@ public Object lock() { * denylist their bound keys on their parent injectors to prevent just-in-time bindings on the * parent injector that would conflict. */ - void blacklist(Key key); + void denylist(Key key); /** * Returns true if {@code key} is forbidden from being bound in this injector. This indicates that * one of this injector's descendent's has bound the key. */ - boolean isBlacklisted(Key key); + boolean isDenylisted(Key key); /** * Returns the shared lock for all injector data. This is a low-granularity, high-contention lock @@ -182,7 +182,7 @@ public Object lock() { Object lock(); // ES_GUICE: clean denylist keys - void clearBlacklisted(); + void clearDenylisted(); void makeAllBindingsToEagerSingletons(Injector injector); } diff --git a/test/framework/src/main/java/org/opensearch/test/rest/yaml/BlacklistedPathPatternMatcher.java b/test/framework/src/main/java/org/opensearch/test/rest/yaml/DenylistedPathPatternMatcher.java similarity index 97% rename from test/framework/src/main/java/org/opensearch/test/rest/yaml/BlacklistedPathPatternMatcher.java rename to test/framework/src/main/java/org/opensearch/test/rest/yaml/DenylistedPathPatternMatcher.java index 15510e368b1f5..eeaa76b6ca1b3 100644 --- a/test/framework/src/main/java/org/opensearch/test/rest/yaml/BlacklistedPathPatternMatcher.java +++ b/test/framework/src/main/java/org/opensearch/test/rest/yaml/DenylistedPathPatternMatcher.java @@ -47,7 +47,7 @@ * * Each denylist pattern is a suffix match on the path. Empty patterns are not allowed. */ -final class BlacklistedPathPatternMatcher { +final class DenylistedPathPatternMatcher { private final Pattern pattern; /** @@ -55,7 +55,7 @@ final class BlacklistedPathPatternMatcher { * * @param p The suffix pattern. Must be a non-empty string. */ - BlacklistedPathPatternMatcher(String p) { + DenylistedPathPatternMatcher(String p) { // guard against accidentally matching everything as an empty string lead to the pattern ".*" which matches everything if (p == null || p.trim().isEmpty()) { throw new IllegalArgumentException("Empty denylist patterns are not supported"); diff --git a/test/framework/src/main/java/org/opensearch/test/rest/yaml/OpenSearchClientYamlSuiteTestCase.java b/test/framework/src/main/java/org/opensearch/test/rest/yaml/OpenSearchClientYamlSuiteTestCase.java index 70e3adbefbfc3..1b19f03f46174 100644 --- a/test/framework/src/main/java/org/opensearch/test/rest/yaml/OpenSearchClientYamlSuiteTestCase.java +++ b/test/framework/src/main/java/org/opensearch/test/rest/yaml/OpenSearchClientYamlSuiteTestCase.java @@ -116,7 +116,7 @@ public abstract class OpenSearchClientYamlSuiteTestCase extends OpenSearchRestTe */ private static final String PATHS_SEPARATOR = "(? denylistPathMatchers; + private static List denylistPathMatchers; private static ClientYamlTestExecutionContext restTestExecutionContext; private static ClientYamlTestExecutionContext adminExecutionContext; private static ClientYamlTestClient clientYamlTestClient; @@ -157,11 +157,11 @@ public void initAndResetContext() throws Exception { final String[] denylist = resolvePathsProperty(REST_TESTS_DENYLIST, null); denylistPathMatchers = new ArrayList<>(); for (final String entry : denylist) { - denylistPathMatchers.add(new BlacklistedPathPatternMatcher(entry)); + denylistPathMatchers.add(new DenylistedPathPatternMatcher(entry)); } final String[] denylistAdditions = resolvePathsProperty(REST_TESTS_DENYLIST_ADDITIONS, null); for (final String entry : denylistAdditions) { - denylistPathMatchers.add(new BlacklistedPathPatternMatcher(entry)); + denylistPathMatchers.add(new DenylistedPathPatternMatcher(entry)); } } assert restTestExecutionContext != null; @@ -368,12 +368,9 @@ protected RequestOptions getCatNodesVersionMasterRequestOptions() { public void test() throws IOException { // skip test if it matches one of the denylist globs - for (BlacklistedPathPatternMatcher denylistedPathMatcher : denylistPathMatchers) { + for (DenylistedPathPatternMatcher denylistedPathMatcher : denylistPathMatchers) { String testPath = testCandidate.getSuitePath() + "/" + testCandidate.getTestSection().getName(); - assumeFalse( - "[" + testCandidate.getTestPath() + "] skipped, reason: blacklisted", - denylistedPathMatcher.isSuffixMatch(testPath) - ); + assumeFalse("[" + testCandidate.getTestPath() + "] skipped, reason: denylisted", denylistedPathMatcher.isSuffixMatch(testPath)); } // skip test if the whole suite (yaml file) is disabled diff --git a/test/framework/src/test/java/org/opensearch/test/rest/yaml/BlacklistedPathPatternMatcherTests.java b/test/framework/src/test/java/org/opensearch/test/rest/yaml/DenylistedPathPatternMatcherTests.java similarity index 93% rename from test/framework/src/test/java/org/opensearch/test/rest/yaml/BlacklistedPathPatternMatcherTests.java rename to test/framework/src/test/java/org/opensearch/test/rest/yaml/DenylistedPathPatternMatcherTests.java index 05cdec242e565..3d62f399fe271 100644 --- a/test/framework/src/test/java/org/opensearch/test/rest/yaml/BlacklistedPathPatternMatcherTests.java +++ b/test/framework/src/test/java/org/opensearch/test/rest/yaml/DenylistedPathPatternMatcherTests.java @@ -33,7 +33,7 @@ import org.opensearch.test.OpenSearchTestCase; -public class BlacklistedPathPatternMatcherTests extends OpenSearchTestCase { +public class DenylistedPathPatternMatcherTests extends OpenSearchTestCase { public void testMatchesExact() { // suffix match @@ -71,12 +71,12 @@ public void testMatchesMixedPatterns() { } private void assertMatch(String pattern, String path) { - BlacklistedPathPatternMatcher matcher = new BlacklistedPathPatternMatcher(pattern); + DenylistedPathPatternMatcher matcher = new DenylistedPathPatternMatcher(pattern); assertTrue("Pattern [" + pattern + "] should have matched path [" + path + "]", matcher.isSuffixMatch(path)); } private void assertNoMatch(String pattern, String path) { - BlacklistedPathPatternMatcher matcher = new BlacklistedPathPatternMatcher(pattern); + DenylistedPathPatternMatcher matcher = new DenylistedPathPatternMatcher(pattern); assertFalse("Pattern [" + pattern + "] should not have matched path [" + path + "]", matcher.isSuffixMatch(path)); } } From 2d89bc7c61021b81dc884118300f64772a319276 Mon Sep 17 00:00:00 2001 From: Kartik Date: Thu, 7 Apr 2022 18:12:33 -0700 Subject: [PATCH 09/19] Updates to the large string reg-ex check (#2814) * Updates to the large string reg-ex check Removed the null-case for IndexSettings since this only occurs in tests. The tests now use a dummy Index Setting. This change also fixes a bug with the base case handling of max regex length in the check. Signed-off-by: Kartik Ganesh --- .../bucket/terms/IncludeExclude.java | 49 ++++++++----------- .../support/IncludeExcludeTests.java | 26 +++++++--- 2 files changed, 40 insertions(+), 35 deletions(-) diff --git a/server/src/main/java/org/opensearch/search/aggregations/bucket/terms/IncludeExclude.java b/server/src/main/java/org/opensearch/search/aggregations/bucket/terms/IncludeExclude.java index acb3a6629c734..71320909ca5d2 100644 --- a/server/src/main/java/org/opensearch/search/aggregations/bucket/terms/IncludeExclude.java +++ b/server/src/main/java/org/opensearch/search/aggregations/bucket/terms/IncludeExclude.java @@ -48,7 +48,6 @@ import org.apache.lucene.util.automaton.Operations; import org.apache.lucene.util.automaton.RegExp; import org.opensearch.OpenSearchParseException; -import org.opensearch.common.Nullable; import org.opensearch.common.ParseField; import org.opensearch.common.io.stream.StreamInput; import org.opensearch.common.io.stream.StreamOutput; @@ -577,23 +576,10 @@ public boolean isPartitionBased() { return incNumPartitions > 0; } - private Automaton toAutomaton(@Nullable IndexSettings indexSettings) { - int maxRegexLength = indexSettings == null ? -1 : indexSettings.getMaxRegexLength(); + private Automaton toAutomaton(IndexSettings indexSettings) { Automaton a; if (include != null) { - if (include.length() > maxRegexLength) { - throw new IllegalArgumentException( - "The length of regex [" - + include.length() - + "] used in the request has exceeded " - + "the allowed maximum of [" - + maxRegexLength - + "]. " - + "This maximum can be set by changing the [" - + IndexSettings.MAX_REGEX_LENGTH_SETTING.getKey() - + "] index level setting." - ); - } + validateRegExpStringLength(include, indexSettings); a = new RegExp(include).toAutomaton(); } else if (includeValues != null) { a = Automata.makeStringUnion(includeValues); @@ -601,19 +587,7 @@ private Automaton toAutomaton(@Nullable IndexSettings indexSettings) { a = Automata.makeAnyString(); } if (exclude != null) { - if (exclude.length() > maxRegexLength) { - throw new IllegalArgumentException( - "The length of regex [" - + exclude.length() - + "] used in the request has exceeded " - + "the allowed maximum of [" - + maxRegexLength - + "]. " - + "This maximum can be set by changing the [" - + IndexSettings.MAX_REGEX_LENGTH_SETTING.getKey() - + "] index level setting." - ); - } + validateRegExpStringLength(exclude, indexSettings); Automaton excludeAutomaton = new RegExp(exclude).toAutomaton(); a = Operations.minus(a, excludeAutomaton, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT); } else if (excludeValues != null) { @@ -622,6 +596,23 @@ private Automaton toAutomaton(@Nullable IndexSettings indexSettings) { return a; } + private static void validateRegExpStringLength(String source, IndexSettings indexSettings) { + int maxRegexLength = indexSettings.getMaxRegexLength(); + if (maxRegexLength > 0 && source.length() > maxRegexLength) { + throw new IllegalArgumentException( + "The length of regex [" + + source.length() + + "] used in the request has exceeded " + + "the allowed maximum of [" + + maxRegexLength + + "]. " + + "This maximum can be set by changing the [" + + IndexSettings.MAX_REGEX_LENGTH_SETTING.getKey() + + "] index level setting." + ); + } + } + public StringFilter convertToStringFilter(DocValueFormat format, IndexSettings indexSettings) { if (isRegexBased()) { return new AutomatonBackedStringFilter(toAutomaton(indexSettings)); diff --git a/server/src/test/java/org/opensearch/search/aggregations/support/IncludeExcludeTests.java b/server/src/test/java/org/opensearch/search/aggregations/support/IncludeExcludeTests.java index 9ebca90d84cab..d104fc6783dc5 100644 --- a/server/src/test/java/org/opensearch/search/aggregations/support/IncludeExcludeTests.java +++ b/server/src/test/java/org/opensearch/search/aggregations/support/IncludeExcludeTests.java @@ -36,12 +36,16 @@ import org.apache.lucene.index.SortedSetDocValues; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.LongBitSet; +import org.opensearch.Version; +import org.opensearch.cluster.metadata.IndexMetadata; import org.opensearch.common.ParseField; +import org.opensearch.common.settings.Settings; import org.opensearch.common.xcontent.ToXContent; import org.opensearch.common.xcontent.XContentBuilder; import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.common.xcontent.XContentParser; import org.opensearch.common.xcontent.XContentType; +import org.opensearch.index.IndexSettings; import org.opensearch.index.fielddata.AbstractSortedSetDocValues; import org.opensearch.search.DocValueFormat; import org.opensearch.search.aggregations.bucket.terms.IncludeExclude; @@ -53,14 +57,24 @@ import java.util.TreeSet; public class IncludeExcludeTests extends OpenSearchTestCase { + + private final IndexSettings dummyIndexSettings = new IndexSettings( + IndexMetadata.builder("index") + .settings(Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT)) + .numberOfShards(1) + .numberOfReplicas(0) + .build(), + Settings.EMPTY + ); + public void testEmptyTermsWithOrds() throws IOException { IncludeExclude inexcl = new IncludeExclude(new TreeSet<>(Collections.singleton(new BytesRef("foo"))), null); - OrdinalsFilter filter = inexcl.convertToOrdinalsFilter(DocValueFormat.RAW, null); + OrdinalsFilter filter = inexcl.convertToOrdinalsFilter(DocValueFormat.RAW, dummyIndexSettings); LongBitSet acceptedOrds = filter.acceptedGlobalOrdinals(DocValues.emptySortedSet()); assertEquals(0, acceptedOrds.length()); inexcl = new IncludeExclude(null, new TreeSet<>(Collections.singleton(new BytesRef("foo")))); - filter = inexcl.convertToOrdinalsFilter(DocValueFormat.RAW, null); + filter = inexcl.convertToOrdinalsFilter(DocValueFormat.RAW, dummyIndexSettings); acceptedOrds = filter.acceptedGlobalOrdinals(DocValues.emptySortedSet()); assertEquals(0, acceptedOrds.length()); } @@ -99,13 +113,13 @@ public long getValueCount() { }; IncludeExclude inexcl = new IncludeExclude(new TreeSet<>(Collections.singleton(new BytesRef("foo"))), null); - OrdinalsFilter filter = inexcl.convertToOrdinalsFilter(DocValueFormat.RAW, null); + OrdinalsFilter filter = inexcl.convertToOrdinalsFilter(DocValueFormat.RAW, dummyIndexSettings); LongBitSet acceptedOrds = filter.acceptedGlobalOrdinals(ords); assertEquals(1, acceptedOrds.length()); assertTrue(acceptedOrds.get(0)); inexcl = new IncludeExclude(new TreeSet<>(Collections.singleton(new BytesRef("bar"))), null); - filter = inexcl.convertToOrdinalsFilter(DocValueFormat.RAW, null); + filter = inexcl.convertToOrdinalsFilter(DocValueFormat.RAW, dummyIndexSettings); acceptedOrds = filter.acceptedGlobalOrdinals(ords); assertEquals(1, acceptedOrds.length()); assertFalse(acceptedOrds.get(0)); @@ -114,7 +128,7 @@ public long getValueCount() { new TreeSet<>(Collections.singleton(new BytesRef("foo"))), new TreeSet<>(Collections.singleton(new BytesRef("foo"))) ); - filter = inexcl.convertToOrdinalsFilter(DocValueFormat.RAW, null); + filter = inexcl.convertToOrdinalsFilter(DocValueFormat.RAW, dummyIndexSettings); acceptedOrds = filter.acceptedGlobalOrdinals(ords); assertEquals(1, acceptedOrds.length()); assertFalse(acceptedOrds.get(0)); @@ -123,7 +137,7 @@ public long getValueCount() { null, // means everything included new TreeSet<>(Collections.singleton(new BytesRef("foo"))) ); - filter = inexcl.convertToOrdinalsFilter(DocValueFormat.RAW, null); + filter = inexcl.convertToOrdinalsFilter(DocValueFormat.RAW, dummyIndexSettings); acceptedOrds = filter.acceptedGlobalOrdinals(ords); assertEquals(1, acceptedOrds.length()); assertFalse(acceptedOrds.get(0)); From b5d5616d44a08c609fb96c7467732480d7333285 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Yoann=20Rodi=C3=A8re?= Date: Fri, 8 Apr 2022 22:43:51 +0200 Subject: [PATCH 10/19] Update commons-logging to 1.2 (#2806) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Upgrade to Apache Commons Logging 1.2 Signed-off-by: Yoann Rodière * Clarify that Apache HTTP/commons-* dependencies are not just for tests Signed-off-by: Yoann Rodière --- buildSrc/version.properties | 12 +++++++----- client/rest/build.gradle | 1 - client/rest/licenses/commons-logging-1.1.3.jar.sha1 | 1 - client/rest/licenses/commons-logging-1.2.jar.sha1 | 1 + client/sniffer/build.gradle | 1 - .../sniffer/licenses/commons-logging-1.1.3.jar.sha1 | 1 - client/sniffer/licenses/commons-logging-1.2.jar.sha1 | 1 + .../licenses/commons-logging-1.2.jar.sha1 | 1 + .../licenses/commons-logging-1.1.3.jar.sha1 | 1 - .../licenses/commons-logging-1.2.jar.sha1 | 1 + .../licenses/commons-logging-1.1.3.jar.sha1 | 1 - .../licenses/commons-logging-1.2.jar.sha1 | 1 + .../licenses/commons-logging-1.1.3.jar.sha1 | 1 - .../licenses/commons-logging-1.2.jar.sha1 | 1 + .../licenses/commons-logging-1.1.3.jar.sha1 | 1 - .../licenses/commons-logging-1.2.jar.sha1 | 1 + .../licenses/commons-logging-1.1.3.jar.sha1 | 1 - .../licenses/commons-logging-1.2.jar.sha1 | 1 + .../licenses/commons-logging-1.1.3.jar.sha1 | 1 - .../licenses/commons-logging-1.2.jar.sha1 | 1 + .../licenses/commons-logging-1.1.3.jar.sha1 | 1 - .../licenses/commons-logging-1.2.jar.sha1 | 1 + test/framework/build.gradle | 1 - .../core/licenses/commons-logging-1.2.jar.sha1 | 1 + 24 files changed, 18 insertions(+), 17 deletions(-) delete mode 100644 client/rest/licenses/commons-logging-1.1.3.jar.sha1 create mode 100644 client/rest/licenses/commons-logging-1.2.jar.sha1 delete mode 100644 client/sniffer/licenses/commons-logging-1.1.3.jar.sha1 create mode 100644 client/sniffer/licenses/commons-logging-1.2.jar.sha1 create mode 100644 modules/repository-s3/licenses/commons-logging-1.2.jar.sha1 delete mode 100644 plugins/discovery-azure-classic/licenses/commons-logging-1.1.3.jar.sha1 create mode 100644 plugins/discovery-azure-classic/licenses/commons-logging-1.2.jar.sha1 delete mode 100644 plugins/discovery-ec2/licenses/commons-logging-1.1.3.jar.sha1 create mode 100644 plugins/discovery-ec2/licenses/commons-logging-1.2.jar.sha1 delete mode 100644 plugins/discovery-gce/licenses/commons-logging-1.1.3.jar.sha1 create mode 100644 plugins/discovery-gce/licenses/commons-logging-1.2.jar.sha1 delete mode 100644 plugins/ingest-attachment/licenses/commons-logging-1.1.3.jar.sha1 create mode 100644 plugins/ingest-attachment/licenses/commons-logging-1.2.jar.sha1 delete mode 100644 plugins/repository-gcs/licenses/commons-logging-1.1.3.jar.sha1 create mode 100644 plugins/repository-gcs/licenses/commons-logging-1.2.jar.sha1 delete mode 100644 plugins/repository-hdfs/licenses/commons-logging-1.1.3.jar.sha1 create mode 100644 plugins/repository-hdfs/licenses/commons-logging-1.2.jar.sha1 delete mode 100644 plugins/repository-s3/licenses/commons-logging-1.1.3.jar.sha1 create mode 100644 plugins/repository-s3/licenses/commons-logging-1.2.jar.sha1 create mode 100644 x-pack/plugin/core/licenses/commons-logging-1.2.jar.sha1 diff --git a/buildSrc/version.properties b/buildSrc/version.properties index 84ee06cafba2d..7ae3bfaa19b5a 100644 --- a/buildSrc/version.properties +++ b/buildSrc/version.properties @@ -23,6 +23,13 @@ jna = 5.5.0 netty = 4.1.73.Final joda = 2.10.12 +# client dependencies +httpclient = 4.5.13 +httpcore = 4.4.12 +httpasyncclient = 4.1.4 +commonslogging = 1.2 +commonscodec = 1.13 + # when updating this version, you need to ensure compatibility with: # - plugins/ingest-attachment (transitive dependency, check the upstream POM) # - distribution/tools/plugin-cli @@ -30,11 +37,6 @@ bouncycastle=1.70 # test dependencies randomizedrunner = 2.7.1 junit = 4.13.2 -httpclient = 4.5.13 -httpcore = 4.4.12 -httpasyncclient = 4.1.4 -commonslogging = 1.1.3 -commonscodec = 1.13 hamcrest = 2.1 mockito = 4.3.1 objenesis = 3.2 diff --git a/client/rest/build.gradle b/client/rest/build.gradle index 5c1252061443a..01c186ed83fc2 100644 --- a/client/rest/build.gradle +++ b/client/rest/build.gradle @@ -89,7 +89,6 @@ thirdPartyAudit.ignoreMissingClasses( 'org.apache.avalon.framework.logger.Logger', 'org.apache.log.Hierarchy', 'org.apache.log.Logger', - 'org.apache.log4j.Category', 'org.apache.log4j.Level', 'org.apache.log4j.Logger', 'org.apache.log4j.Priority', diff --git a/client/rest/licenses/commons-logging-1.1.3.jar.sha1 b/client/rest/licenses/commons-logging-1.1.3.jar.sha1 deleted file mode 100644 index 5b8f029e58293..0000000000000 --- a/client/rest/licenses/commons-logging-1.1.3.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -f6f66e966c70a83ffbdb6f17a0919eaf7c8aca7f \ No newline at end of file diff --git a/client/rest/licenses/commons-logging-1.2.jar.sha1 b/client/rest/licenses/commons-logging-1.2.jar.sha1 new file mode 100644 index 0000000000000..f40f0242448e8 --- /dev/null +++ b/client/rest/licenses/commons-logging-1.2.jar.sha1 @@ -0,0 +1 @@ +4bfc12adfe4842bf07b657f0369c4cb522955686 \ No newline at end of file diff --git a/client/sniffer/build.gradle b/client/sniffer/build.gradle index bc4be1dd153e8..b7cb0d87c02d9 100644 --- a/client/sniffer/build.gradle +++ b/client/sniffer/build.gradle @@ -88,7 +88,6 @@ thirdPartyAudit.ignoreMissingClasses( 'org.apache.avalon.framework.logger.Logger', 'org.apache.log.Hierarchy', 'org.apache.log.Logger', - 'org.apache.log4j.Category', 'org.apache.log4j.Level', 'org.apache.log4j.Logger', 'org.apache.log4j.Priority', diff --git a/client/sniffer/licenses/commons-logging-1.1.3.jar.sha1 b/client/sniffer/licenses/commons-logging-1.1.3.jar.sha1 deleted file mode 100644 index 5b8f029e58293..0000000000000 --- a/client/sniffer/licenses/commons-logging-1.1.3.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -f6f66e966c70a83ffbdb6f17a0919eaf7c8aca7f \ No newline at end of file diff --git a/client/sniffer/licenses/commons-logging-1.2.jar.sha1 b/client/sniffer/licenses/commons-logging-1.2.jar.sha1 new file mode 100644 index 0000000000000..f40f0242448e8 --- /dev/null +++ b/client/sniffer/licenses/commons-logging-1.2.jar.sha1 @@ -0,0 +1 @@ +4bfc12adfe4842bf07b657f0369c4cb522955686 \ No newline at end of file diff --git a/modules/repository-s3/licenses/commons-logging-1.2.jar.sha1 b/modules/repository-s3/licenses/commons-logging-1.2.jar.sha1 new file mode 100644 index 0000000000000..f40f0242448e8 --- /dev/null +++ b/modules/repository-s3/licenses/commons-logging-1.2.jar.sha1 @@ -0,0 +1 @@ +4bfc12adfe4842bf07b657f0369c4cb522955686 \ No newline at end of file diff --git a/plugins/discovery-azure-classic/licenses/commons-logging-1.1.3.jar.sha1 b/plugins/discovery-azure-classic/licenses/commons-logging-1.1.3.jar.sha1 deleted file mode 100644 index c8756c438320f..0000000000000 --- a/plugins/discovery-azure-classic/licenses/commons-logging-1.1.3.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -f6f66e966c70a83ffbdb6f17a0919eaf7c8aca7f diff --git a/plugins/discovery-azure-classic/licenses/commons-logging-1.2.jar.sha1 b/plugins/discovery-azure-classic/licenses/commons-logging-1.2.jar.sha1 new file mode 100644 index 0000000000000..f40f0242448e8 --- /dev/null +++ b/plugins/discovery-azure-classic/licenses/commons-logging-1.2.jar.sha1 @@ -0,0 +1 @@ +4bfc12adfe4842bf07b657f0369c4cb522955686 \ No newline at end of file diff --git a/plugins/discovery-ec2/licenses/commons-logging-1.1.3.jar.sha1 b/plugins/discovery-ec2/licenses/commons-logging-1.1.3.jar.sha1 deleted file mode 100644 index c8756c438320f..0000000000000 --- a/plugins/discovery-ec2/licenses/commons-logging-1.1.3.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -f6f66e966c70a83ffbdb6f17a0919eaf7c8aca7f diff --git a/plugins/discovery-ec2/licenses/commons-logging-1.2.jar.sha1 b/plugins/discovery-ec2/licenses/commons-logging-1.2.jar.sha1 new file mode 100644 index 0000000000000..f40f0242448e8 --- /dev/null +++ b/plugins/discovery-ec2/licenses/commons-logging-1.2.jar.sha1 @@ -0,0 +1 @@ +4bfc12adfe4842bf07b657f0369c4cb522955686 \ No newline at end of file diff --git a/plugins/discovery-gce/licenses/commons-logging-1.1.3.jar.sha1 b/plugins/discovery-gce/licenses/commons-logging-1.1.3.jar.sha1 deleted file mode 100644 index c8756c438320f..0000000000000 --- a/plugins/discovery-gce/licenses/commons-logging-1.1.3.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -f6f66e966c70a83ffbdb6f17a0919eaf7c8aca7f diff --git a/plugins/discovery-gce/licenses/commons-logging-1.2.jar.sha1 b/plugins/discovery-gce/licenses/commons-logging-1.2.jar.sha1 new file mode 100644 index 0000000000000..f40f0242448e8 --- /dev/null +++ b/plugins/discovery-gce/licenses/commons-logging-1.2.jar.sha1 @@ -0,0 +1 @@ +4bfc12adfe4842bf07b657f0369c4cb522955686 \ No newline at end of file diff --git a/plugins/ingest-attachment/licenses/commons-logging-1.1.3.jar.sha1 b/plugins/ingest-attachment/licenses/commons-logging-1.1.3.jar.sha1 deleted file mode 100644 index 5b8f029e58293..0000000000000 --- a/plugins/ingest-attachment/licenses/commons-logging-1.1.3.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -f6f66e966c70a83ffbdb6f17a0919eaf7c8aca7f \ No newline at end of file diff --git a/plugins/ingest-attachment/licenses/commons-logging-1.2.jar.sha1 b/plugins/ingest-attachment/licenses/commons-logging-1.2.jar.sha1 new file mode 100644 index 0000000000000..f40f0242448e8 --- /dev/null +++ b/plugins/ingest-attachment/licenses/commons-logging-1.2.jar.sha1 @@ -0,0 +1 @@ +4bfc12adfe4842bf07b657f0369c4cb522955686 \ No newline at end of file diff --git a/plugins/repository-gcs/licenses/commons-logging-1.1.3.jar.sha1 b/plugins/repository-gcs/licenses/commons-logging-1.1.3.jar.sha1 deleted file mode 100644 index 5b8f029e58293..0000000000000 --- a/plugins/repository-gcs/licenses/commons-logging-1.1.3.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -f6f66e966c70a83ffbdb6f17a0919eaf7c8aca7f \ No newline at end of file diff --git a/plugins/repository-gcs/licenses/commons-logging-1.2.jar.sha1 b/plugins/repository-gcs/licenses/commons-logging-1.2.jar.sha1 new file mode 100644 index 0000000000000..f40f0242448e8 --- /dev/null +++ b/plugins/repository-gcs/licenses/commons-logging-1.2.jar.sha1 @@ -0,0 +1 @@ +4bfc12adfe4842bf07b657f0369c4cb522955686 \ No newline at end of file diff --git a/plugins/repository-hdfs/licenses/commons-logging-1.1.3.jar.sha1 b/plugins/repository-hdfs/licenses/commons-logging-1.1.3.jar.sha1 deleted file mode 100644 index 5b8f029e58293..0000000000000 --- a/plugins/repository-hdfs/licenses/commons-logging-1.1.3.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -f6f66e966c70a83ffbdb6f17a0919eaf7c8aca7f \ No newline at end of file diff --git a/plugins/repository-hdfs/licenses/commons-logging-1.2.jar.sha1 b/plugins/repository-hdfs/licenses/commons-logging-1.2.jar.sha1 new file mode 100644 index 0000000000000..f40f0242448e8 --- /dev/null +++ b/plugins/repository-hdfs/licenses/commons-logging-1.2.jar.sha1 @@ -0,0 +1 @@ +4bfc12adfe4842bf07b657f0369c4cb522955686 \ No newline at end of file diff --git a/plugins/repository-s3/licenses/commons-logging-1.1.3.jar.sha1 b/plugins/repository-s3/licenses/commons-logging-1.1.3.jar.sha1 deleted file mode 100644 index c8756c438320f..0000000000000 --- a/plugins/repository-s3/licenses/commons-logging-1.1.3.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -f6f66e966c70a83ffbdb6f17a0919eaf7c8aca7f diff --git a/plugins/repository-s3/licenses/commons-logging-1.2.jar.sha1 b/plugins/repository-s3/licenses/commons-logging-1.2.jar.sha1 new file mode 100644 index 0000000000000..f40f0242448e8 --- /dev/null +++ b/plugins/repository-s3/licenses/commons-logging-1.2.jar.sha1 @@ -0,0 +1 @@ +4bfc12adfe4842bf07b657f0369c4cb522955686 \ No newline at end of file diff --git a/test/framework/build.gradle b/test/framework/build.gradle index 42197bf5e2980..096e8c1e58243 100644 --- a/test/framework/build.gradle +++ b/test/framework/build.gradle @@ -71,7 +71,6 @@ thirdPartyAudit.ignoreMissingClasses( 'org.apache.avalon.framework.logger.Logger', 'org.apache.log.Hierarchy', 'org.apache.log.Logger', - 'org.apache.log4j.Category', 'org.apache.log4j.Level', 'org.apache.log4j.Logger', 'org.apache.log4j.Priority', diff --git a/x-pack/plugin/core/licenses/commons-logging-1.2.jar.sha1 b/x-pack/plugin/core/licenses/commons-logging-1.2.jar.sha1 new file mode 100644 index 0000000000000..f40f0242448e8 --- /dev/null +++ b/x-pack/plugin/core/licenses/commons-logging-1.2.jar.sha1 @@ -0,0 +1 @@ +4bfc12adfe4842bf07b657f0369c4cb522955686 \ No newline at end of file From c876f55294b5636e558e5735cf2f0c629bcda080 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 11 Apr 2022 11:41:20 -0400 Subject: [PATCH 11/19] Bump com.diffplug.spotless from 6.4.1 to 6.4.2 (#2827) Bumps com.diffplug.spotless from 6.4.1 to 6.4.2. --- updated-dependencies: - dependency-name: com.diffplug.spotless dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- build.gradle | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.gradle b/build.gradle index 487f20c7f6ccd..a41ad61de39a6 100644 --- a/build.gradle +++ b/build.gradle @@ -48,7 +48,7 @@ plugins { id 'lifecycle-base' id 'opensearch.docker-support' id 'opensearch.global-build-info' - id "com.diffplug.spotless" version "6.4.1" apply false + id "com.diffplug.spotless" version "6.4.2" apply false id "org.gradle.test-retry" version "1.3.1" apply false } From 0a17faca613f15b46d6a0ffc02eafec2414481c4 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 11 Apr 2022 11:41:48 -0400 Subject: [PATCH 12/19] Bump cdi-api from 1.2 to 2.0 in /qa/wildfly (#2835) Bumps [cdi-api](https://github.com/cdi-spec/cdi) from 1.2 to 2.0. - [Release notes](https://github.com/cdi-spec/cdi/releases) - [Commits](https://github.com/cdi-spec/cdi/compare/1.2...2.0) --- updated-dependencies: - dependency-name: javax.enterprise:cdi-api dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- qa/wildfly/build.gradle | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/qa/wildfly/build.gradle b/qa/wildfly/build.gradle index 9abaa2a83033f..0cf2098c24b91 100644 --- a/qa/wildfly/build.gradle +++ b/qa/wildfly/build.gradle @@ -39,7 +39,7 @@ apply plugin: 'opensearch.internal-distribution-download' testFixtures.useFixture() dependencies { - providedCompile 'javax.enterprise:cdi-api:1.2' + providedCompile 'javax.enterprise:cdi-api:2.0' providedCompile 'org.jboss.spec.javax.annotation:jboss-annotations-api_1.2_spec:1.0.2.Final' providedCompile 'org.jboss.spec.javax.ws.rs:jboss-jaxrs-api_2.0_spec:1.0.1.Final' api('org.jboss.resteasy:resteasy-jackson2-provider:3.0.19.Final') { From f68c8f452704f46ece805958f8d3e67d077684d8 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 11 Apr 2022 11:42:16 -0400 Subject: [PATCH 13/19] Bump gradle-info-plugin from 7.1.3 to 11.3.3 in /buildSrc (#2831) Bumps [gradle-info-plugin](https://github.com/nebula-plugins/gradle-info-plugin) from 7.1.3 to 11.3.3. - [Release notes](https://github.com/nebula-plugins/gradle-info-plugin/releases) - [Changelog](https://github.com/nebula-plugins/gradle-info-plugin/blob/main/CHANGELOG.md) - [Commits](https://github.com/nebula-plugins/gradle-info-plugin/compare/v7.1.3...v11.3.3) --- updated-dependencies: - dependency-name: com.netflix.nebula:gradle-info-plugin dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- buildSrc/build.gradle | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/buildSrc/build.gradle b/buildSrc/build.gradle index 18f8738bbba71..d478a1fd45e80 100644 --- a/buildSrc/build.gradle +++ b/buildSrc/build.gradle @@ -107,7 +107,7 @@ dependencies { api 'org.apache.ant:ant:1.10.12' api 'com.netflix.nebula:gradle-extra-configurations-plugin:7.0.0' api 'com.netflix.nebula:nebula-publishing-plugin:4.4.4' - api 'com.netflix.nebula:gradle-info-plugin:7.1.3' + api 'com.netflix.nebula:gradle-info-plugin:11.3.3' api 'org.apache.rat:apache-rat:0.13' api 'commons-io:commons-io:2.7' api "net.java.dev.jna:jna:5.10.0" From c69ad3deca810f0470757e9a56180cbc574808dc Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 11 Apr 2022 11:47:19 -0400 Subject: [PATCH 14/19] Bump azure-core from 1.26.0 to 1.27.0 in /plugins/repository-azure (#2837) * Bump azure-core from 1.26.0 to 1.27.0 in /plugins/repository-azure Bumps [azure-core](https://github.com/Azure/azure-sdk-for-java) from 1.26.0 to 1.27.0. - [Release notes](https://github.com/Azure/azure-sdk-for-java/releases) - [Commits](https://github.com/Azure/azure-sdk-for-java/compare/azure-core_1.26.0...azure-core_1.27.0) --- updated-dependencies: - dependency-name: com.azure:azure-core dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] * Updating SHAs Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: dependabot[bot] --- plugins/repository-azure/build.gradle | 2 +- plugins/repository-azure/licenses/azure-core-1.26.0.jar.sha1 | 1 - plugins/repository-azure/licenses/azure-core-1.27.0.jar.sha1 | 1 + 3 files changed, 2 insertions(+), 2 deletions(-) delete mode 100644 plugins/repository-azure/licenses/azure-core-1.26.0.jar.sha1 create mode 100644 plugins/repository-azure/licenses/azure-core-1.27.0.jar.sha1 diff --git a/plugins/repository-azure/build.gradle b/plugins/repository-azure/build.gradle index 040a29750b967..da644d77eb488 100644 --- a/plugins/repository-azure/build.gradle +++ b/plugins/repository-azure/build.gradle @@ -44,7 +44,7 @@ opensearchplugin { } dependencies { - api 'com.azure:azure-core:1.26.0' + api 'com.azure:azure-core:1.27.0' api 'com.azure:azure-storage-common:12.15.0' api 'com.azure:azure-core-http-netty:1.11.9' api "io.netty:netty-codec-dns:${versions.netty}" diff --git a/plugins/repository-azure/licenses/azure-core-1.26.0.jar.sha1 b/plugins/repository-azure/licenses/azure-core-1.26.0.jar.sha1 deleted file mode 100644 index 693c6a721959c..0000000000000 --- a/plugins/repository-azure/licenses/azure-core-1.26.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -461b89dcf8948a0c4a97d4f1d876f778d0cac7aa \ No newline at end of file diff --git a/plugins/repository-azure/licenses/azure-core-1.27.0.jar.sha1 b/plugins/repository-azure/licenses/azure-core-1.27.0.jar.sha1 new file mode 100644 index 0000000000000..9206b697ca648 --- /dev/null +++ b/plugins/repository-azure/licenses/azure-core-1.27.0.jar.sha1 @@ -0,0 +1 @@ +75a2db538d218e2bd3c2cbdf04c955b8f6db6626 \ No newline at end of file From 81e2455df1b9e4428a3379aecc7dfe877202060f Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 11 Apr 2022 11:47:50 -0400 Subject: [PATCH 15/19] Bump asm-analysis from 9.2 to 9.3 in /test/logger-usage (#2829) Bumps asm-analysis from 9.2 to 9.3. --- updated-dependencies: - dependency-name: org.ow2.asm:asm-analysis dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- test/logger-usage/build.gradle | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/logger-usage/build.gradle b/test/logger-usage/build.gradle index ecfccc9338410..3154e556a87cf 100644 --- a/test/logger-usage/build.gradle +++ b/test/logger-usage/build.gradle @@ -33,7 +33,7 @@ apply plugin: 'opensearch.java' dependencies { api 'org.ow2.asm:asm:9.2' api 'org.ow2.asm:asm-tree:9.2' - api 'org.ow2.asm:asm-analysis:9.2' + api 'org.ow2.asm:asm-analysis:9.3' api "org.apache.logging.log4j:log4j-api:${versions.log4j}" testImplementation project(":test:framework") } From 00ae764752b6b705ed109c4cfef259dd5b6adc4e Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 11 Apr 2022 11:48:21 -0400 Subject: [PATCH 16/19] Bump protobuf-java from 3.19.3 to 3.20.0 in /plugins/repository-hdfs (#2836) * Bump protobuf-java from 3.19.3 to 3.20.0 in /plugins/repository-hdfs Bumps [protobuf-java](https://github.com/protocolbuffers/protobuf) from 3.19.3 to 3.20.0. - [Release notes](https://github.com/protocolbuffers/protobuf/releases) - [Changelog](https://github.com/protocolbuffers/protobuf/blob/main/generate_changelog.py) - [Commits](https://github.com/protocolbuffers/protobuf/compare/v3.19.3...v3.20.0) --- updated-dependencies: - dependency-name: com.google.protobuf:protobuf-java dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] * Updating SHAs Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: dependabot[bot] --- plugins/repository-hdfs/build.gradle | 2 +- plugins/repository-hdfs/licenses/protobuf-java-3.19.3.jar.sha1 | 1 - plugins/repository-hdfs/licenses/protobuf-java-3.20.0.jar.sha1 | 1 + 3 files changed, 2 insertions(+), 2 deletions(-) delete mode 100644 plugins/repository-hdfs/licenses/protobuf-java-3.19.3.jar.sha1 create mode 100644 plugins/repository-hdfs/licenses/protobuf-java-3.20.0.jar.sha1 diff --git a/plugins/repository-hdfs/build.gradle b/plugins/repository-hdfs/build.gradle index 1787a380b933b..d8811ded8d092 100644 --- a/plugins/repository-hdfs/build.gradle +++ b/plugins/repository-hdfs/build.gradle @@ -67,7 +67,7 @@ dependencies { api "com.fasterxml.jackson.core:jackson-databind:${versions.jackson_databind}" api 'com.google.code.gson:gson:2.9.0' runtimeOnly 'com.google.guava:guava:30.1.1-jre' - api 'com.google.protobuf:protobuf-java:3.19.3' + api 'com.google.protobuf:protobuf-java:3.20.0' api "commons-logging:commons-logging:${versions.commonslogging}" api 'commons-cli:commons-cli:1.2' api "commons-codec:commons-codec:${versions.commonscodec}" diff --git a/plugins/repository-hdfs/licenses/protobuf-java-3.19.3.jar.sha1 b/plugins/repository-hdfs/licenses/protobuf-java-3.19.3.jar.sha1 deleted file mode 100644 index 655ecd1f1c1c9..0000000000000 --- a/plugins/repository-hdfs/licenses/protobuf-java-3.19.3.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -4b57f1b1b9e281231c3fcfc039ce3021e29ff570 \ No newline at end of file diff --git a/plugins/repository-hdfs/licenses/protobuf-java-3.20.0.jar.sha1 b/plugins/repository-hdfs/licenses/protobuf-java-3.20.0.jar.sha1 new file mode 100644 index 0000000000000..c5b0169ce0dba --- /dev/null +++ b/plugins/repository-hdfs/licenses/protobuf-java-3.20.0.jar.sha1 @@ -0,0 +1 @@ +3c72ddaaab7ffafe789e4f732c1fd614eb798bf4 \ No newline at end of file From 725b5ca004bd075102f0d9a6a2c37cdc6c1fcab7 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 11 Apr 2022 11:48:56 -0400 Subject: [PATCH 17/19] Bump joni from 2.1.41 to 2.1.43 in /libs/grok (#2832) * Bump joni from 2.1.41 to 2.1.43 in /libs/grok Bumps [joni](https://github.com/jruby/joni) from 2.1.41 to 2.1.43. - [Release notes](https://github.com/jruby/joni/releases) - [Commits](https://github.com/jruby/joni/compare/joni-2.1.41...joni-2.1.43) --- updated-dependencies: - dependency-name: org.jruby.joni:joni dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] * Updating SHAs Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: dependabot[bot] --- libs/grok/build.gradle | 2 +- libs/grok/licenses/joni-2.1.41.jar.sha1 | 1 - libs/grok/licenses/joni-2.1.43.jar.sha1 | 1 + 3 files changed, 2 insertions(+), 2 deletions(-) delete mode 100644 libs/grok/licenses/joni-2.1.41.jar.sha1 create mode 100644 libs/grok/licenses/joni-2.1.43.jar.sha1 diff --git a/libs/grok/build.gradle b/libs/grok/build.gradle index e406a80ee1c91..86414d18108a1 100644 --- a/libs/grok/build.gradle +++ b/libs/grok/build.gradle @@ -29,7 +29,7 @@ */ dependencies { - api 'org.jruby.joni:joni:2.1.41' + api 'org.jruby.joni:joni:2.1.43' // joni dependencies: api 'org.jruby.jcodings:jcodings:1.0.57' diff --git a/libs/grok/licenses/joni-2.1.41.jar.sha1 b/libs/grok/licenses/joni-2.1.41.jar.sha1 deleted file mode 100644 index 4f0a0a8393dd0..0000000000000 --- a/libs/grok/licenses/joni-2.1.41.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -4a35f4eaef792073bc081b756b1f4949879cd41e \ No newline at end of file diff --git a/libs/grok/licenses/joni-2.1.43.jar.sha1 b/libs/grok/licenses/joni-2.1.43.jar.sha1 new file mode 100644 index 0000000000000..ef5dfabb2b391 --- /dev/null +++ b/libs/grok/licenses/joni-2.1.43.jar.sha1 @@ -0,0 +1 @@ +9a3bf154469d5ff1d1107755904279081a5fb618 \ No newline at end of file From d308795bf0e62d39c45419c4c3e3a9f98dc4eaec Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 11 Apr 2022 11:49:32 -0400 Subject: [PATCH 18/19] Bump google-oauth-client from 1.33.1 to 1.33.2 in /plugins/discovery-gce (#2828) * Bump google-oauth-client from 1.33.1 to 1.33.2 in /plugins/discovery-gce Bumps [google-oauth-client](https://github.com/googleapis/google-oauth-java-client) from 1.33.1 to 1.33.2. - [Release notes](https://github.com/googleapis/google-oauth-java-client/releases) - [Changelog](https://github.com/googleapis/google-oauth-java-client/blob/main/CHANGELOG.md) - [Commits](https://github.com/googleapis/google-oauth-java-client/compare/v1.33.1...v1.33.2) --- updated-dependencies: - dependency-name: com.google.oauth-client:google-oauth-client dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] * Updating SHAs Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: dependabot[bot] --- plugins/discovery-gce/build.gradle | 2 +- .../discovery-gce/licenses/google-oauth-client-1.33.1.jar.sha1 | 1 - .../discovery-gce/licenses/google-oauth-client-1.33.2.jar.sha1 | 1 + 3 files changed, 2 insertions(+), 2 deletions(-) delete mode 100644 plugins/discovery-gce/licenses/google-oauth-client-1.33.1.jar.sha1 create mode 100644 plugins/discovery-gce/licenses/google-oauth-client-1.33.2.jar.sha1 diff --git a/plugins/discovery-gce/build.gradle b/plugins/discovery-gce/build.gradle index 2396b228d77a0..eb695f84b2bd0 100644 --- a/plugins/discovery-gce/build.gradle +++ b/plugins/discovery-gce/build.gradle @@ -24,7 +24,7 @@ versions << [ dependencies { api "com.google.apis:google-api-services-compute:v1-rev160-${versions.google}" api "com.google.api-client:google-api-client:${versions.google}" - api "com.google.oauth-client:google-oauth-client:1.33.1" + api "com.google.oauth-client:google-oauth-client:1.33.2" api "com.google.http-client:google-http-client:${versions.google}" api "com.google.http-client:google-http-client-jackson2:${versions.google}" api 'com.google.code.findbugs:jsr305:3.0.2' diff --git a/plugins/discovery-gce/licenses/google-oauth-client-1.33.1.jar.sha1 b/plugins/discovery-gce/licenses/google-oauth-client-1.33.1.jar.sha1 deleted file mode 100644 index 3897a85310ec6..0000000000000 --- a/plugins/discovery-gce/licenses/google-oauth-client-1.33.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -0a431f1a677c5f89507591ab47a7ccdb0b18b6f7 \ No newline at end of file diff --git a/plugins/discovery-gce/licenses/google-oauth-client-1.33.2.jar.sha1 b/plugins/discovery-gce/licenses/google-oauth-client-1.33.2.jar.sha1 new file mode 100644 index 0000000000000..289e8e8261fd3 --- /dev/null +++ b/plugins/discovery-gce/licenses/google-oauth-client-1.33.2.jar.sha1 @@ -0,0 +1 @@ +2810fb515fe110295dc6867fc9f70c401b66daf3 \ No newline at end of file From 7dd171d48ae0d416a745f92ce8fe732ef0cbcc02 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 11 Apr 2022 11:50:03 -0400 Subject: [PATCH 19/19] Bump protobuf-java-util from 3.19.3 to 3.20.0 in /plugins/repository-gcs (#2834) * Bump protobuf-java-util from 3.19.3 to 3.20.0 in /plugins/repository-gcs Bumps [protobuf-java-util](https://github.com/protocolbuffers/protobuf) from 3.19.3 to 3.20.0. - [Release notes](https://github.com/protocolbuffers/protobuf/releases) - [Changelog](https://github.com/protocolbuffers/protobuf/blob/main/generate_changelog.py) - [Commits](https://github.com/protocolbuffers/protobuf/compare/v3.19.3...v3.20.0) --- updated-dependencies: - dependency-name: com.google.protobuf:protobuf-java-util dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] * Updating SHAs Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: dependabot[bot] --- plugins/repository-gcs/build.gradle | 2 +- .../repository-gcs/licenses/protobuf-java-util-3.19.3.jar.sha1 | 1 - .../repository-gcs/licenses/protobuf-java-util-3.20.0.jar.sha1 | 1 + 3 files changed, 2 insertions(+), 2 deletions(-) delete mode 100644 plugins/repository-gcs/licenses/protobuf-java-util-3.19.3.jar.sha1 create mode 100644 plugins/repository-gcs/licenses/protobuf-java-util-3.20.0.jar.sha1 diff --git a/plugins/repository-gcs/build.gradle b/plugins/repository-gcs/build.gradle index 6e577d058ff67..0e1ed06879f91 100644 --- a/plugins/repository-gcs/build.gradle +++ b/plugins/repository-gcs/build.gradle @@ -61,7 +61,7 @@ dependencies { api 'com.google.api:api-common:1.8.1' api 'com.google.api:gax:1.54.0' api 'org.threeten:threetenbp:1.4.4' - api 'com.google.protobuf:protobuf-java-util:3.19.3' + api 'com.google.protobuf:protobuf-java-util:3.20.0' api 'com.google.protobuf:protobuf-java:3.19.3' api 'com.google.code.gson:gson:2.9.0' api 'com.google.api.grpc:proto-google-common-protos:2.8.0' diff --git a/plugins/repository-gcs/licenses/protobuf-java-util-3.19.3.jar.sha1 b/plugins/repository-gcs/licenses/protobuf-java-util-3.19.3.jar.sha1 deleted file mode 100644 index 9ba36d444c541..0000000000000 --- a/plugins/repository-gcs/licenses/protobuf-java-util-3.19.3.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -3e6812cbbb7e6faffa7b56438740dec510e1fc1a \ No newline at end of file diff --git a/plugins/repository-gcs/licenses/protobuf-java-util-3.20.0.jar.sha1 b/plugins/repository-gcs/licenses/protobuf-java-util-3.20.0.jar.sha1 new file mode 100644 index 0000000000000..1e9d00d8d5c03 --- /dev/null +++ b/plugins/repository-gcs/licenses/protobuf-java-util-3.20.0.jar.sha1 @@ -0,0 +1 @@ +ee4496b296418283cbe7ae784984347fc4717a9a \ No newline at end of file