From 3e9031f283626b41da7429188823e798095533a3 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 7 Mar 2022 11:47:37 -0500 Subject: [PATCH 01/46] Bump gradle-extra-configurations-plugin from 3.0.3 to 7.0.0 in /buildSrc (#2386) Bumps [gradle-extra-configurations-plugin](https://github.com/nebula-plugins/gradle-extra-configurations-plugin) from 3.0.3 to 7.0.0. - [Release notes](https://github.com/nebula-plugins/gradle-extra-configurations-plugin/releases) - [Changelog](https://github.com/nebula-plugins/gradle-extra-configurations-plugin/blob/main/CHANGELOG.md) - [Commits](https://github.com/nebula-plugins/gradle-extra-configurations-plugin/compare/v3.0.3...v7.0.0) --- updated-dependencies: - dependency-name: com.netflix.nebula:gradle-extra-configurations-plugin dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- buildSrc/build.gradle | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/buildSrc/build.gradle b/buildSrc/build.gradle index 3d999454e262e..37bfc5e764dda 100644 --- a/buildSrc/build.gradle +++ b/buildSrc/build.gradle @@ -105,7 +105,7 @@ dependencies { api 'commons-codec:commons-codec:1.15' api 'org.apache.commons:commons-compress:1.21' api 'org.apache.ant:ant:1.10.12' - api 'com.netflix.nebula:gradle-extra-configurations-plugin:3.0.3' + api 'com.netflix.nebula:gradle-extra-configurations-plugin:7.0.0' api 'com.netflix.nebula:nebula-publishing-plugin:4.4.4' api 'com.netflix.nebula:gradle-info-plugin:7.1.3' api 'org.apache.rat:apache-rat:0.13' From 75e837d05679922b201132f4b1153fd3dc372ea1 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 7 Mar 2022 11:48:00 -0500 Subject: [PATCH 02/46] Bump guava from 31.0.1-jre to 31.1-jre in /distribution/tools/plugin-cli (#2387) Bumps [guava](https://github.com/google/guava) from 31.0.1-jre to 31.1-jre. - [Release notes](https://github.com/google/guava/releases) - [Commits](https://github.com/google/guava/commits) --- updated-dependencies: - dependency-name: com.google.guava:guava dependency-type: direct:production ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- distribution/tools/plugin-cli/build.gradle | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/distribution/tools/plugin-cli/build.gradle b/distribution/tools/plugin-cli/build.gradle index 714080f051186..b2e81491da6bd 100644 --- a/distribution/tools/plugin-cli/build.gradle +++ b/distribution/tools/plugin-cli/build.gradle @@ -39,7 +39,7 @@ dependencies { api "org.bouncycastle:bc-fips:1.0.2.3" testImplementation project(":test:framework") testImplementation 'com.google.jimfs:jimfs:1.2' - testRuntimeOnly 'com.google.guava:guava:31.0.1-jre' + testRuntimeOnly 'com.google.guava:guava:31.1-jre' } tasks.named("dependencyLicenses").configure { From 72c5d81187bb07e5e912e111e67c5627188fdf45 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 7 Mar 2022 11:48:18 -0500 Subject: [PATCH 03/46] Bump guava in /distribution/tools/keystore-cli (#2384) Bumps [guava](https://github.com/google/guava) from 31.0.1-jre to 31.1-jre. - [Release notes](https://github.com/google/guava/releases) - [Commits](https://github.com/google/guava/commits) --- updated-dependencies: - dependency-name: com.google.guava:guava dependency-type: direct:production ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- distribution/tools/keystore-cli/build.gradle | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/distribution/tools/keystore-cli/build.gradle b/distribution/tools/keystore-cli/build.gradle index a6468a381148b..1e7473f787ca0 100644 --- a/distribution/tools/keystore-cli/build.gradle +++ b/distribution/tools/keystore-cli/build.gradle @@ -35,5 +35,5 @@ dependencies { compileOnly project(":libs:opensearch-cli") testImplementation project(":test:framework") testImplementation 'com.google.jimfs:jimfs:1.2' - testRuntimeOnly 'com.google.guava:guava:31.0.1-jre' + testRuntimeOnly 'com.google.guava:guava:31.1-jre' } From 4395ed560fc9d95faf0161139d289e793b3f56ef Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 7 Mar 2022 11:48:38 -0500 Subject: [PATCH 04/46] Bump guava in /distribution/tools/upgrade-cli (#2383) Bumps [guava](https://github.com/google/guava) from 31.0.1-jre to 31.1-jre. - [Release notes](https://github.com/google/guava/releases) - [Commits](https://github.com/google/guava/commits) --- updated-dependencies: - dependency-name: com.google.guava:guava dependency-type: direct:production ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- distribution/tools/upgrade-cli/build.gradle | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/distribution/tools/upgrade-cli/build.gradle b/distribution/tools/upgrade-cli/build.gradle index 29d06b89395c6..0e1996f3d68fa 100644 --- a/distribution/tools/upgrade-cli/build.gradle +++ b/distribution/tools/upgrade-cli/build.gradle @@ -19,7 +19,7 @@ dependencies { implementation "com.fasterxml.jackson.core:jackson-annotations:${versions.jackson}" testImplementation project(":test:framework") testImplementation 'com.google.jimfs:jimfs:1.2' - testRuntimeOnly 'com.google.guava:guava:31.0.1-jre' + testRuntimeOnly 'com.google.guava:guava:31.1-jre' } tasks.named("dependencyLicenses").configure { From e1fd4b75b4f888d8d486baceeb9fd6fe7df44416 Mon Sep 17 00:00:00 2001 From: Subhobrata Dey Date: Mon, 7 Mar 2022 08:51:49 -0800 Subject: [PATCH 05/46] Add valuesField in PercentilesAggregationBuilder streamInput constructor (#2308) Signed-off-by: Subhobrata Dey --- ...AbstractPercentilesAggregationBuilder.java | 3 +- .../PercentileRanksAggregationBuilder.java | 2 +- .../PercentilesAggregationBuilder.java | 2 +- .../builder/SearchSourceBuilderTests.java | 40 +++++++++++++++++++ 4 files changed, 44 insertions(+), 3 deletions(-) diff --git a/server/src/main/java/org/opensearch/search/aggregations/metrics/AbstractPercentilesAggregationBuilder.java b/server/src/main/java/org/opensearch/search/aggregations/metrics/AbstractPercentilesAggregationBuilder.java index 094c706053703..ac0baf18dfb55 100644 --- a/server/src/main/java/org/opensearch/search/aggregations/metrics/AbstractPercentilesAggregationBuilder.java +++ b/server/src/main/java/org/opensearch/search/aggregations/metrics/AbstractPercentilesAggregationBuilder.java @@ -163,7 +163,7 @@ public static > ConstructingO this.valuesField = clone.valuesField; } - AbstractPercentilesAggregationBuilder(StreamInput in) throws IOException { + AbstractPercentilesAggregationBuilder(StreamInput in, ParseField valuesField) throws IOException { super(in); values = in.readDoubleArray(); keyed = in.readBoolean(); @@ -175,6 +175,7 @@ public static > ConstructingO PercentilesMethod method = PercentilesMethod.readFromStream(in); percentilesConfig = PercentilesConfig.fromLegacy(method, compression, numberOfSignificantValueDigits); } + this.valuesField = valuesField; } @Override diff --git a/server/src/main/java/org/opensearch/search/aggregations/metrics/PercentileRanksAggregationBuilder.java b/server/src/main/java/org/opensearch/search/aggregations/metrics/PercentileRanksAggregationBuilder.java index d8f3c80f36ff3..037830c63ecdf 100644 --- a/server/src/main/java/org/opensearch/search/aggregations/metrics/PercentileRanksAggregationBuilder.java +++ b/server/src/main/java/org/opensearch/search/aggregations/metrics/PercentileRanksAggregationBuilder.java @@ -82,7 +82,7 @@ private PercentileRanksAggregationBuilder(String name, double[] values, Percenti } public PercentileRanksAggregationBuilder(StreamInput in) throws IOException { - super(in); + super(in, VALUES_FIELD); } private PercentileRanksAggregationBuilder( diff --git a/server/src/main/java/org/opensearch/search/aggregations/metrics/PercentilesAggregationBuilder.java b/server/src/main/java/org/opensearch/search/aggregations/metrics/PercentilesAggregationBuilder.java index 85e8d37d3fdba..323723bb6deb1 100644 --- a/server/src/main/java/org/opensearch/search/aggregations/metrics/PercentilesAggregationBuilder.java +++ b/server/src/main/java/org/opensearch/search/aggregations/metrics/PercentilesAggregationBuilder.java @@ -80,7 +80,7 @@ public static void registerAggregators(ValuesSourceRegistry.Builder builder) { } public PercentilesAggregationBuilder(StreamInput in) throws IOException { - super(in); + super(in, PERCENTS_FIELD); } public static AggregationBuilder parse(String aggregationName, XContentParser parser) throws IOException { diff --git a/server/src/test/java/org/opensearch/search/builder/SearchSourceBuilderTests.java b/server/src/test/java/org/opensearch/search/builder/SearchSourceBuilderTests.java index 59519226685e2..66b18bc42ad50 100644 --- a/server/src/test/java/org/opensearch/search/builder/SearchSourceBuilderTests.java +++ b/server/src/test/java/org/opensearch/search/builder/SearchSourceBuilderTests.java @@ -118,6 +118,46 @@ public void testSerialization() throws IOException { } } + public void testSerializationWithPercentilesQueryObject() throws IOException { + String restContent = "{\n" + + " \"aggregations\": {" + + " \"percentiles_duration\": {\n" + + " \"percentiles\" : {\n" + + " \"field\": \"duration\"\n" + + " }\n" + + " }\n" + + " }\n" + + "}\n"; + String expectedContent = "{\"aggregations\":{" + + "\"percentiles_duration\":{" + + "\"percentiles\":{" + + "\"field\":\"duration\"," + + "\"percents\":[1.0,5.0,25.0,50.0,75.0,95.0,99.0]," + + "\"keyed\":true," + + "\"tdigest\":{" + + "\"compression\":100.0" + + "}" + + "}" + + "}" + + "}}"; + + try (XContentParser parser = createParser(JsonXContent.jsonXContent, restContent)) { + SearchSourceBuilder searchSourceBuilder = SearchSourceBuilder.fromXContent(parser); + + try (BytesStreamOutput output = new BytesStreamOutput()) { + searchSourceBuilder.writeTo(output); + try (StreamInput in = new NamedWriteableAwareStreamInput(output.bytes().streamInput(), namedWriteableRegistry)) { + SearchSourceBuilder deserializedBuilder = new SearchSourceBuilder(in); + String actualContent = deserializedBuilder.toString(); + + assertEquals(expectedContent, actualContent); + assertEquals(searchSourceBuilder.hashCode(), deserializedBuilder.hashCode()); + assertNotSame(searchSourceBuilder, deserializedBuilder); + } + } + } + } + public void testShallowCopy() { for (int i = 0; i < 10; i++) { SearchSourceBuilder original = createSearchSourceBuilder(); From 09e16e3139ff629cc15ba47c111a9f17b6c13ce2 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 7 Mar 2022 13:08:07 -0500 Subject: [PATCH 06/46] Bump guava from 30.1.1-jre to 31.1-jre in /plugins/repository-azure (#2382) * Bump guava from 30.1.1-jre to 31.1-jre in /plugins/repository-azure Bumps [guava](https://github.com/google/guava) from 30.1.1-jre to 31.1-jre. - [Release notes](https://github.com/google/guava/releases) - [Commits](https://github.com/google/guava/commits) --- updated-dependencies: - dependency-name: com.google.guava:guava dependency-type: direct:production ... Signed-off-by: dependabot[bot] * Updating SHAs Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: dependabot[bot] --- plugins/repository-azure/build.gradle | 2 +- plugins/repository-azure/licenses/guava-30.1.1-jre.jar.sha1 | 1 - plugins/repository-azure/licenses/guava-31.1-jre.jar.sha1 | 1 + 3 files changed, 2 insertions(+), 2 deletions(-) delete mode 100644 plugins/repository-azure/licenses/guava-30.1.1-jre.jar.sha1 create mode 100644 plugins/repository-azure/licenses/guava-31.1-jre.jar.sha1 diff --git a/plugins/repository-azure/build.gradle b/plugins/repository-azure/build.gradle index 9f42c0675d3e1..1f923b8f36bbd 100644 --- a/plugins/repository-azure/build.gradle +++ b/plugins/repository-azure/build.gradle @@ -68,7 +68,7 @@ dependencies { api "com.fasterxml.jackson.module:jackson-module-jaxb-annotations:${versions.jackson}" api 'org.codehaus.woodstox:stax2-api:4.2.1' implementation 'com.fasterxml.woodstox:woodstox-core:6.1.1' - runtimeOnly 'com.google.guava:guava:30.1.1-jre' + runtimeOnly 'com.google.guava:guava:31.1-jre' api 'org.apache.commons:commons-lang3:3.4' testImplementation project(':test:fixtures:azure-fixture') } diff --git a/plugins/repository-azure/licenses/guava-30.1.1-jre.jar.sha1 b/plugins/repository-azure/licenses/guava-30.1.1-jre.jar.sha1 deleted file mode 100644 index 39e641fc7834f..0000000000000 --- a/plugins/repository-azure/licenses/guava-30.1.1-jre.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -87e0fd1df874ea3cbe577702fe6f17068b790fd8 \ No newline at end of file diff --git a/plugins/repository-azure/licenses/guava-31.1-jre.jar.sha1 b/plugins/repository-azure/licenses/guava-31.1-jre.jar.sha1 new file mode 100644 index 0000000000000..e57390ebe1299 --- /dev/null +++ b/plugins/repository-azure/licenses/guava-31.1-jre.jar.sha1 @@ -0,0 +1 @@ +60458f877d055d0c9114d9e1a2efb737b4bc282c \ No newline at end of file From 1f0361a929ef63d648ace98c70a00b43804a0124 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 7 Mar 2022 10:28:17 -0800 Subject: [PATCH 07/46] Bump asm-commons from 5.0.4 to 9.2 in /modules/lang-expression (#2385) * Bump asm-commons from 5.0.4 to 9.2 in /modules/lang-expression Bumps asm-commons from 5.0.4 to 9.2. --- updated-dependencies: - dependency-name: org.ow2.asm:asm-commons dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] * Updating SHAs Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: dependabot[bot] --- modules/lang-expression/build.gradle | 2 +- modules/lang-expression/licenses/asm-commons-5.0.4.jar.sha1 | 1 - modules/lang-expression/licenses/asm-commons-9.2.jar.sha1 | 1 + 3 files changed, 2 insertions(+), 2 deletions(-) delete mode 100644 modules/lang-expression/licenses/asm-commons-5.0.4.jar.sha1 create mode 100644 modules/lang-expression/licenses/asm-commons-9.2.jar.sha1 diff --git a/modules/lang-expression/build.gradle b/modules/lang-expression/build.gradle index 0383f6573b16b..e3feacd71f060 100644 --- a/modules/lang-expression/build.gradle +++ b/modules/lang-expression/build.gradle @@ -39,7 +39,7 @@ dependencies { api "org.apache.lucene:lucene-expressions:${versions.lucene}" api 'org.antlr:antlr4-runtime:4.9.3' api 'org.ow2.asm:asm:9.2' - api 'org.ow2.asm:asm-commons:5.0.4' + api 'org.ow2.asm:asm-commons:9.2' api 'org.ow2.asm:asm-tree:9.2' } restResources { diff --git a/modules/lang-expression/licenses/asm-commons-5.0.4.jar.sha1 b/modules/lang-expression/licenses/asm-commons-5.0.4.jar.sha1 deleted file mode 100644 index 94fe0cd92c9c9..0000000000000 --- a/modules/lang-expression/licenses/asm-commons-5.0.4.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -5a556786086c23cd689a0328f8519db93821c04c diff --git a/modules/lang-expression/licenses/asm-commons-9.2.jar.sha1 b/modules/lang-expression/licenses/asm-commons-9.2.jar.sha1 new file mode 100644 index 0000000000000..7beb3d29afe86 --- /dev/null +++ b/modules/lang-expression/licenses/asm-commons-9.2.jar.sha1 @@ -0,0 +1 @@ +f4d7f0fc9054386f2893b602454d48e07d4fbead \ No newline at end of file From 919d18036e421f3d48ce0c3278d698fb8651cb22 Mon Sep 17 00:00:00 2001 From: Suraj Singh <79435743+dreamer-89@users.noreply.github.com> Date: Mon, 7 Mar 2022 12:43:05 -0800 Subject: [PATCH 08/46] Remove type end-points from count action (#2379) Signed-off-by: Suraj Singh --- .../org/opensearch/rest/action/search/RestCountAction.java | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/server/src/main/java/org/opensearch/rest/action/search/RestCountAction.java b/server/src/main/java/org/opensearch/rest/action/search/RestCountAction.java index 04ee5fdd5b621..6cb00633de441 100644 --- a/server/src/main/java/org/opensearch/rest/action/search/RestCountAction.java +++ b/server/src/main/java/org/opensearch/rest/action/search/RestCountAction.java @@ -66,10 +66,7 @@ public List routes() { new Route(GET, "/_count"), new Route(POST, "/_count"), new Route(GET, "/{index}/_count"), - new Route(POST, "/{index}/_count"), - // Deprecated typed endpoints. - new Route(GET, "/{index}/{type}/_count"), - new Route(POST, "/{index}/{type}/_count") + new Route(POST, "/{index}/_count") ) ); } From 65debde4363fc3f9ec1cbc2f0f15d6e7a4c46305 Mon Sep 17 00:00:00 2001 From: Andriy Redko Date: Tue, 8 Mar 2022 11:30:48 -0500 Subject: [PATCH 09/46] Update the BWC versions (post 1.x backport) (#2390) Signed-off-by: Andriy Redko --- .../index/query/functionscore/ScoreFunctionBuilder.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/server/src/main/java/org/opensearch/index/query/functionscore/ScoreFunctionBuilder.java b/server/src/main/java/org/opensearch/index/query/functionscore/ScoreFunctionBuilder.java index 48b4f29ea8dcc..4987f260bc9b4 100644 --- a/server/src/main/java/org/opensearch/index/query/functionscore/ScoreFunctionBuilder.java +++ b/server/src/main/java/org/opensearch/index/query/functionscore/ScoreFunctionBuilder.java @@ -60,7 +60,7 @@ public ScoreFunctionBuilder() {} */ public ScoreFunctionBuilder(StreamInput in) throws IOException { weight = checkWeight(in.readOptionalFloat()); - if (in.getVersion().onOrAfter(Version.V_2_0_0)) { + if (in.getVersion().onOrAfter(Version.V_1_3_0)) { functionName = in.readOptionalString(); } } @@ -68,7 +68,7 @@ public ScoreFunctionBuilder(StreamInput in) throws IOException { @Override public final void writeTo(StreamOutput out) throws IOException { out.writeOptionalFloat(weight); - if (out.getVersion().onOrAfter(Version.V_2_0_0)) { + if (out.getVersion().onOrAfter(Version.V_1_3_0)) { out.writeOptionalString(functionName); } doWriteTo(out); From 63c75d1b1d188a4bc3917169001aa8711ad702ef Mon Sep 17 00:00:00 2001 From: Tianli Feng Date: Tue, 8 Mar 2022 08:35:36 -0800 Subject: [PATCH 10/46] Deprecate setting 'reindex.remote.whitelist' and introduce the alternative setting 'reindex.remote.allowlist' (#2221) * Add setting reindex.remote.allowlist, and deprecate setting reindex.remote.whitelist Signed-off-by: Tianli Feng * Add unit test for renaming the setting reindex.remote.allowlist Signed-off-by: Tianli Feng * Remove system.out.println() Signed-off-by: Tianli Feng * Adjust format by spotlessApply task Signed-off-by: Tianli Feng * Replace REMOTE_CLUSTER_WHITELIST with REMOTE_CLUSTER_ALLOWLIST Signed-off-by: Tianli Feng * Add a unit test to test final setting value when both settings have got a value Signed-off-by: Tianli Feng * Rename the unit test class name Signed-off-by: Tianli Feng * Remove the Access modifiers public from the constant REMOTE_CLUSTER_WHITELIST Signed-off-by: Tianli Feng * Initialize ReindexPlugin without using the @Before method Signed-off-by: Tianli Feng * Rename 'unwhitelisted' to 'unallowlisted' in a yml file used for REST api testing. Signed-off-by: Tianli Feng --- client/rest-high-level/build.gradle | 2 +- modules/reindex/build.gradle | 2 +- .../index/reindex/ReindexPlugin.java | 1 + .../index/reindex/ReindexValidator.java | 4 +- .../index/reindex/TransportReindexAction.java | 11 ++- .../ReindexFromRemoteWhitelistTests.java | 2 +- .../ReindexFromRemoteWithAuthTests.java | 2 +- .../reindex/ReindexRenamedSettingTests.java | 83 +++++++++++++++++++ .../opensearch/index/reindex/RetryTests.java | 2 +- .../test/reindex/20_validation.yml | 4 +- 10 files changed, 103 insertions(+), 10 deletions(-) create mode 100644 modules/reindex/src/test/java/org/opensearch/index/reindex/ReindexRenamedSettingTests.java diff --git a/client/rest-high-level/build.gradle b/client/rest-high-level/build.gradle index 4144186ba5f70..07147ce81b72e 100644 --- a/client/rest-high-level/build.gradle +++ b/client/rest-high-level/build.gradle @@ -92,7 +92,7 @@ check.dependsOn(asyncIntegTest) testClusters.all { testDistribution = 'ARCHIVE' systemProperty 'opensearch.scripting.update.ctx_in_params', 'false' - setting 'reindex.remote.whitelist', '[ "[::1]:*", "127.0.0.1:*" ]' + setting 'reindex.remote.allowlist', '[ "[::1]:*", "127.0.0.1:*" ]' extraConfigFile 'roles.yml', file('roles.yml') user username: System.getProperty('tests.rest.cluster.username', 'test_user'), diff --git a/modules/reindex/build.gradle b/modules/reindex/build.gradle index 935fe468fdbd0..37526a924da73 100644 --- a/modules/reindex/build.gradle +++ b/modules/reindex/build.gradle @@ -50,7 +50,7 @@ testClusters.all { module ':modules:parent-join' module ':modules:lang-painless' // Allowlist reindexing from the local node so we can test reindex-from-remote. - setting 'reindex.remote.whitelist', '127.0.0.1:*' + setting 'reindex.remote.allowlist', '127.0.0.1:*' } test { diff --git a/modules/reindex/src/main/java/org/opensearch/index/reindex/ReindexPlugin.java b/modules/reindex/src/main/java/org/opensearch/index/reindex/ReindexPlugin.java index 04619efb43c6c..865ae26f6f54d 100644 --- a/modules/reindex/src/main/java/org/opensearch/index/reindex/ReindexPlugin.java +++ b/modules/reindex/src/main/java/org/opensearch/index/reindex/ReindexPlugin.java @@ -133,6 +133,7 @@ public Collection createComponents( public List> getSettings() { final List> settings = new ArrayList<>(); settings.add(TransportReindexAction.REMOTE_CLUSTER_WHITELIST); + settings.add(TransportReindexAction.REMOTE_CLUSTER_ALLOWLIST); settings.addAll(ReindexSslConfig.getSettings()); return settings; } diff --git a/modules/reindex/src/main/java/org/opensearch/index/reindex/ReindexValidator.java b/modules/reindex/src/main/java/org/opensearch/index/reindex/ReindexValidator.java index d4a2ba08409e6..71c3aad8713e1 100644 --- a/modules/reindex/src/main/java/org/opensearch/index/reindex/ReindexValidator.java +++ b/modules/reindex/src/main/java/org/opensearch/index/reindex/ReindexValidator.java @@ -70,7 +70,7 @@ class ReindexValidator { IndexNameExpressionResolver resolver, AutoCreateIndex autoCreateIndex ) { - this.remoteAllowlist = buildRemoteAllowlist(TransportReindexAction.REMOTE_CLUSTER_WHITELIST.get(settings)); + this.remoteAllowlist = buildRemoteAllowlist(TransportReindexAction.REMOTE_CLUSTER_ALLOWLIST.get(settings)); this.clusterService = clusterService; this.resolver = resolver; this.autoCreateIndex = autoCreateIndex; @@ -101,7 +101,7 @@ static void checkRemoteAllowlist(CharacterRunAutomaton allowlist, RemoteInfo rem if (allowlist.run(check)) { return; } - String allowListKey = TransportReindexAction.REMOTE_CLUSTER_WHITELIST.getKey(); + String allowListKey = TransportReindexAction.REMOTE_CLUSTER_ALLOWLIST.getKey(); throw new IllegalArgumentException('[' + check + "] not allowlisted in " + allowListKey); } diff --git a/modules/reindex/src/main/java/org/opensearch/index/reindex/TransportReindexAction.java b/modules/reindex/src/main/java/org/opensearch/index/reindex/TransportReindexAction.java index a24c2b002b759..c84d103a2ef6f 100644 --- a/modules/reindex/src/main/java/org/opensearch/index/reindex/TransportReindexAction.java +++ b/modules/reindex/src/main/java/org/opensearch/index/reindex/TransportReindexAction.java @@ -56,10 +56,19 @@ import static java.util.Collections.emptyList; public class TransportReindexAction extends HandledTransportAction { - public static final Setting> REMOTE_CLUSTER_WHITELIST = Setting.listSetting( + static final Setting> REMOTE_CLUSTER_WHITELIST = Setting.listSetting( "reindex.remote.whitelist", emptyList(), Function.identity(), + Property.NodeScope, + Property.Deprecated + ); + // The setting below is going to replace the above. + // To keep backwards compatibility, the old usage is remained, and it's also used as the fallback for the new usage. + public static final Setting> REMOTE_CLUSTER_ALLOWLIST = Setting.listSetting( + "reindex.remote.allowlist", + REMOTE_CLUSTER_WHITELIST, + Function.identity(), Property.NodeScope ); public static Optional remoteExtension = Optional.empty(); diff --git a/modules/reindex/src/test/java/org/opensearch/index/reindex/ReindexFromRemoteWhitelistTests.java b/modules/reindex/src/test/java/org/opensearch/index/reindex/ReindexFromRemoteWhitelistTests.java index df2d9894e64bb..8012b67253cb6 100644 --- a/modules/reindex/src/test/java/org/opensearch/index/reindex/ReindexFromRemoteWhitelistTests.java +++ b/modules/reindex/src/test/java/org/opensearch/index/reindex/ReindexFromRemoteWhitelistTests.java @@ -131,7 +131,7 @@ public void testUnwhitelistedRemote() { IllegalArgumentException.class, () -> checkRemoteAllowlist(buildRemoteAllowlist(allowlist), newRemoteInfo("not in list", port)) ); - assertEquals("[not in list:" + port + "] not allowlisted in reindex.remote.whitelist", e.getMessage()); + assertEquals("[not in list:" + port + "] not allowlisted in reindex.remote.allowlist", e.getMessage()); } public void testRejectMatchAll() { diff --git a/modules/reindex/src/test/java/org/opensearch/index/reindex/ReindexFromRemoteWithAuthTests.java b/modules/reindex/src/test/java/org/opensearch/index/reindex/ReindexFromRemoteWithAuthTests.java index e78715d904574..8ce850a936557 100644 --- a/modules/reindex/src/test/java/org/opensearch/index/reindex/ReindexFromRemoteWithAuthTests.java +++ b/modules/reindex/src/test/java/org/opensearch/index/reindex/ReindexFromRemoteWithAuthTests.java @@ -99,7 +99,7 @@ protected boolean addMockHttpTransport() { protected Settings nodeSettings() { Settings.Builder settings = Settings.builder().put(super.nodeSettings()); // Allowlist reindexing from the http host we're going to use - settings.put(TransportReindexAction.REMOTE_CLUSTER_WHITELIST.getKey(), "127.0.0.1:*"); + settings.put(TransportReindexAction.REMOTE_CLUSTER_ALLOWLIST.getKey(), "127.0.0.1:*"); settings.put(NetworkModule.HTTP_TYPE_KEY, Netty4Plugin.NETTY_HTTP_TRANSPORT_NAME); return settings.build(); } diff --git a/modules/reindex/src/test/java/org/opensearch/index/reindex/ReindexRenamedSettingTests.java b/modules/reindex/src/test/java/org/opensearch/index/reindex/ReindexRenamedSettingTests.java new file mode 100644 index 0000000000000..8ff84223d371e --- /dev/null +++ b/modules/reindex/src/test/java/org/opensearch/index/reindex/ReindexRenamedSettingTests.java @@ -0,0 +1,83 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.index.reindex; + +import org.opensearch.common.settings.Setting; +import org.opensearch.common.settings.Settings; +import org.opensearch.test.OpenSearchTestCase; + +import java.util.Arrays; +import java.util.List; + +/** + * A unit test to validate the former name of the setting 'reindex.remote.allowlist' still take effect, + * after it is deprecated, so that the backwards compatibility is maintained. + * The test can be removed along with removing support of the deprecated setting. + */ +public class ReindexRenamedSettingTests extends OpenSearchTestCase { + private final ReindexPlugin plugin = new ReindexPlugin(); + + /** + * Validate the both settings are known and supported. + */ + public void testReindexSettingsExist() { + List> settings = plugin.getSettings(); + assertTrue( + "Both 'reindex.remote.allowlist' and its predecessor should be supported settings of Reindex plugin", + settings.containsAll( + Arrays.asList(TransportReindexAction.REMOTE_CLUSTER_WHITELIST, TransportReindexAction.REMOTE_CLUSTER_ALLOWLIST) + ) + ); + } + + /** + * Validate the default value of the both settings is the same. + */ + public void testSettingFallback() { + assertEquals( + TransportReindexAction.REMOTE_CLUSTER_ALLOWLIST.get(Settings.EMPTY), + TransportReindexAction.REMOTE_CLUSTER_WHITELIST.get(Settings.EMPTY) + ); + } + + /** + * Validate the new setting can be configured correctly, and it doesn't impact the old setting. + */ + public void testSettingGetValue() { + Settings settings = Settings.builder().put("reindex.remote.allowlist", "127.0.0.1:*").build(); + assertEquals(TransportReindexAction.REMOTE_CLUSTER_ALLOWLIST.get(settings), Arrays.asList("127.0.0.1:*")); + assertEquals( + TransportReindexAction.REMOTE_CLUSTER_WHITELIST.get(settings), + TransportReindexAction.REMOTE_CLUSTER_WHITELIST.getDefault(Settings.EMPTY) + ); + } + + /** + * Validate the value of the old setting will be applied to the new setting, if the new setting is not configured. + */ + public void testSettingGetValueWithFallback() { + Settings settings = Settings.builder().put("reindex.remote.whitelist", "127.0.0.1:*").build(); + assertEquals(TransportReindexAction.REMOTE_CLUSTER_ALLOWLIST.get(settings), Arrays.asList("127.0.0.1:*")); + assertSettingDeprecationsAndWarnings(new Setting[] { TransportReindexAction.REMOTE_CLUSTER_WHITELIST }); + } + + /** + * Validate the value of the old setting will be ignored, if the new setting is configured. + */ + public void testSettingGetValueWhenBothAreConfigured() { + Settings settings = Settings.builder() + .put("reindex.remote.allowlist", "127.0.0.1:*") + .put("reindex.remote.whitelist", "[::1]:*, 127.0.0.1:*") + .build(); + assertEquals(TransportReindexAction.REMOTE_CLUSTER_ALLOWLIST.get(settings), Arrays.asList("127.0.0.1:*")); + assertEquals(TransportReindexAction.REMOTE_CLUSTER_WHITELIST.get(settings), Arrays.asList("[::1]:*", "127.0.0.1:*")); + assertSettingDeprecationsAndWarnings(new Setting[] { TransportReindexAction.REMOTE_CLUSTER_WHITELIST }); + } + +} diff --git a/modules/reindex/src/test/java/org/opensearch/index/reindex/RetryTests.java b/modules/reindex/src/test/java/org/opensearch/index/reindex/RetryTests.java index 96b1b5d3d2e65..124670dba9510 100644 --- a/modules/reindex/src/test/java/org/opensearch/index/reindex/RetryTests.java +++ b/modules/reindex/src/test/java/org/opensearch/index/reindex/RetryTests.java @@ -103,7 +103,7 @@ protected boolean addMockHttpTransport() { final Settings nodeSettings() { return Settings.builder() // allowlist reindexing from the HTTP host we're going to use - .put(TransportReindexAction.REMOTE_CLUSTER_WHITELIST.getKey(), "127.0.0.1:*") + .put(TransportReindexAction.REMOTE_CLUSTER_ALLOWLIST.getKey(), "127.0.0.1:*") .build(); } diff --git a/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/reindex/20_validation.yml b/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/reindex/20_validation.yml index 876d100e0bc3c..15e2397099b65 100644 --- a/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/reindex/20_validation.yml +++ b/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/reindex/20_validation.yml @@ -306,9 +306,9 @@ index: dest --- -"unwhitelisted remote host fails": +"unallowlisted remote host fails": - do: - catch: /\[badremote:9200\] not allowlisted in reindex.remote.whitelist/ + catch: /\[badremote:9200\] not allowlisted in reindex.remote.allowlist/ reindex: body: source: From c3712a51b33b97bda886b0f7a0cdc3f7a85ecae8 Mon Sep 17 00:00:00 2001 From: Nick Knize Date: Tue, 8 Mar 2022 11:30:27 -0600 Subject: [PATCH 11/46] [Remove] include_type_name from HLRC (#2397) Removes include_type_name from the high level reset client along with relevant deprecated methods in IndicesClient. All tests are updated to remove the parameter from the rest requests along with various toXContent methods that are no longer required. Signed-off-by: Nicholas Walter Knize --- .../org/opensearch/client/IndicesClient.java | 311 ------------------ .../client/IndicesRequestConverters.java | 142 -------- .../opensearch/client/IndicesClientIT.java | 305 ----------------- .../client/IndicesRequestConvertersTests.java | 247 +------------- .../rollover/RolloverResponseTests.java | 7 +- .../test/11_basic_with_types.yml | 96 ------ .../test/mixed_cluster/10_basic.yml | 1 - .../test/field_caps/10_basic.yml | 1 - .../test/indices.create/10_basic.yml | 17 - .../11_basic_with_types.yml | 48 --- .../test/indices.put_template/10_basic.yml | 17 - .../11_basic_with_types.yml | 74 ----- .../test/indices.rollover/40_mapping.yml | 25 -- .../search.aggregation/280_geohash_grid.yml | 1 - .../search.aggregation/290_geotile_grid.yml | 1 - .../indices/create/CreateIndexRequest.java | 33 +- .../mapping/get/GetFieldMappingsResponse.java | 46 +-- .../indices/rollover/RolloverRequest.java | 48 +-- .../get/GetIndexTemplatesResponse.java | 38 +-- .../org/opensearch/rest/BaseRestHandler.java | 7 - .../admin/indices/RestCreateIndexAction.java | 23 +- .../indices/RestGetFieldMappingAction.java | 10 - .../indices/RestGetIndexTemplateAction.java | 15 +- .../admin/indices/RestGetIndicesAction.java | 19 +- .../admin/indices/RestGetMappingAction.java | 7 - .../indices/RestPutIndexTemplateAction.java | 11 +- .../admin/indices/RestPutMappingAction.java | 12 +- .../indices/RestRolloverIndexAction.java | 5 +- .../create/CreateIndexRequestTests.java | 55 ---- .../get/GetFieldMappingsResponseTests.java | 74 +---- .../rollover/RolloverRequestTests.java | 39 +-- .../get/GetIndexTemplatesResponseTests.java | 32 +- .../indices/RestCreateIndexActionTests.java | 70 +--- .../indices/RestGetIndicesActionTests.java | 83 ----- .../yaml/ClientYamlTestExecutionContext.java | 63 ---- 35 files changed, 65 insertions(+), 1918 deletions(-) delete mode 100644 modules/percolator/src/test/resources/rest-api-spec/test/11_basic_with_types.yml delete mode 100644 rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_template/11_basic_with_types.yml delete mode 100644 rest-api-spec/src/main/resources/rest-api-spec/test/indices.put_template/11_basic_with_types.yml delete mode 100644 server/src/test/java/org/opensearch/rest/action/admin/indices/RestGetIndicesActionTests.java diff --git a/client/rest-high-level/src/main/java/org/opensearch/client/IndicesClient.java b/client/rest-high-level/src/main/java/org/opensearch/client/IndicesClient.java index 8889b717ab896..9b4586ec6bf89 100644 --- a/client/rest-high-level/src/main/java/org/opensearch/client/IndicesClient.java +++ b/client/rest-high-level/src/main/java/org/opensearch/client/IndicesClient.java @@ -361,60 +361,6 @@ public Cancellable dataStreamsStatsAsync( ); } - /** - * Creates an index using the Create Index API. - * - * @param createIndexRequest the request - * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return the response - * @throws IOException in case there is a problem sending the request or parsing back the response - * - * @deprecated This method uses an old request object which still refers to types, a deprecated feature. The - * method {@link #create(CreateIndexRequest, RequestOptions)} should be used instead, which accepts a new - * request object. - */ - @Deprecated - public org.opensearch.action.admin.indices.create.CreateIndexResponse create( - org.opensearch.action.admin.indices.create.CreateIndexRequest createIndexRequest, - RequestOptions options - ) throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - createIndexRequest, - IndicesRequestConverters::createIndex, - options, - org.opensearch.action.admin.indices.create.CreateIndexResponse::fromXContent, - emptySet() - ); - } - - /** - * Asynchronously creates an index using the Create Index API. - * - * @param createIndexRequest the request - * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener the listener to be notified upon request completion - * - * @deprecated This method uses an old request object which still refers to types, a deprecated feature. The - * method {@link #createAsync(CreateIndexRequest, RequestOptions, ActionListener)} should be used instead, - * which accepts a new request object. - * @return cancellable that may be used to cancel the request - */ - @Deprecated - public Cancellable createAsync( - org.opensearch.action.admin.indices.create.CreateIndexRequest createIndexRequest, - RequestOptions options, - ActionListener listener - ) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - createIndexRequest, - IndicesRequestConverters::createIndex, - options, - org.opensearch.action.admin.indices.create.CreateIndexResponse::fromXContent, - listener, - emptySet() - ); - } - /** * Updates the mappings on an index using the Put Mapping API. * @@ -497,60 +443,6 @@ public Cancellable getMappingAsync( ); } - /** - * Retrieves the field mappings on an index or indices using the Get Field Mapping API. - * - * @param getFieldMappingsRequest the request - * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return the response - * @throws IOException in case there is a problem sending the request or parsing back the response - * - * @deprecated This method uses old request and response objects which still refer to types, a deprecated feature. - * The method {@link #getFieldMapping(GetFieldMappingsRequest, RequestOptions)} should be used instead, which - * accepts a new request object. - */ - @Deprecated - public org.opensearch.action.admin.indices.mapping.get.GetFieldMappingsResponse getFieldMapping( - org.opensearch.action.admin.indices.mapping.get.GetFieldMappingsRequest getFieldMappingsRequest, - RequestOptions options - ) throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - getFieldMappingsRequest, - IndicesRequestConverters::getFieldMapping, - options, - org.opensearch.action.admin.indices.mapping.get.GetFieldMappingsResponse::fromXContent, - emptySet() - ); - } - - /** - * Asynchronously retrieves the field mappings on an index on indices using the Get Field Mapping API. - * - * @param getFieldMappingsRequest the request - * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener the listener to be notified upon request completion - * - * @deprecated This method uses old request and response objects which still refer to types, a deprecated feature. - * The method {@link #getFieldMappingAsync(GetFieldMappingsRequest, RequestOptions, ActionListener)} should be - * used instead, which accepts a new request object. - * @return cancellable that may be used to cancel the request - */ - @Deprecated - public Cancellable getFieldMappingAsync( - org.opensearch.action.admin.indices.mapping.get.GetFieldMappingsRequest getFieldMappingsRequest, - RequestOptions options, - ActionListener listener - ) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - getFieldMappingsRequest, - IndicesRequestConverters::getFieldMapping, - options, - org.opensearch.action.admin.indices.mapping.get.GetFieldMappingsResponse::fromXContent, - listener, - emptySet() - ); - } - /** * Retrieves the field mappings on an index or indices using the Get Field Mapping API. * @@ -1053,53 +945,6 @@ public Cancellable existsAsync(GetIndexRequest request, RequestOptions options, ); } - /** - * Checks if the index (indices) exists or not. - * - * @param request the request - * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return the response - * @throws IOException in case there is a problem sending the request - * @deprecated This method uses an old request object which still refers to types, a deprecated feature. The method - * {@link #exists(GetIndexRequest, RequestOptions)} should be used instead, which accepts a new request object. - */ - @Deprecated - public boolean exists(org.opensearch.action.admin.indices.get.GetIndexRequest request, RequestOptions options) throws IOException { - return restHighLevelClient.performRequest( - request, - IndicesRequestConverters::indicesExist, - options, - RestHighLevelClient::convertExistsResponse, - Collections.emptySet() - ); - } - - /** - * Asynchronously checks if the index (indices) exists or not. - * - * @param request the request - * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener the listener to be notified upon request completion - * @deprecated This method uses an old request object which still refers to types, a deprecated feature. The method - * {@link #existsAsync(GetIndexRequest, RequestOptions, ActionListener)} should be used instead, which accepts a new request object. - * @return cancellable that may be used to cancel the request - */ - @Deprecated - public Cancellable existsAsync( - org.opensearch.action.admin.indices.get.GetIndexRequest request, - RequestOptions options, - ActionListener listener - ) { - return restHighLevelClient.performRequestAsync( - request, - IndicesRequestConverters::indicesExist, - options, - RestHighLevelClient::convertExistsResponse, - listener, - Collections.emptySet() - ); - } - /** * Shrinks an index using the Shrink Index API. * @@ -1392,59 +1237,6 @@ public Cancellable rolloverAsync(RolloverRequest rolloverRequest, RequestOptions ); } - /** - * Rolls over an index using the Rollover Index API. - * - * @param rolloverRequest the request - * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return the response - * @throws IOException in case there is a problem sending the request or parsing back the response - * - * @deprecated This method uses deprecated request and response objects. - * The method {@link #rollover(RolloverRequest, RequestOptions)} should be used instead, which accepts a new request object. - */ - @Deprecated - public org.opensearch.action.admin.indices.rollover.RolloverResponse rollover( - org.opensearch.action.admin.indices.rollover.RolloverRequest rolloverRequest, - RequestOptions options - ) throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - rolloverRequest, - IndicesRequestConverters::rollover, - options, - org.opensearch.action.admin.indices.rollover.RolloverResponse::fromXContent, - emptySet() - ); - } - - /** - * Asynchronously rolls over an index using the Rollover Index API. - * - * @param rolloverRequest the request - * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener the listener to be notified upon request completion - * - * @deprecated This method uses deprecated request and response objects. - * The method {@link #rolloverAsync(RolloverRequest, RequestOptions, ActionListener)} should be used instead, which - * accepts a new request object. - * @return cancellable that may be used to cancel the request - */ - @Deprecated - public Cancellable rolloverAsync( - org.opensearch.action.admin.indices.rollover.RolloverRequest rolloverRequest, - RequestOptions options, - ActionListener listener - ) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - rolloverRequest, - IndicesRequestConverters::rollover, - options, - org.opensearch.action.admin.indices.rollover.RolloverResponse::fromXContent, - listener, - emptySet() - ); - } - /** * Gets one or more aliases using the Get Index Aliases API. * @@ -1527,57 +1319,6 @@ public Cancellable putSettingsAsync( ); } - /** - * Puts an index template using the Index Templates API. - * - * @param putIndexTemplateRequest the request - * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return the response - * @throws IOException in case there is a problem sending the request or parsing back the response - * @deprecated This old form of request allows types in mappings. Use {@link #putTemplate(PutIndexTemplateRequest, RequestOptions)} - * instead which introduces a new request object without types. - */ - @Deprecated - public AcknowledgedResponse putTemplate( - org.opensearch.action.admin.indices.template.put.PutIndexTemplateRequest putIndexTemplateRequest, - RequestOptions options - ) throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - putIndexTemplateRequest, - IndicesRequestConverters::putTemplate, - options, - AcknowledgedResponse::fromXContent, - emptySet() - ); - } - - /** - * Asynchronously puts an index template using the Index Templates API. - * - * @param putIndexTemplateRequest the request - * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener the listener to be notified upon request completion - * @deprecated This old form of request allows types in mappings. - * Use {@link #putTemplateAsync(PutIndexTemplateRequest, RequestOptions, ActionListener)} - * instead which introduces a new request object without types. - * @return cancellable that may be used to cancel the request - */ - @Deprecated - public Cancellable putTemplateAsync( - org.opensearch.action.admin.indices.template.put.PutIndexTemplateRequest putIndexTemplateRequest, - RequestOptions options, - ActionListener listener - ) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - putIndexTemplateRequest, - IndicesRequestConverters::putTemplate, - options, - AcknowledgedResponse::fromXContent, - listener, - emptySet() - ); - } - /** * Puts an index template using the Index Templates API. * @@ -1749,31 +1490,6 @@ public Cancellable validateQueryAsync( ); } - /** - * Gets index templates using the Index Templates API. The mappings will be returned in a legacy deprecated format, where the - * mapping definition is nested under the type name. - * - * @param getIndexTemplatesRequest the request - * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return the response - * @throws IOException in case there is a problem sending the request or parsing back the response - * @deprecated This method uses an old response object which still refers to types, a deprecated feature. Use - * {@link #getIndexTemplate(GetIndexTemplatesRequest, RequestOptions)} instead which returns a new response object - */ - @Deprecated - public org.opensearch.action.admin.indices.template.get.GetIndexTemplatesResponse getTemplate( - GetIndexTemplatesRequest getIndexTemplatesRequest, - RequestOptions options - ) throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - getIndexTemplatesRequest, - IndicesRequestConverters::getTemplatesWithDocumentTypes, - options, - org.opensearch.action.admin.indices.template.get.GetIndexTemplatesResponse::fromXContent, - emptySet() - ); - } - /** * Gets index templates using the Index Templates API * @@ -1837,33 +1553,6 @@ public GetIndexTemplatesResponse getIndexTemplate(GetIndexTemplatesRequest getIn ); } - /** - * Asynchronously gets index templates using the Index Templates API. The mappings will be returned in a legacy deprecated format, - * where the mapping definition is nested under the type name. - * - * @param getIndexTemplatesRequest the request - * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener the listener to be notified upon request completion - * @deprecated This method uses an old response object which still refers to types, a deprecated feature. Use - * {@link #getIndexTemplateAsync(GetIndexTemplatesRequest, RequestOptions, ActionListener)} instead which returns a new response object - * @return cancellable that may be used to cancel the request - */ - @Deprecated - public Cancellable getTemplateAsync( - GetIndexTemplatesRequest getIndexTemplatesRequest, - RequestOptions options, - ActionListener listener - ) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - getIndexTemplatesRequest, - IndicesRequestConverters::getTemplatesWithDocumentTypes, - options, - org.opensearch.action.admin.indices.template.get.GetIndexTemplatesResponse::fromXContent, - listener, - emptySet() - ); - } - /** * Asynchronously gets index templates using the Index Templates API * diff --git a/client/rest-high-level/src/main/java/org/opensearch/client/IndicesRequestConverters.java b/client/rest-high-level/src/main/java/org/opensearch/client/IndicesRequestConverters.java index a3c9b2a99c058..c50ea58982e4e 100644 --- a/client/rest-high-level/src/main/java/org/opensearch/client/IndicesRequestConverters.java +++ b/client/rest-high-level/src/main/java/org/opensearch/client/IndicesRequestConverters.java @@ -78,8 +78,6 @@ import java.io.IOException; import java.util.Locale; -import static org.opensearch.rest.BaseRestHandler.INCLUDE_TYPE_NAME_PARAMETER; - final class IndicesRequestConverters { private IndicesRequestConverters() {} @@ -165,20 +163,6 @@ static Request createIndex(CreateIndexRequest createIndexRequest) throws IOExcep return request; } - static Request createIndex(org.opensearch.action.admin.indices.create.CreateIndexRequest createIndexRequest) throws IOException { - String endpoint = RequestConverters.endpoint(createIndexRequest.indices()); - Request request = new Request(HttpPut.METHOD_NAME, endpoint); - - RequestConverters.Params parameters = new RequestConverters.Params(); - parameters.withTimeout(createIndexRequest.timeout()); - parameters.withMasterTimeout(createIndexRequest.masterNodeTimeout()); - parameters.withWaitForActiveShards(createIndexRequest.waitForActiveShards()); - parameters.putParam(INCLUDE_TYPE_NAME_PARAMETER, "true"); - request.addParameters(parameters.asMap()); - request.setEntity(RequestConverters.createEntity(createIndexRequest, RequestConverters.REQUEST_BODY_CONTENT_TYPE)); - return request; - } - static Request updateAliases(IndicesAliasesRequest indicesAliasesRequest) throws IOException { Request request = new Request(HttpPost.METHOD_NAME, "/_aliases"); @@ -234,30 +218,6 @@ static Request getFieldMapping(GetFieldMappingsRequest getFieldMappingsRequest) return request; } - @Deprecated - static Request getFieldMapping(org.opensearch.action.admin.indices.mapping.get.GetFieldMappingsRequest getFieldMappingsRequest) { - String[] indices = getFieldMappingsRequest.indices() == null ? Strings.EMPTY_ARRAY : getFieldMappingsRequest.indices(); - String[] types = getFieldMappingsRequest.types() == null ? Strings.EMPTY_ARRAY : getFieldMappingsRequest.types(); - String[] fields = getFieldMappingsRequest.fields() == null ? Strings.EMPTY_ARRAY : getFieldMappingsRequest.fields(); - - String endpoint = new RequestConverters.EndpointBuilder().addCommaSeparatedPathParts(indices) - .addPathPartAsIs("_mapping") - .addCommaSeparatedPathParts(types) - .addPathPartAsIs("field") - .addCommaSeparatedPathParts(fields) - .build(); - - Request request = new Request(HttpGet.METHOD_NAME, endpoint); - - RequestConverters.Params parameters = new RequestConverters.Params(); - parameters.withIndicesOptions(getFieldMappingsRequest.indicesOptions()); - parameters.withIncludeDefaults(getFieldMappingsRequest.includeDefaults()); - parameters.withLocal(getFieldMappingsRequest.local()); - parameters.putParam(INCLUDE_TYPE_NAME_PARAMETER, "true"); - request.addParameters(parameters.asMap()); - return request; - } - static Request refresh(RefreshRequest refreshRequest) { String[] indices = refreshRequest.indices() == null ? Strings.EMPTY_ARRAY : refreshRequest.indices(); Request request = new Request(HttpPost.METHOD_NAME, RequestConverters.endpoint(indices, "_refresh")); @@ -415,27 +375,6 @@ static Request rollover(RolloverRequest rolloverRequest) throws IOException { return request; } - @Deprecated - static Request rollover(org.opensearch.action.admin.indices.rollover.RolloverRequest rolloverRequest) throws IOException { - String endpoint = new RequestConverters.EndpointBuilder().addPathPart(rolloverRequest.getRolloverTarget()) - .addPathPartAsIs("_rollover") - .addPathPart(rolloverRequest.getNewIndexName()) - .build(); - Request request = new Request(HttpPost.METHOD_NAME, endpoint); - - RequestConverters.Params params = new RequestConverters.Params(); - params.withTimeout(rolloverRequest.timeout()); - params.withMasterTimeout(rolloverRequest.masterNodeTimeout()); - params.withWaitForActiveShards(rolloverRequest.getCreateIndexRequest().waitForActiveShards()); - if (rolloverRequest.isDryRun()) { - params.putParam("dry_run", Boolean.TRUE.toString()); - } - params.putParam(INCLUDE_TYPE_NAME_PARAMETER, "true"); - request.setEntity(RequestConverters.createEntity(rolloverRequest, RequestConverters.REQUEST_BODY_CONTENT_TYPE)); - request.addParameters(params.asMap()); - return request; - } - static Request getSettings(GetSettingsRequest getSettingsRequest) { String[] indices = getSettingsRequest.indices() == null ? Strings.EMPTY_ARRAY : getSettingsRequest.indices(); String[] names = getSettingsRequest.names() == null ? Strings.EMPTY_ARRAY : getSettingsRequest.names(); @@ -452,28 +391,6 @@ static Request getSettings(GetSettingsRequest getSettingsRequest) { return request; } - /** - * converter for the legacy server-side {@link org.opensearch.action.admin.indices.get.GetIndexRequest} that - * still supports types - */ - @Deprecated - static Request getIndex(org.opensearch.action.admin.indices.get.GetIndexRequest getIndexRequest) { - String[] indices = getIndexRequest.indices() == null ? Strings.EMPTY_ARRAY : getIndexRequest.indices(); - - String endpoint = RequestConverters.endpoint(indices); - Request request = new Request(HttpGet.METHOD_NAME, endpoint); - - RequestConverters.Params params = new RequestConverters.Params(); - params.withIndicesOptions(getIndexRequest.indicesOptions()); - params.withLocal(getIndexRequest.local()); - params.withIncludeDefaults(getIndexRequest.includeDefaults()); - params.withHuman(getIndexRequest.humanReadable()); - params.withMasterTimeout(getIndexRequest.masterNodeTimeout()); - params.putParam(INCLUDE_TYPE_NAME_PARAMETER, "true"); - request.addParameters(params.asMap()); - return request; - } - static Request getIndex(GetIndexRequest getIndexRequest) { String[] indices = getIndexRequest.indices() == null ? Strings.EMPTY_ARRAY : getIndexRequest.indices(); @@ -490,28 +407,6 @@ static Request getIndex(GetIndexRequest getIndexRequest) { return request; } - /** - * converter for the legacy server-side {@link org.opensearch.action.admin.indices.get.GetIndexRequest} that - * still supports types - */ - @Deprecated - static Request indicesExist(org.opensearch.action.admin.indices.get.GetIndexRequest getIndexRequest) { - if (getIndexRequest.indices() == null || getIndexRequest.indices().length == 0) { - throw new IllegalArgumentException("indices are mandatory"); - } - String endpoint = RequestConverters.endpoint(getIndexRequest.indices(), ""); - Request request = new Request(HttpHead.METHOD_NAME, endpoint); - - RequestConverters.Params params = new RequestConverters.Params(); - params.withLocal(getIndexRequest.local()); - params.withHuman(getIndexRequest.humanReadable()); - params.withIndicesOptions(getIndexRequest.indicesOptions()); - params.withIncludeDefaults(getIndexRequest.includeDefaults()); - params.putParam(INCLUDE_TYPE_NAME_PARAMETER, "true"); - request.addParameters(params.asMap()); - return request; - } - static Request indicesExist(GetIndexRequest getIndexRequest) { if (getIndexRequest.indices() == null || getIndexRequest.indices().length == 0) { throw new IllegalArgumentException("indices are mandatory"); @@ -542,31 +437,6 @@ static Request indexPutSettings(UpdateSettingsRequest updateSettingsRequest) thr return request; } - /** - * @deprecated This uses the old form of PutIndexTemplateRequest which uses types. - * Use (@link {@link #putTemplate(PutIndexTemplateRequest)} instead - */ - @Deprecated - static Request putTemplate(org.opensearch.action.admin.indices.template.put.PutIndexTemplateRequest putIndexTemplateRequest) - throws IOException { - String endpoint = new RequestConverters.EndpointBuilder().addPathPartAsIs("_template") - .addPathPart(putIndexTemplateRequest.name()) - .build(); - Request request = new Request(HttpPut.METHOD_NAME, endpoint); - RequestConverters.Params params = new RequestConverters.Params(); - params.withMasterTimeout(putIndexTemplateRequest.masterNodeTimeout()); - if (putIndexTemplateRequest.create()) { - params.putParam("create", Boolean.TRUE.toString()); - } - if (Strings.hasText(putIndexTemplateRequest.cause())) { - params.putParam("cause", putIndexTemplateRequest.cause()); - } - params.putParam(INCLUDE_TYPE_NAME_PARAMETER, "true"); - request.addParameters(params.asMap()); - request.setEntity(RequestConverters.createEntity(putIndexTemplateRequest, RequestConverters.REQUEST_BODY_CONTENT_TYPE)); - return request; - } - static Request putTemplate(PutIndexTemplateRequest putIndexTemplateRequest) throws IOException { String endpoint = new RequestConverters.EndpointBuilder().addPathPartAsIs("_template") .addPathPart(putIndexTemplateRequest.name()) @@ -652,16 +522,7 @@ static Request getAlias(GetAliasesRequest getAliasesRequest) { return request; } - @Deprecated - static Request getTemplatesWithDocumentTypes(GetIndexTemplatesRequest getIndexTemplatesRequest) { - return getTemplates(getIndexTemplatesRequest, true); - } - static Request getTemplates(GetIndexTemplatesRequest getIndexTemplatesRequest) { - return getTemplates(getIndexTemplatesRequest, false); - } - - private static Request getTemplates(GetIndexTemplatesRequest getIndexTemplatesRequest, boolean includeTypeName) { final String endpoint = new RequestConverters.EndpointBuilder().addPathPartAsIs("_template") .addCommaSeparatedPathParts(getIndexTemplatesRequest.names()) .build(); @@ -669,9 +530,6 @@ private static Request getTemplates(GetIndexTemplatesRequest getIndexTemplatesRe final RequestConverters.Params params = new RequestConverters.Params(); params.withLocal(getIndexTemplatesRequest.isLocal()); params.withMasterTimeout(getIndexTemplatesRequest.getMasterNodeTimeout()); - if (includeTypeName) { - params.putParam(INCLUDE_TYPE_NAME_PARAMETER, "true"); - } request.addParameters(params.asMap()); return request; } diff --git a/client/rest-high-level/src/test/java/org/opensearch/client/IndicesClientIT.java b/client/rest-high-level/src/test/java/org/opensearch/client/IndicesClientIT.java index 1a87557530860..f9c8851f8839e 100644 --- a/client/rest-high-level/src/test/java/org/opensearch/client/IndicesClientIT.java +++ b/client/rest-high-level/src/test/java/org/opensearch/client/IndicesClientIT.java @@ -122,14 +122,9 @@ import org.opensearch.common.xcontent.json.JsonXContent; import org.opensearch.common.xcontent.support.XContentMapValues; import org.opensearch.index.IndexSettings; -import org.opensearch.index.mapper.MapperService; import org.opensearch.index.query.QueryBuilder; import org.opensearch.index.query.QueryBuilders; import org.opensearch.rest.RestStatus; -import org.opensearch.rest.action.admin.indices.RestCreateIndexAction; -import org.opensearch.rest.action.admin.indices.RestGetIndexTemplateAction; -import org.opensearch.rest.action.admin.indices.RestPutIndexTemplateAction; -import org.opensearch.rest.action.admin.indices.RestRolloverIndexAction; import java.io.IOException; import java.util.Arrays; @@ -197,18 +192,6 @@ public void testIndicesExists() throws IOException { } } - public void testIndicesExistsWithTypes() throws IOException { - // Index present - String indexName = "test_index_exists_index_present"; - createIndex(indexName, Settings.EMPTY); - - org.opensearch.action.admin.indices.get.GetIndexRequest request = new org.opensearch.action.admin.indices.get.GetIndexRequest(); - request.indices(indexName); - - boolean response = execute(request, highLevelClient().indices()::exists, highLevelClient().indices()::existsAsync); - assertTrue(response); - } - @SuppressWarnings({ "unchecked", "rawtypes" }) public void testCreateIndex() throws IOException { { @@ -273,74 +256,6 @@ public void testCreateIndex() throws IOException { } } - @SuppressWarnings({ "unchecked", "rawtypes" }) - public void testCreateIndexWithTypes() throws IOException { - { - // Create index - String indexName = "plain_index"; - assertFalse(indexExists(indexName)); - - org.opensearch.action.admin.indices.create.CreateIndexRequest createIndexRequest = - new org.opensearch.action.admin.indices.create.CreateIndexRequest(indexName); - - org.opensearch.action.admin.indices.create.CreateIndexResponse createIndexResponse = execute( - createIndexRequest, - highLevelClient().indices()::create, - highLevelClient().indices()::createAsync, - expectWarningsOnce(RestCreateIndexAction.TYPES_DEPRECATION_MESSAGE) - ); - assertTrue(createIndexResponse.isAcknowledged()); - - assertTrue(indexExists(indexName)); - } - { - // Create index with mappings, aliases and settings - String indexName = "rich_index"; - assertFalse(indexExists(indexName)); - - org.opensearch.action.admin.indices.create.CreateIndexRequest createIndexRequest = - new org.opensearch.action.admin.indices.create.CreateIndexRequest(indexName); - - Alias alias = new Alias("alias_name"); - alias.filter("{\"term\":{\"year\":2016}}"); - alias.routing("1"); - createIndexRequest.alias(alias); - - Settings.Builder settings = Settings.builder(); - settings.put(SETTING_NUMBER_OF_REPLICAS, 2); - createIndexRequest.settings(settings); - - XContentBuilder mappingBuilder = JsonXContent.contentBuilder(); - mappingBuilder.startObject().startObject("properties").startObject("field"); - mappingBuilder.field("type", "text"); - mappingBuilder.endObject().endObject().endObject(); - createIndexRequest.mapping(MapperService.SINGLE_MAPPING_NAME, mappingBuilder); - - org.opensearch.action.admin.indices.create.CreateIndexResponse createIndexResponse = execute( - createIndexRequest, - highLevelClient().indices()::create, - highLevelClient().indices()::createAsync, - expectWarningsOnce(RestCreateIndexAction.TYPES_DEPRECATION_MESSAGE) - ); - assertTrue(createIndexResponse.isAcknowledged()); - - Map getIndexResponse = getAsMap(indexName); - assertEquals("2", XContentMapValues.extractValue(indexName + ".settings.index.number_of_replicas", getIndexResponse)); - - Map aliasData = (Map) XContentMapValues.extractValue( - indexName + ".aliases.alias_name", - getIndexResponse - ); - assertNotNull(aliasData); - assertEquals("1", aliasData.get("index_routing")); - Map filter = (Map) aliasData.get("filter"); - Map term = (Map) filter.get("term"); - assertEquals(2016, term.get("year")); - - assertEquals("text", XContentMapValues.extractValue(indexName + ".mappings.properties.field.type", getIndexResponse)); - } - } - public void testGetSettings() throws IOException { String indexName = "get_settings_index"; Settings basicSettings = Settings.builder().put("number_of_shards", 1).put("number_of_replicas", 0).build(); @@ -1179,33 +1094,6 @@ public void testRollover() throws IOException { } } - public void testRolloverWithTypes() throws IOException { - highLevelClient().indices().create(new CreateIndexRequest("test").alias(new Alias("alias")), RequestOptions.DEFAULT); - highLevelClient().index(new IndexRequest("test").id("1").source("field", "value"), RequestOptions.DEFAULT); - highLevelClient().index( - new IndexRequest("test").id("2").source("field", "value").setRefreshPolicy(WriteRequest.RefreshPolicy.WAIT_UNTIL), - RequestOptions.DEFAULT - ); - - org.opensearch.action.admin.indices.rollover.RolloverRequest rolloverRequest = - new org.opensearch.action.admin.indices.rollover.RolloverRequest("alias", "test_new"); - rolloverRequest.addMaxIndexDocsCondition(1); - rolloverRequest.getCreateIndexRequest().mapping("_doc", "field2", "type=keyword"); - - org.opensearch.action.admin.indices.rollover.RolloverResponse rolloverResponse = execute( - rolloverRequest, - highLevelClient().indices()::rollover, - highLevelClient().indices()::rolloverAsync, - expectWarningsOnce(RestRolloverIndexAction.TYPES_DEPRECATION_MESSAGE) - ); - assertTrue(rolloverResponse.isRolledOver()); - assertFalse(rolloverResponse.isDryRun()); - Map conditionStatus = rolloverResponse.getConditionStatus(); - assertTrue(conditionStatus.get("[max_docs: 1]")); - assertEquals("test", rolloverResponse.getOldIndex()); - assertEquals("test_new", rolloverResponse.getNewIndex()); - } - public void testGetAlias() throws IOException { { createIndex("index1", Settings.EMPTY); @@ -1686,48 +1574,6 @@ public void testPutTemplateWithTypesUsingUntypedAPI() throws Exception { ); } - @SuppressWarnings("unchecked") - public void testPutTemplateWithNoTypesUsingTypedApi() throws Exception { - org.opensearch.action.admin.indices.template.put.PutIndexTemplateRequest putTemplateRequest = - new org.opensearch.action.admin.indices.template.put.PutIndexTemplateRequest().name("my-template") - .patterns(Arrays.asList("pattern-1", "name-*")) - .order(10) - .create(randomBoolean()) - .settings(Settings.builder().put("number_of_shards", "3").put("number_of_replicas", "0")) - .mapping( - "my_doc_type", - // Note that the declared type is missing from the mapping - "{ " - + "\"properties\":{" - + "\"host_name\": {\"type\":\"keyword\"}," - + "\"description\": {\"type\":\"text\"}" - + "}" - + "}", - XContentType.JSON - ) - .alias(new Alias("alias-1").indexRouting("abc")) - .alias(new Alias("{index}-write").searchRouting("xyz")); - - AcknowledgedResponse putTemplateResponse = execute( - putTemplateRequest, - highLevelClient().indices()::putTemplate, - highLevelClient().indices()::putTemplateAsync, - expectWarningsOnce(RestPutIndexTemplateAction.TYPES_DEPRECATION_MESSAGE) - ); - assertThat(putTemplateResponse.isAcknowledged(), equalTo(true)); - - Map templates = getAsMap("/_template/my-template"); - assertThat(templates.keySet(), hasSize(1)); - assertThat(extractValue("my-template.order", templates), equalTo(10)); - assertThat(extractRawValues("my-template.index_patterns", templates), contains("pattern-1", "name-*")); - assertThat(extractValue("my-template.settings.index.number_of_shards", templates), equalTo("3")); - assertThat(extractValue("my-template.settings.index.number_of_replicas", templates), equalTo("0")); - assertThat(extractValue("my-template.mappings.properties.host_name.type", templates), equalTo("keyword")); - assertThat(extractValue("my-template.mappings.properties.description.type", templates), equalTo("text")); - assertThat((Map) extractValue("my-template.aliases.alias-1", templates), hasEntry("index_routing", "abc")); - assertThat((Map) extractValue("my-template.aliases.{index}-write", templates), hasEntry("search_routing", "xyz")); - } - public void testPutTemplateBadRequests() throws Exception { RestHighLevelClient client = highLevelClient(); @@ -1809,157 +1655,6 @@ public void testInvalidValidateQuery() throws IOException { assertFalse(response.isValid()); } - // Tests the deprecated form of the API that returns templates with doc types (using the server-side's GetIndexTemplateResponse) - public void testCRUDIndexTemplateWithTypes() throws Exception { - RestHighLevelClient client = highLevelClient(); - - org.opensearch.action.admin.indices.template.put.PutIndexTemplateRequest putTemplate1 = - new org.opensearch.action.admin.indices.template.put.PutIndexTemplateRequest().name("template-1") - .patterns(Arrays.asList("pattern-1", "name-1")) - .alias(new Alias("alias-1")); - assertThat( - execute( - putTemplate1, - client.indices()::putTemplate, - client.indices()::putTemplateAsync, - expectWarningsOnce(RestPutIndexTemplateAction.TYPES_DEPRECATION_MESSAGE) - ).isAcknowledged(), - equalTo(true) - ); - org.opensearch.action.admin.indices.template.put.PutIndexTemplateRequest putTemplate2 = - new org.opensearch.action.admin.indices.template.put.PutIndexTemplateRequest().name("template-2") - .patterns(Arrays.asList("pattern-2", "name-2")) - .mapping("custom_doc_type", "name", "type=text") - .settings(Settings.builder().put("number_of_shards", "2").put("number_of_replicas", "0")); - assertThat( - execute( - putTemplate2, - client.indices()::putTemplate, - client.indices()::putTemplateAsync, - expectWarningsOnce(RestPutIndexTemplateAction.TYPES_DEPRECATION_MESSAGE) - ).isAcknowledged(), - equalTo(true) - ); - - org.opensearch.action.admin.indices.template.get.GetIndexTemplatesResponse getTemplate1 = execute( - new GetIndexTemplatesRequest("template-1"), - client.indices()::getTemplate, - client.indices()::getTemplateAsync, - expectWarningsOnce(RestGetIndexTemplateAction.TYPES_DEPRECATION_MESSAGE) - ); - assertThat(getTemplate1.getIndexTemplates(), hasSize(1)); - org.opensearch.cluster.metadata.IndexTemplateMetadata template1 = getTemplate1.getIndexTemplates().get(0); - assertThat(template1.name(), equalTo("template-1")); - assertThat(template1.patterns(), contains("pattern-1", "name-1")); - assertTrue(template1.aliases().containsKey("alias-1")); - - // Check the typed version of the call - org.opensearch.action.admin.indices.template.get.GetIndexTemplatesResponse getTemplate2 = execute( - new GetIndexTemplatesRequest("template-2"), - client.indices()::getTemplate, - client.indices()::getTemplateAsync, - expectWarningsOnce(RestGetIndexTemplateAction.TYPES_DEPRECATION_MESSAGE) - ); - assertThat(getTemplate2.getIndexTemplates(), hasSize(1)); - org.opensearch.cluster.metadata.IndexTemplateMetadata template2 = getTemplate2.getIndexTemplates().get(0); - assertThat(template2.name(), equalTo("template-2")); - assertThat(template2.patterns(), contains("pattern-2", "name-2")); - assertTrue(template2.aliases().isEmpty()); - assertThat(template2.settings().get("index.number_of_shards"), equalTo("2")); - assertThat(template2.settings().get("index.number_of_replicas"), equalTo("0")); - // Ugly deprecated form of API requires use of doc type to get at mapping object which is CompressedXContent - assertTrue(template2.mappings().containsKey("custom_doc_type")); - - List names = randomBoolean() ? Arrays.asList("*plate-1", "template-2") : Arrays.asList("template-*"); - GetIndexTemplatesRequest getBothRequest = new GetIndexTemplatesRequest(names); - org.opensearch.action.admin.indices.template.get.GetIndexTemplatesResponse getBoth = execute( - getBothRequest, - client.indices()::getTemplate, - client.indices()::getTemplateAsync, - expectWarningsOnce(RestGetIndexTemplateAction.TYPES_DEPRECATION_MESSAGE) - ); - assertThat(getBoth.getIndexTemplates(), hasSize(2)); - assertThat( - getBoth.getIndexTemplates().stream().map(org.opensearch.cluster.metadata.IndexTemplateMetadata::getName).toArray(), - arrayContainingInAnyOrder("template-1", "template-2") - ); - - GetIndexTemplatesRequest getAllRequest = new GetIndexTemplatesRequest(); - org.opensearch.action.admin.indices.template.get.GetIndexTemplatesResponse getAll = execute( - getAllRequest, - client.indices()::getTemplate, - client.indices()::getTemplateAsync, - expectWarningsOnce(RestGetIndexTemplateAction.TYPES_DEPRECATION_MESSAGE) - ); - assertThat(getAll.getIndexTemplates().size(), greaterThanOrEqualTo(2)); - assertThat( - getAll.getIndexTemplates() - .stream() - .map(org.opensearch.cluster.metadata.IndexTemplateMetadata::getName) - .collect(Collectors.toList()), - hasItems("template-1", "template-2") - ); - - assertTrue( - execute(new DeleteIndexTemplateRequest("template-1"), client.indices()::deleteTemplate, client.indices()::deleteTemplateAsync) - .isAcknowledged() - ); - assertThat( - expectThrows( - OpenSearchException.class, - () -> execute(new GetIndexTemplatesRequest("template-1"), client.indices()::getTemplate, client.indices()::getTemplateAsync) - ).status(), - equalTo(RestStatus.NOT_FOUND) - ); - assertThat( - expectThrows( - OpenSearchException.class, - () -> execute( - new DeleteIndexTemplateRequest("template-1"), - client.indices()::deleteTemplate, - client.indices()::deleteTemplateAsync - ) - ).status(), - equalTo(RestStatus.NOT_FOUND) - ); - - assertThat( - execute( - new GetIndexTemplatesRequest("template-*"), - client.indices()::getTemplate, - client.indices()::getTemplateAsync, - expectWarningsOnce(RestGetIndexTemplateAction.TYPES_DEPRECATION_MESSAGE) - ).getIndexTemplates(), - hasSize(1) - ); - assertThat( - execute( - new GetIndexTemplatesRequest("template-*"), - client.indices()::getTemplate, - client.indices()::getTemplateAsync, - expectWarningsOnce(RestGetIndexTemplateAction.TYPES_DEPRECATION_MESSAGE) - ).getIndexTemplates().get(0).name(), - equalTo("template-2") - ); - - assertTrue( - execute(new DeleteIndexTemplateRequest("template-*"), client.indices()::deleteTemplate, client.indices()::deleteTemplateAsync) - .isAcknowledged() - ); - assertThat( - expectThrows( - OpenSearchException.class, - () -> execute( - new GetIndexTemplatesRequest("template-*"), - client.indices()::getTemplate, - client.indices()::getTemplateAsync, - expectWarningsOnce(RestGetIndexTemplateAction.TYPES_DEPRECATION_MESSAGE) - ) - ).status(), - equalTo(RestStatus.NOT_FOUND) - ); - } - public void testCRUDIndexTemplate() throws Exception { RestHighLevelClient client = highLevelClient(); diff --git a/client/rest-high-level/src/test/java/org/opensearch/client/IndicesRequestConvertersTests.java b/client/rest-high-level/src/test/java/org/opensearch/client/IndicesRequestConvertersTests.java index b464a5dd3619c..7276cbb44b030 100644 --- a/client/rest-high-level/src/test/java/org/opensearch/client/IndicesRequestConvertersTests.java +++ b/client/rest-high-level/src/test/java/org/opensearch/client/IndicesRequestConvertersTests.java @@ -96,7 +96,6 @@ import static org.opensearch.index.RandomCreateIndexGenerator.randomAlias; import static org.opensearch.index.RandomCreateIndexGenerator.randomIndexSettings; import static org.opensearch.index.alias.RandomAliasActionsGenerator.randomAliasAction; -import static org.opensearch.rest.BaseRestHandler.INCLUDE_TYPE_NAME_PARAMETER; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.Matchers.nullValue; @@ -140,40 +139,6 @@ public void testIndicesExistEmptyIndices() { ); } - public void testIndicesExistEmptyIndicesWithTypes() { - LuceneTestCase.expectThrows( - IllegalArgumentException.class, - () -> IndicesRequestConverters.indicesExist(new org.opensearch.action.admin.indices.get.GetIndexRequest()) - ); - LuceneTestCase.expectThrows( - IllegalArgumentException.class, - () -> IndicesRequestConverters.indicesExist( - new org.opensearch.action.admin.indices.get.GetIndexRequest().indices((String[]) null) - ) - ); - } - - public void testIndicesExistWithTypes() { - String[] indices = RequestConvertersTests.randomIndicesNames(1, 10); - - org.opensearch.action.admin.indices.get.GetIndexRequest getIndexRequest = - new org.opensearch.action.admin.indices.get.GetIndexRequest().indices(indices); - - Map expectedParams = new HashMap<>(); - RequestConvertersTests.setRandomIndicesOptions(getIndexRequest::indicesOptions, getIndexRequest::indicesOptions, expectedParams); - RequestConvertersTests.setRandomLocal(getIndexRequest::local, expectedParams); - RequestConvertersTests.setRandomHumanReadable(getIndexRequest::humanReadable, expectedParams); - RequestConvertersTests.setRandomIncludeDefaults(getIndexRequest::includeDefaults, expectedParams); - expectedParams.put(INCLUDE_TYPE_NAME_PARAMETER, "true"); - - final Request request = IndicesRequestConverters.indicesExist(getIndexRequest); - - Assert.assertEquals(HttpHead.METHOD_NAME, request.getMethod()); - Assert.assertEquals("/" + String.join(",", indices), request.getEndpoint()); - Assert.assertThat(expectedParams, equalTo(request.getParameters())); - Assert.assertNull(request.getEntity()); - } - public void testCreateIndex() throws IOException { CreateIndexRequest createIndexRequest = RandomCreateIndexGenerator.randomCreateIndexRequest(); @@ -189,23 +154,6 @@ public void testCreateIndex() throws IOException { RequestConvertersTests.assertToXContentBody(createIndexRequest, request.getEntity()); } - public void testCreateIndexWithTypes() throws IOException { - org.opensearch.action.admin.indices.create.CreateIndexRequest createIndexRequest = org.opensearch.index.RandomCreateIndexGenerator - .randomCreateIndexRequest(); - - Map expectedParams = new HashMap<>(); - RequestConvertersTests.setRandomTimeout(createIndexRequest::timeout, AcknowledgedRequest.DEFAULT_ACK_TIMEOUT, expectedParams); - RequestConvertersTests.setRandomMasterTimeout(createIndexRequest, expectedParams); - RequestConvertersTests.setRandomWaitForActiveShards(createIndexRequest::waitForActiveShards, expectedParams); - expectedParams.put(INCLUDE_TYPE_NAME_PARAMETER, "true"); - - Request request = IndicesRequestConverters.createIndex(createIndexRequest); - Assert.assertEquals("/" + createIndexRequest.index(), request.getEndpoint()); - Assert.assertEquals(expectedParams, request.getParameters()); - Assert.assertEquals(HttpPut.METHOD_NAME, request.getMethod()); - RequestConvertersTests.assertToXContentBody(createIndexRequest, request.getEntity()); - } - public void testCreateIndexNullIndex() { IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> new CreateIndexRequest(null)); assertEquals(e.getMessage(), "The index name cannot be null."); @@ -332,67 +280,6 @@ public void testGetFieldMapping() { Assert.assertThat(HttpGet.METHOD_NAME, equalTo(request.getMethod())); } - public void testGetFieldMappingWithTypes() { - org.opensearch.action.admin.indices.mapping.get.GetFieldMappingsRequest getFieldMappingsRequest = - new org.opensearch.action.admin.indices.mapping.get.GetFieldMappingsRequest(); - - String[] indices = Strings.EMPTY_ARRAY; - if (randomBoolean()) { - indices = RequestConvertersTests.randomIndicesNames(0, 5); - getFieldMappingsRequest.indices(indices); - } else if (randomBoolean()) { - getFieldMappingsRequest.indices((String[]) null); - } - - String type = null; - if (randomBoolean()) { - type = randomAlphaOfLengthBetween(3, 10); - getFieldMappingsRequest.types(type); - } else if (randomBoolean()) { - getFieldMappingsRequest.types((String[]) null); - } - - String[] fields = null; - if (randomBoolean()) { - fields = new String[randomIntBetween(1, 5)]; - for (int i = 0; i < fields.length; i++) { - fields[i] = randomAlphaOfLengthBetween(3, 10); - } - getFieldMappingsRequest.fields(fields); - } else if (randomBoolean()) { - getFieldMappingsRequest.fields((String[]) null); - } - - Map expectedParams = new HashMap<>(); - - RequestConvertersTests.setRandomIndicesOptions( - getFieldMappingsRequest::indicesOptions, - getFieldMappingsRequest::indicesOptions, - expectedParams - ); - RequestConvertersTests.setRandomLocal(getFieldMappingsRequest::local, expectedParams); - expectedParams.put(INCLUDE_TYPE_NAME_PARAMETER, "true"); - - Request request = IndicesRequestConverters.getFieldMapping(getFieldMappingsRequest); - StringJoiner endpoint = new StringJoiner("/", "/", ""); - String index = String.join(",", indices); - if (Strings.hasLength(index)) { - endpoint.add(index); - } - endpoint.add("_mapping"); - if (type != null) { - endpoint.add(type); - } - endpoint.add("field"); - if (fields != null) { - endpoint.add(String.join(",", fields)); - } - Assert.assertThat(endpoint.toString(), equalTo(request.getEndpoint())); - - Assert.assertThat(expectedParams, equalTo(request.getParameters())); - Assert.assertThat(HttpGet.METHOD_NAME, equalTo(request.getMethod())); - } - public void testPutDataStream() { String name = randomAlphaOfLength(10); CreateDataStreamRequest createDataStreamRequest = new CreateDataStreamRequest(name); @@ -525,41 +412,6 @@ public void testGetIndex() throws IOException { Assert.assertThat(request.getEntity(), nullValue()); } - public void testGetIndexWithTypes() throws IOException { - String[] indicesUnderTest = OpenSearchTestCase.randomBoolean() ? null : RequestConvertersTests.randomIndicesNames(0, 5); - - org.opensearch.action.admin.indices.get.GetIndexRequest getIndexRequest = - new org.opensearch.action.admin.indices.get.GetIndexRequest().indices(indicesUnderTest); - - Map expectedParams = new HashMap<>(); - RequestConvertersTests.setRandomMasterTimeout(getIndexRequest, expectedParams); - RequestConvertersTests.setRandomIndicesOptions(getIndexRequest::indicesOptions, getIndexRequest::indicesOptions, expectedParams); - RequestConvertersTests.setRandomLocal(getIndexRequest::local, expectedParams); - RequestConvertersTests.setRandomHumanReadable(getIndexRequest::humanReadable, expectedParams); - expectedParams.put(INCLUDE_TYPE_NAME_PARAMETER, "true"); - - if (OpenSearchTestCase.randomBoolean()) { - // the request object will not have include_defaults present unless it is set to - // true - getIndexRequest.includeDefaults(OpenSearchTestCase.randomBoolean()); - if (getIndexRequest.includeDefaults()) { - expectedParams.put("include_defaults", Boolean.toString(true)); - } - } - - StringJoiner endpoint = new StringJoiner("/", "/", ""); - if (indicesUnderTest != null && indicesUnderTest.length > 0) { - endpoint.add(String.join(",", indicesUnderTest)); - } - - Request request = IndicesRequestConverters.getIndex(getIndexRequest); - - Assert.assertThat(endpoint.toString(), equalTo(request.getEndpoint())); - Assert.assertThat(request.getParameters(), equalTo(expectedParams)); - Assert.assertThat(request.getMethod(), equalTo(HttpGet.METHOD_NAME)); - Assert.assertThat(request.getEntity(), nullValue()); - } - public void testDeleteIndexEmptyIndices() { String[] indices = OpenSearchTestCase.randomBoolean() ? null : Strings.EMPTY_ARRAY; ActionRequestValidationException validationException = new DeleteIndexRequest(indices).validate(); @@ -906,51 +758,6 @@ public void testRollover() throws IOException { Assert.assertEquals(expectedParams, request.getParameters()); } - public void testRolloverWithTypes() throws IOException { - org.opensearch.action.admin.indices.rollover.RolloverRequest rolloverRequest = - new org.opensearch.action.admin.indices.rollover.RolloverRequest( - OpenSearchTestCase.randomAlphaOfLengthBetween(3, 10), - OpenSearchTestCase.randomBoolean() ? null : OpenSearchTestCase.randomAlphaOfLengthBetween(3, 10) - ); - Map expectedParams = new HashMap<>(); - RequestConvertersTests.setRandomTimeout(rolloverRequest::timeout, rolloverRequest.timeout(), expectedParams); - RequestConvertersTests.setRandomMasterTimeout(rolloverRequest, expectedParams); - if (OpenSearchTestCase.randomBoolean()) { - rolloverRequest.dryRun(OpenSearchTestCase.randomBoolean()); - if (rolloverRequest.isDryRun()) { - expectedParams.put("dry_run", "true"); - } - } - expectedParams.put(INCLUDE_TYPE_NAME_PARAMETER, "true"); - if (OpenSearchTestCase.randomBoolean()) { - rolloverRequest.addMaxIndexAgeCondition(new TimeValue(OpenSearchTestCase.randomNonNegativeLong())); - } - if (OpenSearchTestCase.randomBoolean()) { - String type = OpenSearchTestCase.randomAlphaOfLengthBetween(3, 10); - rolloverRequest.getCreateIndexRequest().mapping(type, org.opensearch.index.RandomCreateIndexGenerator.randomMapping(type)); - } - if (OpenSearchTestCase.randomBoolean()) { - org.opensearch.index.RandomCreateIndexGenerator.randomAliases(rolloverRequest.getCreateIndexRequest()); - } - if (OpenSearchTestCase.randomBoolean()) { - rolloverRequest.getCreateIndexRequest().settings(org.opensearch.index.RandomCreateIndexGenerator.randomIndexSettings()); - } - RequestConvertersTests.setRandomWaitForActiveShards(rolloverRequest.getCreateIndexRequest()::waitForActiveShards, expectedParams); - - Request request = IndicesRequestConverters.rollover(rolloverRequest); - if (rolloverRequest.getNewIndexName() == null) { - Assert.assertEquals("/" + rolloverRequest.getRolloverTarget() + "/_rollover", request.getEndpoint()); - } else { - Assert.assertEquals( - "/" + rolloverRequest.getRolloverTarget() + "/_rollover/" + rolloverRequest.getNewIndexName(), - request.getEndpoint() - ); - } - Assert.assertEquals(HttpPost.METHOD_NAME, request.getMethod()); - RequestConvertersTests.assertToXContentBody(rolloverRequest, request.getEntity()); - Assert.assertEquals(expectedParams, request.getParameters()); - } - public void testGetAlias() { GetAliasesRequest getAliasesRequest = new GetAliasesRequest(); @@ -1015,57 +822,6 @@ public void testIndexPutSettings() throws IOException { Assert.assertEquals(expectedParams, request.getParameters()); } - public void testPutTemplateRequestWithTypes() throws Exception { - Map names = new HashMap<>(); - names.put("log", "log"); - names.put("template#1", "template%231"); - names.put("-#template", "-%23template"); - names.put("foo^bar", "foo%5Ebar"); - - org.opensearch.action.admin.indices.template.put.PutIndexTemplateRequest putTemplateRequest = - new org.opensearch.action.admin.indices.template.put.PutIndexTemplateRequest().name( - OpenSearchTestCase.randomFrom(names.keySet()) - ).patterns(Arrays.asList(OpenSearchTestCase.generateRandomStringArray(20, 100, false, false))); - if (OpenSearchTestCase.randomBoolean()) { - putTemplateRequest.order(OpenSearchTestCase.randomInt()); - } - if (OpenSearchTestCase.randomBoolean()) { - putTemplateRequest.version(OpenSearchTestCase.randomInt()); - } - if (OpenSearchTestCase.randomBoolean()) { - putTemplateRequest.settings( - Settings.builder().put("setting-" + OpenSearchTestCase.randomInt(), OpenSearchTestCase.randomTimeValue()) - ); - } - Map expectedParams = new HashMap<>(); - if (OpenSearchTestCase.randomBoolean()) { - putTemplateRequest.mapping( - "doc-" + OpenSearchTestCase.randomInt(), - "field-" + OpenSearchTestCase.randomInt(), - "type=" + OpenSearchTestCase.randomFrom("text", "keyword") - ); - } - expectedParams.put(INCLUDE_TYPE_NAME_PARAMETER, "true"); - if (OpenSearchTestCase.randomBoolean()) { - putTemplateRequest.alias(new Alias("alias-" + OpenSearchTestCase.randomInt())); - } - if (OpenSearchTestCase.randomBoolean()) { - expectedParams.put("create", Boolean.TRUE.toString()); - putTemplateRequest.create(true); - } - if (OpenSearchTestCase.randomBoolean()) { - String cause = OpenSearchTestCase.randomUnicodeOfCodepointLengthBetween(1, 50); - putTemplateRequest.cause(cause); - expectedParams.put("cause", cause); - } - RequestConvertersTests.setRandomMasterTimeout(putTemplateRequest, expectedParams); - - Request request = IndicesRequestConverters.putTemplate(putTemplateRequest); - Assert.assertThat(request.getEndpoint(), equalTo("/_template/" + names.get(putTemplateRequest.name()))); - Assert.assertThat(request.getParameters(), equalTo(expectedParams)); - RequestConvertersTests.assertToXContentBody(putTemplateRequest, request.getEntity()); - } - public void testPutTemplateRequest() throws Exception { Map names = new HashMap<>(); names.put("log", "log"); @@ -1164,8 +920,7 @@ public void testGetTemplateRequest() throws Exception { RequestConvertersTests.setRandomMasterTimeout(getTemplatesRequest::setMasterNodeTimeout, expectedParams); RequestConvertersTests.setRandomLocal(getTemplatesRequest::setLocal, expectedParams); - Request request = IndicesRequestConverters.getTemplatesWithDocumentTypes(getTemplatesRequest); - expectedParams.put(INCLUDE_TYPE_NAME_PARAMETER, "true"); + Request request = IndicesRequestConverters.getTemplates(getTemplatesRequest); Assert.assertThat( request.getEndpoint(), equalTo("/_template/" + names.stream().map(encodes::get).collect(Collectors.joining(","))) diff --git a/client/rest-high-level/src/test/java/org/opensearch/client/indices/rollover/RolloverResponseTests.java b/client/rest-high-level/src/test/java/org/opensearch/client/indices/rollover/RolloverResponseTests.java index 7577aa66bfcde..0c924bc06046c 100644 --- a/client/rest-high-level/src/test/java/org/opensearch/client/indices/rollover/RolloverResponseTests.java +++ b/client/rest-high-level/src/test/java/org/opensearch/client/indices/rollover/RolloverResponseTests.java @@ -38,11 +38,8 @@ import org.opensearch.action.admin.indices.rollover.MaxSizeCondition; import org.opensearch.common.unit.ByteSizeValue; import org.opensearch.common.unit.TimeValue; -import org.opensearch.common.xcontent.ToXContent; import org.opensearch.common.xcontent.XContentBuilder; import org.opensearch.test.OpenSearchTestCase; -import org.opensearch.rest.BaseRestHandler; -import org.opensearch.common.xcontent.ToXContent.Params; import java.io.IOException; import java.util.ArrayList; @@ -51,7 +48,6 @@ import java.util.Map; import java.util.function.Predicate; import java.util.function.Supplier; -import java.util.Collections; import static org.opensearch.test.AbstractXContentTestCase.xContentTester; @@ -94,7 +90,6 @@ private Predicate getRandomFieldsExcludeFilter() { } private static void toXContent(RolloverResponse response, XContentBuilder builder) throws IOException { - Params params = new ToXContent.MapParams(Collections.singletonMap(BaseRestHandler.INCLUDE_TYPE_NAME_PARAMETER, "false")); org.opensearch.action.admin.indices.rollover.RolloverResponse serverResponse = new org.opensearch.action.admin.indices.rollover.RolloverResponse( response.getOldIndex(), @@ -105,6 +100,6 @@ private static void toXContent(RolloverResponse response, XContentBuilder builde response.isAcknowledged(), response.isShardsAcknowledged() ); - serverResponse.toXContent(builder, params); + serverResponse.toXContent(builder, null); } } diff --git a/modules/percolator/src/test/resources/rest-api-spec/test/11_basic_with_types.yml b/modules/percolator/src/test/resources/rest-api-spec/test/11_basic_with_types.yml deleted file mode 100644 index 896d2d514bcb9..0000000000000 --- a/modules/percolator/src/test/resources/rest-api-spec/test/11_basic_with_types.yml +++ /dev/null @@ -1,96 +0,0 @@ ---- -"Test percolator basics via rest": - - - do: - indices.create: - include_type_name: true - index: queries_index - body: - mappings: - queries_type: - properties: - query: - type: percolator - foo: - type: keyword - - - do: - indices.create: - include_type_name: true - index: documents_index - body: - mappings: - documents_type: - properties: - foo: - type: keyword - - - do: - index: - index: queries_index - type: queries_type - id: test_percolator - body: - query: - match_all: {} - - - do: - index: - index: documents_index - type: documents_type - id: some_id - body: - foo: bar - - - do: - indices.refresh: {} - - - do: - search: - rest_total_hits_as_int: true - body: - - query: - percolate: - field: query - document: - document_type: queries_type - foo: bar - - match: { hits.total: 1 } - - - do: - msearch: - rest_total_hits_as_int: true - body: - - index: queries_index - - query: - percolate: - field: query - document_type: queries_type - document: - foo: bar - - match: { responses.0.hits.total: 1 } - - - do: - search: - rest_total_hits_as_int: true - body: - - query: - percolate: - field: query - index: documents_index - type: documents_type - id: some_id - - match: { hits.total: 1 } - - - do: - msearch: - rest_total_hits_as_int: true - body: - - index: queries_index - - query: - percolate: - field: query - index: documents_index - type: documents_type - id: some_id - - match: { responses.0.hits.total: 1 } diff --git a/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/mixed_cluster/10_basic.yml b/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/mixed_cluster/10_basic.yml index 1956cd56e6850..f83c098e05741 100644 --- a/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/mixed_cluster/10_basic.yml +++ b/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/mixed_cluster/10_basic.yml @@ -78,5 +78,4 @@ - do: indices.get: index: queries - include_type_name: false - match: { queries.mappings.properties.id.type: "keyword" } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/field_caps/10_basic.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/field_caps/10_basic.yml index f1ae5c89e52a5..45e9a969c5982 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/field_caps/10_basic.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/field_caps/10_basic.yml @@ -151,7 +151,6 @@ setup: "Get date_nanos field caps": - do: indices.create: - include_type_name: false index: test_nanos body: mappings: diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.create/10_basic.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.create/10_basic.yml index ce8a6604069ed..0f8c7a7a68f07 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.create/10_basic.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.create/10_basic.yml @@ -112,20 +112,3 @@ properties: "": type: keyword - ---- -"Create index with explicit _doc type": - - do: - catch: bad_request - indices.create: - index: test_index - body: - mappings: - _doc: - properties: - field: - type: keyword - - - match: { error.type: "illegal_argument_exception" } - - match: { error.reason: "The mapping definition cannot be nested under a type [_doc] unless include_type_name is set to true." } - diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_template/11_basic_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_template/11_basic_with_types.yml deleted file mode 100644 index 0ecf304b1ce70..0000000000000 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_template/11_basic_with_types.yml +++ /dev/null @@ -1,48 +0,0 @@ -setup: - - do: - indices.put_template: - include_type_name: true - name: test - body: - index_patterns: test-* - settings: - number_of_shards: 1 - number_of_replicas: 0 - mappings: - _doc: - properties: - field: - type: keyword - ---- -"Get template": - - - do: - indices.get_template: - include_type_name: true - name: test - - - match: {test.index_patterns: ["test-*"]} - - match: {test.settings: {index: {number_of_shards: '1', number_of_replicas: '0'}}} - - match: {test.mappings: {_doc: {properties: {field: {type: keyword}}}}} - ---- -"Get template with no mappings": - - - do: - indices.put_template: - name: test_no_mappings - body: - index_patterns: test-* - settings: - number_of_shards: 1 - number_of_replicas: 0 - - - do: - indices.get_template: - include_type_name: true - name: test_no_mappings - - - match: {test_no_mappings.index_patterns: ["test-*"]} - - match: {test_no_mappings.settings: {index: {number_of_shards: '1', number_of_replicas: '0'}}} - - match: {test_no_mappings.mappings: {}} diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.put_template/10_basic.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.put_template/10_basic.yml index 0b4e34d2740b5..5b40ad0771c70 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.put_template/10_basic.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.put_template/10_basic.yml @@ -226,20 +226,3 @@ indices.put_template: name: test body: {} - ---- -"Put template with explicit _doc type": - - do: - catch: bad_request - indices.put_template: - name: test - body: - index_patterns: test-* - mappings: - _doc: - properties: - field: - type: keyword - - - match: { error.type: "illegal_argument_exception" } - - match: { error.reason: "The mapping definition cannot be nested under a type [_doc] unless include_type_name is set to true." } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.put_template/11_basic_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.put_template/11_basic_with_types.yml deleted file mode 100644 index fde28db3c691d..0000000000000 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.put_template/11_basic_with_types.yml +++ /dev/null @@ -1,74 +0,0 @@ ---- -"Put template": - - do: - indices.put_template: - include_type_name: true - name: test - body: - index_patterns: test-* - settings: - number_of_shards: 1 - number_of_replicas: 0 - mappings: - _doc: - properties: - field: - type: keyword - - - do: - indices.get_template: - include_type_name: true - name: test - flat_settings: true - - - match: {test.index_patterns: ["test-*"]} - - match: {test.settings: {index.number_of_shards: '1', index.number_of_replicas: '0'}} - - match: {test.mappings: {_doc: {properties: {field: {type: keyword}}}}} - ---- -"Put multiple template": - - do: - indices.put_template: - include_type_name: true - name: test - body: - index_patterns: [test-*, test2-*] - settings: - number_of_shards: 1 - number_of_replicas: 0 - mappings: - _doc: - properties: - field: - type: text - - - do: - indices.get_template: - include_type_name: true - name: test - flat_settings: true - - - match: {test.index_patterns: ["test-*", "test2-*"]} - - match: {test.settings: {index.number_of_shards: '1', index.number_of_replicas: '0'}} - - match: {test.mappings: {_doc: {properties: {field: {type: text}}}}} - ---- -"Put template with empty mappings": - - do: - indices.put_template: - include_type_name: true - name: test - body: - index_patterns: test-* - settings: - number_of_shards: 1 - number_of_replicas: 0 - mappings: {} - - - do: - indices.get_template: - include_type_name: true - name: test - flat_settings: true - - - match: {test.mappings: {}} diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.rollover/40_mapping.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.rollover/40_mapping.yml index 040ffd534c0ab..b669c2ab75176 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.rollover/40_mapping.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.rollover/40_mapping.yml @@ -37,28 +37,3 @@ - match: { conditions: { "[max_docs: 2]": true } } - match: { rolled_over: true } - ---- -"Mappings with explicit _doc type": - - do: - indices.create: - index: logs-1 - body: - aliases: - logs_search: {} - - - do: - catch: bad_request - indices.rollover: - alias: "logs_search" - body: - conditions: - max_docs: 2 - mappings: - _doc: - properties: - field: - type: keyword - - - match: { error.caused_by.type: "illegal_argument_exception" } - - match: { error.caused_by.reason: "The mapping definition cannot be nested under a type [_doc] unless include_type_name is set to true." } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/280_geohash_grid.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/280_geohash_grid.yml index 534e552fc0ea2..1368c87a77d7e 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/280_geohash_grid.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/280_geohash_grid.yml @@ -1,7 +1,6 @@ setup: - do: indices.create: - include_type_name: false index: test_1 body: settings: diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/290_geotile_grid.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/290_geotile_grid.yml index 2db498a0cacf0..dfd5b6c5f2583 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/290_geotile_grid.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/290_geotile_grid.yml @@ -4,7 +4,6 @@ setup: reason: "added in 7.0.0" - do: indices.create: - include_type_name: false index: test_1 body: settings: diff --git a/server/src/main/java/org/opensearch/action/admin/indices/create/CreateIndexRequest.java b/server/src/main/java/org/opensearch/action/admin/indices/create/CreateIndexRequest.java index b4cab8cea2554..b8a3b284273ae 100644 --- a/server/src/main/java/org/opensearch/action/admin/indices/create/CreateIndexRequest.java +++ b/server/src/main/java/org/opensearch/action/admin/indices/create/CreateIndexRequest.java @@ -53,7 +53,6 @@ import org.opensearch.common.xcontent.DeprecationHandler; import org.opensearch.common.xcontent.LoggingDeprecationHandler; import org.opensearch.common.xcontent.NamedXContentRegistry; -import org.opensearch.common.xcontent.ToXContentObject; import org.opensearch.common.xcontent.XContentBuilder; import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.common.xcontent.XContentHelper; @@ -61,7 +60,6 @@ import org.opensearch.common.xcontent.XContentType; import java.io.IOException; -import java.io.InputStream; import java.util.HashMap; import java.util.HashSet; import java.util.Map; @@ -82,7 +80,7 @@ * @see org.opensearch.client.Requests#createIndexRequest(String) * @see CreateIndexResponse */ -public class CreateIndexRequest extends AcknowledgedRequest implements IndicesRequest, ToXContentObject { +public class CreateIndexRequest extends AcknowledgedRequest implements IndicesRequest { public static final ParseField MAPPINGS = new ParseField("mappings"); public static final ParseField SETTINGS = new ParseField("settings"); @@ -483,33 +481,4 @@ public void writeTo(StreamOutput out) throws IOException { } waitForActiveShards.writeTo(out); } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - innerToXContent(builder, params); - builder.endObject(); - return builder; - } - - public XContentBuilder innerToXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(SETTINGS.getPreferredName()); - settings.toXContent(builder, params); - builder.endObject(); - - builder.startObject(MAPPINGS.getPreferredName()); - for (Map.Entry entry : mappings.entrySet()) { - try (InputStream stream = new BytesArray(entry.getValue()).streamInput()) { - builder.rawField(entry.getKey(), stream, XContentType.JSON); - } - } - builder.endObject(); - - builder.startObject(ALIASES.getPreferredName()); - for (Alias alias : aliases) { - alias.toXContent(builder, params); - } - builder.endObject(); - return builder; - } } diff --git a/server/src/main/java/org/opensearch/action/admin/indices/mapping/get/GetFieldMappingsResponse.java b/server/src/main/java/org/opensearch/action/admin/indices/mapping/get/GetFieldMappingsResponse.java index d486a102d1a21..713c842e07dad 100644 --- a/server/src/main/java/org/opensearch/action/admin/indices/mapping/get/GetFieldMappingsResponse.java +++ b/server/src/main/java/org/opensearch/action/admin/indices/mapping/get/GetFieldMappingsResponse.java @@ -47,7 +47,6 @@ import org.opensearch.common.xcontent.XContentParser; import org.opensearch.common.xcontent.XContentType; import org.opensearch.index.mapper.Mapper; -import org.opensearch.rest.BaseRestHandler; import java.io.IOException; import java.io.InputStream; @@ -58,8 +57,6 @@ import static java.util.Collections.unmodifiableMap; import static org.opensearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg; import static org.opensearch.common.xcontent.XContentFactory.jsonBuilder; -import static org.opensearch.common.xcontent.XContentParserUtils.ensureExpectedToken; -import static org.opensearch.rest.BaseRestHandler.DEFAULT_INCLUDE_TYPE_NAME_POLICY; /** * Response object for {@link GetFieldMappingsRequest} API @@ -100,6 +97,7 @@ public class GetFieldMappingsResponse extends ActionResponse implements ToXConte }, MAPPINGS, ObjectParser.ValueType.OBJECT); } + // todo remove middle `type` level private final Map>> mappings; GetFieldMappingsResponse(Map>> mappings) { @@ -154,28 +152,18 @@ public FieldMappingMetadata fieldMappings(String index, String type, String fiel @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - boolean includeTypeName = params.paramAsBoolean(BaseRestHandler.INCLUDE_TYPE_NAME_PARAMETER, DEFAULT_INCLUDE_TYPE_NAME_POLICY); - builder.startObject(); for (Map.Entry>> indexEntry : mappings.entrySet()) { builder.startObject(indexEntry.getKey()); builder.startObject(MAPPINGS.getPreferredName()); - if (includeTypeName == false) { - Map mappings = null; - for (Map.Entry> typeEntry : indexEntry.getValue().entrySet()) { - assert mappings == null; - mappings = typeEntry.getValue(); - } - if (mappings != null) { - addFieldMappingsToBuilder(builder, params, mappings); - } - } else { - for (Map.Entry> typeEntry : indexEntry.getValue().entrySet()) { - builder.startObject(typeEntry.getKey()); - addFieldMappingsToBuilder(builder, params, typeEntry.getValue()); - builder.endObject(); - } + Map mappings = null; + for (Map.Entry> typeEntry : indexEntry.getValue().entrySet()) { + assert mappings == null; + mappings = typeEntry.getValue(); + } + if (mappings != null) { + addFieldMappingsToBuilder(builder, params, mappings); } builder.endObject(); @@ -194,24 +182,6 @@ private void addFieldMappingsToBuilder(XContentBuilder builder, Params params, M } } - public static GetFieldMappingsResponse fromXContent(XContentParser parser) throws IOException { - ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser); - - final Map>> mappings = new HashMap<>(); - if (parser.nextToken() == XContentParser.Token.FIELD_NAME) { - while (parser.currentToken() == XContentParser.Token.FIELD_NAME) { - final String index = parser.currentName(); - - final Map> typeMappings = PARSER.parse(parser, index); - mappings.put(index, typeMappings); - - parser.nextToken(); - } - } - - return new GetFieldMappingsResponse(mappings); - } - public static class FieldMappingMetadata implements ToXContentFragment { public static final FieldMappingMetadata NULL = new FieldMappingMetadata("", BytesArray.EMPTY); diff --git a/server/src/main/java/org/opensearch/action/admin/indices/rollover/RolloverRequest.java b/server/src/main/java/org/opensearch/action/admin/indices/rollover/RolloverRequest.java index 94028f315a704..f06cb599a60df 100644 --- a/server/src/main/java/org/opensearch/action/admin/indices/rollover/RolloverRequest.java +++ b/server/src/main/java/org/opensearch/action/admin/indices/rollover/RolloverRequest.java @@ -43,8 +43,6 @@ import org.opensearch.common.unit.ByteSizeValue; import org.opensearch.common.unit.TimeValue; import org.opensearch.common.xcontent.ObjectParser; -import org.opensearch.common.xcontent.ToXContentObject; -import org.opensearch.common.xcontent.XContentBuilder; import org.opensearch.common.xcontent.XContentParser; import org.opensearch.index.mapper.MapperService; @@ -60,9 +58,9 @@ * Note: there is a new class with the same name for the Java HLRC that uses a typeless format. * Any changes done to this class should also go to that client class. */ -public class RolloverRequest extends AcknowledgedRequest implements IndicesRequest, ToXContentObject { +public class RolloverRequest extends AcknowledgedRequest implements IndicesRequest { - private static final ObjectParser PARSER = new ObjectParser<>("rollover"); + private static final ObjectParser PARSER = new ObjectParser<>("rollover"); private static final ObjectParser>, Void> CONDITION_PARSER = new ObjectParser<>("conditions"); private static final ParseField CONDITIONS = new ParseField("conditions"); @@ -97,24 +95,13 @@ public class RolloverRequest extends AcknowledgedRequest implem CreateIndexRequest.SETTINGS, ObjectParser.ValueType.OBJECT ); - PARSER.declareField((parser, request, includeTypeName) -> { - if (includeTypeName) { - for (Map.Entry mappingsEntry : parser.map().entrySet()) { - request.createIndexRequest.mapping(mappingsEntry.getKey(), (Map) mappingsEntry.getValue()); - } - } else { - // a type is not included, add a dummy _doc type - Map mappings = parser.map(); - if (MapperService.isMappingSourceTyped(MapperService.SINGLE_MAPPING_NAME, mappings)) { - throw new IllegalArgumentException( - "The mapping definition cannot be nested under a type " - + "[" - + MapperService.SINGLE_MAPPING_NAME - + "] unless include_type_name is set to true." - ); - } - request.createIndexRequest.mapping(MapperService.SINGLE_MAPPING_NAME, mappings); + PARSER.declareField((parser, request, context) -> { + // a type is not included, add a dummy _doc type + Map mappings = parser.map(); + if (MapperService.isMappingSourceTyped(MapperService.SINGLE_MAPPING_NAME, mappings)) { + throw new IllegalArgumentException("The mapping definition cannot be nested under a type"); } + request.createIndexRequest.mapping(MapperService.SINGLE_MAPPING_NAME, mappings); }, CreateIndexRequest.MAPPINGS, ObjectParser.ValueType.OBJECT); PARSER.declareField( (parser, request, context) -> request.createIndexRequest.aliases(parser.map()), @@ -273,23 +260,8 @@ public CreateIndexRequest getCreateIndexRequest() { return createIndexRequest; } - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - createIndexRequest.innerToXContent(builder, params); - - builder.startObject(CONDITIONS.getPreferredName()); - for (Condition condition : conditions.values()) { - condition.toXContent(builder, params); - } - builder.endObject(); - - builder.endObject(); - return builder; - } - // param isTypeIncluded decides how mappings should be parsed from XContent - public void fromXContent(boolean isTypeIncluded, XContentParser parser) throws IOException { - PARSER.parse(parser, this, isTypeIncluded); + public void fromXContent(XContentParser parser) throws IOException { + PARSER.parse(parser, this, null); } } diff --git a/server/src/main/java/org/opensearch/action/admin/indices/template/get/GetIndexTemplatesResponse.java b/server/src/main/java/org/opensearch/action/admin/indices/template/get/GetIndexTemplatesResponse.java index 5a596b090133f..e6d487e0a40b3 100644 --- a/server/src/main/java/org/opensearch/action/admin/indices/template/get/GetIndexTemplatesResponse.java +++ b/server/src/main/java/org/opensearch/action/admin/indices/template/get/GetIndexTemplatesResponse.java @@ -38,15 +38,13 @@ import org.opensearch.common.xcontent.ToXContent; import org.opensearch.common.xcontent.ToXContentObject; import org.opensearch.common.xcontent.XContentBuilder; -import org.opensearch.common.xcontent.XContentParser; import java.io.IOException; import java.util.ArrayList; import java.util.List; +import java.util.Objects; import static java.util.Collections.singletonMap; -import static org.opensearch.rest.BaseRestHandler.DEFAULT_INCLUDE_TYPE_NAME_POLICY; -import static org.opensearch.rest.BaseRestHandler.INCLUDE_TYPE_NAME_PARAMETER; public class GetIndexTemplatesResponse extends ActionResponse implements ToXContentObject { @@ -57,7 +55,7 @@ public GetIndexTemplatesResponse(StreamInput in) throws IOException { int size = in.readVInt(); indexTemplates = new ArrayList<>(); for (int i = 0; i < size; i++) { - indexTemplates.add(0, IndexTemplateMetadata.readFrom(in)); + indexTemplates.add(IndexTemplateMetadata.readFrom(in)); } } @@ -77,32 +75,28 @@ public void writeTo(StreamOutput out) throws IOException { } } + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetIndexTemplatesResponse that = (GetIndexTemplatesResponse) o; + return Objects.equals(indexTemplates, that.indexTemplates); + } + + @Override + public int hashCode() { + return Objects.hash(indexTemplates); + } + @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { params = new ToXContent.DelegatingMapParams(singletonMap("reduce_mappings", "true"), params); - boolean includeTypeName = params.paramAsBoolean(INCLUDE_TYPE_NAME_PARAMETER, DEFAULT_INCLUDE_TYPE_NAME_POLICY); - builder.startObject(); for (IndexTemplateMetadata indexTemplateMetadata : getIndexTemplates()) { - if (includeTypeName) { - IndexTemplateMetadata.Builder.toXContentWithTypes(indexTemplateMetadata, builder, params); - } else { - IndexTemplateMetadata.Builder.toXContent(indexTemplateMetadata, builder, params); - } + IndexTemplateMetadata.Builder.toXContent(indexTemplateMetadata, builder, params); } builder.endObject(); return builder; } - - public static GetIndexTemplatesResponse fromXContent(XContentParser parser) throws IOException { - final List templates = new ArrayList<>(); - for (XContentParser.Token token = parser.nextToken(); token != XContentParser.Token.END_OBJECT; token = parser.nextToken()) { - if (token == XContentParser.Token.FIELD_NAME) { - final IndexTemplateMetadata templateMetadata = IndexTemplateMetadata.Builder.fromXContent(parser, parser.currentName()); - templates.add(templateMetadata); - } - } - return new GetIndexTemplatesResponse(templates); - } } diff --git a/server/src/main/java/org/opensearch/rest/BaseRestHandler.java b/server/src/main/java/org/opensearch/rest/BaseRestHandler.java index f2e345314ee10..4ee209111bdcb 100644 --- a/server/src/main/java/org/opensearch/rest/BaseRestHandler.java +++ b/server/src/main/java/org/opensearch/rest/BaseRestHandler.java @@ -80,13 +80,6 @@ public abstract class BaseRestHandler implements RestHandler { @Deprecated protected Logger logger = LogManager.getLogger(getClass()); - /** - * Parameter that controls whether certain REST apis should include type names in their requests or responses. - * Note: Support for this parameter will be removed after the transition period to typeless APIs. - */ - public static final String INCLUDE_TYPE_NAME_PARAMETER = "include_type_name"; - public static final boolean DEFAULT_INCLUDE_TYPE_NAME_POLICY = false; - public final long getUsageCount() { return usageCount.sum(); } diff --git a/server/src/main/java/org/opensearch/rest/action/admin/indices/RestCreateIndexAction.java b/server/src/main/java/org/opensearch/rest/action/admin/indices/RestCreateIndexAction.java index 6cc72388758a2..5b628bc094c41 100644 --- a/server/src/main/java/org/opensearch/rest/action/admin/indices/RestCreateIndexAction.java +++ b/server/src/main/java/org/opensearch/rest/action/admin/indices/RestCreateIndexAction.java @@ -35,7 +35,6 @@ import org.opensearch.action.admin.indices.create.CreateIndexRequest; import org.opensearch.action.support.ActiveShardCount; import org.opensearch.client.node.NodeClient; -import org.opensearch.common.logging.DeprecationLogger; import org.opensearch.common.xcontent.LoggingDeprecationHandler; import org.opensearch.common.xcontent.XContentHelper; import org.opensearch.index.mapper.MapperService; @@ -53,9 +52,6 @@ import static org.opensearch.rest.RestRequest.Method.PUT; public class RestCreateIndexAction extends BaseRestHandler { - private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(RestCreateIndexAction.class); - public static final String TYPES_DEPRECATION_MESSAGE = "[types removal] Using include_type_name in create " - + "index requests is deprecated. The parameter will be removed in the next major version."; @Override public List routes() { @@ -69,17 +65,11 @@ public String getName() { @Override public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { - final boolean includeTypeName = request.paramAsBoolean(INCLUDE_TYPE_NAME_PARAMETER, DEFAULT_INCLUDE_TYPE_NAME_POLICY); - - if (request.hasParam(INCLUDE_TYPE_NAME_PARAMETER)) { - deprecationLogger.deprecate("create_index_with_types", TYPES_DEPRECATION_MESSAGE); - } - CreateIndexRequest createIndexRequest = new CreateIndexRequest(request.param("index")); if (request.hasContent()) { Map sourceAsMap = XContentHelper.convertToMap(request.requiredContent(), false, request.getXContentType()).v2(); - sourceAsMap = prepareMappings(sourceAsMap, includeTypeName); + sourceAsMap = prepareMappings(sourceAsMap); createIndexRequest.source(sourceAsMap, LoggingDeprecationHandler.INSTANCE); } @@ -89,8 +79,8 @@ public RestChannelConsumer prepareRequest(final RestRequest request, final NodeC return channel -> client.admin().indices().create(createIndexRequest, new RestToXContentListener<>(channel)); } - static Map prepareMappings(Map source, boolean includeTypeName) { - if (includeTypeName || source.containsKey("mappings") == false || (source.get("mappings") instanceof Map) == false) { + static Map prepareMappings(Map source) { + if (source.containsKey("mappings") == false || (source.get("mappings") instanceof Map) == false) { return source; } @@ -99,12 +89,7 @@ static Map prepareMappings(Map source, boolean i @SuppressWarnings("unchecked") Map mappings = (Map) source.get("mappings"); if (MapperService.isMappingSourceTyped(MapperService.SINGLE_MAPPING_NAME, mappings)) { - throw new IllegalArgumentException( - "The mapping definition cannot be nested under a type " - + "[" - + MapperService.SINGLE_MAPPING_NAME - + "] unless include_type_name is set to true." - ); + throw new IllegalArgumentException("The mapping definition cannot be nested under a type"); } newSource.put("mappings", singletonMap(MapperService.SINGLE_MAPPING_NAME, mappings)); diff --git a/server/src/main/java/org/opensearch/rest/action/admin/indices/RestGetFieldMappingAction.java b/server/src/main/java/org/opensearch/rest/action/admin/indices/RestGetFieldMappingAction.java index 8b04e0b66dfae..292ed5e560848 100644 --- a/server/src/main/java/org/opensearch/rest/action/admin/indices/RestGetFieldMappingAction.java +++ b/server/src/main/java/org/opensearch/rest/action/admin/indices/RestGetFieldMappingAction.java @@ -63,8 +63,6 @@ public class RestGetFieldMappingAction extends BaseRestHandler { private static final Logger logger = LogManager.getLogger(RestGetFieldMappingAction.class); private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(logger.getName()); - public static final String TYPES_DEPRECATION_MESSAGE = "[types removal] Using include_type_name in get " - + "field mapping requests is deprecated. The parameter will be removed in the next major version."; @Override public List routes() { @@ -85,14 +83,6 @@ public RestChannelConsumer prepareRequest(final RestRequest request, final NodeC getMappingsRequest.indices(indices).fields(fields).includeDefaults(request.paramAsBoolean("include_defaults", false)); getMappingsRequest.indicesOptions(IndicesOptions.fromRequest(request, getMappingsRequest.indicesOptions())); - if (request.hasParam(INCLUDE_TYPE_NAME_PARAMETER)) { - boolean includeTypeName = request.paramAsBoolean(INCLUDE_TYPE_NAME_PARAMETER, DEFAULT_INCLUDE_TYPE_NAME_POLICY); - if (includeTypeName) { - throw new IllegalArgumentException(INCLUDE_TYPE_NAME_PARAMETER + " no longer supports the value [true]."); - } - deprecationLogger.deprecate("get_field_mapping_with_types", TYPES_DEPRECATION_MESSAGE); - } - if (request.hasParam("local")) { deprecationLogger.deprecate( "get_field_mapping_local", diff --git a/server/src/main/java/org/opensearch/rest/action/admin/indices/RestGetIndexTemplateAction.java b/server/src/main/java/org/opensearch/rest/action/admin/indices/RestGetIndexTemplateAction.java index de985cb9c372b..71e7ed098cf8d 100644 --- a/server/src/main/java/org/opensearch/rest/action/admin/indices/RestGetIndexTemplateAction.java +++ b/server/src/main/java/org/opensearch/rest/action/admin/indices/RestGetIndexTemplateAction.java @@ -36,16 +36,13 @@ import org.opensearch.action.admin.indices.template.get.GetIndexTemplatesResponse; import org.opensearch.client.node.NodeClient; import org.opensearch.common.Strings; -import org.opensearch.common.logging.DeprecationLogger; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.set.Sets; import org.opensearch.rest.BaseRestHandler; import org.opensearch.rest.RestRequest; import org.opensearch.rest.RestStatus; import org.opensearch.rest.action.RestToXContentListener; import java.io.IOException; -import java.util.Collections; import java.util.List; import java.util.Set; @@ -61,13 +58,6 @@ */ public class RestGetIndexTemplateAction extends BaseRestHandler { - private static final Set RESPONSE_PARAMETERS = Collections.unmodifiableSet( - Sets.union(Collections.singleton(INCLUDE_TYPE_NAME_PARAMETER), Settings.FORMAT_PARAMS) - ); - private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(RestGetIndexTemplateAction.class); - public static final String TYPES_DEPRECATION_MESSAGE = "[types removal]" - + " Specifying include_type_name in get index template requests is deprecated."; - @Override public List routes() { return unmodifiableList( @@ -85,9 +75,6 @@ public RestChannelConsumer prepareRequest(final RestRequest request, final NodeC final String[] names = Strings.splitStringByCommaToArray(request.param("name")); final GetIndexTemplatesRequest getIndexTemplatesRequest = new GetIndexTemplatesRequest(names); - if (request.hasParam(INCLUDE_TYPE_NAME_PARAMETER)) { - deprecationLogger.deprecate("get_index_template_include_type_name", TYPES_DEPRECATION_MESSAGE); - } getIndexTemplatesRequest.local(request.paramAsBoolean("local", getIndexTemplatesRequest.local())); getIndexTemplatesRequest.masterNodeTimeout(request.paramAsTime("master_timeout", getIndexTemplatesRequest.masterNodeTimeout())); @@ -106,7 +93,7 @@ protected RestStatus getStatus(final GetIndexTemplatesResponse response) { @Override protected Set responseParams() { - return RESPONSE_PARAMETERS; + return Settings.FORMAT_PARAMS; } } diff --git a/server/src/main/java/org/opensearch/rest/action/admin/indices/RestGetIndicesAction.java b/server/src/main/java/org/opensearch/rest/action/admin/indices/RestGetIndicesAction.java index 0647221c8b6a0..37c8162c6d31b 100644 --- a/server/src/main/java/org/opensearch/rest/action/admin/indices/RestGetIndicesAction.java +++ b/server/src/main/java/org/opensearch/rest/action/admin/indices/RestGetIndicesAction.java @@ -36,18 +36,14 @@ import org.opensearch.action.support.IndicesOptions; import org.opensearch.client.node.NodeClient; import org.opensearch.common.Strings; -import org.opensearch.common.logging.DeprecationLogger; import org.opensearch.common.settings.Settings; import org.opensearch.rest.BaseRestHandler; import org.opensearch.rest.RestRequest; import org.opensearch.rest.action.RestToXContentListener; import java.io.IOException; -import java.util.Collections; import java.util.List; import java.util.Set; -import java.util.stream.Collectors; -import java.util.stream.Stream; import static java.util.Arrays.asList; import static java.util.Collections.unmodifiableList; @@ -59,15 +55,6 @@ */ public class RestGetIndicesAction extends BaseRestHandler { - private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(RestGetIndicesAction.class); - public static final String TYPES_DEPRECATION_MESSAGE = "[types removal] Using `include_type_name` in get indices requests" - + " is deprecated. The parameter will be removed in the next major version."; - - private static final Set allowedResponseParameters = Collections.unmodifiableSet( - Stream.concat(Collections.singleton(INCLUDE_TYPE_NAME_PARAMETER).stream(), Settings.FORMAT_PARAMS.stream()) - .collect(Collectors.toSet()) - ); - @Override public List routes() { return unmodifiableList(asList(new Route(GET, "/{index}"), new Route(HEAD, "/{index}"))); @@ -81,10 +68,6 @@ public String getName() { @Override public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { String[] indices = Strings.splitStringByCommaToArray(request.param("index")); - // starting with 7.0 we don't include types by default in the response to GET requests - if (request.hasParam(INCLUDE_TYPE_NAME_PARAMETER) && request.method().equals(GET)) { - deprecationLogger.deprecate("get_indices_with_types", TYPES_DEPRECATION_MESSAGE); - } final GetIndexRequest getIndexRequest = new GetIndexRequest(); getIndexRequest.indices(indices); getIndexRequest.indicesOptions(IndicesOptions.fromRequest(request, getIndexRequest.indicesOptions())); @@ -101,6 +84,6 @@ public RestChannelConsumer prepareRequest(final RestRequest request, final NodeC */ @Override protected Set responseParams() { - return allowedResponseParameters; + return Settings.FORMAT_PARAMS; } } diff --git a/server/src/main/java/org/opensearch/rest/action/admin/indices/RestGetMappingAction.java b/server/src/main/java/org/opensearch/rest/action/admin/indices/RestGetMappingAction.java index f196eb4e41d6d..f4f33905408e7 100644 --- a/server/src/main/java/org/opensearch/rest/action/admin/indices/RestGetMappingAction.java +++ b/server/src/main/java/org/opensearch/rest/action/admin/indices/RestGetMappingAction.java @@ -32,8 +32,6 @@ package org.opensearch.rest.action.admin.indices; -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; import org.opensearch.OpenSearchTimeoutException; import org.opensearch.action.ActionRunnable; import org.opensearch.action.admin.indices.mapping.get.GetMappingsRequest; @@ -41,7 +39,6 @@ import org.opensearch.action.support.IndicesOptions; import org.opensearch.client.node.NodeClient; import org.opensearch.common.Strings; -import org.opensearch.common.logging.DeprecationLogger; import org.opensearch.common.unit.TimeValue; import org.opensearch.common.xcontent.XContentBuilder; import org.opensearch.rest.BaseRestHandler; @@ -61,10 +58,6 @@ import static org.opensearch.rest.RestRequest.Method.GET; public class RestGetMappingAction extends BaseRestHandler { - private static final Logger logger = LogManager.getLogger(RestGetMappingAction.class); - private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(logger.getName()); - public static final String TYPES_DEPRECATION_MESSAGE = "[types removal] Using include_type_name in get" - + " mapping requests is deprecated. The parameter will be removed in the next major version."; private final ThreadPool threadPool; diff --git a/server/src/main/java/org/opensearch/rest/action/admin/indices/RestPutIndexTemplateAction.java b/server/src/main/java/org/opensearch/rest/action/admin/indices/RestPutIndexTemplateAction.java index da89691c60c9d..f17ac495b494b 100644 --- a/server/src/main/java/org/opensearch/rest/action/admin/indices/RestPutIndexTemplateAction.java +++ b/server/src/main/java/org/opensearch/rest/action/admin/indices/RestPutIndexTemplateAction.java @@ -53,11 +53,7 @@ import static org.opensearch.rest.RestRequest.Method.PUT; public class RestPutIndexTemplateAction extends BaseRestHandler { - private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(RestPutIndexTemplateAction.class); - public static final String TYPES_DEPRECATION_MESSAGE = "[types removal]" - + " Specifying include_type_name in put index template requests is deprecated." - + " The parameter will be removed in the next major version."; @Override public List routes() { @@ -71,12 +67,7 @@ public String getName() { @Override public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { - boolean includeTypeName = request.paramAsBoolean(INCLUDE_TYPE_NAME_PARAMETER, DEFAULT_INCLUDE_TYPE_NAME_POLICY); - PutIndexTemplateRequest putRequest = new PutIndexTemplateRequest(request.param("name")); - if (request.hasParam(INCLUDE_TYPE_NAME_PARAMETER)) { - deprecationLogger.deprecate("put_index_template_with_types", TYPES_DEPRECATION_MESSAGE); - } if (request.hasParam("template")) { deprecationLogger.deprecate( "put_index_template_deprecated_parameter", @@ -92,7 +83,7 @@ public RestChannelConsumer prepareRequest(final RestRequest request, final NodeC putRequest.cause(request.param("cause", "")); Map sourceAsMap = XContentHelper.convertToMap(request.requiredContent(), false, request.getXContentType()).v2(); - sourceAsMap = RestCreateIndexAction.prepareMappings(sourceAsMap, includeTypeName); + sourceAsMap = RestCreateIndexAction.prepareMappings(sourceAsMap); putRequest.source(sourceAsMap); return channel -> client.admin().indices().putTemplate(putRequest, new RestToXContentListener<>(channel)); diff --git a/server/src/main/java/org/opensearch/rest/action/admin/indices/RestPutMappingAction.java b/server/src/main/java/org/opensearch/rest/action/admin/indices/RestPutMappingAction.java index 19043db9aa186..f65dea1ebe3d2 100644 --- a/server/src/main/java/org/opensearch/rest/action/admin/indices/RestPutMappingAction.java +++ b/server/src/main/java/org/opensearch/rest/action/admin/indices/RestPutMappingAction.java @@ -36,7 +36,6 @@ import org.opensearch.action.support.IndicesOptions; import org.opensearch.client.node.NodeClient; import org.opensearch.common.Strings; -import org.opensearch.common.logging.DeprecationLogger; import org.opensearch.common.xcontent.XContentHelper; import org.opensearch.index.mapper.MapperService; import org.opensearch.rest.BaseRestHandler; @@ -54,9 +53,6 @@ import static org.opensearch.rest.RestRequest.Method.PUT; public class RestPutMappingAction extends BaseRestHandler { - private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(RestPutMappingAction.class); - public static final String TYPES_DEPRECATION_MESSAGE = "[types removal] Using include_type_name in put " - + "mapping requests is deprecated. The parameter will be removed in the next major version."; @Override public List routes() { @@ -79,15 +75,9 @@ public String getName() { public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { PutMappingRequest putMappingRequest = putMappingRequest(Strings.splitStringByCommaToArray(request.param("index"))); - - final boolean includeTypeName = request.paramAsBoolean(INCLUDE_TYPE_NAME_PARAMETER, DEFAULT_INCLUDE_TYPE_NAME_POLICY); - if (request.hasParam(INCLUDE_TYPE_NAME_PARAMETER)) { - deprecationLogger.deprecate("put_mapping_with_types", TYPES_DEPRECATION_MESSAGE); - } - Map sourceAsMap = XContentHelper.convertToMap(request.requiredContent(), false, request.getXContentType()).v2(); - if (includeTypeName == false && MapperService.isMappingSourceTyped(MapperService.SINGLE_MAPPING_NAME, sourceAsMap)) { + if (MapperService.isMappingSourceTyped(MapperService.SINGLE_MAPPING_NAME, sourceAsMap)) { throw new IllegalArgumentException("Types cannot be provided in put mapping requests"); } diff --git a/server/src/main/java/org/opensearch/rest/action/admin/indices/RestRolloverIndexAction.java b/server/src/main/java/org/opensearch/rest/action/admin/indices/RestRolloverIndexAction.java index bfa34b1bea763..08b84cc6fe6cc 100644 --- a/server/src/main/java/org/opensearch/rest/action/admin/indices/RestRolloverIndexAction.java +++ b/server/src/main/java/org/opensearch/rest/action/admin/indices/RestRolloverIndexAction.java @@ -65,12 +65,11 @@ public String getName() { @Override public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { - final boolean includeTypeName = request.paramAsBoolean(INCLUDE_TYPE_NAME_PARAMETER, DEFAULT_INCLUDE_TYPE_NAME_POLICY); - if (request.hasParam(INCLUDE_TYPE_NAME_PARAMETER)) { + if (request.hasParam("include_type_name")) { deprecationLogger.deprecate("index_rollover_with_types", TYPES_DEPRECATION_MESSAGE); } RolloverRequest rolloverIndexRequest = new RolloverRequest(request.param("index"), request.param("new_index")); - request.applyContentParser(parser -> rolloverIndexRequest.fromXContent(includeTypeName, parser)); + request.applyContentParser(parser -> rolloverIndexRequest.fromXContent(parser)); rolloverIndexRequest.dryRun(request.paramAsBoolean("dry_run", false)); rolloverIndexRequest.timeout(request.paramAsTime("timeout", rolloverIndexRequest.timeout())); rolloverIndexRequest.masterNodeTimeout(request.paramAsTime("master_timeout", rolloverIndexRequest.masterNodeTimeout())); diff --git a/server/src/test/java/org/opensearch/action/admin/indices/create/CreateIndexRequestTests.java b/server/src/test/java/org/opensearch/action/admin/indices/create/CreateIndexRequestTests.java index cfe3e9779314f..de69be636c327 100644 --- a/server/src/test/java/org/opensearch/action/admin/indices/create/CreateIndexRequestTests.java +++ b/server/src/test/java/org/opensearch/action/admin/indices/create/CreateIndexRequestTests.java @@ -35,11 +35,9 @@ import org.opensearch.OpenSearchParseException; import org.opensearch.action.admin.indices.alias.Alias; import org.opensearch.common.Strings; -import org.opensearch.common.bytes.BytesReference; import org.opensearch.common.collect.MapBuilder; import org.opensearch.common.io.stream.BytesStreamOutput; import org.opensearch.common.io.stream.StreamInput; -import org.opensearch.common.settings.Settings; import org.opensearch.common.xcontent.LoggingDeprecationHandler; import org.opensearch.common.xcontent.NamedXContentRegistry; import org.opensearch.common.xcontent.XContentBuilder; @@ -47,16 +45,12 @@ import org.opensearch.common.xcontent.XContentParser; import org.opensearch.common.xcontent.XContentType; import org.opensearch.common.xcontent.json.JsonXContent; -import org.opensearch.index.RandomCreateIndexGenerator; import org.opensearch.test.OpenSearchTestCase; -import org.opensearch.test.hamcrest.OpenSearchAssertions; import java.io.IOException; import java.util.Map; import java.util.Set; -import static org.opensearch.cluster.metadata.IndexMetadata.SETTING_NUMBER_OF_SHARDS; -import static org.opensearch.common.xcontent.ToXContent.EMPTY_PARAMS; import static org.hamcrest.CoreMatchers.equalTo; public class CreateIndexRequestTests extends OpenSearchTestCase { @@ -101,36 +95,6 @@ public void testTopLevelKeys() { assertEquals("unknown key [FOO_SHOULD_BE_ILLEGAL_HERE] for create index", e.getMessage()); } - public void testToXContent() throws IOException { - CreateIndexRequest request = new CreateIndexRequest("foo"); - - String mapping; - if (randomBoolean()) { - mapping = Strings.toString(JsonXContent.contentBuilder().startObject().startObject("my_type").endObject().endObject()); - } else { - mapping = Strings.toString(JsonXContent.contentBuilder().startObject().endObject()); - } - request.mapping("my_type", mapping, XContentType.JSON); - - Alias alias = new Alias("test_alias"); - alias.routing("1"); - alias.filter("{\"term\":{\"year\":2016}}"); - alias.writeIndex(true); - request.alias(alias); - - Settings.Builder settings = Settings.builder(); - settings.put(SETTING_NUMBER_OF_SHARDS, 10); - request.settings(settings); - - String actualRequestBody = Strings.toString(request); - - String expectedRequestBody = "{\"settings\":{\"index\":{\"number_of_shards\":\"10\"}}," - + "\"mappings\":{\"my_type\":{\"my_type\":{}}}," - + "\"aliases\":{\"test_alias\":{\"filter\":{\"term\":{\"year\":2016}},\"routing\":\"1\",\"is_write_index\":true}}}"; - - assertEquals(expectedRequestBody, actualRequestBody); - } - public void testMappingKeyedByType() throws IOException { CreateIndexRequest request1 = new CreateIndexRequest("foo"); CreateIndexRequest request2 = new CreateIndexRequest("bar"); @@ -196,25 +160,6 @@ public void testMappingKeyedByType() throws IOException { } } - public void testToAndFromXContent() throws IOException { - - final CreateIndexRequest createIndexRequest = RandomCreateIndexGenerator.randomCreateIndexRequest(); - - boolean humanReadable = randomBoolean(); - final XContentType xContentType = randomFrom(XContentType.values()); - BytesReference originalBytes = toShuffledXContent(createIndexRequest, xContentType, EMPTY_PARAMS, humanReadable); - - CreateIndexRequest parsedCreateIndexRequest = new CreateIndexRequest(); - parsedCreateIndexRequest.source(originalBytes, xContentType); - - assertMappingsEqual(createIndexRequest.mappings(), parsedCreateIndexRequest.mappings()); - assertAliasesEqual(createIndexRequest.aliases(), parsedCreateIndexRequest.aliases()); - assertEquals(createIndexRequest.settings(), parsedCreateIndexRequest.settings()); - - BytesReference finalBytes = toShuffledXContent(parsedCreateIndexRequest, xContentType, EMPTY_PARAMS, humanReadable); - OpenSearchAssertions.assertToXContentEquivalent(originalBytes, finalBytes, xContentType); - } - public void testSettingsType() throws IOException { XContentBuilder builder = XContentFactory.contentBuilder(randomFrom(XContentType.values())); builder.startObject().startArray("settings").endArray().endObject(); diff --git a/server/src/test/java/org/opensearch/action/admin/indices/mapping/get/GetFieldMappingsResponseTests.java b/server/src/test/java/org/opensearch/action/admin/indices/mapping/get/GetFieldMappingsResponseTests.java index 2c1ed98bbaeac..99e4b5a2cca89 100644 --- a/server/src/test/java/org/opensearch/action/admin/indices/mapping/get/GetFieldMappingsResponseTests.java +++ b/server/src/test/java/org/opensearch/action/admin/indices/mapping/get/GetFieldMappingsResponseTests.java @@ -37,22 +37,14 @@ import org.opensearch.common.io.stream.BytesStreamOutput; import org.opensearch.common.io.stream.StreamInput; import org.opensearch.common.io.stream.Writeable; -import org.opensearch.common.xcontent.LoggingDeprecationHandler; -import org.opensearch.common.xcontent.ToXContent; -import org.opensearch.common.xcontent.XContentParser; -import org.opensearch.common.xcontent.XContentType; -import org.opensearch.test.AbstractSerializingTestCase; +import org.opensearch.test.AbstractWireSerializingTestCase; import java.io.IOException; import java.util.Collections; import java.util.HashMap; import java.util.Map; -import java.util.function.Predicate; -import static org.opensearch.rest.BaseRestHandler.INCLUDE_TYPE_NAME_PARAMETER; -import static org.hamcrest.CoreMatchers.equalTo; - -public class GetFieldMappingsResponseTests extends AbstractSerializingTestCase { +public class GetFieldMappingsResponseTests extends AbstractWireSerializingTestCase { public void testManualSerialization() throws IOException { Map>> mappings = new HashMap<>(); @@ -71,51 +63,6 @@ public void testManualSerialization() throws IOException { } } - public void testManualJunkedJson() throws Exception { - // in fact random fields could be evaluated as proper mapping, while proper junk in this case is arrays and values - final String json = "{\"index1\":{\"mappings\":" - + "{\"doctype0\":{\"field1\":{\"full_name\":\"my field\",\"mapping\":{\"type\":\"keyword\"}}," - + "\"field0\":{\"full_name\":\"my field\",\"mapping\":{\"type\":\"keyword\"}}}," - // junk here - + "\"junk1\": [\"field1\", {\"field2\":{}}]," - + "\"junk2\": [{\"field3\":{}}]," - + "\"junk3\": 42," - + "\"junk4\": \"Q\"," - + "\"doctype1\":{\"field1\":{\"full_name\":\"my field\",\"mapping\":{\"type\":\"keyword\"}}," - + "\"field0\":{\"full_name\":\"my field\",\"mapping\":{\"type\":\"keyword\"}}}}}," - + "\"index0\":{\"mappings\":" - + "{\"doctype0\":{\"field1\":{\"full_name\":\"my field\",\"mapping\":{\"type\":\"keyword\"}}," - + "\"field0\":{\"full_name\":\"my field\",\"mapping\":{\"type\":\"keyword\"}}}," - + "\"doctype1\":{\"field1\":{\"full_name\":\"my field\",\"mapping\":{\"type\":\"keyword\"}}," - + "\"field0\":{\"full_name\":\"my field\",\"mapping\":{\"type\":\"keyword\"}}}}}}"; - - final XContentParser parser = XContentType.JSON.xContent() - .createParser(xContentRegistry(), LoggingDeprecationHandler.INSTANCE, json.getBytes("UTF-8")); - - final GetFieldMappingsResponse response = GetFieldMappingsResponse.fromXContent(parser); - - FieldMappingMetadata fieldMappingMetadata = new FieldMappingMetadata("my field", new BytesArray("{\"type\":\"keyword\"}")); - Map fieldMapping = new HashMap<>(); - fieldMapping.put("field0", fieldMappingMetadata); - fieldMapping.put("field1", fieldMappingMetadata); - - Map> typeMapping = new HashMap<>(); - typeMapping.put("doctype0", fieldMapping); - typeMapping.put("doctype1", fieldMapping); - - Map>> mappings = new HashMap<>(); - mappings.put("index0", typeMapping); - mappings.put("index1", typeMapping); - - final Map>> responseMappings = response.mappings(); - assertThat(responseMappings, equalTo(mappings)); - } - - @Override - protected GetFieldMappingsResponse doParseInstance(XContentParser parser) throws IOException { - return GetFieldMappingsResponse.fromXContent(parser); - } - @Override protected GetFieldMappingsResponse createTestInstance() { return new GetFieldMappingsResponse(randomMapping()); @@ -126,23 +73,6 @@ protected Writeable.Reader instanceReader() { return GetFieldMappingsResponse::new; } - @Override - protected Predicate getRandomFieldsExcludeFilter() { - // allow random fields at the level of `index` and `index.mappings.doctype.field` - // otherwise random field could be evaluated as index name or type name - return s -> false == (s.matches("(?[^.]+)") - || s.matches("(?[^.]+)\\.mappings\\.(?[^.]+)\\.(?[^.]+)")); - } - - /** - * For xContent roundtrip testing we force the xContent output to still contain types because the parser - * still expects them. The new typeless parsing is implemented in the client side GetFieldMappingsResponse. - */ - @Override - protected ToXContent.Params getToXContentParams() { - return new ToXContent.MapParams(Collections.singletonMap(INCLUDE_TYPE_NAME_PARAMETER, "true")); - } - private Map>> randomMapping() { Map>> mappings = new HashMap<>(); diff --git a/server/src/test/java/org/opensearch/action/admin/indices/rollover/RolloverRequestTests.java b/server/src/test/java/org/opensearch/action/admin/indices/rollover/RolloverRequestTests.java index 8e5c3d9f59a86..1e8dc2f031058 100644 --- a/server/src/test/java/org/opensearch/action/admin/indices/rollover/RolloverRequestTests.java +++ b/server/src/test/java/org/opensearch/action/admin/indices/rollover/RolloverRequestTests.java @@ -34,7 +34,6 @@ import org.opensearch.action.ActionRequestValidationException; import org.opensearch.action.admin.indices.create.CreateIndexRequest; -import org.opensearch.action.admin.indices.create.CreateIndexRequestTests; import org.opensearch.common.bytes.BytesArray; import org.opensearch.common.bytes.BytesReference; import org.opensearch.common.io.stream.BytesStreamOutput; @@ -54,7 +53,6 @@ import org.opensearch.indices.IndicesModule; import org.opensearch.test.OpenSearchTestCase; import org.opensearch.test.XContentTestUtils; -import org.opensearch.test.hamcrest.OpenSearchAssertions; import java.io.IOException; import org.junit.Before; @@ -64,7 +62,6 @@ import java.util.Map; import java.util.function.Consumer; -import static org.opensearch.common.xcontent.ToXContent.EMPTY_PARAMS; import static org.hamcrest.Matchers.equalTo; public class RolloverRequestTests extends OpenSearchTestCase { @@ -87,7 +84,7 @@ public void testConditionsParsing() throws Exception { .field("max_size", "45gb") .endObject() .endObject(); - request.fromXContent(false, createParser(builder)); + request.fromXContent(createParser(builder)); Map> conditions = request.getConditions(); assertThat(conditions.size(), equalTo(3)); MaxAgeCondition maxAgeCondition = (MaxAgeCondition) conditions.get(MaxAgeCondition.NAME); @@ -107,7 +104,6 @@ public void testParsingWithIndexSettings() throws Exception { .field("max_docs", 100) .endObject() .startObject("mappings") - .startObject("type1") .startObject("properties") .startObject("field1") .field("type", "string") @@ -115,7 +111,6 @@ public void testParsingWithIndexSettings() throws Exception { .endObject() .endObject() .endObject() - .endObject() .startObject("settings") .field("number_of_shards", 10) .endObject() @@ -124,7 +119,7 @@ public void testParsingWithIndexSettings() throws Exception { .endObject() .endObject() .endObject(); - request.fromXContent(true, createParser(builder)); + request.fromXContent(createParser(builder)); Map> conditions = request.getConditions(); assertThat(conditions.size(), equalTo(2)); assertThat(request.getCreateIndexRequest().mappings().size(), equalTo(1)); @@ -145,8 +140,7 @@ public void testTypelessMappingParsing() throws Exception { .endObject() .endObject(); - boolean includeTypeName = false; - request.fromXContent(includeTypeName, createParser(builder)); + request.fromXContent(createParser(builder)); CreateIndexRequest createIndexRequest = request.getCreateIndexRequest(); String mapping = createIndexRequest.mappings().get(MapperService.SINGLE_MAPPING_NAME); @@ -182,27 +176,6 @@ public void testSerialize() throws Exception { } } - public void testToAndFromXContent() throws IOException { - RolloverRequest rolloverRequest = createTestItem(); - - final XContentType xContentType = randomFrom(XContentType.values()); - boolean humanReadable = randomBoolean(); - BytesReference originalBytes = toShuffledXContent(rolloverRequest, xContentType, EMPTY_PARAMS, humanReadable); - - RolloverRequest parsedRolloverRequest = new RolloverRequest(); - parsedRolloverRequest.fromXContent(true, createParser(xContentType.xContent(), originalBytes)); - - CreateIndexRequest createIndexRequest = rolloverRequest.getCreateIndexRequest(); - CreateIndexRequest parsedCreateIndexRequest = parsedRolloverRequest.getCreateIndexRequest(); - CreateIndexRequestTests.assertMappingsEqual(createIndexRequest.mappings(), parsedCreateIndexRequest.mappings()); - CreateIndexRequestTests.assertAliasesEqual(createIndexRequest.aliases(), parsedCreateIndexRequest.aliases()); - assertEquals(createIndexRequest.settings(), parsedCreateIndexRequest.settings()); - assertEquals(rolloverRequest.getConditions(), parsedRolloverRequest.getConditions()); - - BytesReference finalBytes = toShuffledXContent(parsedRolloverRequest, xContentType, EMPTY_PARAMS, humanReadable); - OpenSearchAssertions.assertToXContentEquivalent(originalBytes, finalBytes, xContentType); - } - public void testUnknownFields() throws IOException { final RolloverRequest request = new RolloverRequest(); XContentType xContentType = randomFrom(XContentType.values()); @@ -215,7 +188,7 @@ public void testUnknownFields() throws IOException { } builder.endObject(); BytesReference mutated = XContentTestUtils.insertRandomFields(xContentType, BytesReference.bytes(builder), null, random()); - expectThrows(XContentParseException.class, () -> request.fromXContent(false, createParser(xContentType.xContent(), mutated))); + expectThrows(XContentParseException.class, () -> request.fromXContent(createParser(xContentType.xContent(), mutated))); } public void testSameConditionCanOnlyBeAddedOnce() { @@ -244,8 +217,8 @@ public void testValidation() { private static RolloverRequest createTestItem() throws IOException { RolloverRequest rolloverRequest = new RolloverRequest(); if (randomBoolean()) { - String type = randomAlphaOfLengthBetween(3, 10); - rolloverRequest.getCreateIndexRequest().mapping(type, RandomCreateIndexGenerator.randomMapping(type)); + rolloverRequest.getCreateIndexRequest() + .mapping(MapperService.SINGLE_MAPPING_NAME, RandomCreateIndexGenerator.randomMapping(MapperService.SINGLE_MAPPING_NAME)); } if (randomBoolean()) { RandomCreateIndexGenerator.randomAliases(rolloverRequest.getCreateIndexRequest()); diff --git a/server/src/test/java/org/opensearch/action/admin/indices/template/get/GetIndexTemplatesResponseTests.java b/server/src/test/java/org/opensearch/action/admin/indices/template/get/GetIndexTemplatesResponseTests.java index 6f30781ab9bbe..7f62861d4f332 100644 --- a/server/src/test/java/org/opensearch/action/admin/indices/template/get/GetIndexTemplatesResponseTests.java +++ b/server/src/test/java/org/opensearch/action/admin/indices/template/get/GetIndexTemplatesResponseTests.java @@ -32,31 +32,23 @@ package org.opensearch.action.admin.indices.template.get; -import org.opensearch.action.admin.indices.mapping.get.GetFieldMappingsResponse; import org.opensearch.cluster.metadata.AliasMetadata; import org.opensearch.cluster.metadata.IndexTemplateMetadata; +import org.opensearch.common.io.stream.Writeable; import org.opensearch.common.settings.Settings; -import org.opensearch.common.xcontent.ToXContent; -import org.opensearch.common.xcontent.XContentParser; -import org.opensearch.test.AbstractXContentTestCase; +import org.opensearch.test.AbstractWireSerializingTestCase; import java.io.IOException; import java.io.UncheckedIOException; import java.util.ArrayList; -import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.stream.Collectors; import java.util.stream.IntStream; -import static org.opensearch.rest.BaseRestHandler.INCLUDE_TYPE_NAME_PARAMETER; import static org.hamcrest.Matchers.equalTo; -public class GetIndexTemplatesResponseTests extends AbstractXContentTestCase { - @Override - protected GetIndexTemplatesResponse doParseInstance(XContentParser parser) throws IOException { - return GetIndexTemplatesResponse.fromXContent(parser); - } +public class GetIndexTemplatesResponseTests extends AbstractWireSerializingTestCase { @Override protected GetIndexTemplatesResponse createTestInstance() { @@ -80,7 +72,7 @@ protected GetIndexTemplatesResponse createTestInstance() { } if (randomBoolean()) { try { - templateBuilder.putMapping("doc", "{\"doc\":{\"properties\":{\"type\":\"text\"}}}"); + templateBuilder.putMapping("doc", "{\"properties\":{\"type\":\"text\"}}"); } catch (IOException ex) { throw new UncheckedIOException(ex); } @@ -91,20 +83,8 @@ protected GetIndexTemplatesResponse createTestInstance() { } @Override - protected boolean supportsUnknownFields() { - // We can not inject anything at the top level because a GetIndexTemplatesResponse is serialized as a map - // from template name to template content. IndexTemplateMetadataTests already covers situations where we - // inject arbitrary things inside the IndexTemplateMetadata. - return false; - } - - /** - * For now, we only unit test the legacy typed responses. This will soon no longer be the case, - * as we introduce support for typeless xContent parsing in {@link GetFieldMappingsResponse}. - */ - @Override - protected ToXContent.Params getToXContentParams() { - return new ToXContent.MapParams(Collections.singletonMap(INCLUDE_TYPE_NAME_PARAMETER, "true")); + protected Writeable.Reader instanceReader() { + return GetIndexTemplatesResponse::new; } @Override diff --git a/server/src/test/java/org/opensearch/rest/action/admin/indices/RestCreateIndexActionTests.java b/server/src/test/java/org/opensearch/rest/action/admin/indices/RestCreateIndexActionTests.java index 2400a59df6021..707210abad948 100644 --- a/server/src/test/java/org/opensearch/rest/action/admin/indices/RestCreateIndexActionTests.java +++ b/server/src/test/java/org/opensearch/rest/action/admin/indices/RestCreateIndexActionTests.java @@ -32,48 +32,16 @@ package org.opensearch.rest.action.admin.indices; -import org.opensearch.client.node.NodeClient; import org.opensearch.common.bytes.BytesReference; import org.opensearch.common.xcontent.XContentBuilder; import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.common.xcontent.XContentHelper; -import org.opensearch.rest.RestRequest; -import org.opensearch.test.rest.FakeRestRequest; -import org.opensearch.test.rest.RestActionTestCase; -import org.junit.Before; +import org.opensearch.test.OpenSearchTestCase; import java.io.IOException; -import java.util.HashMap; import java.util.Map; -import static org.opensearch.rest.BaseRestHandler.INCLUDE_TYPE_NAME_PARAMETER; -import static org.mockito.Mockito.mock; - -public class RestCreateIndexActionTests extends RestActionTestCase { - private RestCreateIndexAction action; - - @Before - public void setupAction() { - action = new RestCreateIndexAction(); - controller().registerHandler(action); - } - - public void testIncludeTypeName() throws IOException { - Map params = new HashMap<>(); - params.put(INCLUDE_TYPE_NAME_PARAMETER, randomFrom("true", "false")); - RestRequest deprecatedRequest = new FakeRestRequest.Builder(xContentRegistry()).withMethod(RestRequest.Method.PUT) - .withPath("/some_index") - .withParams(params) - .build(); - - action.prepareRequest(deprecatedRequest, mock(NodeClient.class)); - assertWarnings(RestCreateIndexAction.TYPES_DEPRECATION_MESSAGE); - - RestRequest validRequest = new FakeRestRequest.Builder(xContentRegistry()).withMethod(RestRequest.Method.PUT) - .withPath("/some_index") - .build(); - action.prepareRequest(validRequest, mock(NodeClient.class)); - } +public class RestCreateIndexActionTests extends OpenSearchTestCase { public void testPrepareTypelessRequest() throws IOException { XContentBuilder content = XContentFactory.jsonBuilder() @@ -95,8 +63,7 @@ public void testPrepareTypelessRequest() throws IOException { .endObject(); Map contentAsMap = XContentHelper.convertToMap(BytesReference.bytes(content), true, content.contentType()).v2(); - boolean includeTypeName = false; - Map source = RestCreateIndexAction.prepareMappings(contentAsMap, includeTypeName); + Map source = RestCreateIndexAction.prepareMappings(contentAsMap); XContentBuilder expectedContent = XContentFactory.jsonBuilder() .startObject() @@ -126,34 +93,6 @@ public void testPrepareTypelessRequest() throws IOException { assertEquals(expectedContentAsMap, source); } - public void testPrepareTypedRequest() throws IOException { - XContentBuilder content = XContentFactory.jsonBuilder() - .startObject() - .startObject("mappings") - .startObject("type") - .startObject("properties") - .startObject("field1") - .field("type", "keyword") - .endObject() - .startObject("field2") - .field("type", "text") - .endObject() - .endObject() - .endObject() - .endObject() - .startObject("aliases") - .startObject("read_alias") - .endObject() - .endObject() - .endObject(); - - Map contentAsMap = XContentHelper.convertToMap(BytesReference.bytes(content), true, content.contentType()).v2(); - boolean includeTypeName = true; - Map source = RestCreateIndexAction.prepareMappings(contentAsMap, includeTypeName); - - assertEquals(contentAsMap, source); - } - public void testMalformedMappings() throws IOException { XContentBuilder content = XContentFactory.jsonBuilder() .startObject() @@ -166,8 +105,7 @@ public void testMalformedMappings() throws IOException { Map contentAsMap = XContentHelper.convertToMap(BytesReference.bytes(content), true, content.contentType()).v2(); - boolean includeTypeName = false; - Map source = RestCreateIndexAction.prepareMappings(contentAsMap, includeTypeName); + Map source = RestCreateIndexAction.prepareMappings(contentAsMap); assertEquals(contentAsMap, source); } } diff --git a/server/src/test/java/org/opensearch/rest/action/admin/indices/RestGetIndicesActionTests.java b/server/src/test/java/org/opensearch/rest/action/admin/indices/RestGetIndicesActionTests.java deleted file mode 100644 index 374b2cb0e8636..0000000000000 --- a/server/src/test/java/org/opensearch/rest/action/admin/indices/RestGetIndicesActionTests.java +++ /dev/null @@ -1,83 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - */ - -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -/* - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - -package org.opensearch.rest.action.admin.indices; - -import org.opensearch.client.node.NodeClient; -import org.opensearch.rest.RestRequest; -import org.opensearch.test.rest.FakeRestRequest; -import org.opensearch.test.rest.RestActionTestCase; - -import java.io.IOException; -import java.util.HashMap; -import java.util.Map; - -import static org.opensearch.rest.BaseRestHandler.INCLUDE_TYPE_NAME_PARAMETER; -import static org.mockito.Mockito.mock; - -public class RestGetIndicesActionTests extends RestActionTestCase { - - /** - * Test that setting the "include_type_name" parameter raises a warning for the GET request - */ - public void testIncludeTypeNamesWarning() throws IOException { - Map params = new HashMap<>(); - params.put(INCLUDE_TYPE_NAME_PARAMETER, randomFrom("true", "false")); - RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withMethod(RestRequest.Method.GET) - .withPath("/some_index") - .withParams(params) - .build(); - - RestGetIndicesAction handler = new RestGetIndicesAction(); - handler.prepareRequest(request, mock(NodeClient.class)); - assertWarnings(RestGetIndicesAction.TYPES_DEPRECATION_MESSAGE); - - // the same request without the parameter should pass without warning - request = new FakeRestRequest.Builder(xContentRegistry()).withMethod(RestRequest.Method.GET).withPath("/some_index").build(); - handler.prepareRequest(request, mock(NodeClient.class)); - } - - /** - * Test that setting the "include_type_name" parameter doesn't raises a warning if the HEAD method is used (indices.exists) - */ - public void testIncludeTypeNamesWarningExists() throws IOException { - Map params = new HashMap<>(); - params.put(INCLUDE_TYPE_NAME_PARAMETER, randomFrom("true", "false")); - RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withMethod(RestRequest.Method.HEAD) - .withPath("/some_index") - .withParams(params) - .build(); - - RestGetIndicesAction handler = new RestGetIndicesAction(); - handler.prepareRequest(request, mock(NodeClient.class)); - } -} diff --git a/test/framework/src/main/java/org/opensearch/test/rest/yaml/ClientYamlTestExecutionContext.java b/test/framework/src/main/java/org/opensearch/test/rest/yaml/ClientYamlTestExecutionContext.java index 9a611231a9fa0..4c3a1ec863d31 100644 --- a/test/framework/src/main/java/org/opensearch/test/rest/yaml/ClientYamlTestExecutionContext.java +++ b/test/framework/src/main/java/org/opensearch/test/rest/yaml/ClientYamlTestExecutionContext.java @@ -39,7 +39,6 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.apache.lucene.util.BytesRef; -import org.opensearch.LegacyESVersion; import org.opensearch.Version; import org.opensearch.client.NodeSelector; import org.opensearch.common.bytes.BytesReference; @@ -54,8 +53,6 @@ import java.util.List; import java.util.Map; -import static org.opensearch.rest.BaseRestHandler.INCLUDE_TYPE_NAME_PARAMETER; - /** * Execution context passed across the REST tests. * Holds the REST client used to communicate with opensearch. @@ -121,10 +118,6 @@ public ClientYamlTestResponse callApi( } } - if (esVersion().before(LegacyESVersion.V_7_0_0)) { - adaptRequestForOlderVersion(apiName, bodies, requestParams); - } - HttpEntity entity = createEntity(bodies, requestHeaders); try { response = callApiInternal(apiName, requestParams, entity, requestHeaders, nodeSelector); @@ -140,62 +133,6 @@ public ClientYamlTestResponse callApi( } } - /** - * To allow tests to run against a mixed 7.x/6.x cluster, we make certain modifications to the - * request related to types. - * - * Specifically, we generally use typeless index creation and document writes in test set-up code. - * This functionality is supported in 7.x, but is not supported in 6.x (or is not the default - * behavior). Here we modify the request so that it will work against a 6.x node. - */ - private void adaptRequestForOlderVersion(String apiName, List> bodies, Map requestParams) { - // For index creations, we specify 'include_type_name=false' if it is not explicitly set. This - // allows us to omit the parameter in the test description, while still being able to communicate - // with 6.x nodes where include_type_name defaults to 'true'. - if (apiName.equals("indices.create") && requestParams.containsKey(INCLUDE_TYPE_NAME_PARAMETER) == false) { - requestParams.put(INCLUDE_TYPE_NAME_PARAMETER, "false"); - } - - // We add the type to the document API requests if it's not already included. - if ((apiName.equals("index") || apiName.equals("update") || apiName.equals("delete") || apiName.equals("get")) - && requestParams.containsKey("type") == false) { - requestParams.put("type", "_doc"); - } - - // We also add the type to the bulk API requests if it's not already included. The type can either - // be on the request parameters or in the action metadata in the body of the request so we need to - // be sensitive to both scenarios. - if (apiName.equals("bulk") && requestParams.containsKey("type") == false) { - if (requestParams.containsKey("index")) { - requestParams.put("type", "_doc"); - } else { - for (int i = 0; i < bodies.size(); i++) { - Map body = bodies.get(i); - Map actionMetadata; - if (body.containsKey("index")) { - actionMetadata = (Map) body.get("index"); - i++; - } else if (body.containsKey("create")) { - actionMetadata = (Map) body.get("create"); - i++; - } else if (body.containsKey("update")) { - actionMetadata = (Map) body.get("update"); - i++; - } else if (body.containsKey("delete")) { - actionMetadata = (Map) body.get("delete"); - } else { - // action metadata is malformed so leave it malformed since - // the test is probably testing for malformed action metadata - continue; - } - if (actionMetadata.containsKey("_type") == false) { - actionMetadata.put("_type", "_doc"); - } - } - } - } - } - private HttpEntity createEntity(List> bodies, Map headers) throws IOException { if (bodies.isEmpty()) { return null; From 044f53630a08b0021097e422c9f31f15fb98219d Mon Sep 17 00:00:00 2001 From: "Daniel Doubrovkine (dB.)" Date: Tue, 8 Mar 2022 14:48:51 -0500 Subject: [PATCH 12/46] Set target and source compatibility to 11, required by Lucene 9. (#2407) * Set target and source compatibility to 11, required by Lucene 9. Signed-off-by: dblock * Uncomment commented code in #2321 for killing child processes that uses JDK9+ ProcessInfo. Signed-off-by: dblock * Set distribution checker target JDK compatibility to 11. Signed-off-by: dblock * Supress processing warnings. Signed-off-by: dblock --- DEVELOPER_GUIDE.md | 27 ++++++++- build.gradle | 2 +- buildSrc/build.gradle | 4 +- .../org/opensearch/gradle/ReaperPlugin.java | 29 ++-------- .../org/opensearch/gradle/ReaperService.java | 6 +- .../gradle/testclusters/OpenSearchNode.java | 56 ++++++++++--------- .../SymbolicLinkPreservingUntarTransform.java | 6 +- .../gradle/vagrant/VagrantBasePlugin.java | 3 +- .../src/main/resources/minimumRuntimeVersion | 2 +- client/rest/build.gradle | 4 +- client/sniffer/build.gradle | 4 +- client/test/build.gradle | 4 +- .../tools/java-version-checker/build.gradle | 5 +- libs/core/build.gradle | 6 +- server/build.gradle | 6 +- 15 files changed, 82 insertions(+), 82 deletions(-) diff --git a/DEVELOPER_GUIDE.md b/DEVELOPER_GUIDE.md index 6e3886a04e6da..58444441e3258 100644 --- a/DEVELOPER_GUIDE.md +++ b/DEVELOPER_GUIDE.md @@ -61,7 +61,19 @@ Fork [opensearch-project/OpenSearch](https://github.com/opensearch-project/OpenS #### JDK 11 -OpenSearch builds using Java 11 at a minimum. This means you must have a JDK 11 installed with the environment variable `JAVA_HOME` referencing the path to Java home for your JDK 11 installation, e.g. `JAVA_HOME=/usr/lib/jvm/jdk-11`. +OpenSearch builds using Java 11 at a minimum, using the Adoptium distribution. This means you must have a JDK 11 installed with the environment variable `JAVA_HOME` referencing the path to Java home for your JDK 11 installation, e.g. `JAVA_HOME=/usr/lib/jvm/jdk-11`. This is configured in [buildSrc/build.gradle](buildSrc/build.gradle) and [distribution/tools/java-version-checker/build.gradle](distribution/tools/java-version-checker/build.gradle). + +``` +allprojects { + targetCompatibility = JavaVersion.VERSION_11 + sourceCompatibility = JavaVersion.VERSION_11 +} +``` + +``` +sourceCompatibility = JavaVersion.VERSION_11 +targetCompatibility = JavaVersion.VERSION_11 +``` Download Java 11 from [here](https://adoptium.net/releases.html?variant=openjdk11). @@ -69,9 +81,18 @@ Download Java 11 from [here](https://adoptium.net/releases.html?variant=openjdk1 To run the full suite of tests, download and install [JDK 14](https://jdk.java.net/archive/) and set `JAVA11_HOME`, and `JAVA14_HOME`. They are required by the [backwards compatibility test](./TESTING.md#testing-backwards-compatibility). -#### Runtime JDK +#### JDK 17 + +By default, the test tasks use bundled JDK runtime, configured in [buildSrc/version.properties](buildSrc/version.properties), and set to JDK 17 (LTS). + +``` +bundled_jdk_vendor = adoptium +bundled_jdk = 17.0.2+8 +``` + +#### Custom Runtime JDK -By default, the test tasks use bundled JDK runtime, configured in `buildSrc/version.properties` and set to JDK 17 (LTS). Other kind of test tasks (integration, cluster, ... ) use the same runtime as `JAVA_HOME`. However, the build supports compiling with JDK 11 and testing on a different version of JDK runtime. To do this, set `RUNTIME_JAVA_HOME` pointing to the Java home of another JDK installation, e.g. `RUNTIME_JAVA_HOME=/usr/lib/jvm/jdk-14`. Alternatively, the runtime JDK version could be provided as the command line argument, using combination of `runtime.java=` property and `JAVA_HOME` environment variable, for example `./gradlew -Druntime.java=17 ...` (in this case, the tooling expects `JAVA17_HOME` environment variable to be set). +Other kind of test tasks (integration, cluster, etc.) use the same runtime as `JAVA_HOME`. However, the build also supports compiling with one version of JDK, and testing on a different version. To do this, set `RUNTIME_JAVA_HOME` pointing to the Java home of another JDK installation, e.g. `RUNTIME_JAVA_HOME=/usr/lib/jvm/jdk-14`. Alternatively, the runtime JDK version could be provided as the command line argument, using combination of `runtime.java=` property and `JAVA_HOME` environment variable, for example `./gradlew -Druntime.java=17 ...` (in this case, the tooling expects `JAVA17_HOME` environment variable to be set). #### Windows diff --git a/build.gradle b/build.gradle index c12f7ece4d39c..374bfb3ccfae3 100644 --- a/build.gradle +++ b/build.gradle @@ -244,7 +244,7 @@ allprojects { compile.options.compilerArgs << '-Xlint:opens' compile.options.compilerArgs << '-Xlint:overloads' compile.options.compilerArgs << '-Xlint:overrides' - compile.options.compilerArgs << '-Xlint:processing' + compile.options.compilerArgs << '-Xlint:-processing' compile.options.compilerArgs << '-Xlint:rawtypes' compile.options.compilerArgs << '-Xlint:removal' compile.options.compilerArgs << '-Xlint:requires-automatic' diff --git a/buildSrc/build.gradle b/buildSrc/build.gradle index 37bfc5e764dda..ff79cc5df0df0 100644 --- a/buildSrc/build.gradle +++ b/buildSrc/build.gradle @@ -158,8 +158,8 @@ if (project != rootProject) { apply plugin: 'opensearch.publish' allprojects { - targetCompatibility = 8 - sourceCompatibility = 8 + targetCompatibility = JavaVersion.VERSION_11 + sourceCompatibility = JavaVersion.VERSION_11 } // groovydoc succeeds, but has some weird internal exception... diff --git a/buildSrc/src/main/java/org/opensearch/gradle/ReaperPlugin.java b/buildSrc/src/main/java/org/opensearch/gradle/ReaperPlugin.java index af9dc6e053fb2..16e5cba4b5b23 100644 --- a/buildSrc/src/main/java/org/opensearch/gradle/ReaperPlugin.java +++ b/buildSrc/src/main/java/org/opensearch/gradle/ReaperPlugin.java @@ -36,7 +36,6 @@ import org.gradle.api.Plugin; import org.gradle.api.Project; -import java.lang.management.ManagementFactory; import java.nio.file.Path; /** @@ -52,31 +51,15 @@ public void apply(Project project) { project.getPlugins().apply(GlobalBuildInfoPlugin.class); - Path inputDir = project.getRootDir().toPath().resolve(".gradle").resolve("reaper").resolve("build-" + getProcessId("xx")); + Path inputDir = project.getRootDir() + .toPath() + .resolve(".gradle") + .resolve("reaper") + .resolve("build-" + ProcessHandle.current().pid()); + ReaperService service = project.getExtensions() .create("reaper", ReaperService.class, project, project.getBuildDir().toPath(), inputDir); project.getGradle().buildFinished(result -> service.shutdown()); } - - private static String getProcessId(final String fallback) { - // Note: may fail in some JVM implementations - // therefore fallback has to be provided - - // something like '@', at least in SUN / Oracle JVMs - final String jvmName = ManagementFactory.getRuntimeMXBean().getName(); - final int index = jvmName.indexOf('@'); - - if (index < 1) { - // part before '@' empty (index = 0) / '@' not found (index = -1) - return fallback; - } - - try { - return Long.toString(Long.parseLong(jvmName.substring(0, index))); - } catch (NumberFormatException e) { - // ignore - } - return fallback; - } } diff --git a/buildSrc/src/main/java/org/opensearch/gradle/ReaperService.java b/buildSrc/src/main/java/org/opensearch/gradle/ReaperService.java index 19660c672af3a..498bd68ca2a91 100644 --- a/buildSrc/src/main/java/org/opensearch/gradle/ReaperService.java +++ b/buildSrc/src/main/java/org/opensearch/gradle/ReaperService.java @@ -179,11 +179,7 @@ private Path locateReaperJar() { InputStream jarInput = this.getClass().getResourceAsStream("/META-INF/reaper.jar"); ) { logger.info("Copying reaper.jar..."); - byte[] buffer = new byte[4096]; - int len; - while ((len = jarInput.read(buffer)) > 0) { - out.write(buffer, 0, len); - } + jarInput.transferTo(out); } catch (IOException e) { throw new UncheckedIOException(e); } diff --git a/buildSrc/src/main/java/org/opensearch/gradle/testclusters/OpenSearchNode.java b/buildSrc/src/main/java/org/opensearch/gradle/testclusters/OpenSearchNode.java index a726fc53a1f37..b051c15e81d6d 100644 --- a/buildSrc/src/main/java/org/opensearch/gradle/testclusters/OpenSearchNode.java +++ b/buildSrc/src/main/java/org/opensearch/gradle/testclusters/OpenSearchNode.java @@ -93,7 +93,9 @@ import java.util.Map; import java.util.Objects; import java.util.Set; +import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; +import java.util.concurrent.TimeoutException; import java.util.concurrent.atomic.AtomicBoolean; import java.util.function.BiConsumer; import java.util.function.Function; @@ -908,7 +910,7 @@ private void startOpenSearchProcess() { } catch (IOException e) { throw new TestClustersException("Failed to start " + currentConfig.command + " process for " + this, e); } - // reaper.registerPid(toString(), opensearchProcess.pid()); + reaper.registerPid(toString(), opensearchProcess.pid()); } @Internal @@ -975,7 +977,7 @@ public synchronized void stop(boolean tailLogs) { LOGGER.info("Stopping `{}`, tailLogs: {}", this, tailLogs); requireNonNull(opensearchProcess, "Can't stop `" + this + "` as it was not started or already stopped."); // Test clusters are not reused, don't spend time on a graceful shutdown - stopHandle(opensearchProcess, true); + stopProcess(opensearchProcess.toHandle(), true); reaper.unregister(toString()); if (tailLogs) { logFileContents("Standard output of node", currentConfig.stdoutFile); @@ -1000,9 +1002,9 @@ public void setNameCustomization(Function nameCustomizer) { this.nameCustomization = nameCustomizer; } - private void stopHandle(Process process, boolean forcibly) { + private void stopProcess(ProcessHandle processHandle, boolean forcibly) { // No-op if the process has already exited by itself. - if (process.isAlive() == false) { + if (processHandle.isAlive() == false) { LOGGER.info("Process was not running when we tried to terminate it."); return; } @@ -1011,19 +1013,19 @@ private void stopHandle(Process process, boolean forcibly) { // they'll be recorded as having failed and won't restart when the cluster restarts. // ES could actually be a child when there's some wrapper process like on Windows, // and in that case the ML processes will be grandchildren of the wrapper. - // List children = process.children().collect(Collectors.toList()); + List children = processHandle.children().collect(Collectors.toList()); try { - // logProcessInfo( - // "Terminating " + currentConfig.command + " process" + (forcibly ? " forcibly " : "gracefully") + ":", - // process.info() - // ); + logProcessInfo( + "Terminating " + currentConfig.command + " process" + (forcibly ? " forcibly " : "gracefully") + ":", + processHandle.info() + ); if (forcibly) { - process.destroyForcibly(); + processHandle.destroyForcibly(); } else { - process.destroy(); - waitForProcessToExit(process); - if (process.isAlive() == false) { + processHandle.destroy(); + waitForProcessToExit(processHandle); + if (processHandle.isAlive() == false) { return; } LOGGER.info( @@ -1031,25 +1033,24 @@ private void stopHandle(Process process, boolean forcibly) { OPENSEARCH_DESTROY_TIMEOUT, OPENSEARCH_DESTROY_TIMEOUT_UNIT ); - process.destroyForcibly(); + processHandle.destroyForcibly(); } - waitForProcessToExit(process); - if (process.isAlive()) { + waitForProcessToExit(processHandle); + if (processHandle.isAlive()) { throw new TestClustersException("Was not able to terminate " + currentConfig.command + " process for " + this); } } finally { - // children.forEach(each -> stopHandle(each, forcibly)); + children.forEach(each -> stopProcess(each, forcibly)); } - // waitForProcessToExit(process); - // if (process.isAlive()) { - // throw new TestClustersException("Was not able to terminate " + currentConfig.command + " process for " + this); - // } + waitForProcessToExit(processHandle); + if (processHandle.isAlive()) { + throw new TestClustersException("Was not able to terminate " + currentConfig.command + " process for " + this); + } } - /* - private void logProcessInfo(String prefix, Process info) { + private void logProcessInfo(String prefix, ProcessHandle.Info info) { LOGGER.info( prefix + " commandLine:`{}` command:`{}` args:`{}`", info.commandLine().orElse("-"), @@ -1057,7 +1058,6 @@ private void logProcessInfo(String prefix, Process info) { Arrays.stream(info.arguments().orElse(new String[] {})).map(each -> "'" + each + "'").collect(Collectors.joining(" ")) ); } - */ private void logFileContents(String description, Path from) { final Map errorsAndWarnings = new LinkedHashMap<>(); @@ -1126,14 +1126,16 @@ private String normalizeLogLine(String line) { return line; } - private void waitForProcessToExit(Process process) { + private void waitForProcessToExit(ProcessHandle processHandle) { try { - process.waitFor(OPENSEARCH_DESTROY_TIMEOUT, OPENSEARCH_DESTROY_TIMEOUT_UNIT); + processHandle.onExit().get(OPENSEARCH_DESTROY_TIMEOUT, OPENSEARCH_DESTROY_TIMEOUT_UNIT); } catch (InterruptedException e) { LOGGER.info("Interrupted while waiting for {} process", currentConfig.command, e); Thread.currentThread().interrupt(); - } catch (NullPointerException e) { + } catch (ExecutionException e) { LOGGER.info("Failure while waiting for process to exist", e); + } catch (TimeoutException e) { + LOGGER.info("Timed out waiting for process to exit", e); } } diff --git a/buildSrc/src/main/java/org/opensearch/gradle/transform/SymbolicLinkPreservingUntarTransform.java b/buildSrc/src/main/java/org/opensearch/gradle/transform/SymbolicLinkPreservingUntarTransform.java index 4b3e92e23925e..5ff8168a9bed2 100644 --- a/buildSrc/src/main/java/org/opensearch/gradle/transform/SymbolicLinkPreservingUntarTransform.java +++ b/buildSrc/src/main/java/org/opensearch/gradle/transform/SymbolicLinkPreservingUntarTransform.java @@ -84,11 +84,7 @@ public void unpack(File tarFile, File targetDir) throws IOException { // copy the file from the archive using a small buffer to avoid heaping Files.createFile(destination); try (FileOutputStream fos = new FileOutputStream(destination.toFile())) { - byte[] buffer = new byte[4096]; - int len; - while ((len = tar.read(buffer)) > 0) { - fos.write(buffer, 0, len); - } + tar.transferTo(fos); } } if (entry.isSymbolicLink() == false) { diff --git a/buildSrc/src/main/java/org/opensearch/gradle/vagrant/VagrantBasePlugin.java b/buildSrc/src/main/java/org/opensearch/gradle/vagrant/VagrantBasePlugin.java index 4b918bb38e3c2..9d957a301dde4 100644 --- a/buildSrc/src/main/java/org/opensearch/gradle/vagrant/VagrantBasePlugin.java +++ b/buildSrc/src/main/java/org/opensearch/gradle/vagrant/VagrantBasePlugin.java @@ -42,6 +42,7 @@ import org.gradle.api.tasks.TaskState; import java.io.ByteArrayOutputStream; +import java.nio.charset.StandardCharsets; import java.util.List; import java.util.function.Consumer; import java.util.regex.Matcher; @@ -101,7 +102,7 @@ void checkVersion(Project project, String tool, Pattern versionRegex, int... min spec.setCommandLine(tool, "--version"); spec.setStandardOutput(pipe); }); - String output = pipe.toString().trim(); + String output = pipe.toString(StandardCharsets.UTF_8).trim(); Matcher matcher = versionRegex.matcher(output); if (matcher.find() == false) { throw new IllegalStateException( diff --git a/buildSrc/src/main/resources/minimumRuntimeVersion b/buildSrc/src/main/resources/minimumRuntimeVersion index 468437494697b..9d607966b721a 100644 --- a/buildSrc/src/main/resources/minimumRuntimeVersion +++ b/buildSrc/src/main/resources/minimumRuntimeVersion @@ -1 +1 @@ -1.8 \ No newline at end of file +11 \ No newline at end of file diff --git a/client/rest/build.gradle b/client/rest/build.gradle index 2271fed252793..5c1252061443a 100644 --- a/client/rest/build.gradle +++ b/client/rest/build.gradle @@ -33,8 +33,8 @@ import de.thetaphi.forbiddenapis.gradle.CheckForbiddenApis apply plugin: 'opensearch.build' apply plugin: 'opensearch.publish' -targetCompatibility = JavaVersion.VERSION_1_8 -sourceCompatibility = JavaVersion.VERSION_1_8 +targetCompatibility = JavaVersion.VERSION_11 +sourceCompatibility = JavaVersion.VERSION_11 group = 'org.opensearch.client' archivesBaseName = 'opensearch-rest-client' diff --git a/client/sniffer/build.gradle b/client/sniffer/build.gradle index f81f4ccc3b1e8..bc4be1dd153e8 100644 --- a/client/sniffer/build.gradle +++ b/client/sniffer/build.gradle @@ -30,8 +30,8 @@ apply plugin: 'opensearch.build' apply plugin: 'opensearch.publish' -targetCompatibility = JavaVersion.VERSION_1_8 -sourceCompatibility = JavaVersion.VERSION_1_8 +targetCompatibility = JavaVersion.VERSION_11 +sourceCompatibility = JavaVersion.VERSION_11 group = 'org.opensearch.client' archivesBaseName = 'opensearch-rest-client-sniffer' diff --git a/client/test/build.gradle b/client/test/build.gradle index 7d1333a84eae7..07d874cf01ea7 100644 --- a/client/test/build.gradle +++ b/client/test/build.gradle @@ -29,8 +29,8 @@ */ apply plugin: 'opensearch.build' -targetCompatibility = JavaVersion.VERSION_1_8 -sourceCompatibility = JavaVersion.VERSION_1_8 +targetCompatibility = JavaVersion.VERSION_11 +sourceCompatibility = JavaVersion.VERSION_11 group = "${group}.client.test" diff --git a/distribution/tools/java-version-checker/build.gradle b/distribution/tools/java-version-checker/build.gradle index d3b1422de475a..9480a86ce6fb7 100644 --- a/distribution/tools/java-version-checker/build.gradle +++ b/distribution/tools/java-version-checker/build.gradle @@ -11,8 +11,9 @@ apply plugin: 'opensearch.build' -sourceCompatibility = JavaVersion.VERSION_1_8 -targetCompatibility = JavaVersion.VERSION_1_8 +sourceCompatibility = JavaVersion.VERSION_11 +targetCompatibility = JavaVersion.VERSION_11 + // targetting very old java versions enables a warning by default on newer JDK: disable it. compileJava.options.compilerArgs += '-Xlint:-options' diff --git a/libs/core/build.gradle b/libs/core/build.gradle index edb05cd1c22b0..374f2fe572a12 100644 --- a/libs/core/build.gradle +++ b/libs/core/build.gradle @@ -54,13 +54,13 @@ if (!isEclipse) { } compileJava11Java { - sourceCompatibility = 11 - targetCompatibility = 11 + sourceCompatibility = JavaVersion.VERSION_11 + targetCompatibility = JavaVersion.VERSION_11 } forbiddenApisJava11 { if (BuildParams.runtimeJavaVersion < JavaVersion.VERSION_11) { - targetCompatibility = JavaVersion.VERSION_11.getMajorVersion() + targetCompatibility = JavaVersion.VERSION_11 } replaceSignatureFiles 'jdk-signatures' } diff --git a/server/build.gradle b/server/build.gradle index aa467cd0528bf..3a11428ca7919 100644 --- a/server/build.gradle +++ b/server/build.gradle @@ -64,14 +64,14 @@ if (!isEclipse) { } compileJava11Java { - sourceCompatibility = 11 - targetCompatibility = 11 + sourceCompatibility = JavaVersion.VERSION_11 + targetCompatibility = JavaVersion.VERSION_11 } tasks.named('forbiddenApisJava11').configure { doFirst { if (BuildParams.runtimeJavaVersion < JavaVersion.VERSION_11) { - targetCompatibility = JavaVersion.VERSION_11.getMajorVersion() + targetCompatibility = JavaVersion.VERSION_11 } } } From a6a47e7321fcd57c943eaf71363b463ea5dd66b6 Mon Sep 17 00:00:00 2001 From: Suraj Singh <79435743+dreamer-89@users.noreply.github.com> Date: Tue, 8 Mar 2022 14:43:04 -0800 Subject: [PATCH 13/46] Remove inclue_type_name parameter from rest api spec (#2410) Signed-off-by: Suraj Singh --- .../src/main/resources/rest-api-spec/api/indices.create.json | 4 ---- .../src/main/resources/rest-api-spec/api/indices.get.json | 4 ---- .../rest-api-spec/api/indices.get_field_mapping.json | 4 ---- .../resources/rest-api-spec/api/indices.get_template.json | 4 ---- .../resources/rest-api-spec/api/indices.put_template.json | 4 ---- .../main/resources/rest-api-spec/api/indices.rollover.json | 4 ---- .../rest/action/admin/indices/RestGetFieldMappingAction.java | 3 +-- 7 files changed, 1 insertion(+), 26 deletions(-) diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.create.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.create.json index 2b9e8617a661c..922183d628ac6 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.create.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.create.json @@ -22,10 +22,6 @@ ] }, "params":{ - "include_type_name":{ - "type":"boolean", - "description":"Whether a type should be expected in the body of the mappings." - }, "wait_for_active_shards":{ "type":"string", "description":"Set the number of active shards to wait for before the operation returns." diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get.json index f78b410f5b489..90a1274ecb059 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get.json @@ -22,10 +22,6 @@ ] }, "params":{ - "include_type_name":{ - "type":"boolean", - "description":"Whether to add the type name to the response (default: false)" - }, "local":{ "type":"boolean", "description":"Return local information, do not retrieve the state from master node (default: false)" diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get_field_mapping.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get_field_mapping.json index ee96dfcc21ccd..0e71b6d395777 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get_field_mapping.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get_field_mapping.json @@ -38,10 +38,6 @@ ] }, "params":{ - "include_type_name":{ - "type":"boolean", - "description":"Whether a type should be returned in the body of the mappings." - }, "include_defaults":{ "type":"boolean", "description":"Whether the default mapping values should be returned as well" diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get_template.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get_template.json index 9e07ae663ff8f..337016763ad0a 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get_template.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get_template.json @@ -28,10 +28,6 @@ ] }, "params":{ - "include_type_name":{ - "type":"boolean", - "description":"Whether a type should be returned in the body of the mappings." - }, "flat_settings":{ "type":"boolean", "description":"Return settings in flat format (default: false)" diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.put_template.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.put_template.json index 701a722d89eb8..75a328af929ef 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.put_template.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.put_template.json @@ -23,10 +23,6 @@ ] }, "params":{ - "include_type_name":{ - "type":"boolean", - "description":"Whether a type should be returned in the body of the mappings." - }, "order":{ "type":"number", "description":"The order for this template when merging multiple matching ones (higher numbers are merged later, overriding the lower numbers)" diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.rollover.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.rollover.json index 4ed1f9b490969..fef1f03d1c9a7 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.rollover.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.rollover.json @@ -38,10 +38,6 @@ ] }, "params":{ - "include_type_name":{ - "type":"boolean", - "description":"Whether a type should be included in the body of the mappings." - }, "timeout":{ "type":"time", "description":"Explicit operation timeout" diff --git a/server/src/main/java/org/opensearch/rest/action/admin/indices/RestGetFieldMappingAction.java b/server/src/main/java/org/opensearch/rest/action/admin/indices/RestGetFieldMappingAction.java index 292ed5e560848..d0610d790999b 100644 --- a/server/src/main/java/org/opensearch/rest/action/admin/indices/RestGetFieldMappingAction.java +++ b/server/src/main/java/org/opensearch/rest/action/admin/indices/RestGetFieldMappingAction.java @@ -60,7 +60,6 @@ import static org.opensearch.rest.RestStatus.OK; public class RestGetFieldMappingAction extends BaseRestHandler { - private static final Logger logger = LogManager.getLogger(RestGetFieldMappingAction.class); private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(logger.getName()); @@ -107,7 +106,7 @@ public RestResponse buildResponse(GetFieldMappingsResponse response, XContentBui status = NOT_FOUND; } response.toXContent(builder, request); - return new BytesRestResponse(RestStatus.OK, builder); + return new BytesRestResponse(status, builder); } }); } From 9c679cbbfcf685e3865d2cf06b8f4e10c3082d49 Mon Sep 17 00:00:00 2001 From: Andriy Redko Date: Tue, 8 Mar 2022 18:42:32 -0500 Subject: [PATCH 14/46] MapperService has to be passed in as null for EnginePlugins CodecService constructor (#2177) * MapperService has to be passed in as null for EnginePlugins CodecService constructor Signed-off-by: Andriy Redko * Addressing code review comments Signed-off-by: Andriy Redko * Delayed CodecService instantiation up to the shard initialization Signed-off-by: Andriy Redko * Added logger (associated with shard) to CodecServiceConfig Signed-off-by: Andriy Redko * Refactored the EngineConfigFactory / IndexShard instantiation of the CodecService Signed-off-by: Andriy Redko --- .../index/codec/CodecServiceConfig.java | 45 ++++++++++++++ .../index/codec/CodecServiceFactory.java | 22 +++++++ .../index/engine/EngineConfigFactory.java | 59 ++++++++++++++++-- .../opensearch/index/shard/IndexShard.java | 3 +- .../org/opensearch/plugins/EnginePlugin.java | 16 +++++ .../engine/EngineConfigFactoryTests.java | 62 +++++++++++++++++++ 6 files changed, 200 insertions(+), 7 deletions(-) create mode 100644 server/src/main/java/org/opensearch/index/codec/CodecServiceConfig.java create mode 100644 server/src/main/java/org/opensearch/index/codec/CodecServiceFactory.java diff --git a/server/src/main/java/org/opensearch/index/codec/CodecServiceConfig.java b/server/src/main/java/org/opensearch/index/codec/CodecServiceConfig.java new file mode 100644 index 0000000000000..313c0d359bb02 --- /dev/null +++ b/server/src/main/java/org/opensearch/index/codec/CodecServiceConfig.java @@ -0,0 +1,45 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.index.codec; + +import org.apache.logging.log4j.Logger; +import org.opensearch.common.Nullable; +import org.opensearch.index.IndexSettings; +import org.opensearch.index.mapper.MapperService; + +import java.util.Objects; + +/** + * The configuration parameters necessary for the {@link CodecService} instance construction. + */ +public final class CodecServiceConfig { + private final IndexSettings indexSettings; + private final MapperService mapperService; + private final Logger logger; + + public CodecServiceConfig(IndexSettings indexSettings, @Nullable MapperService mapperService, @Nullable Logger logger) { + this.indexSettings = Objects.requireNonNull(indexSettings); + this.mapperService = mapperService; + this.logger = logger; + } + + public IndexSettings getIndexSettings() { + return indexSettings; + } + + @Nullable + public MapperService getMapperService() { + return mapperService; + } + + @Nullable + public Logger getLogger() { + return logger; + } +} diff --git a/server/src/main/java/org/opensearch/index/codec/CodecServiceFactory.java b/server/src/main/java/org/opensearch/index/codec/CodecServiceFactory.java new file mode 100644 index 0000000000000..da28c5f06b035 --- /dev/null +++ b/server/src/main/java/org/opensearch/index/codec/CodecServiceFactory.java @@ -0,0 +1,22 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.index.codec; + +/** + * A factory for creating new {@link CodecService} instance + */ +@FunctionalInterface +public interface CodecServiceFactory { + /** + * Create new {@link CodecService} instance + * @param config code service configuration + * @return new {@link CodecService} instance + */ + CodecService createCodecService(CodecServiceConfig config); +} diff --git a/server/src/main/java/org/opensearch/index/engine/EngineConfigFactory.java b/server/src/main/java/org/opensearch/index/engine/EngineConfigFactory.java index dffdb58bfec1e..a78a5e5a4820a 100644 --- a/server/src/main/java/org/opensearch/index/engine/EngineConfigFactory.java +++ b/server/src/main/java/org/opensearch/index/engine/EngineConfigFactory.java @@ -8,6 +8,7 @@ package org.opensearch.index.engine; +import org.apache.logging.log4j.Logger; import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.index.MergePolicy; import org.apache.lucene.search.QueryCache; @@ -15,9 +16,13 @@ import org.apache.lucene.search.ReferenceManager; import org.apache.lucene.search.Sort; import org.apache.lucene.search.similarities.Similarity; +import org.opensearch.common.Nullable; import org.opensearch.common.unit.TimeValue; import org.opensearch.index.IndexSettings; import org.opensearch.index.codec.CodecService; +import org.opensearch.index.codec.CodecServiceConfig; +import org.opensearch.index.codec.CodecServiceFactory; +import org.opensearch.index.mapper.MapperService; import org.opensearch.index.seqno.RetentionLeases; import org.opensearch.index.shard.ShardId; import org.opensearch.index.store.Store; @@ -39,7 +44,7 @@ * A factory to create an EngineConfig based on custom plugin overrides */ public class EngineConfigFactory { - private final CodecService codecService; + private final CodecServiceFactory codecServiceFactory; private final TranslogDeletionPolicyFactory translogDeletionPolicyFactory; /** default ctor primarily used for tests without plugins */ @@ -58,6 +63,8 @@ public EngineConfigFactory(PluginsService pluginsService, IndexSettings idxSetti EngineConfigFactory(Collection enginePlugins, IndexSettings idxSettings) { Optional codecService = Optional.empty(); String codecServiceOverridingPlugin = null; + Optional codecServiceFactory = Optional.empty(); + String codecServiceFactoryOverridingPlugin = null; Optional translogDeletionPolicyFactory = Optional.empty(); String translogDeletionPolicyOverridingPlugin = null; for (EnginePlugin enginePlugin : enginePlugins) { @@ -65,7 +72,7 @@ public EngineConfigFactory(PluginsService pluginsService, IndexSettings idxSetti if (codecService.isPresent() == false) { codecService = enginePlugin.getCustomCodecService(idxSettings); codecServiceOverridingPlugin = enginePlugin.getClass().getName(); - } else { + } else if (enginePlugin.getCustomCodecService(idxSettings).isPresent()) { throw new IllegalStateException( "existing codec service already overridden in: " + codecServiceOverridingPlugin @@ -76,7 +83,7 @@ public EngineConfigFactory(PluginsService pluginsService, IndexSettings idxSetti if (translogDeletionPolicyFactory.isPresent() == false) { translogDeletionPolicyFactory = enginePlugin.getCustomTranslogDeletionPolicyFactory(); translogDeletionPolicyOverridingPlugin = enginePlugin.getClass().getName(); - } else { + } else if (enginePlugin.getCustomTranslogDeletionPolicyFactory().isPresent()) { throw new IllegalStateException( "existing TranslogDeletionPolicyFactory is already overridden in: " + translogDeletionPolicyOverridingPlugin @@ -84,12 +91,37 @@ public EngineConfigFactory(PluginsService pluginsService, IndexSettings idxSetti + enginePlugin.getClass().getName() ); } + // get overriding CodecServiceFactory from EnginePlugin + if (codecServiceFactory.isPresent() == false) { + codecServiceFactory = enginePlugin.getCustomCodecServiceFactory(idxSettings); + codecServiceFactoryOverridingPlugin = enginePlugin.getClass().getName(); + } else if (enginePlugin.getCustomCodecServiceFactory(idxSettings).isPresent()) { + throw new IllegalStateException( + "existing codec service factory already overridden in: " + + codecServiceFactoryOverridingPlugin + + " attempting to override again by: " + + enginePlugin.getClass().getName() + ); + } + } + + if (codecService.isPresent() && codecServiceFactory.isPresent()) { + throw new IllegalStateException( + "both codec service and codec service factory are present, codec service provided by: " + + codecServiceOverridingPlugin + + " conflicts with codec service factory provided by: " + + codecServiceFactoryOverridingPlugin + ); } - this.codecService = codecService.orElse(null); + + final CodecService instance = codecService.orElse(null); + this.codecServiceFactory = (instance != null) ? (config) -> instance : codecServiceFactory.orElse(null); this.translogDeletionPolicyFactory = translogDeletionPolicyFactory.orElse((idxs, rtls) -> null); } - /** Instantiates a new EngineConfig from the provided custom overrides */ + /** + * Instantiates a new EngineConfig from the provided custom overrides + */ public EngineConfig newEngineConfig( ShardId shardId, ThreadPool threadPool, @@ -114,6 +146,10 @@ public EngineConfig newEngineConfig( LongSupplier primaryTermSupplier, EngineConfig.TombstoneDocSupplier tombstoneDocSupplier ) { + CodecService codecServiceToUse = codecService; + if (codecService == null && this.codecServiceFactory != null) { + codecServiceToUse = newCodecServiceOrDefault(indexSettings, null, null, null); + } return new EngineConfig( shardId, @@ -124,7 +160,7 @@ public EngineConfig newEngineConfig( mergePolicy, analyzer, similarity, - this.codecService != null ? this.codecService : codecService, + codecServiceToUse, eventListener, queryCache, queryCachingPolicy, @@ -141,4 +177,15 @@ public EngineConfig newEngineConfig( tombstoneDocSupplier ); } + + public CodecService newCodecServiceOrDefault( + IndexSettings indexSettings, + @Nullable MapperService mapperService, + Logger logger, + CodecService defaultCodecService + ) { + return this.codecServiceFactory != null + ? this.codecServiceFactory.createCodecService(new CodecServiceConfig(indexSettings, mapperService, logger)) + : defaultCodecService; + } } diff --git a/server/src/main/java/org/opensearch/index/shard/IndexShard.java b/server/src/main/java/org/opensearch/index/shard/IndexShard.java index 1474785f5f4e9..df0edd02d4f48 100644 --- a/server/src/main/java/org/opensearch/index/shard/IndexShard.java +++ b/server/src/main/java/org/opensearch/index/shard/IndexShard.java @@ -3155,6 +3155,7 @@ private EngineConfig newEngineConfig(LongSupplier globalCheckpointSupplier) { this.warmer.warm(reader); } }; + return this.engineConfigFactory.newEngineConfig( shardId, threadPool, @@ -3164,7 +3165,7 @@ private EngineConfig newEngineConfig(LongSupplier globalCheckpointSupplier) { indexSettings.getMergePolicy(), mapperService != null ? mapperService.indexAnalyzer() : null, similarityService.similarity(mapperService), - codecService, + engineConfigFactory.newCodecServiceOrDefault(indexSettings, mapperService, logger, codecService), shardEventListener, indexCache != null ? indexCache.query() : null, cachingPolicy, diff --git a/server/src/main/java/org/opensearch/plugins/EnginePlugin.java b/server/src/main/java/org/opensearch/plugins/EnginePlugin.java index ee285e8be8c2f..4c3a07d7b98d9 100644 --- a/server/src/main/java/org/opensearch/plugins/EnginePlugin.java +++ b/server/src/main/java/org/opensearch/plugins/EnginePlugin.java @@ -34,6 +34,7 @@ import org.opensearch.index.IndexSettings; import org.opensearch.index.codec.CodecService; +import org.opensearch.index.codec.CodecServiceFactory; import org.opensearch.index.engine.EngineFactory; import org.opensearch.index.seqno.RetentionLeases; import org.opensearch.index.translog.TranslogDeletionPolicy; @@ -63,11 +64,26 @@ public interface EnginePlugin { * to determine if a custom {@link CodecService} should be provided for the given index. A plugin that is not overriding * the {@link CodecService} through the plugin can ignore this method and the Codec specified in the {@link IndexSettings} * will be used. + * + * @deprecated Please use {@code getCustomCodecServiceFactory()} instead as it provides more context for {@link CodecService} + * instance construction. */ + @Deprecated default Optional getCustomCodecService(IndexSettings indexSettings) { return Optional.empty(); } + /** + * EXPERT: + * When an index is created this method is invoked for each engine plugin. Engine plugins can inspect the index settings + * to determine if a custom {@link CodecServiceFactory} should be provided for the given index. A plugin that is not overriding + * the {@link CodecServiceFactory} through the plugin can ignore this method and the default Codec specified in the + * {@link IndexSettings} will be used. + */ + default Optional getCustomCodecServiceFactory(IndexSettings indexSettings) { + return Optional.empty(); + } + /** * When an index is created this method is invoked for each engine plugin. Engine plugins that need to provide a * custom {@link TranslogDeletionPolicy} can override this method to return a function that takes the {@link IndexSettings} diff --git a/server/src/test/java/org/opensearch/index/engine/EngineConfigFactoryTests.java b/server/src/test/java/org/opensearch/index/engine/EngineConfigFactoryTests.java index df3e8deb6d90a..a6bc87d53c004 100644 --- a/server/src/test/java/org/opensearch/index/engine/EngineConfigFactoryTests.java +++ b/server/src/test/java/org/opensearch/index/engine/EngineConfigFactoryTests.java @@ -14,6 +14,7 @@ import org.opensearch.common.unit.TimeValue; import org.opensearch.index.IndexSettings; import org.opensearch.index.codec.CodecService; +import org.opensearch.index.codec.CodecServiceFactory; import org.opensearch.index.seqno.RetentionLeases; import org.opensearch.index.translog.TranslogDeletionPolicy; import org.opensearch.index.translog.TranslogDeletionPolicyFactory; @@ -84,6 +85,18 @@ public void testCreateEngineConfigFromFactoryMultipleCodecServiceIllegalStateExc expectThrows(IllegalStateException.class, () -> new EngineConfigFactory(plugins, indexSettings)); } + public void testCreateEngineConfigFromFactoryMultipleCodecServiceAndFactoryIllegalStateException() { + IndexMetadata meta = IndexMetadata.builder("test") + .settings(settings(Version.CURRENT)) + .numberOfShards(1) + .numberOfReplicas(1) + .build(); + List plugins = Arrays.asList(new FooEnginePlugin(), new BakEnginePlugin()); + IndexSettings indexSettings = IndexSettingsModule.newIndexSettings("test", meta.getSettings()); + + expectThrows(IllegalStateException.class, () -> new EngineConfigFactory(plugins, indexSettings)); + } + public void testCreateEngineConfigFromFactoryMultipleCustomTranslogDeletionPolicyFactoryIllegalStateException() { IndexMetadata meta = IndexMetadata.builder("test") .settings(settings(Version.CURRENT)) @@ -96,6 +109,43 @@ public void testCreateEngineConfigFromFactoryMultipleCustomTranslogDeletionPolic expectThrows(IllegalStateException.class, () -> new EngineConfigFactory(plugins, indexSettings)); } + public void testCreateCodecServiceFromFactory() { + IndexMetadata meta = IndexMetadata.builder("test") + .settings(settings(Version.CURRENT)) + .numberOfShards(1) + .numberOfReplicas(1) + .build(); + List plugins = Arrays.asList(new BakEnginePlugin()); + IndexSettings indexSettings = IndexSettingsModule.newIndexSettings("test", meta.getSettings()); + + EngineConfigFactory factory = new EngineConfigFactory(plugins, indexSettings); + EngineConfig config = factory.newEngineConfig( + null, + null, + indexSettings, + null, + null, + null, + null, + null, + null, + null, + null, + null, + null, + TimeValue.timeValueMinutes(5), + null, + null, + null, + null, + null, + () -> new RetentionLeases(0, 0, Collections.emptyList()), + null, + null + ); + assertNotNull(config.getCodec()); + } + private static class FooEnginePlugin extends Plugin implements EnginePlugin { @Override public Optional getEngineFactory(final IndexSettings indexSettings) { @@ -125,6 +175,18 @@ public Optional getCustomCodecService(IndexSettings indexSettings) } } + private static class BakEnginePlugin extends Plugin implements EnginePlugin { + @Override + public Optional getEngineFactory(final IndexSettings indexSettings) { + return Optional.empty(); + } + + @Override + public Optional getCustomCodecServiceFactory(IndexSettings indexSettings) { + return Optional.of(config -> new CodecService(config.getMapperService(), LogManager.getLogger(getClass()))); + } + } + private static class BazEnginePlugin extends Plugin implements EnginePlugin { @Override public Optional getEngineFactory(final IndexSettings indexSettings) { From 5a9a11416dcef1be08326a0ffea5fb639b9c5bee Mon Sep 17 00:00:00 2001 From: Nick Knize Date: Wed, 9 Mar 2022 12:50:05 -0600 Subject: [PATCH 15/46] [Remove] TrimUnsafeCommit logic for legacy 6.x indexes (#2225) * [Remove] TrimUnsafeCommit logic for legacy 6.x indexes Multiple txlog commits was introduced in legacy 7.x. Legacy 6.x indexes could therefore not have a safe commit. Since OpenSearch 2.0 is no longer compatible with legacy 6.x indexes, the logic to trim these unsafe commits is safely removed. Signed-off-by: Nicholas Walter Knize * fix assertion typo Signed-off-by: Nicholas Walter Knize * rebase and incorporate pr feedback Signed-off-by: Nicholas Walter Knize --- .../testclusters/OpenSearchCluster.java | 5 - .../upgrades/FullClusterRestartIT.java | 39 ------ qa/translog-policy/build.gradle | 117 ------------------ .../index/engine/InternalEngine.java | 13 +- .../org/opensearch/index/store/Store.java | 25 ++-- .../index/engine/InternalEngineTests.java | 2 +- .../test/rest/OpenSearchRestTestCase.java | 20 +-- 7 files changed, 10 insertions(+), 211 deletions(-) delete mode 100644 qa/translog-policy/build.gradle diff --git a/buildSrc/src/main/java/org/opensearch/gradle/testclusters/OpenSearchCluster.java b/buildSrc/src/main/java/org/opensearch/gradle/testclusters/OpenSearchCluster.java index 9e6984fd45007..a94ebacd460a5 100644 --- a/buildSrc/src/main/java/org/opensearch/gradle/testclusters/OpenSearchCluster.java +++ b/buildSrc/src/main/java/org/opensearch/gradle/testclusters/OpenSearchCluster.java @@ -404,11 +404,6 @@ public void upgradeAllNodesAndPluginsToNextVersion(List> p writeUnicastHostsFiles(); } - public void fullRestart() { - stop(false); - start(); - } - public void nextNodeToNextVersion() { OpenSearchNode node = upgradeNodeToNextVersion(); node.start(); diff --git a/qa/full-cluster-restart/src/test/java/org/opensearch/upgrades/FullClusterRestartIT.java b/qa/full-cluster-restart/src/test/java/org/opensearch/upgrades/FullClusterRestartIT.java index 629e325427162..a67c5581cba92 100644 --- a/qa/full-cluster-restart/src/test/java/org/opensearch/upgrades/FullClusterRestartIT.java +++ b/qa/full-cluster-restart/src/test/java/org/opensearch/upgrades/FullClusterRestartIT.java @@ -1335,45 +1335,6 @@ public void testTurnOffTranslogRetentionAfterUpgraded() throws Exception { } } - public void testRecoveryWithTranslogRetentionDisabled() throws Exception { - if (isRunningAgainstOldCluster()) { - final Settings.Builder settings = Settings.builder() - .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1) - .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 1); - if (minimumNodeVersion().before(Version.V_2_0_0)) { - settings.put(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), randomBoolean()); - } - if (randomBoolean()) { - settings.put(IndexSettings.INDEX_TRANSLOG_RETENTION_SIZE_SETTING.getKey(), "-1"); - } - if (randomBoolean()) { - settings.put(IndexSettings.INDEX_TRANSLOG_GENERATION_THRESHOLD_SIZE_SETTING.getKey(), "1kb"); - } - createIndex(index, settings.build()); - ensureGreen(index); - int numDocs = randomIntBetween(0, 100); - for (int i = 0; i < numDocs; i++) { - indexDocument(Integer.toString(i)); - if (rarely()) { - flush(index, randomBoolean()); - } - } - client().performRequest(new Request("POST", "/" + index + "/_refresh")); - if (randomBoolean()) { - ensurePeerRecoveryRetentionLeasesRenewedAndSynced(index); - } - if (randomBoolean()) { - flush(index, randomBoolean()); - } else if (randomBoolean()) { - syncedFlush(index, randomBoolean()); - } - saveInfoDocument("doc_count", Integer.toString(numDocs)); - } - ensureGreen(index); - final int numDocs = Integer.parseInt(loadInfoDocument("doc_count")); - assertTotalHits(numDocs, entityAsMap(client().performRequest(new Request("GET", "/" + index + "/_search")))); - } - public void testResize() throws Exception { int numDocs; if (isRunningAgainstOldCluster()) { diff --git a/qa/translog-policy/build.gradle b/qa/translog-policy/build.gradle deleted file mode 100644 index 5ef7774045e16..0000000000000 --- a/qa/translog-policy/build.gradle +++ /dev/null @@ -1,117 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - * - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - -import org.opensearch.gradle.Version -import org.opensearch.gradle.info.BuildParams -import org.opensearch.gradle.testclusters.StandaloneRestIntegTestTask - -apply plugin: 'opensearch.testclusters' -apply plugin: 'opensearch.standalone-test' -apply from : "$rootDir/gradle/bwc-test.gradle" - -for (Version bwcVersion : BuildParams.bwcVersions.indexCompatible) { - if (bwcVersion.before('6.3.0')) { - // explicitly running restart on the current node does not work in step 2 - // below when plugins are installed, which is the case for some plugins - // prior to 6.3.0 - continue - } - String baseName = "v${bwcVersion}" - - testClusters { - "${baseName}" { - versions = [bwcVersion.toString(), project.version] - numberOfNodes = 2 - setting 'http.content_type.required', 'true' - } - } - - tasks.register("${baseName}#Step1OldClusterTest", StandaloneRestIntegTestTask) { - useCluster testClusters."${baseName}" - mustRunAfter(precommit) - systemProperty 'tests.test_step', 'step1' - systemProperty 'tests.is_old_cluster', 'true' - } - - tasks.register("${baseName}#Step2OldClusterTest", StandaloneRestIntegTestTask) { - useCluster testClusters."${baseName}" - dependsOn "${baseName}#Step1OldClusterTest" - doFirst { - testClusters."${baseName}".fullRestart() - } - systemProperty 'tests.test_step', 'step2' - systemProperty 'tests.is_old_cluster', 'true' - } - - tasks.register("${baseName}#Step3NewClusterTest", StandaloneRestIntegTestTask) { - useCluster testClusters."${baseName}" - dependsOn "${baseName}#Step2OldClusterTest" - doFirst { - testClusters."${baseName}".goToNextVersion() - } - systemProperty 'tests.test_step', 'step3' - systemProperty 'tests.is_old_cluster', 'false' - } - - tasks.register("${baseName}#Step4NewClusterTest", StandaloneRestIntegTestTask) { - useCluster testClusters."${baseName}" - dependsOn "${baseName}#Step3NewClusterTest" - doFirst { - testClusters."${baseName}".fullRestart() - } - systemProperty 'tests.test_step', 'step4' - systemProperty 'tests.is_old_cluster', 'false' - } - - String oldVersion = bwcVersion.toString().minus("-SNAPSHOT") - tasks.matching { it.name.startsWith(baseName) && it.name.endsWith("ClusterTest") }.configureEach { - it.systemProperty 'tests.old_cluster_version', oldVersion - it.nonInputProperties.systemProperty('tests.rest.cluster', "${-> testClusters."${baseName}".allHttpSocketURI.join(",")}") - it.nonInputProperties.systemProperty('tests.clustername', "${-> testClusters."${baseName}".getName()}") - } - - tasks.register(bwcTaskName(bwcVersion)) { - dependsOn tasks.named("${baseName}#Step4NewClusterTest") - } -} - -configurations { - testArtifacts.extendsFrom testRuntime -} - -task testJar(type: Jar) { - archiveAppendix = 'test' - from sourceSets.test.output -} - -artifacts { - testArtifacts testJar -} diff --git a/server/src/main/java/org/opensearch/index/engine/InternalEngine.java b/server/src/main/java/org/opensearch/index/engine/InternalEngine.java index 1edd0c67c3317..2c54b726348de 100644 --- a/server/src/main/java/org/opensearch/index/engine/InternalEngine.java +++ b/server/src/main/java/org/opensearch/index/engine/InternalEngine.java @@ -103,7 +103,6 @@ import org.opensearch.index.seqno.SequenceNumbers; import org.opensearch.index.shard.OpenSearchMergePolicy; import org.opensearch.index.shard.ShardId; -import org.opensearch.index.store.Store; import org.opensearch.index.translog.Translog; import org.opensearch.index.translog.TranslogConfig; import org.opensearch.index.translog.TranslogCorruptedException; @@ -115,7 +114,6 @@ import java.io.Closeable; import java.io.IOException; -import java.nio.file.Path; import java.util.Arrays; import java.util.HashMap; import java.util.List; @@ -252,7 +250,7 @@ public InternalEngine(EngineConfig engineConfig) { mergeScheduler = scheduler = new EngineMergeScheduler(engineConfig.getShardId(), engineConfig.getIndexSettings()); throttle = new IndexThrottle(); try { - trimUnsafeCommits(engineConfig); + store.trimUnsafeCommits(engineConfig.getTranslogConfig().getTranslogPath()); translog = openTranslog(engineConfig, translogDeletionPolicy, engineConfig.getGlobalCheckpointSupplier(), seqNo -> { final LocalCheckpointTracker tracker = getLocalCheckpointTracker(); assert tracker != null || getTranslog().isOpen() == false; @@ -2955,15 +2953,6 @@ private boolean assertMaxSeqNoOfUpdatesIsAdvanced(Term id, long seqNo, boolean a return true; } - private static void trimUnsafeCommits(EngineConfig engineConfig) throws IOException { - final Store store = engineConfig.getStore(); - final String translogUUID = store.readLastCommittedSegmentsInfo().getUserData().get(Translog.TRANSLOG_UUID_KEY); - final Path translogPath = engineConfig.getTranslogConfig().getTranslogPath(); - final long globalCheckpoint = Translog.readGlobalCheckpoint(translogPath, translogUUID); - final long minRetainedTranslogGen = Translog.readMinTranslogGeneration(translogPath, translogUUID); - store.trimUnsafeCommits(globalCheckpoint, minRetainedTranslogGen, engineConfig.getIndexSettings().getIndexVersionCreated()); - } - /** * Restores the live version map and local checkpoint of this engine using documents (including soft-deleted) * after the local checkpoint in the safe commit. This step ensures the live version map and checkpoint tracker diff --git a/server/src/main/java/org/opensearch/index/store/Store.java b/server/src/main/java/org/opensearch/index/store/Store.java index 86f007c61a684..2b47c5845a394 100644 --- a/server/src/main/java/org/opensearch/index/store/Store.java +++ b/server/src/main/java/org/opensearch/index/store/Store.java @@ -1597,27 +1597,16 @@ public void ensureIndexHasHistoryUUID() throws IOException { * commit on the replica will cause exception as the new last commit c3 will have recovery_translog_gen=1. The recovery * translog generation of a commit is calculated based on the current local checkpoint. The local checkpoint of c3 is 1 * while the local checkpoint of c2 is 2. - *

- * 3. Commit without translog can be used in recovery. An old index, which was created before multiple-commits is introduced - * (v6.2), may not have a safe commit. If that index has a snapshotted commit without translog and an unsafe commit, - * the policy can consider the snapshotted commit as a safe commit for recovery even the commit does not have translog. */ - public void trimUnsafeCommits( - final long lastSyncedGlobalCheckpoint, - final long minRetainedTranslogGen, - final org.opensearch.Version indexVersionCreated - ) throws IOException { + public void trimUnsafeCommits(final Path translogPath) throws IOException { metadataLock.writeLock().lock(); try { final List existingCommits = DirectoryReader.listCommits(directory); - if (existingCommits.isEmpty()) { - throw new IllegalArgumentException("No index found to trim"); - } - final IndexCommit lastIndexCommitCommit = existingCommits.get(existingCommits.size() - 1); - final String translogUUID = lastIndexCommitCommit.getUserData().get(Translog.TRANSLOG_UUID_KEY); - final IndexCommit startingIndexCommit; - // TODO: Asserts the starting commit is a safe commit once peer-recovery sets global checkpoint. - startingIndexCommit = CombinedDeletionPolicy.findSafeCommitPoint(existingCommits, lastSyncedGlobalCheckpoint); + assert existingCommits.isEmpty() == false : "No index found to trim"; + final IndexCommit lastIndexCommit = existingCommits.get(existingCommits.size() - 1); + final String translogUUID = lastIndexCommit.getUserData().get(Translog.TRANSLOG_UUID_KEY); + final long lastSyncedGlobalCheckpoint = Translog.readGlobalCheckpoint(translogPath, translogUUID); + final IndexCommit startingIndexCommit = CombinedDeletionPolicy.findSafeCommitPoint(existingCommits, lastSyncedGlobalCheckpoint); if (translogUUID.equals(startingIndexCommit.getUserData().get(Translog.TRANSLOG_UUID_KEY)) == false) { throw new IllegalStateException( @@ -1628,7 +1617,7 @@ public void trimUnsafeCommits( + "]" ); } - if (startingIndexCommit.equals(lastIndexCommitCommit) == false) { + if (startingIndexCommit.equals(lastIndexCommit) == false) { try (IndexWriter writer = newAppendingIndexWriter(directory, startingIndexCommit)) { // this achieves two things: // - by committing a new commit based on the starting commit, it make sure the starting commit will be opened diff --git a/server/src/test/java/org/opensearch/index/engine/InternalEngineTests.java b/server/src/test/java/org/opensearch/index/engine/InternalEngineTests.java index 359f73ff3d555..0bd47902c89ed 100644 --- a/server/src/test/java/org/opensearch/index/engine/InternalEngineTests.java +++ b/server/src/test/java/org/opensearch/index/engine/InternalEngineTests.java @@ -6094,7 +6094,7 @@ public void testTrimUnsafeCommits() throws Exception { minTranslogGen = engine.getTranslog().getMinFileGeneration(); } - store.trimUnsafeCommits(globalCheckpoint.get(), minTranslogGen, config.getIndexSettings().getIndexVersionCreated()); + store.trimUnsafeCommits(config.getTranslogConfig().getTranslogPath()); long safeMaxSeqNo = commitMaxSeqNo.stream() .filter(s -> s <= globalCheckpoint.get()) .reduce((s1, s2) -> s2) // get the last one. diff --git a/test/framework/src/main/java/org/opensearch/test/rest/OpenSearchRestTestCase.java b/test/framework/src/main/java/org/opensearch/test/rest/OpenSearchRestTestCase.java index 9603b63337842..27369e79e5dee 100644 --- a/test/framework/src/main/java/org/opensearch/test/rest/OpenSearchRestTestCase.java +++ b/test/framework/src/main/java/org/opensearch/test/rest/OpenSearchRestTestCase.java @@ -968,10 +968,7 @@ protected static void createIndex(String name, Settings settings, String mapping entity += "}"; if (settings.getAsBoolean(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), true) == false) { expectSoftDeletesWarning(request, name); - } else if (settings.hasValue(IndexSettings.INDEX_TRANSLOG_RETENTION_AGE_SETTING.getKey()) - || settings.hasValue(IndexSettings.INDEX_TRANSLOG_RETENTION_SIZE_SETTING.getKey())) { - expectTranslogRetentionWarning(request); - } + } request.setJsonEntity(entity); client().performRequest(request); } @@ -1025,21 +1022,6 @@ protected static void expectSoftDeletesWarning(Request request, String indexName } } - protected static void expectTranslogRetentionWarning(Request request) { - final List expectedWarnings = Collections.singletonList( - "Translog retention settings [index.translog.retention.age] " - + "and [index.translog.retention.size] are deprecated and effectively ignored. They will be removed in a future version." - ); - final Builder requestOptions = RequestOptions.DEFAULT.toBuilder(); - if (nodeVersions.stream().allMatch(version -> version.onOrAfter(LegacyESVersion.V_7_7_0))) { - requestOptions.setWarningsHandler(warnings -> warnings.equals(expectedWarnings) == false); - request.setOptions(requestOptions); - } else if (nodeVersions.stream().anyMatch(version -> version.onOrAfter(LegacyESVersion.V_7_7_0))) { - requestOptions.setWarningsHandler(warnings -> warnings.isEmpty() == false && warnings.equals(expectedWarnings) == false); - request.setOptions(requestOptions); - } - } - protected static Map getIndexSettings(String index) throws IOException { Request request = new Request("GET", "/" + index + "/_settings"); request.addParameter("flat_settings", "true"); From fb9e1500761c462210752a4a703026322aadc5b6 Mon Sep 17 00:00:00 2001 From: Kartik <85275476+kartg@users.noreply.github.com> Date: Wed, 9 Mar 2022 12:21:09 -0800 Subject: [PATCH 16/46] Refactoring gated and ref-counted interfaces and their implementations (#2396) * Reducing duplication in plugins around ref-counted releasable classes Both AmazonEc2Reference and AmazonS3Reference duplicate the same logic - a subclass of AbstractRefCounted that also implements Releasable. This change centralizes this paradigm into a AbstractRefCountedReleasable class and supports both clients via generics. It also updates all fetching implementations to use the get() method instead of client() Signed-off-by: Kartik Ganesh * Introduce Reference classes for the Closeable and AutoCloseable interfaces These classes allow you to wrap a reference instance with an onClose runnable that is executed when close() is invoked. Two separate classes are needed because the close() signatures for the two interfaces are different. This change takes the first step to have implementing classes extend from these generic superclasses, before attempting to remove the subclasses entirely. The get() method is also replaced throughout the code base. Note that there is also a separate Releasable interface that has a similar access pattern, but is implemented separately. This is used in AbstractRefCountedReleasable introduced in a prior commit Signed-off-by: Kartik Ganesh * More improvements and refactoring * Functionality around one-way gating is now moved to a dedicated class - OneWayGate. This replaces duplicate functionality throughout the code. * The two *Reference classes have been renamed to Gated* since that better represents their functionality * The AbstractRefCountedReleasable has been improved to no longer be abstract by accepting the shutdown hook. This removes the need for the inner class in ReleasableBytesReference, and further simplifies the plugin subclasses (these could probably be removed entirely). * Finally, unit tests have been added for some classes Signed-off-by: Kartik Ganesh * Added tests for GatedCloseable Also updated the license information in GatedAutoCloseableTests Signed-off-by: Kartik Ganesh * Fixing license information in new files Signed-off-by: Kartik Ganesh * Added unit tests for RefCountedReleasable Signed-off-by: Kartik Ganesh --- .../util/concurrent/RefCountedTests.java | 8 +-- .../discovery/ec2/AmazonEc2Reference.java | 33 +-------- .../ec2/AwsEc2SeedHostsProvider.java | 2 +- .../ec2/Ec2DiscoveryPluginTests.java | 40 +++++------ .../repositories/s3/AmazonS3Reference.java | 34 +--------- .../repositories/s3/S3BlobContainer.java | 22 +++--- .../s3/S3RetryingInputStream.java | 2 +- .../s3/RepositoryCredentialsTests.java | 6 +- .../s3/S3RetryingInputStreamTests.java | 2 +- .../indices/forcemerge/ForceMergeIT.java | 2 +- .../indices/recovery/IndexRecoveryIT.java | 2 +- .../bytes/ReleasableBytesReference.java | 25 ++----- .../common/concurrent/GatedAutoCloseable.java | 43 ++++++++++++ .../common/concurrent/GatedCloseable.java | 48 +++++++++++++ .../common/concurrent/OneWayGate.java | 43 ++++++++++++ .../concurrent/RefCountedReleasable.java | 48 +++++++++++++ .../org/opensearch/index/engine/Engine.java | 21 +----- .../opensearch/index/shard/IndexShard.java | 2 +- .../index/shard/LocalShardSnapshot.java | 2 +- .../recovery/PeerRecoveryTargetService.java | 22 +++--- .../recovery/RecoveriesCollection.java | 24 ++----- .../recovery/RecoverySourceHandler.java | 6 +- .../snapshots/SnapshotShardsService.java | 4 +- .../concurrent/GatedAutoCloseableTests.java | 46 +++++++++++++ .../concurrent/GatedCloseableTests.java | 60 ++++++++++++++++ .../common/concurrent/OneWayGateTests.java | 41 +++++++++++ .../concurrent/RefCountedReleasableTests.java | 68 +++++++++++++++++++ .../index/engine/InternalEngineTests.java | 14 ++-- .../index/engine/NoOpEngineTests.java | 2 +- .../index/shard/IndexShardTests.java | 4 +- .../recovery/RecoveriesCollectionTests.java | 10 +-- .../index/engine/EngineTestCase.java | 2 +- .../index/shard/IndexShardTestCase.java | 2 +- 33 files changed, 493 insertions(+), 197 deletions(-) create mode 100644 server/src/main/java/org/opensearch/common/concurrent/GatedAutoCloseable.java create mode 100644 server/src/main/java/org/opensearch/common/concurrent/GatedCloseable.java create mode 100644 server/src/main/java/org/opensearch/common/concurrent/OneWayGate.java create mode 100644 server/src/main/java/org/opensearch/common/concurrent/RefCountedReleasable.java create mode 100644 server/src/test/java/org/opensearch/common/concurrent/GatedAutoCloseableTests.java create mode 100644 server/src/test/java/org/opensearch/common/concurrent/GatedCloseableTests.java create mode 100644 server/src/test/java/org/opensearch/common/concurrent/OneWayGateTests.java create mode 100644 server/src/test/java/org/opensearch/common/concurrent/RefCountedReleasableTests.java diff --git a/libs/core/src/test/java/org/opensearch/common/util/concurrent/RefCountedTests.java b/libs/core/src/test/java/org/opensearch/common/util/concurrent/RefCountedTests.java index 47cf49b3e320f..f784ef9d16464 100644 --- a/libs/core/src/test/java/org/opensearch/common/util/concurrent/RefCountedTests.java +++ b/libs/core/src/test/java/org/opensearch/common/util/concurrent/RefCountedTests.java @@ -31,13 +31,13 @@ package org.opensearch.common.util.concurrent; +import org.opensearch.common.concurrent.OneWayGate; import org.opensearch.test.OpenSearchTestCase; import org.hamcrest.Matchers; import java.io.IOException; import java.util.concurrent.CopyOnWriteArrayList; import java.util.concurrent.CountDownLatch; -import java.util.concurrent.atomic.AtomicBoolean; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; @@ -138,7 +138,7 @@ public void run() { private final class MyRefCounted extends AbstractRefCounted { - private final AtomicBoolean closed = new AtomicBoolean(false); + private final OneWayGate gate = new OneWayGate(); MyRefCounted() { super("test"); @@ -146,11 +146,11 @@ private final class MyRefCounted extends AbstractRefCounted { @Override protected void closeInternal() { - this.closed.set(true); + gate.close(); } public void ensureOpen() { - if (closed.get()) { + if (gate.isClosed()) { assert this.refCount() == 0; throw new IllegalStateException("closed"); } diff --git a/plugins/discovery-ec2/src/main/java/org/opensearch/discovery/ec2/AmazonEc2Reference.java b/plugins/discovery-ec2/src/main/java/org/opensearch/discovery/ec2/AmazonEc2Reference.java index eac46356d9127..2686c376213f3 100644 --- a/plugins/discovery-ec2/src/main/java/org/opensearch/discovery/ec2/AmazonEc2Reference.java +++ b/plugins/discovery-ec2/src/main/java/org/opensearch/discovery/ec2/AmazonEc2Reference.java @@ -33,42 +33,15 @@ package org.opensearch.discovery.ec2; import com.amazonaws.services.ec2.AmazonEC2; - -import org.opensearch.common.lease.Releasable; -import org.opensearch.common.util.concurrent.AbstractRefCounted; +import org.opensearch.common.concurrent.RefCountedReleasable; /** * Handles the shutdown of the wrapped {@link AmazonEC2} using reference * counting. */ -public class AmazonEc2Reference extends AbstractRefCounted implements Releasable { - - private final AmazonEC2 client; +public class AmazonEc2Reference extends RefCountedReleasable { AmazonEc2Reference(AmazonEC2 client) { - super("AWS_EC2_CLIENT"); - this.client = client; + super("AWS_EC2_CLIENT", client, client::shutdown); } - - /** - * Call when the client is not needed anymore. - */ - @Override - public void close() { - decRef(); - } - - /** - * Returns the underlying `AmazonEC2` client. All method calls are permitted BUT - * NOT shutdown. Shutdown is called when reference count reaches 0. - */ - public AmazonEC2 client() { - return client; - } - - @Override - protected void closeInternal() { - client.shutdown(); - } - } diff --git a/plugins/discovery-ec2/src/main/java/org/opensearch/discovery/ec2/AwsEc2SeedHostsProvider.java b/plugins/discovery-ec2/src/main/java/org/opensearch/discovery/ec2/AwsEc2SeedHostsProvider.java index 4b36a60bb278f..f26ecfab501f8 100644 --- a/plugins/discovery-ec2/src/main/java/org/opensearch/discovery/ec2/AwsEc2SeedHostsProvider.java +++ b/plugins/discovery-ec2/src/main/java/org/opensearch/discovery/ec2/AwsEc2SeedHostsProvider.java @@ -129,7 +129,7 @@ protected List fetchDynamicNodes() { // NOTE: we don't filter by security group during the describe instances request for two reasons: // 1. differences in VPCs require different parameters during query (ID vs Name) // 2. We want to use two different strategies: (all security groups vs. any security groups) - descInstances = SocketAccess.doPrivileged(() -> clientReference.client().describeInstances(buildDescribeInstancesRequest())); + descInstances = SocketAccess.doPrivileged(() -> clientReference.get().describeInstances(buildDescribeInstancesRequest())); } catch (final AmazonClientException e) { logger.info("Exception while retrieving instance list from AWS API: {}", e.getMessage()); logger.debug("Full exception:", e); diff --git a/plugins/discovery-ec2/src/test/java/org/opensearch/discovery/ec2/Ec2DiscoveryPluginTests.java b/plugins/discovery-ec2/src/test/java/org/opensearch/discovery/ec2/Ec2DiscoveryPluginTests.java index be6261583bdd1..cb19c0d4255ac 100644 --- a/plugins/discovery-ec2/src/test/java/org/opensearch/discovery/ec2/Ec2DiscoveryPluginTests.java +++ b/plugins/discovery-ec2/src/test/java/org/opensearch/discovery/ec2/Ec2DiscoveryPluginTests.java @@ -103,7 +103,7 @@ public void testNodeAttributesErrorLenient() throws Exception { public void testDefaultEndpoint() throws IOException { try (Ec2DiscoveryPluginMock plugin = new Ec2DiscoveryPluginMock(Settings.EMPTY)) { - final String endpoint = ((AmazonEC2Mock) plugin.ec2Service.client().client()).endpoint; + final String endpoint = ((AmazonEC2Mock) plugin.ec2Service.client().get()).endpoint; assertThat(endpoint, is("")); } } @@ -111,7 +111,7 @@ public void testDefaultEndpoint() throws IOException { public void testSpecificEndpoint() throws IOException { final Settings settings = Settings.builder().put(Ec2ClientSettings.ENDPOINT_SETTING.getKey(), "ec2.endpoint").build(); try (Ec2DiscoveryPluginMock plugin = new Ec2DiscoveryPluginMock(settings)) { - final String endpoint = ((AmazonEC2Mock) plugin.ec2Service.client().client()).endpoint; + final String endpoint = ((AmazonEC2Mock) plugin.ec2Service.client().get()).endpoint; assertThat(endpoint, is("ec2.endpoint")); } } @@ -150,7 +150,7 @@ public void testClientSettingsReInit() throws IOException { try (Ec2DiscoveryPluginMock plugin = new Ec2DiscoveryPluginMock(settings1)) { try (AmazonEc2Reference clientReference = plugin.ec2Service.client()) { { - final AWSCredentials credentials = ((AmazonEC2Mock) clientReference.client()).credentials.getCredentials(); + final AWSCredentials credentials = ((AmazonEC2Mock) clientReference.get()).credentials.getCredentials(); assertThat(credentials.getAWSAccessKeyId(), is("ec2_access_1")); assertThat(credentials.getAWSSecretKey(), is("ec2_secret_1")); if (mockSecure1HasSessionToken) { @@ -159,32 +159,32 @@ public void testClientSettingsReInit() throws IOException { } else { assertThat(credentials, instanceOf(BasicAWSCredentials.class)); } - assertThat(((AmazonEC2Mock) clientReference.client()).configuration.getProxyUsername(), is("proxy_username_1")); - assertThat(((AmazonEC2Mock) clientReference.client()).configuration.getProxyPassword(), is("proxy_password_1")); - assertThat(((AmazonEC2Mock) clientReference.client()).configuration.getProxyHost(), is("proxy_host_1")); - assertThat(((AmazonEC2Mock) clientReference.client()).configuration.getProxyPort(), is(881)); - assertThat(((AmazonEC2Mock) clientReference.client()).endpoint, is("ec2_endpoint_1")); + assertThat(((AmazonEC2Mock) clientReference.get()).configuration.getProxyUsername(), is("proxy_username_1")); + assertThat(((AmazonEC2Mock) clientReference.get()).configuration.getProxyPassword(), is("proxy_password_1")); + assertThat(((AmazonEC2Mock) clientReference.get()).configuration.getProxyHost(), is("proxy_host_1")); + assertThat(((AmazonEC2Mock) clientReference.get()).configuration.getProxyPort(), is(881)); + assertThat(((AmazonEC2Mock) clientReference.get()).endpoint, is("ec2_endpoint_1")); } // reload secure settings2 plugin.reload(settings2); // client is not released, it is still using the old settings { - final AWSCredentials credentials = ((AmazonEC2Mock) clientReference.client()).credentials.getCredentials(); + final AWSCredentials credentials = ((AmazonEC2Mock) clientReference.get()).credentials.getCredentials(); if (mockSecure1HasSessionToken) { assertThat(credentials, instanceOf(BasicSessionCredentials.class)); assertThat(((BasicSessionCredentials) credentials).getSessionToken(), is("ec2_session_token_1")); } else { assertThat(credentials, instanceOf(BasicAWSCredentials.class)); } - assertThat(((AmazonEC2Mock) clientReference.client()).configuration.getProxyUsername(), is("proxy_username_1")); - assertThat(((AmazonEC2Mock) clientReference.client()).configuration.getProxyPassword(), is("proxy_password_1")); - assertThat(((AmazonEC2Mock) clientReference.client()).configuration.getProxyHost(), is("proxy_host_1")); - assertThat(((AmazonEC2Mock) clientReference.client()).configuration.getProxyPort(), is(881)); - assertThat(((AmazonEC2Mock) clientReference.client()).endpoint, is("ec2_endpoint_1")); + assertThat(((AmazonEC2Mock) clientReference.get()).configuration.getProxyUsername(), is("proxy_username_1")); + assertThat(((AmazonEC2Mock) clientReference.get()).configuration.getProxyPassword(), is("proxy_password_1")); + assertThat(((AmazonEC2Mock) clientReference.get()).configuration.getProxyHost(), is("proxy_host_1")); + assertThat(((AmazonEC2Mock) clientReference.get()).configuration.getProxyPort(), is(881)); + assertThat(((AmazonEC2Mock) clientReference.get()).endpoint, is("ec2_endpoint_1")); } } try (AmazonEc2Reference clientReference = plugin.ec2Service.client()) { - final AWSCredentials credentials = ((AmazonEC2Mock) clientReference.client()).credentials.getCredentials(); + final AWSCredentials credentials = ((AmazonEC2Mock) clientReference.get()).credentials.getCredentials(); assertThat(credentials.getAWSAccessKeyId(), is("ec2_access_2")); assertThat(credentials.getAWSSecretKey(), is("ec2_secret_2")); if (mockSecure2HasSessionToken) { @@ -193,11 +193,11 @@ public void testClientSettingsReInit() throws IOException { } else { assertThat(credentials, instanceOf(BasicAWSCredentials.class)); } - assertThat(((AmazonEC2Mock) clientReference.client()).configuration.getProxyUsername(), is("proxy_username_2")); - assertThat(((AmazonEC2Mock) clientReference.client()).configuration.getProxyPassword(), is("proxy_password_2")); - assertThat(((AmazonEC2Mock) clientReference.client()).configuration.getProxyHost(), is("proxy_host_2")); - assertThat(((AmazonEC2Mock) clientReference.client()).configuration.getProxyPort(), is(882)); - assertThat(((AmazonEC2Mock) clientReference.client()).endpoint, is("ec2_endpoint_2")); + assertThat(((AmazonEC2Mock) clientReference.get()).configuration.getProxyUsername(), is("proxy_username_2")); + assertThat(((AmazonEC2Mock) clientReference.get()).configuration.getProxyPassword(), is("proxy_password_2")); + assertThat(((AmazonEC2Mock) clientReference.get()).configuration.getProxyHost(), is("proxy_host_2")); + assertThat(((AmazonEC2Mock) clientReference.get()).configuration.getProxyPort(), is(882)); + assertThat(((AmazonEC2Mock) clientReference.get()).endpoint, is("ec2_endpoint_2")); } } } diff --git a/plugins/repository-s3/src/main/java/org/opensearch/repositories/s3/AmazonS3Reference.java b/plugins/repository-s3/src/main/java/org/opensearch/repositories/s3/AmazonS3Reference.java index 239918206f397..62e415705a011 100644 --- a/plugins/repository-s3/src/main/java/org/opensearch/repositories/s3/AmazonS3Reference.java +++ b/plugins/repository-s3/src/main/java/org/opensearch/repositories/s3/AmazonS3Reference.java @@ -32,45 +32,17 @@ package org.opensearch.repositories.s3; -import org.opensearch.common.util.concurrent.AbstractRefCounted; - import com.amazonaws.services.s3.AmazonS3; import com.amazonaws.services.s3.AmazonS3Client; - -import org.opensearch.common.lease.Releasable; +import org.opensearch.common.concurrent.RefCountedReleasable; /** * Handles the shutdown of the wrapped {@link AmazonS3Client} using reference * counting. */ -public class AmazonS3Reference extends AbstractRefCounted implements Releasable { - - private final AmazonS3 client; +public class AmazonS3Reference extends RefCountedReleasable { AmazonS3Reference(AmazonS3 client) { - super("AWS_S3_CLIENT"); - this.client = client; - } - - /** - * Call when the client is not needed anymore. - */ - @Override - public void close() { - decRef(); + super("AWS_S3_CLIENT", client, client::shutdown); } - - /** - * Returns the underlying `AmazonS3` client. All method calls are permitted BUT - * NOT shutdown. Shutdown is called when reference count reaches 0. - */ - public AmazonS3 client() { - return client; - } - - @Override - protected void closeInternal() { - client.shutdown(); - } - } diff --git a/plugins/repository-s3/src/main/java/org/opensearch/repositories/s3/S3BlobContainer.java b/plugins/repository-s3/src/main/java/org/opensearch/repositories/s3/S3BlobContainer.java index 5a9c03c0b2a37..678be7c6f13f2 100644 --- a/plugins/repository-s3/src/main/java/org/opensearch/repositories/s3/S3BlobContainer.java +++ b/plugins/repository-s3/src/main/java/org/opensearch/repositories/s3/S3BlobContainer.java @@ -101,7 +101,7 @@ class S3BlobContainer extends AbstractBlobContainer { @Override public boolean blobExists(String blobName) { try (AmazonS3Reference clientReference = blobStore.clientReference()) { - return SocketAccess.doPrivileged(() -> clientReference.client().doesObjectExist(blobStore.bucket(), buildKey(blobName))); + return SocketAccess.doPrivileged(() -> clientReference.get().doesObjectExist(blobStore.bucket(), buildKey(blobName))); } catch (final Exception e) { throw new BlobStoreException("Failed to check if blob [" + blobName + "] exists", e); } @@ -169,13 +169,13 @@ public DeleteResult delete() throws IOException { ObjectListing list; if (prevListing != null) { final ObjectListing finalPrevListing = prevListing; - list = SocketAccess.doPrivileged(() -> clientReference.client().listNextBatchOfObjects(finalPrevListing)); + list = SocketAccess.doPrivileged(() -> clientReference.get().listNextBatchOfObjects(finalPrevListing)); } else { final ListObjectsRequest listObjectsRequest = new ListObjectsRequest(); listObjectsRequest.setBucketName(blobStore.bucket()); listObjectsRequest.setPrefix(keyPath); listObjectsRequest.setRequestMetricCollector(blobStore.listMetricCollector); - list = SocketAccess.doPrivileged(() -> clientReference.client().listObjects(listObjectsRequest)); + list = SocketAccess.doPrivileged(() -> clientReference.get().listObjects(listObjectsRequest)); } final List blobsToDelete = new ArrayList<>(); list.getObjectSummaries().forEach(s3ObjectSummary -> { @@ -236,7 +236,7 @@ private void doDeleteBlobs(List blobNames, boolean relative) throws IOEx .map(DeleteObjectsRequest.KeyVersion::getKey) .collect(Collectors.toList()); try { - clientReference.client().deleteObjects(deleteRequest); + clientReference.get().deleteObjects(deleteRequest); outstanding.removeAll(keysInRequest); } catch (MultiObjectDeleteException e) { // We are sending quiet mode requests so we can't use the deleted keys entry on the exception and instead @@ -324,9 +324,9 @@ private static List executeListing(AmazonS3Reference clientRefere ObjectListing list; if (prevListing != null) { final ObjectListing finalPrevListing = prevListing; - list = SocketAccess.doPrivileged(() -> clientReference.client().listNextBatchOfObjects(finalPrevListing)); + list = SocketAccess.doPrivileged(() -> clientReference.get().listNextBatchOfObjects(finalPrevListing)); } else { - list = SocketAccess.doPrivileged(() -> clientReference.client().listObjects(listObjectsRequest)); + list = SocketAccess.doPrivileged(() -> clientReference.get().listObjects(listObjectsRequest)); } results.add(list); if (list.isTruncated()) { @@ -374,7 +374,7 @@ void executeSingleUpload(final S3BlobStore blobStore, final String blobName, fin putRequest.setRequestMetricCollector(blobStore.putMetricCollector); try (AmazonS3Reference clientReference = blobStore.clientReference()) { - SocketAccess.doPrivilegedVoid(() -> { clientReference.client().putObject(putRequest); }); + SocketAccess.doPrivilegedVoid(() -> { clientReference.get().putObject(putRequest); }); } catch (final AmazonClientException e) { throw new IOException("Unable to upload object [" + blobName + "] using a single upload", e); } @@ -413,7 +413,7 @@ void executeMultipartUpload(final S3BlobStore blobStore, final String blobName, } try (AmazonS3Reference clientReference = blobStore.clientReference()) { - uploadId.set(SocketAccess.doPrivileged(() -> clientReference.client().initiateMultipartUpload(initRequest).getUploadId())); + uploadId.set(SocketAccess.doPrivileged(() -> clientReference.get().initiateMultipartUpload(initRequest).getUploadId())); if (Strings.isEmpty(uploadId.get())) { throw new IOException("Failed to initialize multipart upload " + blobName); } @@ -439,7 +439,7 @@ void executeMultipartUpload(final S3BlobStore blobStore, final String blobName, } bytesCount += uploadRequest.getPartSize(); - final UploadPartResult uploadResponse = SocketAccess.doPrivileged(() -> clientReference.client().uploadPart(uploadRequest)); + final UploadPartResult uploadResponse = SocketAccess.doPrivileged(() -> clientReference.get().uploadPart(uploadRequest)); parts.add(uploadResponse.getPartETag()); } @@ -456,7 +456,7 @@ void executeMultipartUpload(final S3BlobStore blobStore, final String blobName, parts ); complRequest.setRequestMetricCollector(blobStore.multiPartUploadMetricCollector); - SocketAccess.doPrivilegedVoid(() -> clientReference.client().completeMultipartUpload(complRequest)); + SocketAccess.doPrivilegedVoid(() -> clientReference.get().completeMultipartUpload(complRequest)); success = true; } catch (final AmazonClientException e) { @@ -465,7 +465,7 @@ void executeMultipartUpload(final S3BlobStore blobStore, final String blobName, if ((success == false) && Strings.hasLength(uploadId.get())) { final AbortMultipartUploadRequest abortRequest = new AbortMultipartUploadRequest(bucketName, blobName, uploadId.get()); try (AmazonS3Reference clientReference = blobStore.clientReference()) { - SocketAccess.doPrivilegedVoid(() -> clientReference.client().abortMultipartUpload(abortRequest)); + SocketAccess.doPrivilegedVoid(() -> clientReference.get().abortMultipartUpload(abortRequest)); } } } diff --git a/plugins/repository-s3/src/main/java/org/opensearch/repositories/s3/S3RetryingInputStream.java b/plugins/repository-s3/src/main/java/org/opensearch/repositories/s3/S3RetryingInputStream.java index 82c3367679c53..388f5b8d74a2b 100644 --- a/plugins/repository-s3/src/main/java/org/opensearch/repositories/s3/S3RetryingInputStream.java +++ b/plugins/repository-s3/src/main/java/org/opensearch/repositories/s3/S3RetryingInputStream.java @@ -110,7 +110,7 @@ private void openStream() throws IOException { + end; getObjectRequest.setRange(Math.addExact(start, currentOffset), end); } - final S3Object s3Object = SocketAccess.doPrivileged(() -> clientReference.client().getObject(getObjectRequest)); + final S3Object s3Object = SocketAccess.doPrivileged(() -> clientReference.get().getObject(getObjectRequest)); this.currentStreamLastOffset = Math.addExact(Math.addExact(start, currentOffset), getStreamLength(s3Object)); this.currentStream = s3Object.getObjectContent(); } catch (final AmazonClientException e) { diff --git a/plugins/repository-s3/src/test/java/org/opensearch/repositories/s3/RepositoryCredentialsTests.java b/plugins/repository-s3/src/test/java/org/opensearch/repositories/s3/RepositoryCredentialsTests.java index 645fe5cf1d134..9c359d67db88b 100644 --- a/plugins/repository-s3/src/test/java/org/opensearch/repositories/s3/RepositoryCredentialsTests.java +++ b/plugins/repository-s3/src/test/java/org/opensearch/repositories/s3/RepositoryCredentialsTests.java @@ -123,7 +123,7 @@ public void testRepositoryCredentialsOverrideSecureCredentials() { assertThat(repositories.repository(repositoryName), instanceOf(S3Repository.class)); final S3Repository repository = (S3Repository) repositories.repository(repositoryName); - final AmazonS3 client = repository.createBlobStore().clientReference().client(); + final AmazonS3 client = repository.createBlobStore().clientReference().get(); assertThat(client, instanceOf(ProxyS3RepositoryPlugin.ClientAndCredentials.class)); final AWSCredentials credentials = ((ProxyS3RepositoryPlugin.ClientAndCredentials) client).credentials.getCredentials(); @@ -162,7 +162,7 @@ public void testReinitSecureCredentials() { final S3Repository repository = (S3Repository) repositories.repository(repositoryName); try (AmazonS3Reference clientReference = ((S3BlobStore) repository.blobStore()).clientReference()) { - final AmazonS3 client = clientReference.client(); + final AmazonS3 client = clientReference.get(); assertThat(client, instanceOf(ProxyS3RepositoryPlugin.ClientAndCredentials.class)); final AWSCredentials credentials = ((ProxyS3RepositoryPlugin.ClientAndCredentials) client).credentials.getCredentials(); @@ -202,7 +202,7 @@ public void testReinitSecureCredentials() { // check credentials have been updated try (AmazonS3Reference clientReference = ((S3BlobStore) repository.blobStore()).clientReference()) { - final AmazonS3 client = clientReference.client(); + final AmazonS3 client = clientReference.get(); assertThat(client, instanceOf(ProxyS3RepositoryPlugin.ClientAndCredentials.class)); final AWSCredentials newCredentials = ((ProxyS3RepositoryPlugin.ClientAndCredentials) client).credentials.getCredentials(); diff --git a/plugins/repository-s3/src/test/java/org/opensearch/repositories/s3/S3RetryingInputStreamTests.java b/plugins/repository-s3/src/test/java/org/opensearch/repositories/s3/S3RetryingInputStreamTests.java index c7d1cb43bd266..0f40a7b3392e8 100644 --- a/plugins/repository-s3/src/test/java/org/opensearch/repositories/s3/S3RetryingInputStreamTests.java +++ b/plugins/repository-s3/src/test/java/org/opensearch/repositories/s3/S3RetryingInputStreamTests.java @@ -109,7 +109,7 @@ private S3RetryingInputStream createInputStream(final byte[] data, @Nullable fin final AmazonS3 client = mock(AmazonS3.class); when(client.getObject(any(GetObjectRequest.class))).thenReturn(s3Object); final AmazonS3Reference clientReference = mock(AmazonS3Reference.class); - when(clientReference.client()).thenReturn(client); + when(clientReference.get()).thenReturn(client); final S3BlobStore blobStore = mock(S3BlobStore.class); when(blobStore.clientReference()).thenReturn(clientReference); diff --git a/server/src/internalClusterTest/java/org/opensearch/action/admin/indices/forcemerge/ForceMergeIT.java b/server/src/internalClusterTest/java/org/opensearch/action/admin/indices/forcemerge/ForceMergeIT.java index a31976c969aaa..5c5bb6c622493 100644 --- a/server/src/internalClusterTest/java/org/opensearch/action/admin/indices/forcemerge/ForceMergeIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/action/admin/indices/forcemerge/ForceMergeIT.java @@ -100,7 +100,7 @@ public void testForceMergeUUIDConsistent() throws IOException { private static String getForceMergeUUID(IndexShard indexShard) throws IOException { try (Engine.IndexCommitRef indexCommitRef = indexShard.acquireLastIndexCommit(true)) { - return indexCommitRef.getIndexCommit().getUserData().get(Engine.FORCE_MERGE_UUID_KEY); + return indexCommitRef.get().getUserData().get(Engine.FORCE_MERGE_UUID_KEY); } } } diff --git a/server/src/internalClusterTest/java/org/opensearch/indices/recovery/IndexRecoveryIT.java b/server/src/internalClusterTest/java/org/opensearch/indices/recovery/IndexRecoveryIT.java index 042b98c33683a..17e457bba6428 100644 --- a/server/src/internalClusterTest/java/org/opensearch/indices/recovery/IndexRecoveryIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/indices/recovery/IndexRecoveryIT.java @@ -1601,7 +1601,7 @@ public void testRecoverLocallyUpToGlobalCheckpoint() throws Exception { final long localCheckpointOfSafeCommit; try (Engine.IndexCommitRef safeCommitRef = shard.acquireSafeIndexCommit()) { localCheckpointOfSafeCommit = SequenceNumbers.loadSeqNoInfoFromLuceneCommit( - safeCommitRef.getIndexCommit().getUserData().entrySet() + safeCommitRef.get().getUserData().entrySet() ).localCheckpoint; } final long maxSeqNo = shard.seqNoStats().getMaxSeqNo(); diff --git a/server/src/main/java/org/opensearch/common/bytes/ReleasableBytesReference.java b/server/src/main/java/org/opensearch/common/bytes/ReleasableBytesReference.java index e9466b47c3d5b..9ed47ef6cbf39 100644 --- a/server/src/main/java/org/opensearch/common/bytes/ReleasableBytesReference.java +++ b/server/src/main/java/org/opensearch/common/bytes/ReleasableBytesReference.java @@ -34,9 +34,9 @@ import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRefIterator; +import org.opensearch.common.concurrent.RefCountedReleasable; import org.opensearch.common.io.stream.StreamInput; import org.opensearch.common.lease.Releasable; -import org.opensearch.common.util.concurrent.AbstractRefCounted; import org.opensearch.common.xcontent.XContentBuilder; import java.io.IOException; @@ -50,14 +50,14 @@ public final class ReleasableBytesReference implements Releasable, BytesReferenc public static final Releasable NO_OP = () -> {}; private final BytesReference delegate; - private final AbstractRefCounted refCounted; + private final RefCountedReleasable refCounted; public ReleasableBytesReference(BytesReference delegate, Releasable releasable) { this.delegate = delegate; - this.refCounted = new RefCountedReleasable(releasable); + this.refCounted = new RefCountedReleasable<>("bytes-reference", releasable, releasable::close); } - private ReleasableBytesReference(BytesReference delegate, AbstractRefCounted refCounted) { + private ReleasableBytesReference(BytesReference delegate, RefCountedReleasable refCounted) { this.delegate = delegate; this.refCounted = refCounted; refCounted.incRef(); @@ -82,7 +82,7 @@ public ReleasableBytesReference retainedSlice(int from, int length) { @Override public void close() { - refCounted.decRef(); + refCounted.close(); } @Override @@ -164,19 +164,4 @@ public boolean equals(Object obj) { public int hashCode() { return delegate.hashCode(); } - - private static final class RefCountedReleasable extends AbstractRefCounted { - - private final Releasable releasable; - - RefCountedReleasable(Releasable releasable) { - super("bytes-reference"); - this.releasable = releasable; - } - - @Override - protected void closeInternal() { - releasable.close(); - } - } } diff --git a/server/src/main/java/org/opensearch/common/concurrent/GatedAutoCloseable.java b/server/src/main/java/org/opensearch/common/concurrent/GatedAutoCloseable.java new file mode 100644 index 0000000000000..cb819c0320e91 --- /dev/null +++ b/server/src/main/java/org/opensearch/common/concurrent/GatedAutoCloseable.java @@ -0,0 +1,43 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +/* + * Modifications Copyright OpenSearch Contributors. See + * GitHub history for details. + */ + +package org.opensearch.common.concurrent; + +/** + * Decorator class that wraps an object reference with a {@link Runnable} that is + * invoked when {@link #close()} is called. The internal {@link OneWayGate} instance ensures + * that this is invoked only once. See also {@link GatedCloseable} + */ +public class GatedAutoCloseable implements AutoCloseable { + + private final T ref; + private final Runnable onClose; + private final OneWayGate gate; + + public GatedAutoCloseable(T ref, Runnable onClose) { + this.ref = ref; + this.onClose = onClose; + gate = new OneWayGate(); + } + + public T get() { + return ref; + } + + @Override + public void close() { + if (gate.close()) { + onClose.run(); + } + } +} diff --git a/server/src/main/java/org/opensearch/common/concurrent/GatedCloseable.java b/server/src/main/java/org/opensearch/common/concurrent/GatedCloseable.java new file mode 100644 index 0000000000000..d98e4cca8d561 --- /dev/null +++ b/server/src/main/java/org/opensearch/common/concurrent/GatedCloseable.java @@ -0,0 +1,48 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +/* + * Modifications Copyright OpenSearch Contributors. See + * GitHub history for details. + */ + +package org.opensearch.common.concurrent; + +import org.opensearch.common.CheckedRunnable; + +import java.io.Closeable; +import java.io.IOException; + +/** + * Decorator class that wraps an object reference with a {@link CheckedRunnable} that is + * invoked when {@link #close()} is called. The internal {@link OneWayGate} instance ensures + * that this is invoked only once. See also {@link GatedAutoCloseable} + */ +public class GatedCloseable implements Closeable { + + private final T ref; + private final CheckedRunnable onClose; + private final OneWayGate gate; + + public GatedCloseable(T ref, CheckedRunnable onClose) { + this.ref = ref; + this.onClose = onClose; + gate = new OneWayGate(); + } + + public T get() { + return ref; + } + + @Override + public void close() throws IOException { + if (gate.close()) { + onClose.run(); + } + } +} diff --git a/server/src/main/java/org/opensearch/common/concurrent/OneWayGate.java b/server/src/main/java/org/opensearch/common/concurrent/OneWayGate.java new file mode 100644 index 0000000000000..76625094f3ca6 --- /dev/null +++ b/server/src/main/java/org/opensearch/common/concurrent/OneWayGate.java @@ -0,0 +1,43 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +/* + * Modifications Copyright OpenSearch Contributors. See + * GitHub history for details. + */ + +package org.opensearch.common.concurrent; + +import java.util.concurrent.atomic.AtomicBoolean; + +/** + * Encapsulates logic for a one-way gate. Guarantees idempotency via the {@link AtomicBoolean} instance + * and the return value of the {@link #close()} function. + */ +public class OneWayGate { + + private final AtomicBoolean closed = new AtomicBoolean(); + + /** + * Closes the gate and sets the internal boolean value in an idempotent + * fashion. This is a one-way operation and cannot be reset. + * @return true if the gate was closed in this invocation, + * false if the gate was already closed + */ + public boolean close() { + return closed.compareAndSet(false, true); + } + + /** + * Indicates if the gate has been closed. + * @return true if the gate is closed, false otherwise + */ + public boolean isClosed() { + return closed.get(); + } +} diff --git a/server/src/main/java/org/opensearch/common/concurrent/RefCountedReleasable.java b/server/src/main/java/org/opensearch/common/concurrent/RefCountedReleasable.java new file mode 100644 index 0000000000000..975f2295d7c32 --- /dev/null +++ b/server/src/main/java/org/opensearch/common/concurrent/RefCountedReleasable.java @@ -0,0 +1,48 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +/* + * Modifications Copyright OpenSearch Contributors. See + * GitHub history for details. + */ + +package org.opensearch.common.concurrent; + +import org.opensearch.common.lease.Releasable; +import org.opensearch.common.util.concurrent.AbstractRefCounted; + +/** + * Decorator class that wraps an object reference as a {@link AbstractRefCounted} instance. + * In addition to a {@link String} name, it accepts a {@link Runnable} shutdown hook that is + * invoked when the reference count reaches zero i.e. on {@link #closeInternal()}. + */ +public class RefCountedReleasable extends AbstractRefCounted implements Releasable { + + private final T ref; + private final Runnable shutdownRunnable; + + public RefCountedReleasable(String name, T ref, Runnable shutdownRunnable) { + super(name); + this.ref = ref; + this.shutdownRunnable = shutdownRunnable; + } + + @Override + public void close() { + decRef(); + } + + public T get() { + return ref; + } + + @Override + protected void closeInternal() { + shutdownRunnable.run(); + } +} diff --git a/server/src/main/java/org/opensearch/index/engine/Engine.java b/server/src/main/java/org/opensearch/index/engine/Engine.java index 2d9cba2ee0926..cbaf43b14c775 100644 --- a/server/src/main/java/org/opensearch/index/engine/Engine.java +++ b/server/src/main/java/org/opensearch/index/engine/Engine.java @@ -59,6 +59,7 @@ import org.opensearch.common.Nullable; import org.opensearch.common.bytes.BytesReference; import org.opensearch.common.collect.ImmutableOpenMap; +import org.opensearch.common.concurrent.GatedCloseable; import org.opensearch.common.lease.Releasable; import org.opensearch.common.lease.Releasables; import org.opensearch.common.logging.Loggers; @@ -1828,25 +1829,9 @@ private void awaitPendingClose() { } } - public static class IndexCommitRef implements Closeable { - private final AtomicBoolean closed = new AtomicBoolean(); - private final CheckedRunnable onClose; - private final IndexCommit indexCommit; - + public static class IndexCommitRef extends GatedCloseable { public IndexCommitRef(IndexCommit indexCommit, CheckedRunnable onClose) { - this.indexCommit = indexCommit; - this.onClose = onClose; - } - - @Override - public void close() throws IOException { - if (closed.compareAndSet(false, true)) { - onClose.run(); - } - } - - public IndexCommit getIndexCommit() { - return indexCommit; + super(indexCommit, onClose); } } diff --git a/server/src/main/java/org/opensearch/index/shard/IndexShard.java b/server/src/main/java/org/opensearch/index/shard/IndexShard.java index df0edd02d4f48..863c268414253 100644 --- a/server/src/main/java/org/opensearch/index/shard/IndexShard.java +++ b/server/src/main/java/org/opensearch/index/shard/IndexShard.java @@ -1462,7 +1462,7 @@ public Store.MetadataSnapshot snapshotStoreMetadata() throws IOException { return store.getMetadata(null, true); } } - return store.getMetadata(indexCommit.getIndexCommit()); + return store.getMetadata(indexCommit.get()); } finally { store.decRef(); IOUtils.close(indexCommit); diff --git a/server/src/main/java/org/opensearch/index/shard/LocalShardSnapshot.java b/server/src/main/java/org/opensearch/index/shard/LocalShardSnapshot.java index 148c39df070e8..d62d0358eb796 100644 --- a/server/src/main/java/org/opensearch/index/shard/LocalShardSnapshot.java +++ b/server/src/main/java/org/opensearch/index/shard/LocalShardSnapshot.java @@ -88,7 +88,7 @@ Directory getSnapshotDirectory() { return new FilterDirectory(store.directory()) { @Override public String[] listAll() throws IOException { - Collection fileNames = indexCommit.getIndexCommit().getFileNames(); + Collection fileNames = indexCommit.get().getFileNames(); final String[] fileNameArray = fileNames.toArray(new String[fileNames.size()]); return fileNameArray; } diff --git a/server/src/main/java/org/opensearch/indices/recovery/PeerRecoveryTargetService.java b/server/src/main/java/org/opensearch/indices/recovery/PeerRecoveryTargetService.java index 81a6b0f2c3861..684c401716883 100644 --- a/server/src/main/java/org/opensearch/indices/recovery/PeerRecoveryTargetService.java +++ b/server/src/main/java/org/opensearch/indices/recovery/PeerRecoveryTargetService.java @@ -222,7 +222,7 @@ private void doRecovery(final long recoveryId, final StartRecoveryRequest preExi logger.trace("not running recovery with id [{}] - can not find it (probably finished)", recoveryId); return; } - final RecoveryTarget recoveryTarget = recoveryRef.target(); + final RecoveryTarget recoveryTarget = recoveryRef.get(); timer = recoveryTarget.state().getTimer(); cancellableThreads = recoveryTarget.cancellableThreads(); if (preExistingRequest == null) { @@ -363,7 +363,7 @@ public void messageReceived(RecoveryPrepareForTranslogOperationsRequest request, return; } - recoveryRef.target().prepareForTranslogOperations(request.totalTranslogOps(), listener); + recoveryRef.get().prepareForTranslogOperations(request.totalTranslogOps(), listener); } } } @@ -378,7 +378,7 @@ public void messageReceived(RecoveryFinalizeRecoveryRequest request, TransportCh return; } - recoveryRef.target().finalizeRecovery(request.globalCheckpoint(), request.trimAboveSeqNo(), listener); + recoveryRef.get().finalizeRecovery(request.globalCheckpoint(), request.trimAboveSeqNo(), listener); } } } @@ -389,7 +389,7 @@ class HandoffPrimaryContextRequestHandler implements TransportRequestHandler listener = createOrFinishListener( recoveryRef, channel, @@ -423,7 +423,7 @@ private void performTranslogOps( final ActionListener listener, final RecoveryRef recoveryRef ) { - final RecoveryTarget recoveryTarget = recoveryRef.target(); + final RecoveryTarget recoveryTarget = recoveryRef.get(); final ClusterStateObserver observer = new ClusterStateObserver(clusterService, null, logger, threadPool.getThreadContext()); final Consumer retryOnMappingException = exception -> { @@ -488,7 +488,7 @@ public void messageReceived(RecoveryFilesInfoRequest request, TransportChannel c return; } - recoveryRef.target() + recoveryRef.get() .receiveFileInfo( request.phase1FileNames, request.phase1FileSizes, @@ -511,7 +511,7 @@ public void messageReceived(RecoveryCleanFilesRequest request, TransportChannel return; } - recoveryRef.target() + recoveryRef.get() .cleanFiles(request.totalTranslogOps(), request.getGlobalCheckpoint(), request.sourceMetaSnapshot(), listener); } } @@ -525,7 +525,7 @@ class FileChunkTransportRequestHandler implements TransportRequestHandler listener = createOrFinishListener(recoveryRef, channel, Actions.FILE_CHUNK, request); if (listener == null) { return; @@ -575,7 +575,7 @@ private ActionListener createOrFinishListener( final RecoveryTransportRequest request, final CheckedFunction responseFn ) { - final RecoveryTarget recoveryTarget = recoveryRef.target(); + final RecoveryTarget recoveryTarget = recoveryRef.get(); final ActionListener channelListener = new ChannelActionListener<>(channel, action, request); final ActionListener voidListener = ActionListener.map(channelListener, responseFn); @@ -611,7 +611,7 @@ public void onFailure(Exception e) { logger.error(() -> new ParameterizedMessage("unexpected error during recovery [{}], failing shard", recoveryId), e); onGoingRecoveries.failRecovery( recoveryId, - new RecoveryFailedException(recoveryRef.target().state(), "unexpected error", e), + new RecoveryFailedException(recoveryRef.get().state(), "unexpected error", e), true // be safe ); } else { diff --git a/server/src/main/java/org/opensearch/indices/recovery/RecoveriesCollection.java b/server/src/main/java/org/opensearch/indices/recovery/RecoveriesCollection.java index 0fa2bc29c09fc..3c197a8e33eb6 100644 --- a/server/src/main/java/org/opensearch/indices/recovery/RecoveriesCollection.java +++ b/server/src/main/java/org/opensearch/indices/recovery/RecoveriesCollection.java @@ -36,6 +36,7 @@ import org.apache.logging.log4j.message.ParameterizedMessage; import org.opensearch.OpenSearchTimeoutException; import org.opensearch.cluster.node.DiscoveryNode; +import org.opensearch.common.concurrent.GatedAutoCloseable; import org.opensearch.common.unit.TimeValue; import org.opensearch.common.util.concurrent.AbstractRunnable; import org.opensearch.common.util.concurrent.ConcurrentCollections; @@ -48,7 +49,6 @@ import java.util.Iterator; import java.util.List; import java.util.concurrent.ConcurrentMap; -import java.util.concurrent.atomic.AtomicBoolean; /** * This class holds a collection of all on going recoveries on the current node (i.e., the node is the target node @@ -178,7 +178,7 @@ public RecoveryRef getRecoverySafe(long id, ShardId shardId) { if (recoveryRef == null) { throw new IndexShardClosedException(shardId); } - assert recoveryRef.target().shardId().equals(shardId); + assert recoveryRef.get().shardId().equals(shardId); return recoveryRef; } @@ -273,29 +273,15 @@ public boolean cancelRecoveriesForShard(ShardId shardId, String reason) { * causes {@link RecoveryTarget#decRef()} to be called. This makes sure that the underlying resources * will not be freed until {@link RecoveryRef#close()} is called. */ - public static class RecoveryRef implements AutoCloseable { - - private final RecoveryTarget status; - private final AtomicBoolean closed = new AtomicBoolean(false); + public static class RecoveryRef extends GatedAutoCloseable { /** * Important: {@link RecoveryTarget#tryIncRef()} should * be *successfully* called on status before */ public RecoveryRef(RecoveryTarget status) { - this.status = status; - this.status.setLastAccessTime(); - } - - @Override - public void close() { - if (closed.compareAndSet(false, true)) { - status.decRef(); - } - } - - public RecoveryTarget target() { - return status; + super(status, status::decRef); + status.setLastAccessTime(); } } diff --git a/server/src/main/java/org/opensearch/indices/recovery/RecoverySourceHandler.java b/server/src/main/java/org/opensearch/indices/recovery/RecoverySourceHandler.java index dcb7024ae8c75..710b01a670946 100644 --- a/server/src/main/java/org/opensearch/indices/recovery/RecoverySourceHandler.java +++ b/server/src/main/java/org/opensearch/indices/recovery/RecoverySourceHandler.java @@ -268,7 +268,7 @@ && isTargetSameHistory() // advances and not when creating a new safe commit. In any case this is a best-effort thing since future recoveries can // always fall back to file-based ones, and only really presents a problem if this primary fails before things have settled // down. - startingSeqNo = Long.parseLong(safeCommitRef.getIndexCommit().getUserData().get(SequenceNumbers.LOCAL_CHECKPOINT_KEY)) + 1L; + startingSeqNo = Long.parseLong(safeCommitRef.get().getUserData().get(SequenceNumbers.LOCAL_CHECKPOINT_KEY)) + 1L; logger.trace("performing file-based recovery followed by history replay starting at [{}]", startingSeqNo); try { @@ -307,7 +307,7 @@ && isTargetSameHistory() deleteRetentionLeaseStep.whenComplete(ignored -> { assert Transports.assertNotTransportThread(RecoverySourceHandler.this + "[phase1]"); - phase1(safeCommitRef.getIndexCommit(), startingSeqNo, () -> estimateNumOps, sendFileStep); + phase1(safeCommitRef.get(), startingSeqNo, () -> estimateNumOps, sendFileStep); }, onFailure); } catch (final Exception e) { @@ -470,7 +470,7 @@ private Releasable acquireStore(Store store) { private Engine.IndexCommitRef acquireSafeCommit(IndexShard shard) { final Engine.IndexCommitRef commitRef = shard.acquireSafeIndexCommit(); final AtomicBoolean closed = new AtomicBoolean(false); - return new Engine.IndexCommitRef(commitRef.getIndexCommit(), () -> { + return new Engine.IndexCommitRef(commitRef.get(), () -> { if (closed.compareAndSet(false, true)) { runWithGenericThreadPool(commitRef::close); } diff --git a/server/src/main/java/org/opensearch/snapshots/SnapshotShardsService.java b/server/src/main/java/org/opensearch/snapshots/SnapshotShardsService.java index 3b765cf179821..06b17c679cbd5 100644 --- a/server/src/main/java/org/opensearch/snapshots/SnapshotShardsService.java +++ b/server/src/main/java/org/opensearch/snapshots/SnapshotShardsService.java @@ -372,13 +372,13 @@ private void snapshot( try { // we flush first to make sure we get the latest writes snapshotted snapshotRef = indexShard.acquireLastIndexCommit(true); - final IndexCommit snapshotIndexCommit = snapshotRef.getIndexCommit(); + final IndexCommit snapshotIndexCommit = snapshotRef.get(); repository.snapshotShard( indexShard.store(), indexShard.mapperService(), snapshot.getSnapshotId(), indexId, - snapshotRef.getIndexCommit(), + snapshotRef.get(), getShardStateId(indexShard, snapshotIndexCommit), snapshotStatus, version, diff --git a/server/src/test/java/org/opensearch/common/concurrent/GatedAutoCloseableTests.java b/server/src/test/java/org/opensearch/common/concurrent/GatedAutoCloseableTests.java new file mode 100644 index 0000000000000..63058da8f163a --- /dev/null +++ b/server/src/test/java/org/opensearch/common/concurrent/GatedAutoCloseableTests.java @@ -0,0 +1,46 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +/* + * Modifications Copyright OpenSearch Contributors. See + * GitHub history for details. + */ + +package org.opensearch.common.concurrent; + +import org.junit.Before; +import org.opensearch.test.OpenSearchTestCase; + +import java.util.concurrent.atomic.AtomicInteger; + +public class GatedAutoCloseableTests extends OpenSearchTestCase { + + private AtomicInteger testRef; + private GatedAutoCloseable testObject; + + @Before + public void setup() { + testRef = new AtomicInteger(0); + testObject = new GatedAutoCloseable<>(testRef, testRef::incrementAndGet); + } + + public void testGet() { + assertEquals(0, testObject.get().get()); + } + + public void testClose() { + testObject.close(); + assertEquals(1, testObject.get().get()); + } + + public void testIdempotent() { + testObject.close(); + testObject.close(); + assertEquals(1, testObject.get().get()); + } +} diff --git a/server/src/test/java/org/opensearch/common/concurrent/GatedCloseableTests.java b/server/src/test/java/org/opensearch/common/concurrent/GatedCloseableTests.java new file mode 100644 index 0000000000000..0645f971b8d63 --- /dev/null +++ b/server/src/test/java/org/opensearch/common/concurrent/GatedCloseableTests.java @@ -0,0 +1,60 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +/* + * Modifications Copyright OpenSearch Contributors. See + * GitHub history for details. + */ + +package org.opensearch.common.concurrent; + +import org.junit.Before; +import org.opensearch.test.OpenSearchTestCase; + +import java.io.IOException; +import java.nio.file.FileSystem; + +import static org.mockito.Mockito.atMostOnce; +import static org.mockito.Mockito.doThrow; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.never; +import static org.mockito.Mockito.verify; + +public class GatedCloseableTests extends OpenSearchTestCase { + + private FileSystem testRef; + GatedCloseable testObject; + + @Before + public void setup() { + testRef = mock(FileSystem.class); + testObject = new GatedCloseable<>(testRef, testRef::close); + } + + public void testGet() throws Exception { + assertNotNull(testObject.get()); + assertEquals(testRef, testObject.get()); + verify(testRef, never()).close(); + } + + public void testClose() throws IOException { + testObject.close(); + verify(testRef, atMostOnce()).close(); + } + + public void testIdempotent() throws IOException { + testObject.close(); + testObject.close(); + verify(testRef, atMostOnce()).close(); + } + + public void testException() throws IOException { + doThrow(new IOException()).when(testRef).close(); + assertThrows(IOException.class, () -> testObject.close()); + } +} diff --git a/server/src/test/java/org/opensearch/common/concurrent/OneWayGateTests.java b/server/src/test/java/org/opensearch/common/concurrent/OneWayGateTests.java new file mode 100644 index 0000000000000..357bf3ae321f8 --- /dev/null +++ b/server/src/test/java/org/opensearch/common/concurrent/OneWayGateTests.java @@ -0,0 +1,41 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +/* + * Modifications Copyright OpenSearch Contributors. See + * GitHub history for details. + */ + +package org.opensearch.common.concurrent; + +import org.junit.Before; +import org.opensearch.test.OpenSearchTestCase; + +public class OneWayGateTests extends OpenSearchTestCase { + + private OneWayGate testGate; + + @Before + public void setup() { + testGate = new OneWayGate(); + } + + public void testGateOpen() { + assertFalse(testGate.isClosed()); + } + + public void testGateClosed() { + testGate.close(); + assertTrue(testGate.isClosed()); + } + + public void testGateIdempotent() { + assertTrue(testGate.close()); + assertFalse(testGate.close()); + } +} diff --git a/server/src/test/java/org/opensearch/common/concurrent/RefCountedReleasableTests.java b/server/src/test/java/org/opensearch/common/concurrent/RefCountedReleasableTests.java new file mode 100644 index 0000000000000..63c0873f1593d --- /dev/null +++ b/server/src/test/java/org/opensearch/common/concurrent/RefCountedReleasableTests.java @@ -0,0 +1,68 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +/* + * Modifications Copyright OpenSearch Contributors. See + * GitHub history for details. + */ + +package org.opensearch.common.concurrent; + +import org.junit.Before; +import org.opensearch.test.OpenSearchTestCase; + +import java.util.concurrent.atomic.AtomicInteger; + +public class RefCountedReleasableTests extends OpenSearchTestCase { + + private AtomicInteger testRef; + private RefCountedReleasable testObject; + + @Before + public void setup() { + testRef = new AtomicInteger(0); + testObject = new RefCountedReleasable<>("test", testRef, testRef::incrementAndGet); + } + + public void testInitialState() { + assertEquals("test", testObject.getName()); + assertEquals(testRef, testObject.get()); + assertEquals(testRef, testObject.get()); + assertEquals(0, testObject.get().get()); + assertEquals(1, testObject.refCount()); + } + + public void testIncRef() { + testObject.incRef(); + assertEquals(2, testObject.refCount()); + assertEquals(0, testObject.get().get()); + } + + public void testCloseWithoutInternal() { + testObject.incRef(); + assertEquals(2, testObject.refCount()); + testObject.close(); + assertEquals(1, testObject.refCount()); + assertEquals(0, testObject.get().get()); + } + + public void testCloseWithInternal() { + assertEquals(1, testObject.refCount()); + testObject.close(); + assertEquals(0, testObject.refCount()); + assertEquals(1, testObject.get().get()); + } + + public void testIncRefAfterClose() { + assertEquals(1, testObject.refCount()); + testObject.close(); + assertEquals(0, testObject.refCount()); + assertEquals(1, testObject.get().get()); + assertThrows(IllegalStateException.class, () -> testObject.incRef()); + } +} diff --git a/server/src/test/java/org/opensearch/index/engine/InternalEngineTests.java b/server/src/test/java/org/opensearch/index/engine/InternalEngineTests.java index 0bd47902c89ed..745508135c6a1 100644 --- a/server/src/test/java/org/opensearch/index/engine/InternalEngineTests.java +++ b/server/src/test/java/org/opensearch/index/engine/InternalEngineTests.java @@ -1088,7 +1088,7 @@ public void testSyncTranslogConcurrently() throws Exception { assertThat(engine.getLastSyncedGlobalCheckpoint(), equalTo(globalCheckpoint.get())); try (Engine.IndexCommitRef safeCommit = engine.acquireSafeIndexCommit()) { SequenceNumbers.CommitInfo commitInfo = SequenceNumbers.loadSeqNoInfoFromLuceneCommit( - safeCommit.getIndexCommit().getUserData().entrySet() + safeCommit.get().getUserData().entrySet() ); assertThat(commitInfo.localCheckpoint, equalTo(engine.getProcessedLocalCheckpoint())); } @@ -1505,7 +1505,7 @@ public void testForceMergeWithSoftDeletesRetention() throws Exception { engine.syncTranslog(); final long safeCommitCheckpoint; try (Engine.IndexCommitRef safeCommit = engine.acquireSafeIndexCommit()) { - safeCommitCheckpoint = Long.parseLong(safeCommit.getIndexCommit().getUserData().get(SequenceNumbers.LOCAL_CHECKPOINT_KEY)); + safeCommitCheckpoint = Long.parseLong(safeCommit.get().getUserData().get(SequenceNumbers.LOCAL_CHECKPOINT_KEY)); } engine.forceMerge(true, 1, false, false, false, UUIDs.randomBase64UUID()); assertConsistentHistoryBetweenTranslogAndLuceneIndex(engine, mapperService); @@ -1595,9 +1595,7 @@ public void testForceMergeWithSoftDeletesRetentionAndRecoverySource() throws Exc engine.syncTranslog(); final long minSeqNoToRetain; try (Engine.IndexCommitRef safeCommit = engine.acquireSafeIndexCommit()) { - long safeCommitLocalCheckpoint = Long.parseLong( - safeCommit.getIndexCommit().getUserData().get(SequenceNumbers.LOCAL_CHECKPOINT_KEY) - ); + long safeCommitLocalCheckpoint = Long.parseLong(safeCommit.get().getUserData().get(SequenceNumbers.LOCAL_CHECKPOINT_KEY)); minSeqNoToRetain = Math.min(globalCheckpoint.get() + 1 - retainedExtraOps, safeCommitLocalCheckpoint + 1); } engine.forceMerge(true, 1, false, false, false, UUIDs.randomBase64UUID()); @@ -2671,7 +2669,7 @@ public void testConcurrentWritesAndCommits() throws Exception { long prevLocalCheckpoint = SequenceNumbers.NO_OPS_PERFORMED; long prevMaxSeqNo = SequenceNumbers.NO_OPS_PERFORMED; for (Engine.IndexCommitRef commitRef : commits) { - final IndexCommit commit = commitRef.getIndexCommit(); + final IndexCommit commit = commitRef.get(); Map userData = commit.getUserData(); long localCheckpoint = userData.containsKey(SequenceNumbers.LOCAL_CHECKPOINT_KEY) ? Long.parseLong(userData.get(SequenceNumbers.LOCAL_CHECKPOINT_KEY)) @@ -5643,7 +5641,7 @@ public void testAcquireIndexCommit() throws Exception { globalCheckpoint.set(numDocs + moreDocs - 1); engine.flush(); // check that we can still read the commit that we captured - try (IndexReader reader = DirectoryReader.open(snapshot.getIndexCommit())) { + try (IndexReader reader = DirectoryReader.open(snapshot.get())) { assertThat(reader.numDocs(), equalTo(flushFirst && safeCommit == false ? numDocs : 0)); } assertThat(DirectoryReader.listCommits(engine.store.directory()), hasSize(2)); @@ -6325,7 +6323,7 @@ public void testKeepMinRetainedSeqNoByMergePolicy() throws IOException { assertThat(actualOps, containsInAnyOrder(expectedOps)); } try (Engine.IndexCommitRef commitRef = engine.acquireSafeIndexCommit()) { - IndexCommit safeCommit = commitRef.getIndexCommit(); + IndexCommit safeCommit = commitRef.get(); if (safeCommit.getUserData().containsKey(Engine.MIN_RETAINED_SEQNO)) { lastMinRetainedSeqNo = Long.parseLong(safeCommit.getUserData().get(Engine.MIN_RETAINED_SEQNO)); } diff --git a/server/src/test/java/org/opensearch/index/engine/NoOpEngineTests.java b/server/src/test/java/org/opensearch/index/engine/NoOpEngineTests.java index 65b8a81b029c0..772cda9efa56e 100644 --- a/server/src/test/java/org/opensearch/index/engine/NoOpEngineTests.java +++ b/server/src/test/java/org/opensearch/index/engine/NoOpEngineTests.java @@ -115,7 +115,7 @@ public void testNoopAfterRegularEngine() throws IOException { assertThat(noOpEngine.getPersistedLocalCheckpoint(), equalTo(localCheckpoint)); assertThat(noOpEngine.getSeqNoStats(100L).getMaxSeqNo(), equalTo(maxSeqNo)); try (Engine.IndexCommitRef ref = noOpEngine.acquireLastIndexCommit(false)) { - try (IndexReader reader = DirectoryReader.open(ref.getIndexCommit())) { + try (IndexReader reader = DirectoryReader.open(ref.get())) { assertThat(reader.numDocs(), equalTo(docs)); } } diff --git a/server/src/test/java/org/opensearch/index/shard/IndexShardTests.java b/server/src/test/java/org/opensearch/index/shard/IndexShardTests.java index e08786e2c45a8..6485861f175c4 100644 --- a/server/src/test/java/org/opensearch/index/shard/IndexShardTests.java +++ b/server/src/test/java/org/opensearch/index/shard/IndexShardTests.java @@ -4127,10 +4127,10 @@ public InternalEngine recoverFromTranslog(TranslogRecoveryRunner translogRecover readyToSnapshotLatch.await(); shard.snapshotStoreMetadata(); try (Engine.IndexCommitRef indexCommitRef = shard.acquireLastIndexCommit(randomBoolean())) { - shard.store().getMetadata(indexCommitRef.getIndexCommit()); + shard.store().getMetadata(indexCommitRef.get()); } try (Engine.IndexCommitRef indexCommitRef = shard.acquireSafeIndexCommit()) { - shard.store().getMetadata(indexCommitRef.getIndexCommit()); + shard.store().getMetadata(indexCommitRef.get()); } } catch (InterruptedException | IOException e) { throw new AssertionError(e); diff --git a/server/src/test/java/org/opensearch/recovery/RecoveriesCollectionTests.java b/server/src/test/java/org/opensearch/recovery/RecoveriesCollectionTests.java index 69923e4390ead..6a08f5115d1e2 100644 --- a/server/src/test/java/org/opensearch/recovery/RecoveriesCollectionTests.java +++ b/server/src/test/java/org/opensearch/recovery/RecoveriesCollectionTests.java @@ -69,10 +69,10 @@ public void testLastAccessTimeUpdate() throws Exception { final RecoveriesCollection collection = new RecoveriesCollection(logger, threadPool); final long recoveryId = startRecovery(collection, shards.getPrimaryNode(), shards.addReplica()); try (RecoveriesCollection.RecoveryRef status = collection.getRecovery(recoveryId)) { - final long lastSeenTime = status.target().lastAccessTime(); + final long lastSeenTime = status.get().lastAccessTime(); assertBusy(() -> { try (RecoveriesCollection.RecoveryRef currentStatus = collection.getRecovery(recoveryId)) { - assertThat("access time failed to update", lastSeenTime, lessThan(currentStatus.target().lastAccessTime())); + assertThat("access time failed to update", lastSeenTime, lessThan(currentStatus.get().lastAccessTime())); } }); } finally { @@ -120,7 +120,7 @@ public void testRecoveryCancellation() throws Exception { final long recoveryId = startRecovery(collection, shards.getPrimaryNode(), shards.addReplica()); final long recoveryId2 = startRecovery(collection, shards.getPrimaryNode(), shards.addReplica()); try (RecoveriesCollection.RecoveryRef recoveryRef = collection.getRecovery(recoveryId)) { - ShardId shardId = recoveryRef.target().shardId(); + ShardId shardId = recoveryRef.get().shardId(); assertTrue("failed to cancel recoveries", collection.cancelRecoveriesForShard(shardId, "test")); assertThat("all recoveries should be cancelled", collection.size(), equalTo(0)); } finally { @@ -160,8 +160,8 @@ public void testResetRecovery() throws Exception { assertEquals(currentAsTarget, shard.recoveryStats().currentAsTarget()); try (RecoveriesCollection.RecoveryRef newRecoveryRef = collection.getRecovery(resetRecoveryId)) { shards.recoverReplica(shard, (s, n) -> { - assertSame(s, newRecoveryRef.target().indexShard()); - return newRecoveryRef.target(); + assertSame(s, newRecoveryRef.get().indexShard()); + return newRecoveryRef.get(); }, false); } shards.assertAllEqual(numDocs); diff --git a/test/framework/src/main/java/org/opensearch/index/engine/EngineTestCase.java b/test/framework/src/main/java/org/opensearch/index/engine/EngineTestCase.java index 24d24cd9f1a4b..97d3490db4a3d 100644 --- a/test/framework/src/main/java/org/opensearch/index/engine/EngineTestCase.java +++ b/test/framework/src/main/java/org/opensearch/index/engine/EngineTestCase.java @@ -1389,7 +1389,7 @@ public static void assertConsistentHistoryBetweenTranslogAndLuceneIndex(Engine e final long seqNoForRecovery; if (engine.config().getIndexSettings().isSoftDeleteEnabled()) { try (Engine.IndexCommitRef safeCommit = engine.acquireSafeIndexCommit()) { - seqNoForRecovery = Long.parseLong(safeCommit.getIndexCommit().getUserData().get(SequenceNumbers.LOCAL_CHECKPOINT_KEY)) + 1; + seqNoForRecovery = Long.parseLong(safeCommit.get().getUserData().get(SequenceNumbers.LOCAL_CHECKPOINT_KEY)) + 1; } } else { seqNoForRecovery = engine.getMinRetainedSeqNo(); diff --git a/test/framework/src/main/java/org/opensearch/index/shard/IndexShardTestCase.java b/test/framework/src/main/java/org/opensearch/index/shard/IndexShardTestCase.java index 54b3ffbfd3a1c..b388ab8835ac4 100644 --- a/test/framework/src/main/java/org/opensearch/index/shard/IndexShardTestCase.java +++ b/test/framework/src/main/java/org/opensearch/index/shard/IndexShardTestCase.java @@ -1036,7 +1036,7 @@ protected String snapshotShard(final IndexShard shard, final Snapshot snapshot, shard.mapperService(), snapshot.getSnapshotId(), indexId, - indexCommitRef.getIndexCommit(), + indexCommitRef.get(), null, snapshotStatus, Version.CURRENT, From c8d80090f489877c1e968903a467f41443b67886 Mon Sep 17 00:00:00 2001 From: Andriy Redko Date: Thu, 10 Mar 2022 11:46:08 -0500 Subject: [PATCH 17/46] Fixing bwcVersions and bwc builds (#2430) - adding 1.4.0 into main bwcVersions Signed-off-by: Andriy Redko --- .ci/bwcVersions | 1 + server/src/main/java/org/opensearch/Version.java | 1 + 2 files changed, 2 insertions(+) diff --git a/.ci/bwcVersions b/.ci/bwcVersions index 92b75accdcdb5..dd51082981ffc 100644 --- a/.ci/bwcVersions +++ b/.ci/bwcVersions @@ -37,3 +37,4 @@ BWC_VERSION: - "1.2.4" - "1.2.5" - "1.3.0" + - "1.4.0" \ No newline at end of file diff --git a/server/src/main/java/org/opensearch/Version.java b/server/src/main/java/org/opensearch/Version.java index 536e450da4a98..88e04a6c5dd77 100644 --- a/server/src/main/java/org/opensearch/Version.java +++ b/server/src/main/java/org/opensearch/Version.java @@ -79,6 +79,7 @@ public class Version implements Comparable, ToXContentFragment { public static final Version V_1_2_4 = new Version(1020499, org.apache.lucene.util.Version.LUCENE_8_10_1); public static final Version V_1_2_5 = new Version(1020599, org.apache.lucene.util.Version.LUCENE_8_10_1); public static final Version V_1_3_0 = new Version(1030099, org.apache.lucene.util.Version.LUCENE_8_10_1); + public static final Version V_1_4_0 = new Version(1040099, org.apache.lucene.util.Version.LUCENE_8_10_1); public static final Version V_2_0_0 = new Version(2000099, org.apache.lucene.util.Version.LUCENE_8_10_1); public static final Version CURRENT = V_2_0_0; From 9cfa395128a2e88abe605bb8e7c7ad1d7672f57f Mon Sep 17 00:00:00 2001 From: Kartik Date: Thu, 10 Mar 2022 10:12:17 -0800 Subject: [PATCH 18/46] Remove the IndexCommitRef class (#2421) This inner class is no longer required because its functionality has been moved to the generic GatedCloseable class. Signed-off-by: Kartik Ganesh --- .../indices/forcemerge/ForceMergeIT.java | 6 +- .../indices/recovery/IndexRecoveryIT.java | 23 ++++--- .../org/opensearch/index/engine/Engine.java | 11 +--- .../index/engine/InternalEngine.java | 11 ++-- .../index/engine/ReadOnlyEngine.java | 9 +-- .../opensearch/index/shard/IndexShard.java | 21 ++++--- .../index/shard/LocalShardSnapshot.java | 12 ++-- .../recovery/RecoverySourceHandler.java | 26 ++++---- .../snapshots/SnapshotShardsService.java | 13 ++-- .../index/engine/InternalEngineTests.java | 61 ++++++++++--------- .../index/engine/NoOpEngineTests.java | 6 +- .../index/shard/IndexShardTests.java | 24 ++++---- .../recovery/RecoverySourceHandlerTests.java | 9 +-- .../index/engine/EngineTestCase.java | 19 +++--- .../index/shard/IndexShardTestCase.java | 8 ++- 15 files changed, 135 insertions(+), 124 deletions(-) diff --git a/server/src/internalClusterTest/java/org/opensearch/action/admin/indices/forcemerge/ForceMergeIT.java b/server/src/internalClusterTest/java/org/opensearch/action/admin/indices/forcemerge/ForceMergeIT.java index 5c5bb6c622493..195817bf04cc9 100644 --- a/server/src/internalClusterTest/java/org/opensearch/action/admin/indices/forcemerge/ForceMergeIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/action/admin/indices/forcemerge/ForceMergeIT.java @@ -32,11 +32,13 @@ package org.opensearch.action.admin.indices.forcemerge; +import org.apache.lucene.index.IndexCommit; import org.opensearch.action.admin.indices.flush.FlushResponse; import org.opensearch.cluster.ClusterState; import org.opensearch.cluster.metadata.IndexMetadata; import org.opensearch.cluster.routing.IndexRoutingTable; import org.opensearch.cluster.routing.IndexShardRoutingTable; +import org.opensearch.common.concurrent.GatedCloseable; import org.opensearch.common.settings.Settings; import org.opensearch.index.Index; import org.opensearch.index.engine.Engine; @@ -99,8 +101,8 @@ public void testForceMergeUUIDConsistent() throws IOException { } private static String getForceMergeUUID(IndexShard indexShard) throws IOException { - try (Engine.IndexCommitRef indexCommitRef = indexShard.acquireLastIndexCommit(true)) { - return indexCommitRef.get().getUserData().get(Engine.FORCE_MERGE_UUID_KEY); + try (GatedCloseable wrappedIndexCommit = indexShard.acquireLastIndexCommit(true)) { + return wrappedIndexCommit.get().getUserData().get(Engine.FORCE_MERGE_UUID_KEY); } } } diff --git a/server/src/internalClusterTest/java/org/opensearch/indices/recovery/IndexRecoveryIT.java b/server/src/internalClusterTest/java/org/opensearch/indices/recovery/IndexRecoveryIT.java index 17e457bba6428..a7dc77e024d5c 100644 --- a/server/src/internalClusterTest/java/org/opensearch/indices/recovery/IndexRecoveryIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/indices/recovery/IndexRecoveryIT.java @@ -33,8 +33,8 @@ package org.opensearch.indices.recovery; import org.apache.lucene.analysis.TokenStream; +import org.apache.lucene.index.IndexCommit; import org.apache.lucene.util.SetOnce; - import org.opensearch.OpenSearchException; import org.opensearch.Version; import org.opensearch.action.admin.cluster.health.ClusterHealthResponse; @@ -75,6 +75,7 @@ import org.opensearch.common.Strings; import org.opensearch.common.breaker.CircuitBreaker; import org.opensearch.common.breaker.CircuitBreakingException; +import org.opensearch.common.concurrent.GatedCloseable; import org.opensearch.common.settings.Settings; import org.opensearch.common.unit.ByteSizeUnit; import org.opensearch.common.unit.ByteSizeValue; @@ -88,7 +89,6 @@ import org.opensearch.index.MockEngineFactoryPlugin; import org.opensearch.index.analysis.AbstractTokenFilterFactory; import org.opensearch.index.analysis.TokenFilterFactory; -import org.opensearch.index.engine.Engine; import org.opensearch.index.mapper.MapperParsingException; import org.opensearch.index.recovery.RecoveryStats; import org.opensearch.index.seqno.ReplicationTracker; @@ -114,11 +114,11 @@ import org.opensearch.snapshots.SnapshotState; import org.opensearch.tasks.Task; import org.opensearch.test.BackgroundIndexer; +import org.opensearch.test.InternalSettingsPlugin; +import org.opensearch.test.InternalTestCluster; import org.opensearch.test.OpenSearchIntegTestCase; import org.opensearch.test.OpenSearchIntegTestCase.ClusterScope; import org.opensearch.test.OpenSearchIntegTestCase.Scope; -import org.opensearch.test.InternalSettingsPlugin; -import org.opensearch.test.InternalTestCluster; import org.opensearch.test.engine.MockEngineSupport; import org.opensearch.test.store.MockFSIndexStore; import org.opensearch.test.transport.MockTransportService; @@ -151,12 +151,6 @@ import static java.util.Collections.singletonMap; import static java.util.stream.Collectors.toList; -import static org.opensearch.action.DocWriteResponse.Result.CREATED; -import static org.opensearch.action.DocWriteResponse.Result.UPDATED; -import static org.opensearch.node.RecoverySettingsChunkSizePlugin.CHUNK_SIZE_SETTING; -import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertAcked; -import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertHitCount; - import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.everyItem; @@ -167,6 +161,11 @@ import static org.hamcrest.Matchers.isOneOf; import static org.hamcrest.Matchers.lessThanOrEqualTo; import static org.hamcrest.Matchers.not; +import static org.opensearch.action.DocWriteResponse.Result.CREATED; +import static org.opensearch.action.DocWriteResponse.Result.UPDATED; +import static org.opensearch.node.RecoverySettingsChunkSizePlugin.CHUNK_SIZE_SETTING; +import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertAcked; +import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertHitCount; @ClusterScope(scope = Scope.TEST, numDataNodes = 0) public class IndexRecoveryIT extends OpenSearchIntegTestCase { @@ -1599,9 +1598,9 @@ public void testRecoverLocallyUpToGlobalCheckpoint() throws Exception { .getShardOrNull(new ShardId(resolveIndex(indexName), 0)); final long lastSyncedGlobalCheckpoint = shard.getLastSyncedGlobalCheckpoint(); final long localCheckpointOfSafeCommit; - try (Engine.IndexCommitRef safeCommitRef = shard.acquireSafeIndexCommit()) { + try (GatedCloseable wrappedSafeCommit = shard.acquireSafeIndexCommit()) { localCheckpointOfSafeCommit = SequenceNumbers.loadSeqNoInfoFromLuceneCommit( - safeCommitRef.get().getUserData().entrySet() + wrappedSafeCommit.get().getUserData().entrySet() ).localCheckpoint; } final long maxSeqNo = shard.seqNoStats().getMaxSeqNo(); diff --git a/server/src/main/java/org/opensearch/index/engine/Engine.java b/server/src/main/java/org/opensearch/index/engine/Engine.java index cbaf43b14c775..b821b687c5f68 100644 --- a/server/src/main/java/org/opensearch/index/engine/Engine.java +++ b/server/src/main/java/org/opensearch/index/engine/Engine.java @@ -55,7 +55,6 @@ import org.apache.lucene.util.SetOnce; import org.opensearch.ExceptionsHelper; import org.opensearch.action.index.IndexRequest; -import org.opensearch.common.CheckedRunnable; import org.opensearch.common.Nullable; import org.opensearch.common.bytes.BytesReference; import org.opensearch.common.collect.ImmutableOpenMap; @@ -1109,12 +1108,12 @@ public abstract void forceMerge( * * @param flushFirst indicates whether the engine should flush before returning the snapshot */ - public abstract IndexCommitRef acquireLastIndexCommit(boolean flushFirst) throws EngineException; + public abstract GatedCloseable acquireLastIndexCommit(boolean flushFirst) throws EngineException; /** * Snapshots the most recent safe index commit from the engine. */ - public abstract IndexCommitRef acquireSafeIndexCommit() throws EngineException; + public abstract GatedCloseable acquireSafeIndexCommit() throws EngineException; /** * @return a summary of the contents of the current safe commit @@ -1829,12 +1828,6 @@ private void awaitPendingClose() { } } - public static class IndexCommitRef extends GatedCloseable { - public IndexCommitRef(IndexCommit indexCommit, CheckedRunnable onClose) { - super(indexCommit, onClose); - } - } - public void onSettingsChanged(TimeValue translogRetentionAge, ByteSizeValue translogRetentionSize, long softDeletesRetentionOps) { } diff --git a/server/src/main/java/org/opensearch/index/engine/InternalEngine.java b/server/src/main/java/org/opensearch/index/engine/InternalEngine.java index 2c54b726348de..a264c8e0a55d9 100644 --- a/server/src/main/java/org/opensearch/index/engine/InternalEngine.java +++ b/server/src/main/java/org/opensearch/index/engine/InternalEngine.java @@ -72,6 +72,7 @@ import org.opensearch.common.Booleans; import org.opensearch.common.Nullable; import org.opensearch.common.SuppressForbidden; +import org.opensearch.common.concurrent.GatedCloseable; import org.opensearch.common.lease.Releasable; import org.opensearch.common.lucene.LoggerInfoStream; import org.opensearch.common.lucene.Lucene; @@ -103,10 +104,10 @@ import org.opensearch.index.seqno.SequenceNumbers; import org.opensearch.index.shard.OpenSearchMergePolicy; import org.opensearch.index.shard.ShardId; +import org.opensearch.index.translog.DefaultTranslogDeletionPolicy; import org.opensearch.index.translog.Translog; import org.opensearch.index.translog.TranslogConfig; import org.opensearch.index.translog.TranslogCorruptedException; -import org.opensearch.index.translog.DefaultTranslogDeletionPolicy; import org.opensearch.index.translog.TranslogDeletionPolicy; import org.opensearch.index.translog.TranslogStats; import org.opensearch.search.suggest.completion.CompletionStats; @@ -2193,7 +2194,7 @@ public void forceMerge( } @Override - public IndexCommitRef acquireLastIndexCommit(final boolean flushFirst) throws EngineException { + public GatedCloseable acquireLastIndexCommit(final boolean flushFirst) throws EngineException { // we have to flush outside of the readlock otherwise we might have a problem upgrading // the to a write lock when we fail the engine in this operation if (flushFirst) { @@ -2202,13 +2203,13 @@ public IndexCommitRef acquireLastIndexCommit(final boolean flushFirst) throws En logger.trace("finish flush for snapshot"); } final IndexCommit lastCommit = combinedDeletionPolicy.acquireIndexCommit(false); - return new Engine.IndexCommitRef(lastCommit, () -> releaseIndexCommit(lastCommit)); + return new GatedCloseable<>(lastCommit, () -> releaseIndexCommit(lastCommit)); } @Override - public IndexCommitRef acquireSafeIndexCommit() throws EngineException { + public GatedCloseable acquireSafeIndexCommit() throws EngineException { final IndexCommit safeCommit = combinedDeletionPolicy.acquireIndexCommit(true); - return new Engine.IndexCommitRef(safeCommit, () -> releaseIndexCommit(safeCommit)); + return new GatedCloseable<>(safeCommit, () -> releaseIndexCommit(safeCommit)); } private void releaseIndexCommit(IndexCommit snapshot) throws IOException { diff --git a/server/src/main/java/org/opensearch/index/engine/ReadOnlyEngine.java b/server/src/main/java/org/opensearch/index/engine/ReadOnlyEngine.java index d9cf8e2cd65fe..9bbffb7cc19d6 100644 --- a/server/src/main/java/org/opensearch/index/engine/ReadOnlyEngine.java +++ b/server/src/main/java/org/opensearch/index/engine/ReadOnlyEngine.java @@ -41,6 +41,7 @@ import org.apache.lucene.store.Lock; import org.opensearch.LegacyESVersion; import org.opensearch.Version; +import org.opensearch.common.concurrent.GatedCloseable; import org.opensearch.common.lucene.Lucene; import org.opensearch.common.lucene.index.OpenSearchDirectoryReader; import org.opensearch.common.util.concurrent.ReleasableLock; @@ -49,9 +50,9 @@ import org.opensearch.index.seqno.SeqNoStats; import org.opensearch.index.seqno.SequenceNumbers; import org.opensearch.index.store.Store; +import org.opensearch.index.translog.DefaultTranslogDeletionPolicy; import org.opensearch.index.translog.Translog; import org.opensearch.index.translog.TranslogConfig; -import org.opensearch.index.translog.DefaultTranslogDeletionPolicy; import org.opensearch.index.translog.TranslogDeletionPolicy; import org.opensearch.index.translog.TranslogStats; import org.opensearch.search.suggest.completion.CompletionStats; @@ -413,13 +414,13 @@ public void forceMerge( ) {} @Override - public IndexCommitRef acquireLastIndexCommit(boolean flushFirst) { + public GatedCloseable acquireLastIndexCommit(boolean flushFirst) { store.incRef(); - return new IndexCommitRef(indexCommit, store::decRef); + return new GatedCloseable<>(indexCommit, store::decRef); } @Override - public IndexCommitRef acquireSafeIndexCommit() { + public GatedCloseable acquireSafeIndexCommit() { return acquireLastIndexCommit(false); } diff --git a/server/src/main/java/org/opensearch/index/shard/IndexShard.java b/server/src/main/java/org/opensearch/index/shard/IndexShard.java index 863c268414253..cbf5d35327f6f 100644 --- a/server/src/main/java/org/opensearch/index/shard/IndexShard.java +++ b/server/src/main/java/org/opensearch/index/shard/IndexShard.java @@ -51,9 +51,9 @@ import org.apache.lucene.util.SetOnce; import org.apache.lucene.util.ThreadInterruptedException; import org.opensearch.Assertions; +import org.opensearch.ExceptionsHelper; import org.opensearch.LegacyESVersion; import org.opensearch.OpenSearchException; -import org.opensearch.ExceptionsHelper; import org.opensearch.action.ActionListener; import org.opensearch.action.ActionRunnable; import org.opensearch.action.admin.indices.flush.FlushRequest; @@ -73,6 +73,7 @@ import org.opensearch.common.CheckedRunnable; import org.opensearch.common.Nullable; import org.opensearch.common.collect.Tuple; +import org.opensearch.common.concurrent.GatedCloseable; import org.opensearch.common.io.stream.BytesStreamOutput; import org.opensearch.common.lease.Releasable; import org.opensearch.common.lease.Releasables; @@ -1409,7 +1410,7 @@ public org.apache.lucene.util.Version minimumCompatibleVersion() { * * @param flushFirst true if the index should first be flushed to disk / a low level lucene commit should be executed */ - public Engine.IndexCommitRef acquireLastIndexCommit(boolean flushFirst) throws EngineException { + public GatedCloseable acquireLastIndexCommit(boolean flushFirst) throws EngineException { final IndexShardState state = this.state; // one time volatile read // we allow snapshot on closed index shard, since we want to do one after we close the shard and before we close the engine if (state == IndexShardState.STARTED || state == IndexShardState.CLOSED) { @@ -1423,7 +1424,7 @@ public Engine.IndexCommitRef acquireLastIndexCommit(boolean flushFirst) throws E * Snapshots the most recent safe index commit from the currently running engine. * All index files referenced by this index commit won't be freed until the commit/snapshot is closed. */ - public Engine.IndexCommitRef acquireSafeIndexCommit() throws EngineException { + public GatedCloseable acquireSafeIndexCommit() throws EngineException { final IndexShardState state = this.state; // one time volatile read // we allow snapshot on closed index shard, since we want to do one after we close the shard and before we close the engine if (state == IndexShardState.STARTED || state == IndexShardState.CLOSED) { @@ -1448,7 +1449,7 @@ public Engine.IndexCommitRef acquireSafeIndexCommit() throws EngineException { */ public Store.MetadataSnapshot snapshotStoreMetadata() throws IOException { assert Thread.holdsLock(mutex) == false : "snapshotting store metadata under mutex"; - Engine.IndexCommitRef indexCommit = null; + GatedCloseable wrappedIndexCommit = null; store.incRef(); try { synchronized (engineMutex) { @@ -1456,16 +1457,16 @@ public Store.MetadataSnapshot snapshotStoreMetadata() throws IOException { // the engine on us. If the engine is running, we can get a snapshot via the deletion policy of the engine. final Engine engine = getEngineOrNull(); if (engine != null) { - indexCommit = engine.acquireLastIndexCommit(false); + wrappedIndexCommit = engine.acquireLastIndexCommit(false); } - if (indexCommit == null) { + if (wrappedIndexCommit == null) { return store.getMetadata(null, true); } } - return store.getMetadata(indexCommit.get()); + return store.getMetadata(wrappedIndexCommit.get()); } finally { store.decRef(); - IOUtils.close(indexCommit); + IOUtils.close(wrappedIndexCommit); } } @@ -3913,7 +3914,7 @@ void resetEngineToGlobalCheckpoint() throws IOException { true ) { @Override - public IndexCommitRef acquireLastIndexCommit(boolean flushFirst) { + public GatedCloseable acquireLastIndexCommit(boolean flushFirst) { synchronized (engineMutex) { if (newEngineReference.get() == null) { throw new AlreadyClosedException("engine was closed"); @@ -3924,7 +3925,7 @@ public IndexCommitRef acquireLastIndexCommit(boolean flushFirst) { } @Override - public IndexCommitRef acquireSafeIndexCommit() { + public GatedCloseable acquireSafeIndexCommit() { synchronized (engineMutex) { if (newEngineReference.get() == null) { throw new AlreadyClosedException("engine was closed"); diff --git a/server/src/main/java/org/opensearch/index/shard/LocalShardSnapshot.java b/server/src/main/java/org/opensearch/index/shard/LocalShardSnapshot.java index d62d0358eb796..98556db3ae138 100644 --- a/server/src/main/java/org/opensearch/index/shard/LocalShardSnapshot.java +++ b/server/src/main/java/org/opensearch/index/shard/LocalShardSnapshot.java @@ -32,6 +32,7 @@ package org.opensearch.index.shard; +import org.apache.lucene.index.IndexCommit; import org.apache.lucene.store.Directory; import org.apache.lucene.store.FilterDirectory; import org.apache.lucene.store.IOContext; @@ -39,6 +40,7 @@ import org.apache.lucene.store.Lock; import org.apache.lucene.store.NoLockFactory; import org.opensearch.cluster.metadata.IndexMetadata; +import org.opensearch.common.concurrent.GatedCloseable; import org.opensearch.index.Index; import org.opensearch.index.engine.Engine; import org.opensearch.index.store.Store; @@ -52,7 +54,7 @@ final class LocalShardSnapshot implements Closeable { private final IndexShard shard; private final Store store; - private final Engine.IndexCommitRef indexCommit; + private final GatedCloseable wrappedIndexCommit; private final AtomicBoolean closed = new AtomicBoolean(false); LocalShardSnapshot(IndexShard shard) { @@ -61,7 +63,7 @@ final class LocalShardSnapshot implements Closeable { store.incRef(); boolean success = false; try { - indexCommit = shard.acquireLastIndexCommit(true); + wrappedIndexCommit = shard.acquireLastIndexCommit(true); success = true; } finally { if (success == false) { @@ -88,7 +90,7 @@ Directory getSnapshotDirectory() { return new FilterDirectory(store.directory()) { @Override public String[] listAll() throws IOException { - Collection fileNames = indexCommit.get().getFileNames(); + Collection fileNames = wrappedIndexCommit.get().getFileNames(); final String[] fileNameArray = fileNames.toArray(new String[fileNames.size()]); return fileNameArray; } @@ -143,7 +145,7 @@ public Set getPendingDeletions() throws IOException { public void close() throws IOException { if (closed.compareAndSet(false, true)) { try { - indexCommit.close(); + wrappedIndexCommit.close(); } finally { store.decRef(); } @@ -156,6 +158,6 @@ IndexMetadata getIndexMetadata() { @Override public String toString() { - return "local_shard_snapshot:[" + shard.shardId() + " indexCommit: " + indexCommit + "]"; + return "local_shard_snapshot:[" + shard.shardId() + " indexCommit: " + wrappedIndexCommit + "]"; } } diff --git a/server/src/main/java/org/opensearch/indices/recovery/RecoverySourceHandler.java b/server/src/main/java/org/opensearch/indices/recovery/RecoverySourceHandler.java index 710b01a670946..7899b11330a34 100644 --- a/server/src/main/java/org/opensearch/indices/recovery/RecoverySourceHandler.java +++ b/server/src/main/java/org/opensearch/indices/recovery/RecoverySourceHandler.java @@ -57,6 +57,7 @@ import org.opensearch.common.StopWatch; import org.opensearch.common.bytes.BytesArray; import org.opensearch.common.bytes.BytesReference; +import org.opensearch.common.concurrent.GatedCloseable; import org.opensearch.common.lease.Releasable; import org.opensearch.common.lease.Releasables; import org.opensearch.common.logging.Loggers; @@ -64,11 +65,10 @@ import org.opensearch.common.unit.ByteSizeValue; import org.opensearch.common.unit.TimeValue; import org.opensearch.common.util.CancellableThreads; -import org.opensearch.common.util.concurrent.OpenSearchExecutors; import org.opensearch.common.util.concurrent.FutureUtils; import org.opensearch.common.util.concurrent.ListenableFuture; +import org.opensearch.common.util.concurrent.OpenSearchExecutors; import org.opensearch.core.internal.io.IOUtils; -import org.opensearch.index.engine.Engine; import org.opensearch.index.engine.RecoveryEngineException; import org.opensearch.index.seqno.ReplicationTracker; import org.opensearch.index.seqno.RetentionLease; @@ -250,10 +250,10 @@ && isTargetSameHistory() sendFileStep.onResponse(SendFileResult.EMPTY); } } else { - final Engine.IndexCommitRef safeCommitRef; + final GatedCloseable wrappedSafeCommit; try { - safeCommitRef = acquireSafeCommit(shard); - resources.add(safeCommitRef); + wrappedSafeCommit = acquireSafeCommit(shard); + resources.add(wrappedSafeCommit); } catch (final Exception e) { throw new RecoveryEngineException(shard.shardId(), 1, "snapshot failed", e); } @@ -268,16 +268,16 @@ && isTargetSameHistory() // advances and not when creating a new safe commit. In any case this is a best-effort thing since future recoveries can // always fall back to file-based ones, and only really presents a problem if this primary fails before things have settled // down. - startingSeqNo = Long.parseLong(safeCommitRef.get().getUserData().get(SequenceNumbers.LOCAL_CHECKPOINT_KEY)) + 1L; + startingSeqNo = Long.parseLong(wrappedSafeCommit.get().getUserData().get(SequenceNumbers.LOCAL_CHECKPOINT_KEY)) + 1L; logger.trace("performing file-based recovery followed by history replay starting at [{}]", startingSeqNo); try { final int estimateNumOps = estimateNumberOfHistoryOperations(startingSeqNo); final Releasable releaseStore = acquireStore(shard.store()); resources.add(releaseStore); - sendFileStep.whenComplete(r -> IOUtils.close(safeCommitRef, releaseStore), e -> { + sendFileStep.whenComplete(r -> IOUtils.close(wrappedSafeCommit, releaseStore), e -> { try { - IOUtils.close(safeCommitRef, releaseStore); + IOUtils.close(wrappedSafeCommit, releaseStore); } catch (final IOException ex) { logger.warn("releasing snapshot caused exception", ex); } @@ -307,7 +307,7 @@ && isTargetSameHistory() deleteRetentionLeaseStep.whenComplete(ignored -> { assert Transports.assertNotTransportThread(RecoverySourceHandler.this + "[phase1]"); - phase1(safeCommitRef.get(), startingSeqNo, () -> estimateNumOps, sendFileStep); + phase1(wrappedSafeCommit.get(), startingSeqNo, () -> estimateNumOps, sendFileStep); }, onFailure); } catch (final Exception e) { @@ -467,12 +467,12 @@ private Releasable acquireStore(Store store) { * with the file systems due to interrupt (see {@link org.apache.lucene.store.NIOFSDirectory} javadocs for more detail). * This method acquires a safe commit and wraps it to make sure that it will be released using the generic thread pool. */ - private Engine.IndexCommitRef acquireSafeCommit(IndexShard shard) { - final Engine.IndexCommitRef commitRef = shard.acquireSafeIndexCommit(); + private GatedCloseable acquireSafeCommit(IndexShard shard) { + final GatedCloseable wrappedSafeCommit = shard.acquireSafeIndexCommit(); final AtomicBoolean closed = new AtomicBoolean(false); - return new Engine.IndexCommitRef(commitRef.get(), () -> { + return new GatedCloseable<>(wrappedSafeCommit.get(), () -> { if (closed.compareAndSet(false, true)) { - runWithGenericThreadPool(commitRef::close); + runWithGenericThreadPool(wrappedSafeCommit::close); } }); } diff --git a/server/src/main/java/org/opensearch/snapshots/SnapshotShardsService.java b/server/src/main/java/org/opensearch/snapshots/SnapshotShardsService.java index 06b17c679cbd5..b6c0b63efe3d3 100644 --- a/server/src/main/java/org/opensearch/snapshots/SnapshotShardsService.java +++ b/server/src/main/java/org/opensearch/snapshots/SnapshotShardsService.java @@ -50,6 +50,7 @@ import org.opensearch.common.Nullable; import org.opensearch.common.collect.ImmutableOpenMap; import org.opensearch.common.component.AbstractLifecycleComponent; +import org.opensearch.common.concurrent.GatedCloseable; import org.opensearch.common.io.stream.StreamInput; import org.opensearch.common.settings.Settings; import org.opensearch.core.internal.io.IOUtils; @@ -368,25 +369,25 @@ private void snapshot( } final Repository repository = repositoriesService.repository(snapshot.getRepository()); - Engine.IndexCommitRef snapshotRef = null; + GatedCloseable wrappedSnapshot = null; try { // we flush first to make sure we get the latest writes snapshotted - snapshotRef = indexShard.acquireLastIndexCommit(true); - final IndexCommit snapshotIndexCommit = snapshotRef.get(); + wrappedSnapshot = indexShard.acquireLastIndexCommit(true); + final IndexCommit snapshotIndexCommit = wrappedSnapshot.get(); repository.snapshotShard( indexShard.store(), indexShard.mapperService(), snapshot.getSnapshotId(), indexId, - snapshotRef.get(), + wrappedSnapshot.get(), getShardStateId(indexShard, snapshotIndexCommit), snapshotStatus, version, userMetadata, - ActionListener.runBefore(listener, snapshotRef::close) + ActionListener.runBefore(listener, wrappedSnapshot::close) ); } catch (Exception e) { - IOUtils.close(snapshotRef); + IOUtils.close(wrappedSnapshot); throw e; } } catch (Exception e) { diff --git a/server/src/test/java/org/opensearch/index/engine/InternalEngineTests.java b/server/src/test/java/org/opensearch/index/engine/InternalEngineTests.java index 745508135c6a1..5f98a05840562 100644 --- a/server/src/test/java/org/opensearch/index/engine/InternalEngineTests.java +++ b/server/src/test/java/org/opensearch/index/engine/InternalEngineTests.java @@ -82,6 +82,8 @@ import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.FixedBitSet; import org.apache.lucene.util.SetOnce; +import org.hamcrest.MatcherAssert; +import org.hamcrest.Matchers; import org.opensearch.OpenSearchException; import org.opensearch.Version; import org.opensearch.action.ActionListener; @@ -101,6 +103,7 @@ import org.opensearch.common.bytes.BytesArray; import org.opensearch.common.bytes.BytesReference; import org.opensearch.common.collect.Tuple; +import org.opensearch.common.concurrent.GatedCloseable; import org.opensearch.common.logging.Loggers; import org.opensearch.common.lucene.Lucene; import org.opensearch.common.lucene.index.OpenSearchDirectoryReader; @@ -154,8 +157,6 @@ import org.opensearch.test.IndexSettingsModule; import org.opensearch.test.VersionUtils; import org.opensearch.threadpool.ThreadPool; -import org.hamcrest.MatcherAssert; -import org.hamcrest.Matchers; import java.io.Closeable; import java.io.IOException; @@ -196,15 +197,6 @@ import static java.util.Collections.emptyMap; import static java.util.Collections.shuffle; -import static org.opensearch.index.engine.Engine.Operation.Origin.LOCAL_RESET; -import static org.opensearch.index.engine.Engine.Operation.Origin.LOCAL_TRANSLOG_RECOVERY; -import static org.opensearch.index.engine.Engine.Operation.Origin.PEER_RECOVERY; -import static org.opensearch.index.engine.Engine.Operation.Origin.PRIMARY; -import static org.opensearch.index.engine.Engine.Operation.Origin.REPLICA; -import static org.opensearch.index.seqno.SequenceNumbers.NO_OPS_PERFORMED; -import static org.opensearch.index.seqno.SequenceNumbers.UNASSIGNED_PRIMARY_TERM; -import static org.opensearch.index.seqno.SequenceNumbers.UNASSIGNED_SEQ_NO; -import static org.opensearch.index.translog.TranslogDeletionPolicies.createTranslogDeletionPolicy; import static org.hamcrest.CoreMatchers.instanceOf; import static org.hamcrest.CoreMatchers.sameInstance; import static org.hamcrest.Matchers.contains; @@ -230,6 +222,15 @@ import static org.mockito.Mockito.spy; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; +import static org.opensearch.index.engine.Engine.Operation.Origin.LOCAL_RESET; +import static org.opensearch.index.engine.Engine.Operation.Origin.LOCAL_TRANSLOG_RECOVERY; +import static org.opensearch.index.engine.Engine.Operation.Origin.PEER_RECOVERY; +import static org.opensearch.index.engine.Engine.Operation.Origin.PRIMARY; +import static org.opensearch.index.engine.Engine.Operation.Origin.REPLICA; +import static org.opensearch.index.seqno.SequenceNumbers.NO_OPS_PERFORMED; +import static org.opensearch.index.seqno.SequenceNumbers.UNASSIGNED_PRIMARY_TERM; +import static org.opensearch.index.seqno.SequenceNumbers.UNASSIGNED_SEQ_NO; +import static org.opensearch.index.translog.TranslogDeletionPolicies.createTranslogDeletionPolicy; public class InternalEngineTests extends EngineTestCase { @@ -1086,9 +1087,9 @@ public void testSyncTranslogConcurrently() throws Exception { final CheckedRunnable checker = () -> { assertThat(engine.getTranslogStats().getUncommittedOperations(), equalTo(0)); assertThat(engine.getLastSyncedGlobalCheckpoint(), equalTo(globalCheckpoint.get())); - try (Engine.IndexCommitRef safeCommit = engine.acquireSafeIndexCommit()) { + try (GatedCloseable wrappedSafeCommit = engine.acquireSafeIndexCommit()) { SequenceNumbers.CommitInfo commitInfo = SequenceNumbers.loadSeqNoInfoFromLuceneCommit( - safeCommit.get().getUserData().entrySet() + wrappedSafeCommit.get().getUserData().entrySet() ); assertThat(commitInfo.localCheckpoint, equalTo(engine.getProcessedLocalCheckpoint())); } @@ -1504,8 +1505,8 @@ public void testForceMergeWithSoftDeletesRetention() throws Exception { globalCheckpoint.set(randomLongBetween(0, localCheckpoint)); engine.syncTranslog(); final long safeCommitCheckpoint; - try (Engine.IndexCommitRef safeCommit = engine.acquireSafeIndexCommit()) { - safeCommitCheckpoint = Long.parseLong(safeCommit.get().getUserData().get(SequenceNumbers.LOCAL_CHECKPOINT_KEY)); + try (GatedCloseable wrappedSafeCommit = engine.acquireSafeIndexCommit()) { + safeCommitCheckpoint = Long.parseLong(wrappedSafeCommit.get().getUserData().get(SequenceNumbers.LOCAL_CHECKPOINT_KEY)); } engine.forceMerge(true, 1, false, false, false, UUIDs.randomBase64UUID()); assertConsistentHistoryBetweenTranslogAndLuceneIndex(engine, mapperService); @@ -1594,8 +1595,10 @@ public void testForceMergeWithSoftDeletesRetentionAndRecoverySource() throws Exc globalCheckpoint.set(randomLongBetween(0, engine.getPersistedLocalCheckpoint())); engine.syncTranslog(); final long minSeqNoToRetain; - try (Engine.IndexCommitRef safeCommit = engine.acquireSafeIndexCommit()) { - long safeCommitLocalCheckpoint = Long.parseLong(safeCommit.get().getUserData().get(SequenceNumbers.LOCAL_CHECKPOINT_KEY)); + try (GatedCloseable wrappedSafeCommit = engine.acquireSafeIndexCommit()) { + long safeCommitLocalCheckpoint = Long.parseLong( + wrappedSafeCommit.get().getUserData().get(SequenceNumbers.LOCAL_CHECKPOINT_KEY) + ); minSeqNoToRetain = Math.min(globalCheckpoint.get() + 1 - retainedExtraOps, safeCommitLocalCheckpoint + 1); } engine.forceMerge(true, 1, false, false, false, UUIDs.randomBase64UUID()); @@ -2613,7 +2616,7 @@ public void testSeqNoAndCheckpoints() throws IOException, InterruptedException { // this test writes documents to the engine while concurrently flushing/commit // and ensuring that the commit points contain the correct sequence number data public void testConcurrentWritesAndCommits() throws Exception { - List commits = new ArrayList<>(); + List> commits = new ArrayList<>(); try ( Store store = createStore(); InternalEngine engine = createEngine(config(defaultSettings, store, createTempDir(), newMergePolicy(), null)) @@ -2668,8 +2671,8 @@ public void testConcurrentWritesAndCommits() throws Exception { // now, verify all the commits have the correct docs according to the user commit data long prevLocalCheckpoint = SequenceNumbers.NO_OPS_PERFORMED; long prevMaxSeqNo = SequenceNumbers.NO_OPS_PERFORMED; - for (Engine.IndexCommitRef commitRef : commits) { - final IndexCommit commit = commitRef.get(); + for (GatedCloseable wrappedCommit : commits) { + final IndexCommit commit = wrappedCommit.get(); Map userData = commit.getUserData(); long localCheckpoint = userData.containsKey(SequenceNumbers.LOCAL_CHECKPOINT_KEY) ? Long.parseLong(userData.get(SequenceNumbers.LOCAL_CHECKPOINT_KEY)) @@ -5617,7 +5620,7 @@ public void testAcquireIndexCommit() throws Exception { IOUtils.close(engine, store); store = createStore(); final AtomicLong globalCheckpoint = new AtomicLong(SequenceNumbers.NO_OPS_PERFORMED); - final Engine.IndexCommitRef snapshot; + final GatedCloseable wrappedSnapshot; final boolean closeSnapshotBeforeEngine = randomBoolean(); try (InternalEngine engine = createEngine(store, createTempDir(), globalCheckpoint::get)) { int numDocs = between(1, 20); @@ -5630,9 +5633,9 @@ public void testAcquireIndexCommit() throws Exception { final boolean flushFirst = randomBoolean(); final boolean safeCommit = randomBoolean(); if (safeCommit) { - snapshot = engine.acquireSafeIndexCommit(); + wrappedSnapshot = engine.acquireSafeIndexCommit(); } else { - snapshot = engine.acquireLastIndexCommit(flushFirst); + wrappedSnapshot = engine.acquireLastIndexCommit(flushFirst); } int moreDocs = between(1, 20); for (int i = 0; i < moreDocs; i++) { @@ -5641,13 +5644,13 @@ public void testAcquireIndexCommit() throws Exception { globalCheckpoint.set(numDocs + moreDocs - 1); engine.flush(); // check that we can still read the commit that we captured - try (IndexReader reader = DirectoryReader.open(snapshot.get())) { + try (IndexReader reader = DirectoryReader.open(wrappedSnapshot.get())) { assertThat(reader.numDocs(), equalTo(flushFirst && safeCommit == false ? numDocs : 0)); } assertThat(DirectoryReader.listCommits(engine.store.directory()), hasSize(2)); if (closeSnapshotBeforeEngine) { - snapshot.close(); + wrappedSnapshot.close(); // check it's clean up engine.flush(true, true); assertThat(DirectoryReader.listCommits(engine.store.directory()), hasSize(1)); @@ -5655,7 +5658,7 @@ public void testAcquireIndexCommit() throws Exception { } if (closeSnapshotBeforeEngine == false) { - snapshot.close(); // shouldn't throw AlreadyClosedException + wrappedSnapshot.close(); // shouldn't throw AlreadyClosedException } } @@ -5719,7 +5722,7 @@ public void testCleanupCommitsWhenReleaseSnapshot() throws Exception { } engine.flush(false, randomBoolean()); int numSnapshots = between(1, 10); - final List snapshots = new ArrayList<>(); + final List> snapshots = new ArrayList<>(); for (int i = 0; i < numSnapshots; i++) { snapshots.add(engine.acquireSafeIndexCommit()); // taking snapshots from the safe commit. } @@ -6322,8 +6325,8 @@ public void testKeepMinRetainedSeqNoByMergePolicy() throws IOException { .collect(Collectors.toSet()); assertThat(actualOps, containsInAnyOrder(expectedOps)); } - try (Engine.IndexCommitRef commitRef = engine.acquireSafeIndexCommit()) { - IndexCommit safeCommit = commitRef.get(); + try (GatedCloseable wrappedSafeCommit = engine.acquireSafeIndexCommit()) { + IndexCommit safeCommit = wrappedSafeCommit.get(); if (safeCommit.getUserData().containsKey(Engine.MIN_RETAINED_SEQNO)) { lastMinRetainedSeqNo = Long.parseLong(safeCommit.getUserData().get(Engine.MIN_RETAINED_SEQNO)); } diff --git a/server/src/test/java/org/opensearch/index/engine/NoOpEngineTests.java b/server/src/test/java/org/opensearch/index/engine/NoOpEngineTests.java index 772cda9efa56e..e04bf1a4f20f2 100644 --- a/server/src/test/java/org/opensearch/index/engine/NoOpEngineTests.java +++ b/server/src/test/java/org/opensearch/index/engine/NoOpEngineTests.java @@ -33,6 +33,7 @@ package org.opensearch.index.engine; import org.apache.lucene.index.DirectoryReader; +import org.apache.lucene.index.IndexCommit; import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.NoMergePolicy; import org.apache.lucene.store.LockObtainFailedException; @@ -41,6 +42,7 @@ import org.opensearch.cluster.routing.ShardRouting; import org.opensearch.cluster.routing.ShardRoutingState; import org.opensearch.cluster.routing.TestShardRouting; +import org.opensearch.common.concurrent.GatedCloseable; import org.opensearch.common.settings.Settings; import org.opensearch.common.unit.ByteSizeValue; import org.opensearch.common.unit.TimeValue; @@ -114,8 +116,8 @@ public void testNoopAfterRegularEngine() throws IOException { final NoOpEngine noOpEngine = new NoOpEngine(noOpConfig(INDEX_SETTINGS, store, primaryTranslogDir, tracker)); assertThat(noOpEngine.getPersistedLocalCheckpoint(), equalTo(localCheckpoint)); assertThat(noOpEngine.getSeqNoStats(100L).getMaxSeqNo(), equalTo(maxSeqNo)); - try (Engine.IndexCommitRef ref = noOpEngine.acquireLastIndexCommit(false)) { - try (IndexReader reader = DirectoryReader.open(ref.get())) { + try (GatedCloseable wrappedCommit = noOpEngine.acquireLastIndexCommit(false)) { + try (IndexReader reader = DirectoryReader.open(wrappedCommit.get())) { assertThat(reader.numDocs(), equalTo(docs)); } } diff --git a/server/src/test/java/org/opensearch/index/shard/IndexShardTests.java b/server/src/test/java/org/opensearch/index/shard/IndexShardTests.java index 6485861f175c4..91fb1f9b1ff21 100644 --- a/server/src/test/java/org/opensearch/index/shard/IndexShardTests.java +++ b/server/src/test/java/org/opensearch/index/shard/IndexShardTests.java @@ -34,6 +34,7 @@ import org.apache.logging.log4j.Logger; import org.apache.lucene.index.CorruptIndexException; import org.apache.lucene.index.DirectoryReader; +import org.apache.lucene.index.IndexCommit; import org.apache.lucene.index.IndexableField; import org.apache.lucene.index.Term; import org.apache.lucene.search.TermQuery; @@ -44,6 +45,7 @@ import org.apache.lucene.store.IOContext; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.Constants; +import org.junit.Assert; import org.opensearch.Assertions; import org.opensearch.OpenSearchException; import org.opensearch.Version; @@ -72,6 +74,7 @@ import org.opensearch.common.UUIDs; import org.opensearch.common.bytes.BytesArray; import org.opensearch.common.collect.Tuple; +import org.opensearch.common.concurrent.GatedCloseable; import org.opensearch.common.io.stream.BytesStreamOutput; import org.opensearch.common.io.stream.StreamInput; import org.opensearch.common.lease.Releasable; @@ -142,7 +145,6 @@ import org.opensearch.test.VersionUtils; import org.opensearch.test.store.MockFSDirectoryFactory; import org.opensearch.threadpool.ThreadPool; -import org.junit.Assert; import java.io.IOException; import java.nio.charset.Charset; @@ -179,12 +181,6 @@ import static java.util.Collections.emptyMap; import static java.util.Collections.emptySet; -import static org.opensearch.cluster.routing.TestShardRouting.newShardRouting; -import static org.opensearch.common.lucene.Lucene.cleanLuceneIndex; -import static org.opensearch.common.xcontent.ToXContent.EMPTY_PARAMS; -import static org.opensearch.common.xcontent.XContentFactory.jsonBuilder; -import static org.opensearch.index.seqno.SequenceNumbers.UNASSIGNED_SEQ_NO; -import static org.opensearch.test.hamcrest.RegexMatcher.matches; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.either; @@ -204,6 +200,12 @@ import static org.hamcrest.Matchers.nullValue; import static org.hamcrest.Matchers.oneOf; import static org.hamcrest.Matchers.sameInstance; +import static org.opensearch.cluster.routing.TestShardRouting.newShardRouting; +import static org.opensearch.common.lucene.Lucene.cleanLuceneIndex; +import static org.opensearch.common.xcontent.ToXContent.EMPTY_PARAMS; +import static org.opensearch.common.xcontent.XContentFactory.jsonBuilder; +import static org.opensearch.index.seqno.SequenceNumbers.UNASSIGNED_SEQ_NO; +import static org.opensearch.test.hamcrest.RegexMatcher.matches; /** * Simple unit-test IndexShard related operations. @@ -4126,11 +4128,11 @@ public InternalEngine recoverFromTranslog(TranslogRecoveryRunner translogRecover try { readyToSnapshotLatch.await(); shard.snapshotStoreMetadata(); - try (Engine.IndexCommitRef indexCommitRef = shard.acquireLastIndexCommit(randomBoolean())) { - shard.store().getMetadata(indexCommitRef.get()); + try (GatedCloseable wrappedIndexCommit = shard.acquireLastIndexCommit(randomBoolean())) { + shard.store().getMetadata(wrappedIndexCommit.get()); } - try (Engine.IndexCommitRef indexCommitRef = shard.acquireSafeIndexCommit()) { - shard.store().getMetadata(indexCommitRef.get()); + try (GatedCloseable wrappedSafeCommit = shard.acquireSafeIndexCommit()) { + shard.store().getMetadata(wrappedSafeCommit.get()); } } catch (InterruptedException | IOException e) { throw new AssertionError(e); diff --git a/server/src/test/java/org/opensearch/indices/recovery/RecoverySourceHandlerTests.java b/server/src/test/java/org/opensearch/indices/recovery/RecoverySourceHandlerTests.java index 720356cb49588..3890470f966ca 100644 --- a/server/src/test/java/org/opensearch/indices/recovery/RecoverySourceHandlerTests.java +++ b/server/src/test/java/org/opensearch/indices/recovery/RecoverySourceHandlerTests.java @@ -46,6 +46,8 @@ import org.apache.lucene.store.Directory; import org.apache.lucene.store.IOContext; import org.apache.lucene.util.SetOnce; +import org.junit.After; +import org.junit.Before; import org.opensearch.ExceptionsHelper; import org.opensearch.Version; import org.opensearch.action.ActionListener; @@ -59,6 +61,7 @@ import org.opensearch.common.UUIDs; import org.opensearch.common.bytes.BytesArray; import org.opensearch.common.bytes.BytesReference; +import org.opensearch.common.concurrent.GatedCloseable; import org.opensearch.common.io.FileSystemUtils; import org.opensearch.common.lease.Releasable; import org.opensearch.common.lucene.store.IndexOutputOutputStream; @@ -93,14 +96,12 @@ import org.opensearch.index.translog.Translog; import org.opensearch.test.CorruptionUtils; import org.opensearch.test.DummyShardLock; -import org.opensearch.test.OpenSearchTestCase; import org.opensearch.test.IndexSettingsModule; +import org.opensearch.test.OpenSearchTestCase; import org.opensearch.test.VersionUtils; import org.opensearch.threadpool.FixedExecutorBuilder; import org.opensearch.threadpool.TestThreadPool; import org.opensearch.threadpool.ThreadPool; -import org.junit.After; -import org.junit.Before; import java.io.IOException; import java.io.OutputStream; @@ -650,7 +651,7 @@ public void testThrowExceptionOnPrimaryRelocatedBeforePhase1Started() throws IOE when(shard.seqNoStats()).thenReturn(mock(SeqNoStats.class)); when(shard.segmentStats(anyBoolean(), anyBoolean())).thenReturn(mock(SegmentsStats.class)); when(shard.isRelocatedPrimary()).thenReturn(true); - when(shard.acquireSafeIndexCommit()).thenReturn(mock(Engine.IndexCommitRef.class)); + when(shard.acquireSafeIndexCommit()).thenReturn(mock(GatedCloseable.class)); doAnswer(invocation -> { ((ActionListener) invocation.getArguments()[0]).onResponse(() -> {}); return null; diff --git a/test/framework/src/main/java/org/opensearch/index/engine/EngineTestCase.java b/test/framework/src/main/java/org/opensearch/index/engine/EngineTestCase.java index 97d3490db4a3d..69f7bef90d78f 100644 --- a/test/framework/src/main/java/org/opensearch/index/engine/EngineTestCase.java +++ b/test/framework/src/main/java/org/opensearch/index/engine/EngineTestCase.java @@ -61,6 +61,8 @@ import org.apache.lucene.store.Directory; import org.apache.lucene.util.Bits; import org.apache.lucene.util.BytesRef; +import org.junit.After; +import org.junit.Before; import org.opensearch.Version; import org.opensearch.action.index.IndexRequest; import org.opensearch.action.support.replication.ReplicationResponse; @@ -74,6 +76,7 @@ import org.opensearch.common.bytes.BytesArray; import org.opensearch.common.bytes.BytesReference; import org.opensearch.common.compress.CompressedXContent; +import org.opensearch.common.concurrent.GatedCloseable; import org.opensearch.common.lucene.Lucene; import org.opensearch.common.lucene.uid.Versions; import org.opensearch.common.settings.Settings; @@ -113,12 +116,10 @@ import org.opensearch.indices.breaker.CircuitBreakerService; import org.opensearch.indices.breaker.NoneCircuitBreakerService; import org.opensearch.test.DummyShardLock; -import org.opensearch.test.OpenSearchTestCase; import org.opensearch.test.IndexSettingsModule; +import org.opensearch.test.OpenSearchTestCase; import org.opensearch.threadpool.TestThreadPool; import org.opensearch.threadpool.ThreadPool; -import org.junit.After; -import org.junit.Before; import java.io.IOException; import java.nio.charset.Charset; @@ -143,14 +144,14 @@ import static java.util.Collections.emptyList; import static java.util.Collections.shuffle; -import static org.opensearch.index.engine.Engine.Operation.Origin.PEER_RECOVERY; -import static org.opensearch.index.engine.Engine.Operation.Origin.PRIMARY; -import static org.opensearch.index.engine.Engine.Operation.Origin.REPLICA; -import static org.opensearch.index.translog.TranslogDeletionPolicies.createTranslogDeletionPolicy; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThanOrEqualTo; import static org.hamcrest.Matchers.lessThanOrEqualTo; import static org.hamcrest.Matchers.notNullValue; +import static org.opensearch.index.engine.Engine.Operation.Origin.PEER_RECOVERY; +import static org.opensearch.index.engine.Engine.Operation.Origin.PRIMARY; +import static org.opensearch.index.engine.Engine.Operation.Origin.REPLICA; +import static org.opensearch.index.translog.TranslogDeletionPolicies.createTranslogDeletionPolicy; public abstract class EngineTestCase extends OpenSearchTestCase { @@ -1388,8 +1389,8 @@ public static void assertConsistentHistoryBetweenTranslogAndLuceneIndex(Engine e final long retainedOps = engine.config().getIndexSettings().getSoftDeleteRetentionOperations(); final long seqNoForRecovery; if (engine.config().getIndexSettings().isSoftDeleteEnabled()) { - try (Engine.IndexCommitRef safeCommit = engine.acquireSafeIndexCommit()) { - seqNoForRecovery = Long.parseLong(safeCommit.get().getUserData().get(SequenceNumbers.LOCAL_CHECKPOINT_KEY)) + 1; + try (GatedCloseable wrappedSafeCommit = engine.acquireSafeIndexCommit()) { + seqNoForRecovery = Long.parseLong(wrappedSafeCommit.get().getUserData().get(SequenceNumbers.LOCAL_CHECKPOINT_KEY)) + 1; } } else { seqNoForRecovery = engine.getMinRetainedSeqNo(); diff --git a/test/framework/src/main/java/org/opensearch/index/shard/IndexShardTestCase.java b/test/framework/src/main/java/org/opensearch/index/shard/IndexShardTestCase.java index b388ab8835ac4..09c5dfad486e9 100644 --- a/test/framework/src/main/java/org/opensearch/index/shard/IndexShardTestCase.java +++ b/test/framework/src/main/java/org/opensearch/index/shard/IndexShardTestCase.java @@ -32,6 +32,7 @@ package org.opensearch.index.shard; import org.apache.lucene.index.DirectoryReader; +import org.apache.lucene.index.IndexCommit; import org.apache.lucene.store.Directory; import org.opensearch.Version; import org.opensearch.action.admin.indices.flush.FlushRequest; @@ -52,6 +53,7 @@ import org.opensearch.common.Nullable; import org.opensearch.common.UUIDs; import org.opensearch.common.bytes.BytesArray; +import org.opensearch.common.concurrent.GatedCloseable; import org.opensearch.common.lucene.uid.Versions; import org.opensearch.common.settings.ClusterSettings; import org.opensearch.common.settings.Settings; @@ -113,10 +115,10 @@ import java.util.function.Consumer; import java.util.stream.Collectors; -import static org.opensearch.cluster.routing.TestShardRouting.newShardRouting; import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasSize; +import static org.opensearch.cluster.routing.TestShardRouting.newShardRouting; /** * A base class for unit tests that need to create and shutdown {@link IndexShard} instances easily, @@ -1030,13 +1032,13 @@ protected String snapshotShard(final IndexShard shard, final Snapshot snapshot, ); final PlainActionFuture future = PlainActionFuture.newFuture(); final String shardGen; - try (Engine.IndexCommitRef indexCommitRef = shard.acquireLastIndexCommit(true)) { + try (GatedCloseable wrappedIndexCommit = shard.acquireLastIndexCommit(true)) { repository.snapshotShard( shard.store(), shard.mapperService(), snapshot.getSnapshotId(), indexId, - indexCommitRef.get(), + wrappedIndexCommit.get(), null, snapshotStatus, Version.CURRENT, From b00b3ce2b51abaae1a2b282402b50861d7641793 Mon Sep 17 00:00:00 2001 From: Nick Knize Date: Thu, 10 Mar 2022 23:36:08 -0500 Subject: [PATCH 19/46] [Remove] Multiple Types from IndexTemplateMetadata (#2400) Removes multi-type support from IndexTemplateMetadata so that instead of holding a map of multiple types to mappings, it only returns a single mapping for a single type. Also removes type from documentMapper() method to avoid any accidental NullPointerExceptions in the internal mapping retrieval. Signed-off-by: Nicholas Walter Knize --- .../GetIndexTemplatesResponseTests.java | 10 +- .../percolator/PercolateQueryBuilder.java | 2 +- .../percolator/CandidateQueryTests.java | 2 +- .../PercolatorFieldMapperTests.java | 90 ++++++------ .../index/mapper/size/SizeMappingTests.java | 16 +-- .../upgrades/FullClusterRestartIT.java | 3 - .../20_missing_field.yml | 2 +- .../admin/indices/create/CreateIndexIT.java | 34 ++--- .../coordination/RareClusterStateIT.java | 4 +- .../document/DocumentActionsIT.java | 3 +- .../gateway/GatewayIndexStateIT.java | 14 +- .../mapper/MultiFieldsIntegrationIT.java | 2 - .../opensearch/index/shard/IndexShardIT.java | 5 +- .../indices/exists/types/TypesExistsIT.java | 128 ------------------ .../mapping/SimpleGetFieldMappingsIT.java | 81 ++++------- .../mapping/UpdateMappingIntegrationIT.java | 18 ++- .../search/fields/SearchFieldsIT.java | 4 +- .../search/query/SearchQueryIT.java | 35 ----- .../CreateIndexClusterStateUpdateRequest.java | 10 +- .../indices/create/CreateIndexRequest.java | 83 ++++++++---- .../get/GetFieldMappingsIndexRequest.java | 34 +++-- .../mapping/get/GetFieldMappingsRequest.java | 23 ++-- .../get/GetFieldMappingsRequestBuilder.java | 10 -- .../mapping/get/GetFieldMappingsResponse.java | 78 +++++------ .../mapping/get/GetMappingsResponse.java | 4 +- .../get/TransportGetFieldMappingsAction.java | 5 +- .../TransportGetFieldMappingsIndexAction.java | 34 +---- .../rollover/MetadataRolloverService.java | 4 +- .../TransportSimulateIndexTemplateAction.java | 12 +- .../metadata/IndexTemplateMetadata.java | 120 +++++++--------- .../metadata/MetadataCreateIndexService.java | 126 ++++++----------- .../MetadataIndexTemplateService.java | 5 + .../index/mapper/DocumentMapper.java | 2 +- .../index/mapper/MapperService.java | 20 +-- .../index/query/QueryShardContext.java | 6 +- .../index/query/TypeQueryBuilder.java | 2 +- .../opensearch/index/shard/IndexShard.java | 12 +- .../index/termvectors/TermVectorsService.java | 2 +- .../indices/RestGetFieldMappingAction.java | 27 +--- .../create/CreateIndexRequestTests.java | 2 +- .../get/GetFieldMappingsResponseTests.java | 36 ++--- .../rollover/RolloverRequestTests.java | 5 +- .../opensearch/cluster/ClusterStateTests.java | 14 +- .../metadata/IndexTemplateMetadataTests.java | 4 +- .../MetadataCreateIndexServiceTests.java | 59 ++++---- .../MetadataIndexTemplateServiceTests.java | 22 ++- .../metadata/MetadataMappingServiceTests.java | 14 +- .../metadata/ToAndFromJsonMetadataTests.java | 24 +--- .../index/mapper/AllFieldMapperTests.java | 2 +- .../index/mapper/DocumentParserTests.java | 2 +- .../mapper/FieldFilterMapperPluginTests.java | 15 +- .../index/mapper/MapperServiceTests.java | 26 ---- .../index/mapper/UpdateMappingTests.java | 80 ++++++----- .../index/query/TypeQueryBuilderTests.java | 2 +- .../index/shard/IndexShardTests.java | 46 +------ .../completion/GeoContextMappingTests.java | 24 ++-- 56 files changed, 530 insertions(+), 919 deletions(-) delete mode 100644 server/src/internalClusterTest/java/org/opensearch/indices/exists/types/TypesExistsIT.java diff --git a/client/rest-high-level/src/test/java/org/opensearch/client/indices/GetIndexTemplatesResponseTests.java b/client/rest-high-level/src/test/java/org/opensearch/client/indices/GetIndexTemplatesResponseTests.java index b28da63e4344a..ead5fd4087c0b 100644 --- a/client/rest-high-level/src/test/java/org/opensearch/client/indices/GetIndexTemplatesResponseTests.java +++ b/client/rest-high-level/src/test/java/org/opensearch/client/indices/GetIndexTemplatesResponseTests.java @@ -83,7 +83,7 @@ public void testFromXContent() throws IOException { .test(); } - public void testParsingFromEsResponse() throws IOException { + public void testParsingFromOpenSearchResponse() throws IOException { for (int runs = 0; runs < 20; runs++) { org.opensearch.action.admin.indices.template.get.GetIndexTemplatesResponse esResponse = new org.opensearch.action.admin.indices.template.get.GetIndexTemplatesResponse(new ArrayList<>()); @@ -131,8 +131,7 @@ public void testParsingFromEsResponse() throws IOException { assertThat(result.order(), equalTo(esIMD.order())); assertThat(result.version(), equalTo(esIMD.version())); - assertThat(esIMD.mappings().size(), equalTo(1)); - BytesReference mappingSource = esIMD.mappings().valuesIt().next().uncompressed(); + BytesReference mappingSource = esIMD.mappings().uncompressed(); Map expectedMapping = XContentHelper.convertToMap(mappingSource, true, xContentBuilder.contentType()) .v2(); assertThat(result.mappings().sourceAsMap(), equalTo(expectedMapping.get("_doc"))); @@ -224,7 +223,10 @@ static void toXContent(GetIndexTemplatesResponse response, XContentBuilder build serverTemplateBuilder.order(clientITMD.order()); serverTemplateBuilder.version(clientITMD.version()); if (clientITMD.mappings() != null) { - serverTemplateBuilder.putMapping(MapperService.SINGLE_MAPPING_NAME, clientITMD.mappings().source()); + // The client-side mappings never include a wrapping type, but server-side mappings + // for index templates still do so we need to wrap things here + String mappings = "{\"" + MapperService.SINGLE_MAPPING_NAME + "\": " + clientITMD.mappings().source().string() + "}"; + serverTemplateBuilder.putMapping(MapperService.SINGLE_MAPPING_NAME, mappings); } serverIndexTemplates.add(serverTemplateBuilder.build()); diff --git a/modules/percolator/src/main/java/org/opensearch/percolator/PercolateQueryBuilder.java b/modules/percolator/src/main/java/org/opensearch/percolator/PercolateQueryBuilder.java index 672d4dd15a254..75a7757ba8a5a 100644 --- a/modules/percolator/src/main/java/org/opensearch/percolator/PercolateQueryBuilder.java +++ b/modules/percolator/src/main/java/org/opensearch/percolator/PercolateQueryBuilder.java @@ -584,7 +584,7 @@ protected Query doToQuery(QueryShardContext context) throws IOException { ); } } - docMapper = mapperService.documentMapper(type); + docMapper = mapperService.documentMapper(); for (BytesReference document : documents) { docs.add(docMapper.parse(new SourceToParse(context.index().getName(), type, "_temp_id", document, documentXContentType))); } diff --git a/modules/percolator/src/test/java/org/opensearch/percolator/CandidateQueryTests.java b/modules/percolator/src/test/java/org/opensearch/percolator/CandidateQueryTests.java index 871351ad5b2c2..4058548f052f8 100644 --- a/modules/percolator/src/test/java/org/opensearch/percolator/CandidateQueryTests.java +++ b/modules/percolator/src/test/java/org/opensearch/percolator/CandidateQueryTests.java @@ -209,7 +209,7 @@ public void init() throws Exception { .endObject() ); mapperService.merge("type", new CompressedXContent(percolatorMapper), MapperService.MergeReason.MAPPING_UPDATE); - fieldMapper = (PercolatorFieldMapper) mapperService.documentMapper("type").mappers().getMapper(queryField); + fieldMapper = (PercolatorFieldMapper) mapperService.documentMapper().mappers().getMapper(queryField); fieldType = (PercolatorFieldMapper.PercolatorFieldType) fieldMapper.fieldType(); queries = new ArrayList<>(); diff --git a/modules/percolator/src/test/java/org/opensearch/percolator/PercolatorFieldMapperTests.java b/modules/percolator/src/test/java/org/opensearch/percolator/PercolatorFieldMapperTests.java index 42826d2a368ad..691c3b648cd6a 100644 --- a/modules/percolator/src/test/java/org/opensearch/percolator/PercolatorFieldMapperTests.java +++ b/modules/percolator/src/test/java/org/opensearch/percolator/PercolatorFieldMapperTests.java @@ -161,7 +161,6 @@ public void init() throws Exception { String mapper = Strings.toString( XContentFactory.jsonBuilder() .startObject() - .startObject("doc") .startObject("properties") .startObject("field") .field("type", "text") @@ -204,9 +203,8 @@ public void init() throws Exception { .endObject() .endObject() .endObject() - .endObject() ); - mapperService.merge("doc", new CompressedXContent(mapper), MapperService.MergeReason.MAPPING_UPDATE); + mapperService.merge(MapperService.SINGLE_MAPPING_NAME, new CompressedXContent(mapper), MapperService.MergeReason.MAPPING_UPDATE); } private void addQueryFieldMappings() throws Exception { @@ -214,16 +212,18 @@ private void addQueryFieldMappings() throws Exception { String percolatorMapper = Strings.toString( XContentFactory.jsonBuilder() .startObject() - .startObject("doc") .startObject("properties") .startObject(fieldName) .field("type", "percolator") .endObject() .endObject() .endObject() - .endObject() ); - mapperService.merge("doc", new CompressedXContent(percolatorMapper), MapperService.MergeReason.MAPPING_UPDATE); + mapperService.merge( + MapperService.SINGLE_MAPPING_NAME, + new CompressedXContent(percolatorMapper), + MapperService.MergeReason.MAPPING_UPDATE + ); fieldType = (PercolatorFieldMapper.PercolatorFieldType) mapperService.fieldType(fieldName); } @@ -235,7 +235,7 @@ public void testExtractTerms() throws Exception { TermQuery termQuery2 = new TermQuery(new Term("field", "term2")); bq.add(termQuery2, Occur.SHOULD); - DocumentMapper documentMapper = mapperService.documentMapper("doc"); + DocumentMapper documentMapper = mapperService.documentMapper(); PercolatorFieldMapper fieldMapper = (PercolatorFieldMapper) documentMapper.mappers().getMapper(fieldName); IndexMetadata build = IndexMetadata.builder("") .settings(Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT)) @@ -295,7 +295,7 @@ public void testExtractRanges() throws Exception { Query rangeQuery2 = mapperService.fieldType("number_field1").rangeQuery(15, 20, true, true, null, null, null, context); bq.add(rangeQuery2, Occur.MUST); - DocumentMapper documentMapper = mapperService.documentMapper("doc"); + DocumentMapper documentMapper = mapperService.documentMapper(); IndexMetadata build = IndexMetadata.builder("") .settings(Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT)) .numberOfShards(1) @@ -354,7 +354,7 @@ public void testExtractRanges() throws Exception { public void testExtractTermsAndRanges_failed() throws Exception { addQueryFieldMappings(); TermRangeQuery query = new TermRangeQuery("field1", new BytesRef("a"), new BytesRef("z"), true, true); - DocumentMapper documentMapper = mapperService.documentMapper("doc"); + DocumentMapper documentMapper = mapperService.documentMapper(); PercolatorFieldMapper fieldMapper = (PercolatorFieldMapper) documentMapper.mappers().getMapper(fieldName); IndexMetadata build = IndexMetadata.builder("") .settings(Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT)) @@ -380,7 +380,7 @@ public void testExtractTermsAndRanges_failed() throws Exception { public void testExtractTermsAndRanges_partial() throws Exception { addQueryFieldMappings(); PhraseQuery phraseQuery = new PhraseQuery("field", "term"); - DocumentMapper documentMapper = mapperService.documentMapper("doc"); + DocumentMapper documentMapper = mapperService.documentMapper(); PercolatorFieldMapper fieldMapper = (PercolatorFieldMapper) documentMapper.mappers().getMapper(fieldName); IndexMetadata build = IndexMetadata.builder("") .settings(Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT)) @@ -549,11 +549,11 @@ public void testExtractTermsAndRanges_numberFields() throws Exception { public void testPercolatorFieldMapper() throws Exception { addQueryFieldMappings(); QueryBuilder queryBuilder = termQuery("field", "value"); - ParsedDocument doc = mapperService.documentMapper("doc") + ParsedDocument doc = mapperService.documentMapper() .parse( new SourceToParse( "test", - "doc", + MapperService.SINGLE_MAPPING_NAME, "1", BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field(fieldName, queryBuilder).endObject()), XContentType.JSON @@ -570,11 +570,11 @@ public void testPercolatorFieldMapper() throws Exception { // add an query for which we don't extract terms from queryBuilder = rangeQuery("field").from("a").to("z"); - doc = mapperService.documentMapper("doc") + doc = mapperService.documentMapper() .parse( new SourceToParse( "test", - "doc", + MapperService.SINGLE_MAPPING_NAME, "1", BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field(fieldName, queryBuilder).endObject()), XContentType.JSON @@ -592,7 +592,7 @@ public void testPercolatorFieldMapper() throws Exception { .parse( new SourceToParse( "test", - "doc", + MapperService.SINGLE_MAPPING_NAME, "1", BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field(fieldName, queryBuilder).endObject()), XContentType.JSON @@ -617,11 +617,11 @@ public void testStoringQueries() throws Exception { // (it can't use shard data for rewriting purposes, because percolator queries run on MemoryIndex) for (QueryBuilder query : queries) { - ParsedDocument doc = mapperService.documentMapper("doc") + ParsedDocument doc = mapperService.documentMapper() .parse( new SourceToParse( "test", - "doc", + MapperService.SINGLE_MAPPING_NAME, "1", BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field(fieldName, query).endObject()), XContentType.JSON @@ -636,11 +636,11 @@ public void testQueryWithRewrite() throws Exception { addQueryFieldMappings(); client().prepareIndex("remote").setId("1").setSource("field", "value").get(); QueryBuilder queryBuilder = termsLookupQuery("field", new TermsLookup("remote", "1", "field")); - ParsedDocument doc = mapperService.documentMapper("doc") + ParsedDocument doc = mapperService.documentMapper() .parse( new SourceToParse( "test", - "doc", + MapperService.SINGLE_MAPPING_NAME, "1", BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field(fieldName, queryBuilder).endObject()), XContentType.JSON @@ -661,11 +661,11 @@ public void testQueryWithRewrite() throws Exception { public void testPercolatorFieldMapperUnMappedField() throws Exception { addQueryFieldMappings(); MapperParsingException exception = expectThrows(MapperParsingException.class, () -> { - mapperService.documentMapper("doc") + mapperService.documentMapper() .parse( new SourceToParse( "test", - "doc", + MapperService.SINGLE_MAPPING_NAME, "1", BytesReference.bytes( XContentFactory.jsonBuilder().startObject().field(fieldName, termQuery("unmapped_field", "value")).endObject() @@ -680,11 +680,11 @@ public void testPercolatorFieldMapperUnMappedField() throws Exception { public void testPercolatorFieldMapper_noQuery() throws Exception { addQueryFieldMappings(); - ParsedDocument doc = mapperService.documentMapper("doc") + ParsedDocument doc = mapperService.documentMapper() .parse( new SourceToParse( "test", - "doc", + MapperService.SINGLE_MAPPING_NAME, "1", BytesReference.bytes(XContentFactory.jsonBuilder().startObject().endObject()), XContentType.JSON @@ -693,11 +693,11 @@ public void testPercolatorFieldMapper_noQuery() throws Exception { assertThat(doc.rootDoc().getFields(fieldType.queryBuilderField.name()).length, equalTo(0)); try { - mapperService.documentMapper("doc") + mapperService.documentMapper() .parse( new SourceToParse( "test", - "doc", + MapperService.SINGLE_MAPPING_NAME, "1", BytesReference.bytes(XContentFactory.jsonBuilder().startObject().nullField(fieldName).endObject()), XContentType.JSON @@ -716,7 +716,6 @@ public void testAllowNoAdditionalSettings() throws Exception { String percolatorMapper = Strings.toString( XContentFactory.jsonBuilder() .startObject() - .startObject("doc") .startObject("properties") .startObject(fieldName) .field("type", "percolator") @@ -724,18 +723,21 @@ public void testAllowNoAdditionalSettings() throws Exception { .endObject() .endObject() .endObject() - .endObject() ); MapperParsingException e = expectThrows( MapperParsingException.class, - () -> mapperService.merge("doc", new CompressedXContent(percolatorMapper), MapperService.MergeReason.MAPPING_UPDATE) + () -> mapperService.merge( + MapperService.SINGLE_MAPPING_NAME, + new CompressedXContent(percolatorMapper), + MapperService.MergeReason.MAPPING_UPDATE + ) ); assertThat(e.getMessage(), containsString("Mapping definition for [" + fieldName + "] has unsupported parameters: [index : no]")); } // multiple percolator fields are allowed in the mapping, but only one field can be used at index time. public void testMultiplePercolatorFields() throws Exception { - String typeName = "doc"; + String typeName = MapperService.SINGLE_MAPPING_NAME; String percolatorMapper = Strings.toString( XContentFactory.jsonBuilder() .startObject() @@ -754,7 +756,7 @@ public void testMultiplePercolatorFields() throws Exception { mapperService.merge(typeName, new CompressedXContent(percolatorMapper), MapperService.MergeReason.MAPPING_UPDATE); QueryBuilder queryBuilder = matchQuery("field", "value"); - ParsedDocument doc = mapperService.documentMapper(typeName) + ParsedDocument doc = mapperService.documentMapper() .parse( new SourceToParse( "test", @@ -776,7 +778,7 @@ public void testMultiplePercolatorFields() throws Exception { // percolator field can be nested under an object field, but only one query can be specified per document public void testNestedPercolatorField() throws Exception { - String typeName = "doc"; + String typeName = MapperService.SINGLE_MAPPING_NAME; String percolatorMapper = Strings.toString( XContentFactory.jsonBuilder() .startObject() @@ -797,7 +799,7 @@ public void testNestedPercolatorField() throws Exception { mapperService.merge(typeName, new CompressedXContent(percolatorMapper), MapperService.MergeReason.MAPPING_UPDATE); QueryBuilder queryBuilder = matchQuery("field", "value"); - ParsedDocument doc = mapperService.documentMapper(typeName) + ParsedDocument doc = mapperService.documentMapper() .parse( new SourceToParse( "test", @@ -817,7 +819,7 @@ public void testNestedPercolatorField() throws Exception { BytesRef queryBuilderAsBytes = queryBuilderField.binaryValue(); assertQueryBuilder(queryBuilderAsBytes, queryBuilder); - doc = mapperService.documentMapper(typeName) + doc = mapperService.documentMapper() .parse( new SourceToParse( "test", @@ -840,7 +842,7 @@ public void testNestedPercolatorField() throws Exception { assertQueryBuilder(queryBuilderAsBytes, queryBuilder); MapperParsingException e = expectThrows(MapperParsingException.class, () -> { - mapperService.documentMapper(typeName) + mapperService.documentMapper() .parse( new SourceToParse( "test", @@ -948,11 +950,11 @@ public void testImplicitlySetDefaultScriptLang() throws Exception { query.endObject(); query.endObject(); - ParsedDocument doc = mapperService.documentMapper("doc") + ParsedDocument doc = mapperService.documentMapper() .parse( new SourceToParse( "test", - "doc", + MapperService.SINGLE_MAPPING_NAME, "1", BytesReference.bytes( XContentFactory.jsonBuilder() @@ -996,11 +998,11 @@ public void testImplicitlySetDefaultScriptLang() throws Exception { query.endObject(); query.endObject(); - doc = mapperService.documentMapper("doc") + doc = mapperService.documentMapper() .parse( new SourceToParse( "test", - "doc", + MapperService.SINGLE_MAPPING_NAME, "1", BytesReference.bytes( XContentFactory.jsonBuilder() @@ -1091,11 +1093,11 @@ public void testDuplicatedClauses() throws Exception { QueryBuilder qb = boolQuery().must(boolQuery().must(termQuery("field", "value1")).must(termQuery("field", "value2"))) .must(boolQuery().must(termQuery("field", "value2")).must(termQuery("field", "value3"))); - ParsedDocument doc = mapperService.documentMapper("doc") + ParsedDocument doc = mapperService.documentMapper() .parse( new SourceToParse( "test", - "doc", + MapperService.SINGLE_MAPPING_NAME, "1", BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field(fieldName, qb).endObject()), XContentType.JSON @@ -1117,11 +1119,11 @@ public void testDuplicatedClauses() throws Exception { .must(boolQuery().must(termQuery("field", "value2")).must(termQuery("field", "value3"))) .must(boolQuery().must(termQuery("field", "value3")).must(termQuery("field", "value4"))) .must(boolQuery().should(termQuery("field", "value4")).should(termQuery("field", "value5"))); - doc = mapperService.documentMapper("doc") + doc = mapperService.documentMapper() .parse( new SourceToParse( "test", - "doc", + MapperService.SINGLE_MAPPING_NAME, "1", BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field(fieldName, qb).endObject()), XContentType.JSON @@ -1146,11 +1148,11 @@ public void testDuplicatedClauses() throws Exception { .should(boolQuery().should(termQuery("field", "value2")).should(termQuery("field", "value3"))) .should(boolQuery().should(termQuery("field", "value3")).should(termQuery("field", "value4"))) .should(boolQuery().should(termQuery("field", "value4")).should(termQuery("field", "value5"))); - doc = mapperService.documentMapper("doc") + doc = mapperService.documentMapper() .parse( new SourceToParse( "test", - "doc", + MapperService.SINGLE_MAPPING_NAME, "1", BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field(fieldName, qb).endObject()), XContentType.JSON diff --git a/plugins/mapper-size/src/internalClusterTest/java/org/opensearch/index/mapper/size/SizeMappingTests.java b/plugins/mapper-size/src/internalClusterTest/java/org/opensearch/index/mapper/size/SizeMappingTests.java index e39439c1a3b4f..c1fb3d8083151 100644 --- a/plugins/mapper-size/src/internalClusterTest/java/org/opensearch/index/mapper/size/SizeMappingTests.java +++ b/plugins/mapper-size/src/internalClusterTest/java/org/opensearch/index/mapper/size/SizeMappingTests.java @@ -63,10 +63,10 @@ protected Collection> getPlugins() { public void testSizeEnabled() throws Exception { IndexService service = createIndex("test", Settings.EMPTY, "type", "_size", "enabled=true"); - DocumentMapper docMapper = service.mapperService().documentMapper("type"); + DocumentMapper docMapper = service.mapperService().documentMapper(); BytesReference source = BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("field", "value").endObject()); - ParsedDocument doc = docMapper.parse(new SourceToParse("test", "type", "1", source, XContentType.JSON)); + ParsedDocument doc = docMapper.parse(new SourceToParse("test", MapperService.SINGLE_MAPPING_NAME, "1", source, XContentType.JSON)); boolean stored = false; boolean points = false; @@ -80,27 +80,27 @@ public void testSizeEnabled() throws Exception { public void testSizeDisabled() throws Exception { IndexService service = createIndex("test", Settings.EMPTY, "type", "_size", "enabled=false"); - DocumentMapper docMapper = service.mapperService().documentMapper("type"); + DocumentMapper docMapper = service.mapperService().documentMapper(); BytesReference source = BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("field", "value").endObject()); - ParsedDocument doc = docMapper.parse(new SourceToParse("test", "type", "1", source, XContentType.JSON)); + ParsedDocument doc = docMapper.parse(new SourceToParse("test", MapperService.SINGLE_MAPPING_NAME, "1", source, XContentType.JSON)); assertThat(doc.rootDoc().getField("_size"), nullValue()); } public void testSizeNotSet() throws Exception { - IndexService service = createIndex("test", Settings.EMPTY, "type"); - DocumentMapper docMapper = service.mapperService().documentMapper("type"); + IndexService service = createIndex("test", Settings.EMPTY, MapperService.SINGLE_MAPPING_NAME); + DocumentMapper docMapper = service.mapperService().documentMapper(); BytesReference source = BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("field", "value").endObject()); - ParsedDocument doc = docMapper.parse(new SourceToParse("test", "type", "1", source, XContentType.JSON)); + ParsedDocument doc = docMapper.parse(new SourceToParse("test", MapperService.SINGLE_MAPPING_NAME, "1", source, XContentType.JSON)); assertThat(doc.rootDoc().getField("_size"), nullValue()); } public void testThatDisablingWorksWhenMerging() throws Exception { IndexService service = createIndex("test", Settings.EMPTY, "type", "_size", "enabled=true"); - DocumentMapper docMapper = service.mapperService().documentMapper("type"); + DocumentMapper docMapper = service.mapperService().documentMapper(); assertThat(docMapper.metadataMapper(SizeFieldMapper.class).enabled(), is(true)); String disabledMapping = Strings.toString( diff --git a/qa/full-cluster-restart/src/test/java/org/opensearch/upgrades/FullClusterRestartIT.java b/qa/full-cluster-restart/src/test/java/org/opensearch/upgrades/FullClusterRestartIT.java index a67c5581cba92..a8302fdd6bc76 100644 --- a/qa/full-cluster-restart/src/test/java/org/opensearch/upgrades/FullClusterRestartIT.java +++ b/qa/full-cluster-restart/src/test/java/org/opensearch/upgrades/FullClusterRestartIT.java @@ -664,9 +664,6 @@ public void testEmptyShard() throws IOException { // before timing out .put(INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING.getKey(), "100ms") .put(SETTING_ALLOCATION_MAX_RETRY.getKey(), "0"); // fail faster - if (randomBoolean()) { - settings.put(IndexSettings.INDEX_TRANSLOG_RETENTION_SIZE_SETTING.getKey(), "-1"); - } createIndex(index, settings.build()); } ensureGreen(index); diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_field_mapping/20_missing_field.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_field_mapping/20_missing_field.yml index be6b1c3bb6d49..a65908b238013 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_field_mapping/20_missing_field.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_field_mapping/20_missing_field.yml @@ -1,5 +1,5 @@ --- -"Return empty object if field doesn't exist, but type and index do": +"Return empty object if field doesn't exist, but index does": - do: indices.create: index: test_index diff --git a/server/src/internalClusterTest/java/org/opensearch/action/admin/indices/create/CreateIndexIT.java b/server/src/internalClusterTest/java/org/opensearch/action/admin/indices/create/CreateIndexIT.java index 311767d82ac6c..476bd72ee3ca3 100644 --- a/server/src/internalClusterTest/java/org/opensearch/action/admin/indices/create/CreateIndexIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/action/admin/indices/create/CreateIndexIT.java @@ -52,19 +52,18 @@ import org.opensearch.index.IndexNotFoundException; import org.opensearch.index.IndexService; import org.opensearch.index.mapper.MapperParsingException; +import org.opensearch.index.mapper.MapperService; import org.opensearch.index.query.RangeQueryBuilder; import org.opensearch.indices.IndicesService; import org.opensearch.test.OpenSearchIntegTestCase; import org.opensearch.test.OpenSearchIntegTestCase.ClusterScope; import org.opensearch.test.OpenSearchIntegTestCase.Scope; -import java.util.HashMap; import java.util.concurrent.CountDownLatch; import java.util.concurrent.atomic.AtomicInteger; import java.util.function.BiFunction; import static org.opensearch.cluster.metadata.IndexMetadata.SETTING_WAIT_FOR_ACTIVE_SHARDS; -import static org.opensearch.common.xcontent.XContentFactory.jsonBuilder; import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertAcked; import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertBlocked; import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertRequestBuilderThrows; @@ -109,28 +108,6 @@ public void testCreationDateGenerated() { assertThat(index.getCreationDate(), allOf(lessThanOrEqualTo(timeAfterRequest), greaterThanOrEqualTo(timeBeforeRequest))); } - public void testDoubleAddMapping() throws Exception { - try { - prepareCreate("test").addMapping("type1", "date", "type=date").addMapping("type1", "num", "type=integer"); - fail("did not hit expected exception"); - } catch (IllegalStateException ise) { - // expected - } - try { - prepareCreate("test").addMapping("type1", new HashMap()).addMapping("type1", new HashMap()); - fail("did not hit expected exception"); - } catch (IllegalStateException ise) { - // expected - } - try { - prepareCreate("test").addMapping("type1", jsonBuilder().startObject().endObject()) - .addMapping("type1", jsonBuilder().startObject().endObject()); - fail("did not hit expected exception"); - } catch (IllegalStateException ise) { - // expected - } - } - public void testNonNestedMappings() throws Exception { assertAcked( prepareCreate("test").addMapping( @@ -168,11 +145,16 @@ public void testMappingParamAndNestedMismatch() throws Exception { MapperParsingException e = expectThrows( MapperParsingException.class, () -> prepareCreate("test").addMapping( - "type1", + MapperService.SINGLE_MAPPING_NAME, XContentFactory.jsonBuilder().startObject().startObject("type2").endObject().endObject() ).get() ); - assertThat(e.getMessage(), startsWith("Failed to parse mapping [type1]: Root mapping definition has unsupported parameters")); + assertThat( + e.getMessage(), + startsWith( + "Failed to parse mapping [" + MapperService.SINGLE_MAPPING_NAME + "]: Root mapping definition has unsupported parameters" + ) + ); } public void testEmptyMappings() throws Exception { diff --git a/server/src/internalClusterTest/java/org/opensearch/cluster/coordination/RareClusterStateIT.java b/server/src/internalClusterTest/java/org/opensearch/cluster/coordination/RareClusterStateIT.java index 9e3a693d9bdc4..3060b5c23fe75 100644 --- a/server/src/internalClusterTest/java/org/opensearch/cluster/coordination/RareClusterStateIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/cluster/coordination/RareClusterStateIT.java @@ -365,7 +365,7 @@ public void testDelayedMappingPropagationOnReplica() throws Exception { final IndexService indexService = indicesService.indexServiceSafe(index); assertNotNull(indexService); final MapperService mapperService = indexService.mapperService(); - DocumentMapper mapper = mapperService.documentMapper(MapperService.SINGLE_MAPPING_NAME); + DocumentMapper mapper = mapperService.documentMapper(); assertNotNull(mapper); assertNotNull(mapper.mappers().getMapper("field")); }); @@ -389,7 +389,7 @@ public void testDelayedMappingPropagationOnReplica() throws Exception { final IndexService indexService = indicesService.indexServiceSafe(index); assertNotNull(indexService); final MapperService mapperService = indexService.mapperService(); - DocumentMapper mapper = mapperService.documentMapper(MapperService.SINGLE_MAPPING_NAME); + DocumentMapper mapper = mapperService.documentMapper(); assertNotNull(mapper); assertNotNull(mapper.mappers().getMapper("field2")); }); diff --git a/server/src/internalClusterTest/java/org/opensearch/document/DocumentActionsIT.java b/server/src/internalClusterTest/java/org/opensearch/document/DocumentActionsIT.java index f285d8a6f291f..f3693be3b7227 100644 --- a/server/src/internalClusterTest/java/org/opensearch/document/DocumentActionsIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/document/DocumentActionsIT.java @@ -47,6 +47,7 @@ import org.opensearch.common.xcontent.XContentBuilder; import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.common.xcontent.XContentType; +import org.opensearch.index.mapper.MapperService; import org.opensearch.test.OpenSearchIntegTestCase; import org.opensearch.test.hamcrest.OpenSearchAssertions; @@ -182,7 +183,7 @@ public void testIndexActions() throws Exception { // test successful SearchResponse countResponse = client().prepareSearch("test") .setSize(0) - .setQuery(termQuery("_type", "type1")) + .setQuery(termQuery("_type", MapperService.SINGLE_MAPPING_NAME)) .execute() .actionGet(); assertNoFailures(countResponse); diff --git a/server/src/internalClusterTest/java/org/opensearch/gateway/GatewayIndexStateIT.java b/server/src/internalClusterTest/java/org/opensearch/gateway/GatewayIndexStateIT.java index 1779fe025887a..6fe22e2a8fde4 100644 --- a/server/src/internalClusterTest/java/org/opensearch/gateway/GatewayIndexStateIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/gateway/GatewayIndexStateIT.java @@ -60,6 +60,7 @@ import org.opensearch.common.xcontent.XContentType; import org.opensearch.env.NodeEnvironment; import org.opensearch.index.mapper.MapperParsingException; +import org.opensearch.index.mapper.MapperService; import org.opensearch.indices.IndexClosedException; import org.opensearch.indices.ShardLimitValidator; import org.opensearch.test.OpenSearchIntegTestCase; @@ -108,14 +109,7 @@ public void testMappingMetadataParsed() throws Exception { .prepareCreate("test") .addMapping( "type1", - XContentFactory.jsonBuilder() - .startObject() - .startObject("type1") - .startObject("_routing") - .field("required", true) - .endObject() - .endObject() - .endObject() + XContentFactory.jsonBuilder().startObject().startObject("_routing").field("required", true).endObject().endObject() ) .execute() .actionGet(); @@ -130,7 +124,7 @@ public void testMappingMetadataParsed() throws Exception { .metadata() .index("test") .getMappings() - .get("type1"); + .get(MapperService.SINGLE_MAPPING_NAME); assertThat(mappingMd.routing().required(), equalTo(true)); logger.info("--> restarting nodes..."); @@ -149,7 +143,7 @@ public void testMappingMetadataParsed() throws Exception { .metadata() .index("test") .getMappings() - .get("type1"); + .get(MapperService.SINGLE_MAPPING_NAME); assertThat(mappingMd.routing().required(), equalTo(true)); } diff --git a/server/src/internalClusterTest/java/org/opensearch/index/mapper/MultiFieldsIntegrationIT.java b/server/src/internalClusterTest/java/org/opensearch/index/mapper/MultiFieldsIntegrationIT.java index c9f3ddbc9e8b1..37fa8cdd11a8b 100644 --- a/server/src/internalClusterTest/java/org/opensearch/index/mapper/MultiFieldsIntegrationIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/index/mapper/MultiFieldsIntegrationIT.java @@ -208,7 +208,6 @@ private XContentBuilder createTypeSource() throws IOException { private XContentBuilder createPutMappingSource() throws IOException { return XContentFactory.jsonBuilder() .startObject() - .startObject("my-type") .startObject("properties") .startObject("title") .field("type", "text") @@ -220,7 +219,6 @@ private XContentBuilder createPutMappingSource() throws IOException { .endObject() .endObject() .endObject() - .endObject() .endObject(); } diff --git a/server/src/internalClusterTest/java/org/opensearch/index/shard/IndexShardIT.java b/server/src/internalClusterTest/java/org/opensearch/index/shard/IndexShardIT.java index 11667fca8fce1..4c200720a3af6 100644 --- a/server/src/internalClusterTest/java/org/opensearch/index/shard/IndexShardIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/index/shard/IndexShardIT.java @@ -74,6 +74,7 @@ import org.opensearch.index.engine.Engine; import org.opensearch.index.engine.NoOpEngine; import org.opensearch.index.flush.FlushStats; +import org.opensearch.index.mapper.MapperService; import org.opensearch.index.mapper.SourceToParse; import org.opensearch.index.seqno.RetentionLeaseSyncer; import org.opensearch.index.seqno.SequenceNumbers; @@ -445,7 +446,7 @@ public void testMaybeRollTranslogGeneration() throws Exception { .put("index.number_of_shards", 1) .put("index.translog.generation_threshold_size", generationThreshold + "b") .build(); - createIndex("test", settings, "test"); + createIndex("test", settings, MapperService.SINGLE_MAPPING_NAME); ensureGreen("test"); final IndicesService indicesService = getInstanceFromNode(IndicesService.class); final IndexService test = indicesService.indexService(resolveIndex("test")); @@ -459,7 +460,7 @@ public void testMaybeRollTranslogGeneration() throws Exception { final Engine.IndexResult result = shard.applyIndexOperationOnPrimary( Versions.MATCH_ANY, VersionType.INTERNAL, - new SourceToParse("test", "test", "1", new BytesArray("{}"), XContentType.JSON), + new SourceToParse("test", MapperService.SINGLE_MAPPING_NAME, "1", new BytesArray("{}"), XContentType.JSON), SequenceNumbers.UNASSIGNED_SEQ_NO, 0, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, diff --git a/server/src/internalClusterTest/java/org/opensearch/indices/exists/types/TypesExistsIT.java b/server/src/internalClusterTest/java/org/opensearch/indices/exists/types/TypesExistsIT.java deleted file mode 100644 index 5d219159e1b5f..0000000000000 --- a/server/src/internalClusterTest/java/org/opensearch/indices/exists/types/TypesExistsIT.java +++ /dev/null @@ -1,128 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - */ - -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -/* - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - -package org.opensearch.indices.exists.types; - -import org.opensearch.action.admin.indices.create.CreateIndexResponse; -import org.opensearch.action.admin.indices.exists.types.TypesExistsResponse; -import org.opensearch.client.Client; -import org.opensearch.cluster.metadata.IndexMetadata; -import org.opensearch.index.IndexNotFoundException; -import org.opensearch.plugins.Plugin; -import org.opensearch.test.OpenSearchIntegTestCase; -import org.opensearch.test.InternalSettingsPlugin; - -import java.io.IOException; -import java.util.Arrays; -import java.util.Collection; -import java.util.Collections; - -import static org.opensearch.cluster.metadata.IndexMetadata.SETTING_BLOCKS_READ; -import static org.opensearch.cluster.metadata.IndexMetadata.SETTING_BLOCKS_WRITE; -import static org.opensearch.cluster.metadata.IndexMetadata.SETTING_READ_ONLY; -import static org.opensearch.common.xcontent.XContentFactory.jsonBuilder; -import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertAcked; -import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertBlocked; -import static org.hamcrest.Matchers.equalTo; - -public class TypesExistsIT extends OpenSearchIntegTestCase { - - @Override - protected Collection> nodePlugins() { - return Collections.singleton(InternalSettingsPlugin.class); - } - - public void testSimple() throws Exception { - Client client = client(); - CreateIndexResponse response1 = client.admin() - .indices() - .prepareCreate("test1") - .addMapping("type1", jsonBuilder().startObject().startObject("type1").endObject().endObject()) - .execute() - .actionGet(); - CreateIndexResponse response2 = client.admin() - .indices() - .prepareCreate("test2") - .addMapping("type2", jsonBuilder().startObject().startObject("type2").endObject().endObject()) - .execute() - .actionGet(); - client.admin().indices().prepareAliases().addAlias("test1", "alias1").execute().actionGet(); - assertAcked(response1); - assertAcked(response2); - - TypesExistsResponse response = client.admin().indices().prepareTypesExists("test1").setTypes("type1").execute().actionGet(); - assertThat(response.isExists(), equalTo(true)); - response = client.admin().indices().prepareTypesExists("test1").setTypes("type2").execute().actionGet(); - assertThat(response.isExists(), equalTo(false)); - try { - client.admin().indices().prepareTypesExists("notExist").setTypes("type1").execute().actionGet(); - fail("Exception should have been thrown"); - } catch (IndexNotFoundException e) {} - try { - client.admin().indices().prepareTypesExists("notExist").setTypes("type0").execute().actionGet(); - fail("Exception should have been thrown"); - } catch (IndexNotFoundException e) {} - response = client.admin().indices().prepareTypesExists("alias1").setTypes("type1").execute().actionGet(); - assertThat(response.isExists(), equalTo(true)); - response = client.admin().indices().prepareTypesExists("*").setTypes("type1").execute().actionGet(); - assertThat(response.isExists(), equalTo(false)); - response = client.admin().indices().prepareTypesExists("test1", "test2").setTypes("type1").execute().actionGet(); - assertThat(response.isExists(), equalTo(false)); - response = client.admin().indices().prepareTypesExists("test1", "test2").setTypes("type2").execute().actionGet(); - assertThat(response.isExists(), equalTo(false)); - } - - public void testTypesExistsWithBlocks() throws IOException { - assertAcked(prepareCreate("ro").addMapping("type1", jsonBuilder().startObject().startObject("type1").endObject().endObject())); - ensureGreen("ro"); - - // Request is not blocked - for (String block : Arrays.asList(SETTING_BLOCKS_READ, SETTING_BLOCKS_WRITE, SETTING_READ_ONLY)) { - try { - enableIndexBlock("ro", block); - assertThat( - client().admin().indices().prepareTypesExists("ro").setTypes("type1").execute().actionGet().isExists(), - equalTo(true) - ); - } finally { - disableIndexBlock("ro", block); - } - } - - // Request is blocked - try { - enableIndexBlock("ro", IndexMetadata.SETTING_BLOCKS_METADATA); - assertBlocked(client().admin().indices().prepareTypesExists("ro").setTypes("type1")); - } finally { - disableIndexBlock("ro", IndexMetadata.SETTING_BLOCKS_METADATA); - } - } -} diff --git a/server/src/internalClusterTest/java/org/opensearch/indices/mapping/SimpleGetFieldMappingsIT.java b/server/src/internalClusterTest/java/org/opensearch/indices/mapping/SimpleGetFieldMappingsIT.java index da0f88276f2fa..92a65f43361d6 100644 --- a/server/src/internalClusterTest/java/org/opensearch/indices/mapping/SimpleGetFieldMappingsIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/indices/mapping/SimpleGetFieldMappingsIT.java @@ -77,7 +77,7 @@ public void testGetMappingsWhereThereAreNone() { assertThat(response.mappings().size(), equalTo(1)); assertThat(response.mappings().get("index").size(), equalTo(0)); - assertThat(response.fieldMappings("index", "type", "field"), nullValue()); + assertThat(response.fieldMappings("index", "field"), nullValue()); } private XContentBuilder getMappingForType(String type) throws IOException { @@ -112,48 +112,26 @@ public void testGetFieldMappings() throws Exception { GetFieldMappingsResponse response = client().admin() .indices() .prepareGetFieldMappings("indexa") - .setTypes("typeA") .setFields("field1", "obj.subfield") .get(); - assertThat(response.fieldMappings("indexa", "typeA", "field1").fullName(), equalTo("field1")); - assertThat(response.fieldMappings("indexa", "typeA", "field1").sourceAsMap(), hasKey("field1")); - assertThat(response.fieldMappings("indexa", "typeA", "obj.subfield").fullName(), equalTo("obj.subfield")); - assertThat(response.fieldMappings("indexa", "typeA", "obj.subfield").sourceAsMap(), hasKey("subfield")); - assertThat(response.fieldMappings("indexb", "typeB", "field1"), nullValue()); + assertThat(response.fieldMappings("indexa", "field1").fullName(), equalTo("field1")); + assertThat(response.fieldMappings("indexa", "field1").sourceAsMap(), hasKey("field1")); + assertThat(response.fieldMappings("indexa", "obj.subfield").fullName(), equalTo("obj.subfield")); + assertThat(response.fieldMappings("indexa", "obj.subfield").sourceAsMap(), hasKey("subfield")); // Get mappings by name - response = client().admin().indices().prepareGetFieldMappings("indexa").setTypes("typeA").setFields("field1", "obj.subfield").get(); - assertThat(response.fieldMappings("indexa", "typeA", "field1").fullName(), equalTo("field1")); - assertThat(response.fieldMappings("indexa", "typeA", "field1").sourceAsMap(), hasKey("field1")); - assertThat(response.fieldMappings("indexa", "typeA", "obj.subfield").fullName(), equalTo("obj.subfield")); - assertThat(response.fieldMappings("indexa", "typeA", "obj.subfield").sourceAsMap(), hasKey("subfield")); - assertThat(response.fieldMappings("indexa", "typeB", "field1"), nullValue()); - assertThat(response.fieldMappings("indexb", "typeB", "field1"), nullValue()); + response = client().admin().indices().prepareGetFieldMappings("indexa").setFields("field1", "obj.subfield").get(); + assertThat(response.fieldMappings("indexa", "field1").fullName(), equalTo("field1")); + assertThat(response.fieldMappings("indexa", "field1").sourceAsMap(), hasKey("field1")); + assertThat(response.fieldMappings("indexa", "obj.subfield").fullName(), equalTo("obj.subfield")); + assertThat(response.fieldMappings("indexa", "obj.subfield").sourceAsMap(), hasKey("subfield")); // get mappings by name across multiple indices - response = client().admin().indices().prepareGetFieldMappings().setTypes("typeA").setFields("obj.subfield").get(); - assertThat(response.fieldMappings("indexa", "typeA", "obj.subfield").fullName(), equalTo("obj.subfield")); - assertThat(response.fieldMappings("indexa", "typeA", "obj.subfield").sourceAsMap(), hasKey("subfield")); - assertThat(response.fieldMappings("indexa", "typeB", "obj.subfield"), nullValue()); - assertThat(response.fieldMappings("indexb", "typeB", "obj.subfield"), nullValue()); - - // get mappings by name across multiple types - response = client().admin().indices().prepareGetFieldMappings("indexa").setFields("obj.subfield").get(); - assertThat(response.fieldMappings("indexa", "typeA", "obj.subfield").fullName(), equalTo("obj.subfield")); - assertThat(response.fieldMappings("indexa", "typeA", "obj.subfield").sourceAsMap(), hasKey("subfield")); - assertThat(response.fieldMappings("indexa", "typeA", "field1"), nullValue()); - assertThat(response.fieldMappings("indexb", "typeB", "obj.subfield"), nullValue()); - assertThat(response.fieldMappings("indexb", "typeB", "field1"), nullValue()); - - // get mappings by name across multiple types & indices response = client().admin().indices().prepareGetFieldMappings().setFields("obj.subfield").get(); - assertThat(response.fieldMappings("indexa", "typeA", "obj.subfield").fullName(), equalTo("obj.subfield")); - assertThat(response.fieldMappings("indexa", "typeA", "obj.subfield").sourceAsMap(), hasKey("subfield")); - assertThat(response.fieldMappings("indexa", "typeA", "field1"), nullValue()); - assertThat(response.fieldMappings("indexb", "typeB", "field1"), nullValue()); - assertThat(response.fieldMappings("indexb", "typeB", "obj.subfield").fullName(), equalTo("obj.subfield")); - assertThat(response.fieldMappings("indexb", "typeB", "obj.subfield").sourceAsMap(), hasKey("subfield")); - assertThat(response.fieldMappings("indexb", "typeB", "field1"), nullValue()); + assertThat(response.fieldMappings("indexa", "obj.subfield").fullName(), equalTo("obj.subfield")); + assertThat(response.fieldMappings("indexa", "obj.subfield").sourceAsMap(), hasKey("subfield")); + assertThat(response.fieldMappings("indexb", "obj.subfield").fullName(), equalTo("obj.subfield")); + assertThat(response.fieldMappings("indexb", "obj.subfield").sourceAsMap(), hasKey("subfield")); } @SuppressWarnings("unchecked") @@ -169,25 +147,16 @@ public void testSimpleGetFieldMappingsWithDefaults() throws Exception { .includeDefaults(true) .get(); + assertThat((Map) response.fieldMappings("test", "num").sourceAsMap().get("num"), hasEntry("index", Boolean.TRUE)); + assertThat((Map) response.fieldMappings("test", "num").sourceAsMap().get("num"), hasEntry("type", "long")); assertThat( - (Map) response.fieldMappings("test", "type", "num").sourceAsMap().get("num"), + (Map) response.fieldMappings("test", "field1").sourceAsMap().get("field1"), hasEntry("index", Boolean.TRUE) ); - assertThat((Map) response.fieldMappings("test", "type", "num").sourceAsMap().get("num"), hasEntry("type", "long")); + assertThat((Map) response.fieldMappings("test", "field1").sourceAsMap().get("field1"), hasEntry("type", "text")); + assertThat((Map) response.fieldMappings("test", "field2").sourceAsMap().get("field2"), hasEntry("type", "text")); assertThat( - (Map) response.fieldMappings("test", "type", "field1").sourceAsMap().get("field1"), - hasEntry("index", Boolean.TRUE) - ); - assertThat( - (Map) response.fieldMappings("test", "type", "field1").sourceAsMap().get("field1"), - hasEntry("type", "text") - ); - assertThat( - (Map) response.fieldMappings("test", "type", "field2").sourceAsMap().get("field2"), - hasEntry("type", "text") - ); - assertThat( - (Map) response.fieldMappings("test", "type", "obj.subfield").sourceAsMap().get("subfield"), + (Map) response.fieldMappings("test", "obj.subfield").sourceAsMap().get("subfield"), hasEntry("type", "keyword") ); } @@ -198,12 +167,12 @@ public void testGetFieldMappingsWithFieldAlias() throws Exception { GetFieldMappingsResponse response = client().admin().indices().prepareGetFieldMappings().setFields("alias", "field1").get(); - FieldMappingMetadata aliasMapping = response.fieldMappings("test", "type", "alias"); + FieldMappingMetadata aliasMapping = response.fieldMappings("test", "alias"); assertThat(aliasMapping.fullName(), equalTo("alias")); assertThat(aliasMapping.sourceAsMap(), hasKey("alias")); assertThat((Map) aliasMapping.sourceAsMap().get("alias"), hasEntry("type", "alias")); - FieldMappingMetadata field1Mapping = response.fieldMappings("test", "type", "field1"); + FieldMappingMetadata field1Mapping = response.fieldMappings("test", "field1"); assertThat(field1Mapping.fullName(), equalTo("field1")); assertThat(field1Mapping.sourceAsMap(), hasKey("field1")); } @@ -216,7 +185,6 @@ public void testSimpleGetFieldMappingsWithPretty() throws Exception { GetFieldMappingsResponse response = client().admin() .indices() .prepareGetFieldMappings("index") - .setTypes("type") .setFields("field1", "obj.subfield") .get(); XContentBuilder responseBuilder = XContentFactory.jsonBuilder().prettyPrint(); @@ -229,7 +197,7 @@ public void testSimpleGetFieldMappingsWithPretty() throws Exception { params.put("pretty", "false"); - response = client().admin().indices().prepareGetFieldMappings("index").setTypes("type").setFields("field1", "obj.subfield").get(); + response = client().admin().indices().prepareGetFieldMappings("index").setFields("field1", "obj.subfield").get(); responseBuilder = XContentFactory.jsonBuilder().prettyPrint().lfAtEnd(); response.toXContent(responseBuilder, new ToXContent.MapParams(params)); responseStrings = Strings.toString(responseBuilder); @@ -249,10 +217,9 @@ public void testGetFieldMappingsWithBlocks() throws Exception { GetFieldMappingsResponse response = client().admin() .indices() .prepareGetFieldMappings("test") - .setTypes("_doc") .setFields("field1", "obj.subfield") .get(); - assertThat(response.fieldMappings("test", "_doc", "field1").fullName(), equalTo("field1")); + assertThat(response.fieldMappings("test", "field1").fullName(), equalTo("field1")); } finally { disableIndexBlock("test", block); } diff --git a/server/src/internalClusterTest/java/org/opensearch/indices/mapping/UpdateMappingIntegrationIT.java b/server/src/internalClusterTest/java/org/opensearch/indices/mapping/UpdateMappingIntegrationIT.java index 32584a9e33b52..a325bbc62f8a8 100644 --- a/server/src/internalClusterTest/java/org/opensearch/indices/mapping/UpdateMappingIntegrationIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/indices/mapping/UpdateMappingIntegrationIT.java @@ -205,7 +205,10 @@ public void testUpdateMappingWithConflicts() { client().admin() .indices() .preparePutMapping("test") - .setSource("{\"type\":{\"properties\":{\"body\":{\"type\":\"integer\"}}}}", XContentType.JSON) + .setSource( + "{\"" + MapperService.SINGLE_MAPPING_NAME + "\":{\"properties\":{\"body\":{\"type\":\"integer\"}}}}", + XContentType.JSON + ) .execute() .actionGet(); fail("Expected MergeMappingException"); @@ -225,7 +228,10 @@ public void testUpdateMappingWithNormsConflicts() { client().admin() .indices() .preparePutMapping("test") - .setSource("{\"type\":{\"properties\":{\"body\":{\"type\":\"text\", \"norms\": true }}}}", XContentType.JSON) + .setSource( + "{\"" + MapperService.SINGLE_MAPPING_NAME + "\":{\"properties\":{\"body\":{\"type\":\"text\", \"norms\": true }}}}", + XContentType.JSON + ) .execute() .actionGet(); fail("Expected MergeMappingException"); @@ -242,7 +248,11 @@ public void testUpdateMappingNoChanges() { .indices() .prepareCreate("test") .setSettings(Settings.builder().put("index.number_of_shards", 2).put("index.number_of_replicas", 0)) - .addMapping("type", "{\"type\":{\"properties\":{\"body\":{\"type\":\"text\"}}}}", XContentType.JSON) + .addMapping( + MapperService.SINGLE_MAPPING_NAME, + "{\"" + MapperService.SINGLE_MAPPING_NAME + "\":{\"properties\":{\"body\":{\"type\":\"text\"}}}}", + XContentType.JSON + ) .execute() .actionGet(); client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForGreenStatus().execute().actionGet(); @@ -250,7 +260,7 @@ public void testUpdateMappingNoChanges() { AcknowledgedResponse putMappingResponse = client().admin() .indices() .preparePutMapping("test") - .setSource("{\"type\":{\"properties\":{\"body\":{\"type\":\"text\"}}}}", XContentType.JSON) + .setSource("{\"properties\":{\"body\":{\"type\":\"text\"}}}", XContentType.JSON) .execute() .actionGet(); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/fields/SearchFieldsIT.java b/server/src/internalClusterTest/java/org/opensearch/search/fields/SearchFieldsIT.java index d5cd358612a60..72c60e98ec328 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/fields/SearchFieldsIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/fields/SearchFieldsIT.java @@ -486,7 +486,7 @@ public void testIdBasedScriptFields() throws Exception { assertThat(response.getHits().getAt(i).getId(), equalTo(Integer.toString(i))); Set fields = new HashSet<>(response.getHits().getAt(i).getFields().keySet()); assertThat(fields, equalTo(singleton("type"))); - assertThat(response.getHits().getAt(i).getFields().get("type").getValue(), equalTo("type1")); + assertThat(response.getHits().getAt(i).getFields().get("type").getValue(), equalTo(MapperService.SINGLE_MAPPING_NAME)); } response = client().prepareSearch() @@ -504,7 +504,7 @@ public void testIdBasedScriptFields() throws Exception { assertThat(response.getHits().getAt(i).getId(), equalTo(Integer.toString(i))); Set fields = new HashSet<>(response.getHits().getAt(i).getFields().keySet()); assertThat(fields, equalTo(newHashSet("type", "id"))); - assertThat(response.getHits().getAt(i).getFields().get("type").getValue(), equalTo("type1")); + assertThat(response.getHits().getAt(i).getFields().get("type").getValue(), equalTo(MapperService.SINGLE_MAPPING_NAME)); assertThat(response.getHits().getAt(i).getFields().get("id").getValue(), equalTo(Integer.toString(i))); } } diff --git a/server/src/internalClusterTest/java/org/opensearch/search/query/SearchQueryIT.java b/server/src/internalClusterTest/java/org/opensearch/search/query/SearchQueryIT.java index 69a8fa138d1d6..db87269c8ceae 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/query/SearchQueryIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/query/SearchQueryIT.java @@ -1987,41 +1987,6 @@ public void testRangeQueryRangeFields_24744() throws Exception { assertHitCount(searchResponse, 1); } - public void testRangeQueryTypeField_31476() throws Exception { - assertAcked(prepareCreate("test").addMapping("foo", "field", "type=keyword")); - - client().prepareIndex("test").setId("1").setSource("field", "value").get(); - refresh(); - - RangeQueryBuilder range = new RangeQueryBuilder("_type").from("ape").to("zebra"); - SearchResponse searchResponse = client().prepareSearch("test").setQuery(range).get(); - assertHitCount(searchResponse, 1); - - range = new RangeQueryBuilder("_type").from("monkey").to("zebra"); - searchResponse = client().prepareSearch("test").setQuery(range).get(); - assertHitCount(searchResponse, 0); - - range = new RangeQueryBuilder("_type").from("ape").to("donkey"); - searchResponse = client().prepareSearch("test").setQuery(range).get(); - assertHitCount(searchResponse, 0); - - range = new RangeQueryBuilder("_type").from("ape").to("foo").includeUpper(false); - searchResponse = client().prepareSearch("test").setQuery(range).get(); - assertHitCount(searchResponse, 0); - - range = new RangeQueryBuilder("_type").from("ape").to("foo").includeUpper(true); - searchResponse = client().prepareSearch("test").setQuery(range).get(); - assertHitCount(searchResponse, 1); - - range = new RangeQueryBuilder("_type").from("foo").to("zebra").includeLower(false); - searchResponse = client().prepareSearch("test").setQuery(range).get(); - assertHitCount(searchResponse, 0); - - range = new RangeQueryBuilder("_type").from("foo").to("zebra").includeLower(true); - searchResponse = client().prepareSearch("test").setQuery(range).get(); - assertHitCount(searchResponse, 1); - } - public void testNestedQueryWithFieldAlias() throws Exception { XContentBuilder mapping = XContentFactory.jsonBuilder() .startObject() diff --git a/server/src/main/java/org/opensearch/action/admin/indices/create/CreateIndexClusterStateUpdateRequest.java b/server/src/main/java/org/opensearch/action/admin/indices/create/CreateIndexClusterStateUpdateRequest.java index 27625028887f9..5ca6fb4226b64 100644 --- a/server/src/main/java/org/opensearch/action/admin/indices/create/CreateIndexClusterStateUpdateRequest.java +++ b/server/src/main/java/org/opensearch/action/admin/indices/create/CreateIndexClusterStateUpdateRequest.java @@ -41,9 +41,7 @@ import org.opensearch.common.settings.Settings; import org.opensearch.index.Index; -import java.util.HashMap; import java.util.HashSet; -import java.util.Map; import java.util.Set; /** @@ -61,7 +59,7 @@ public class CreateIndexClusterStateUpdateRequest extends ClusterStateUpdateRequ private Settings settings = Settings.Builder.EMPTY_SETTINGS; - private final Map mappings = new HashMap<>(); + private String mappings = "{}"; private final Set aliases = new HashSet<>(); @@ -80,8 +78,8 @@ public CreateIndexClusterStateUpdateRequest settings(Settings settings) { return this; } - public CreateIndexClusterStateUpdateRequest mappings(Map mappings) { - this.mappings.putAll(mappings); + public CreateIndexClusterStateUpdateRequest mappings(String mappings) { + this.mappings = mappings; return this; } @@ -122,7 +120,7 @@ public Settings settings() { return settings; } - public Map mappings() { + public String mappings() { return mappings; } diff --git a/server/src/main/java/org/opensearch/action/admin/indices/create/CreateIndexRequest.java b/server/src/main/java/org/opensearch/action/admin/indices/create/CreateIndexRequest.java index b8a3b284273ae..dd8fcdec1ddf8 100644 --- a/server/src/main/java/org/opensearch/action/admin/indices/create/CreateIndexRequest.java +++ b/server/src/main/java/org/opensearch/action/admin/indices/create/CreateIndexRequest.java @@ -35,6 +35,7 @@ import org.opensearch.LegacyESVersion; import org.opensearch.OpenSearchGenerationException; import org.opensearch.OpenSearchParseException; +import org.opensearch.Version; import org.opensearch.action.ActionRequestValidationException; import org.opensearch.action.IndicesRequest; import org.opensearch.action.admin.indices.alias.Alias; @@ -46,7 +47,6 @@ import org.opensearch.common.Strings; import org.opensearch.common.bytes.BytesArray; import org.opensearch.common.bytes.BytesReference; -import org.opensearch.common.collect.MapBuilder; import org.opensearch.common.io.stream.StreamInput; import org.opensearch.common.io.stream.StreamOutput; import org.opensearch.common.settings.Settings; @@ -58,9 +58,10 @@ import org.opensearch.common.xcontent.XContentHelper; import org.opensearch.common.xcontent.XContentParser; import org.opensearch.common.xcontent.XContentType; +import org.opensearch.index.mapper.MapperService; import java.io.IOException; -import java.util.HashMap; +import java.util.Collections; import java.util.HashSet; import java.util.Map; import java.util.Objects; @@ -92,7 +93,7 @@ public class CreateIndexRequest extends AcknowledgedRequest private Settings settings = EMPTY_SETTINGS; - private final Map mappings = new HashMap<>(); + private String mappings = "{}"; private final Set aliases = new HashSet<>(); @@ -103,11 +104,21 @@ public CreateIndexRequest(StreamInput in) throws IOException { cause = in.readString(); index = in.readString(); settings = readSettingsFromStream(in); - int size = in.readVInt(); - for (int i = 0; i < size; i++) { - final String type = in.readString(); - String source = in.readString(); - mappings.put(type, source); + if (in.getVersion().before(Version.V_2_0_0)) { + int size = in.readVInt(); + if (size == 1) { + String type = in.readString(); + if (MapperService.SINGLE_MAPPING_NAME.equals(type) == false) { + throw new IllegalArgumentException( + "Expected to receive mapping type of [" + MapperService.SINGLE_MAPPING_NAME + "] but got [" + type + "]" + ); + } + mappings = in.readString(); + } else if (size != 0) { + throw new IllegalStateException("Expected to read 0 or 1 mappings, but received " + size); + } + } else { + mappings = in.readString(); } int aliasesSize = in.readVInt(); for (int i = 0; i < aliasesSize; i++) { @@ -221,6 +232,19 @@ public CreateIndexRequest settings(Map source) { return this; } + /** + * Set the mapping for this index + * + * The mapping should be in the form of a JSON string, with an outer _doc key + *

+     *     .mapping("{\"_doc\":{\"properties\": ... }}")
+     * 
+ */ + public CreateIndexRequest mapping(String mapping) { + this.mappings = mapping; + return this; + } + /** * Adds mapping that will be added when the index gets created. * @@ -249,14 +273,6 @@ private CreateIndexRequest mapping(String type, BytesReference source, XContentT return mapping(type, mappingAsMap); } - /** - * The cause for this index creation. - */ - public CreateIndexRequest cause(String cause) { - this.cause = cause; - return this; - } - /** * Adds mapping that will be added when the index gets created. * @@ -278,18 +294,17 @@ public CreateIndexRequest mapping(String type, XContentBuilder source) { */ @Deprecated public CreateIndexRequest mapping(String type, Map source) { - if (mappings.containsKey(type)) { - throw new IllegalStateException("mappings for type \"" + type + "\" were already defined"); - } // wrap it in a type map if its not if (source.size() != 1 || !source.containsKey(type)) { - source = MapBuilder.newMapBuilder().put(type, source).map(); + source = Collections.singletonMap(MapperService.SINGLE_MAPPING_NAME, source); + } else if (MapperService.SINGLE_MAPPING_NAME.equals(type) == false) { + // if it has a different type name, then unwrap and rewrap with _doc + source = Collections.singletonMap(MapperService.SINGLE_MAPPING_NAME, source.get(type)); } try { XContentBuilder builder = XContentFactory.jsonBuilder(); builder.map(source); - mappings.put(type, Strings.toString(builder)); - return this; + return mapping(Strings.toString(builder)); } catch (IOException e) { throw new OpenSearchGenerationException("Failed to generate [" + source + "]", e); } @@ -306,6 +321,14 @@ public CreateIndexRequest mapping(String type, Object... source) { return this; } + /** + * The cause for this index creation. + */ + public CreateIndexRequest cause(String cause) { + this.cause = cause; + return this; + } + /** * Sets the aliases that will be associated with the index when it gets created */ @@ -421,7 +444,7 @@ public CreateIndexRequest source(Map source, DeprecationHandler depre return this; } - public Map mappings() { + public String mappings() { return this.mappings; } @@ -467,10 +490,16 @@ public void writeTo(StreamOutput out) throws IOException { out.writeString(cause); out.writeString(index); writeSettingsToStream(settings, out); - out.writeVInt(mappings.size()); - for (Map.Entry entry : mappings.entrySet()) { - out.writeString(entry.getKey()); - out.writeString(entry.getValue()); + if (out.getVersion().before(Version.V_2_0_0)) { + if ("{}".equals(mappings)) { + out.writeVInt(0); + } else { + out.writeVInt(1); + out.writeString(MapperService.SINGLE_MAPPING_NAME); + out.writeString(mappings); + } + } else { + out.writeString(mappings); } out.writeVInt(aliases.size()); for (Alias alias : aliases) { diff --git a/server/src/main/java/org/opensearch/action/admin/indices/mapping/get/GetFieldMappingsIndexRequest.java b/server/src/main/java/org/opensearch/action/admin/indices/mapping/get/GetFieldMappingsIndexRequest.java index be13313cbe9e7..961662ecdcf7e 100644 --- a/server/src/main/java/org/opensearch/action/admin/indices/mapping/get/GetFieldMappingsIndexRequest.java +++ b/server/src/main/java/org/opensearch/action/admin/indices/mapping/get/GetFieldMappingsIndexRequest.java @@ -32,10 +32,12 @@ package org.opensearch.action.admin.indices.mapping.get; +import org.opensearch.Version; import org.opensearch.action.ActionRequestValidationException; import org.opensearch.action.OriginalIndices; import org.opensearch.action.support.IndicesOptions; import org.opensearch.action.support.single.shard.SingleShardRequest; +import org.opensearch.common.Strings; import org.opensearch.common.io.stream.StreamInput; import org.opensearch.common.io.stream.StreamOutput; @@ -43,26 +45,26 @@ public class GetFieldMappingsIndexRequest extends SingleShardRequest { - private final boolean probablySingleFieldRequest; private final boolean includeDefaults; private final String[] fields; - private final String[] types; - private OriginalIndices originalIndices; + private final OriginalIndices originalIndices; GetFieldMappingsIndexRequest(StreamInput in) throws IOException { super(in); - types = in.readStringArray(); + if (in.getVersion().before(Version.V_2_0_0)) { + in.readStringArray(); // removed types array + } fields = in.readStringArray(); includeDefaults = in.readBoolean(); - probablySingleFieldRequest = in.readBoolean(); + if (in.getVersion().before(Version.V_2_0_0)) { + in.readBoolean(); // removed probablySingleField boolean + } originalIndices = OriginalIndices.readOriginalIndices(in); } - GetFieldMappingsIndexRequest(GetFieldMappingsRequest other, String index, boolean probablySingleFieldRequest) { - this.probablySingleFieldRequest = probablySingleFieldRequest; + GetFieldMappingsIndexRequest(GetFieldMappingsRequest other, String index) { this.includeDefaults = other.includeDefaults(); - this.types = other.types(); this.fields = other.fields(); assert index != null; this.index(index); @@ -74,18 +76,10 @@ public ActionRequestValidationException validate() { return null; } - public String[] types() { - return types; - } - public String[] fields() { return fields; } - public boolean probablySingleFieldRequest() { - return probablySingleFieldRequest; - } - public boolean includeDefaults() { return includeDefaults; } @@ -103,10 +97,14 @@ public IndicesOptions indicesOptions() { @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); - out.writeStringArray(types); + if (out.getVersion().before(Version.V_2_0_0)) { + out.writeStringArray(Strings.EMPTY_ARRAY); + } out.writeStringArray(fields); out.writeBoolean(includeDefaults); - out.writeBoolean(probablySingleFieldRequest); + if (out.getVersion().before(Version.V_2_0_0)) { + out.writeBoolean(false); + } OriginalIndices.writeOriginalIndices(originalIndices, out); } diff --git a/server/src/main/java/org/opensearch/action/admin/indices/mapping/get/GetFieldMappingsRequest.java b/server/src/main/java/org/opensearch/action/admin/indices/mapping/get/GetFieldMappingsRequest.java index 8ed9de2427948..e6a2ad3187250 100644 --- a/server/src/main/java/org/opensearch/action/admin/indices/mapping/get/GetFieldMappingsRequest.java +++ b/server/src/main/java/org/opensearch/action/admin/indices/mapping/get/GetFieldMappingsRequest.java @@ -32,6 +32,7 @@ package org.opensearch.action.admin.indices.mapping.get; +import org.opensearch.Version; import org.opensearch.action.ActionRequest; import org.opensearch.action.ActionRequestValidationException; import org.opensearch.action.IndicesRequest; @@ -41,6 +42,7 @@ import org.opensearch.common.io.stream.StreamOutput; import java.io.IOException; +import java.util.Arrays; /** * Request the mappings of specific fields @@ -57,7 +59,6 @@ public class GetFieldMappingsRequest extends ActionRequest implements IndicesReq private boolean includeDefaults = false; private String[] indices = Strings.EMPTY_ARRAY; - private String[] types = Strings.EMPTY_ARRAY; private IndicesOptions indicesOptions = IndicesOptions.strictExpandOpen(); @@ -66,7 +67,12 @@ public GetFieldMappingsRequest() {} public GetFieldMappingsRequest(StreamInput in) throws IOException { super(in); indices = in.readStringArray(); - types = in.readStringArray(); + if (in.getVersion().before(Version.V_2_0_0)) { + String[] types = in.readStringArray(); + if (types != Strings.EMPTY_ARRAY) { + throw new IllegalArgumentException("Expected empty type array but received [" + Arrays.toString(types) + "]"); + } + } indicesOptions = IndicesOptions.readIndicesOptions(in); local = in.readBoolean(); fields = in.readStringArray(); @@ -92,11 +98,6 @@ public GetFieldMappingsRequest indices(String... indices) { return this; } - public GetFieldMappingsRequest types(String... types) { - this.types = types; - return this; - } - public GetFieldMappingsRequest indicesOptions(IndicesOptions indicesOptions) { this.indicesOptions = indicesOptions; return this; @@ -107,10 +108,6 @@ public String[] indices() { return indices; } - public String[] types() { - return types; - } - @Override public IndicesOptions indicesOptions() { return indicesOptions; @@ -150,7 +147,9 @@ public ActionRequestValidationException validate() { public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); out.writeStringArray(indices); - out.writeStringArray(types); + if (out.getVersion().before(Version.V_2_0_0)) { + out.writeStringArray(Strings.EMPTY_ARRAY); + } indicesOptions.writeIndicesOptions(out); out.writeBoolean(local); out.writeStringArray(fields); diff --git a/server/src/main/java/org/opensearch/action/admin/indices/mapping/get/GetFieldMappingsRequestBuilder.java b/server/src/main/java/org/opensearch/action/admin/indices/mapping/get/GetFieldMappingsRequestBuilder.java index 19eb0afc18262..4a8c624e7e06e 100644 --- a/server/src/main/java/org/opensearch/action/admin/indices/mapping/get/GetFieldMappingsRequestBuilder.java +++ b/server/src/main/java/org/opensearch/action/admin/indices/mapping/get/GetFieldMappingsRequestBuilder.java @@ -54,16 +54,6 @@ public GetFieldMappingsRequestBuilder addIndices(String... indices) { return this; } - public GetFieldMappingsRequestBuilder setTypes(String... types) { - request.types(types); - return this; - } - - public GetFieldMappingsRequestBuilder addTypes(String... types) { - request.types(ArrayUtils.concat(request.types(), types)); - return this; - } - public GetFieldMappingsRequestBuilder setIndicesOptions(IndicesOptions indicesOptions) { request.indicesOptions(indicesOptions); return this; diff --git a/server/src/main/java/org/opensearch/action/admin/indices/mapping/get/GetFieldMappingsResponse.java b/server/src/main/java/org/opensearch/action/admin/indices/mapping/get/GetFieldMappingsResponse.java index 713c842e07dad..12024ef455a32 100644 --- a/server/src/main/java/org/opensearch/action/admin/indices/mapping/get/GetFieldMappingsResponse.java +++ b/server/src/main/java/org/opensearch/action/admin/indices/mapping/get/GetFieldMappingsResponse.java @@ -32,9 +32,9 @@ package org.opensearch.action.admin.indices.mapping.get; +import org.opensearch.Version; import org.opensearch.action.ActionResponse; import org.opensearch.common.ParseField; -import org.opensearch.common.bytes.BytesArray; import org.opensearch.common.bytes.BytesReference; import org.opensearch.common.io.stream.StreamInput; import org.opensearch.common.io.stream.StreamOutput; @@ -47,6 +47,7 @@ import org.opensearch.common.xcontent.XContentParser; import org.opensearch.common.xcontent.XContentType; import org.opensearch.index.mapper.Mapper; +import org.opensearch.index.mapper.MapperService; import java.io.IOException; import java.io.InputStream; @@ -97,38 +98,37 @@ public class GetFieldMappingsResponse extends ActionResponse implements ToXConte }, MAPPINGS, ObjectParser.ValueType.OBJECT); } - // todo remove middle `type` level - private final Map>> mappings; + private final Map> mappings; - GetFieldMappingsResponse(Map>> mappings) { + GetFieldMappingsResponse(Map> mappings) { this.mappings = mappings; } GetFieldMappingsResponse(StreamInput in) throws IOException { super(in); int size = in.readVInt(); - Map>> indexMapBuilder = new HashMap<>(size); + Map> indexMapBuilder = new HashMap<>(size); for (int i = 0; i < size; i++) { String index = in.readString(); - int typesSize = in.readVInt(); - Map> typeMapBuilder = new HashMap<>(typesSize); - for (int j = 0; j < typesSize; j++) { - String type = in.readString(); - int fieldSize = in.readVInt(); - Map fieldMapBuilder = new HashMap<>(fieldSize); - for (int k = 0; k < fieldSize; k++) { - fieldMapBuilder.put(in.readString(), new FieldMappingMetadata(in.readString(), in.readBytesReference())); + if (in.getVersion().before(Version.V_2_0_0)) { + int typesSize = in.readVInt(); + if (typesSize != 1) { + throw new IllegalStateException("Expected single type but received [" + typesSize + "]"); } - typeMapBuilder.put(type, unmodifiableMap(fieldMapBuilder)); + in.readString(); // type } - indexMapBuilder.put(index, unmodifiableMap(typeMapBuilder)); + int fieldSize = in.readVInt(); + Map fieldMapBuilder = new HashMap<>(fieldSize); + for (int k = 0; k < fieldSize; k++) { + fieldMapBuilder.put(in.readString(), new FieldMappingMetadata(in.readString(), in.readBytesReference())); + } + indexMapBuilder.put(index, unmodifiableMap(fieldMapBuilder)); } mappings = unmodifiableMap(indexMapBuilder); - } /** returns the retrieved field mapping. The return map keys are index, type, field (as specified in the request). */ - public Map>> mappings() { + public Map> mappings() { return mappings; } @@ -138,32 +138,23 @@ public Map>> mappings() { * @param field field name as specified in the {@link GetFieldMappingsRequest} * @return FieldMappingMetadata for the requested field or null if not found. */ - public FieldMappingMetadata fieldMappings(String index, String type, String field) { - Map> indexMapping = mappings.get(index); + public FieldMappingMetadata fieldMappings(String index, String field) { + Map indexMapping = mappings.get(index); if (indexMapping == null) { return null; } - Map typeMapping = indexMapping.get(type); - if (typeMapping == null) { - return null; - } - return typeMapping.get(field); + return indexMapping.get(field); } @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); - for (Map.Entry>> indexEntry : mappings.entrySet()) { + for (Map.Entry> indexEntry : mappings.entrySet()) { builder.startObject(indexEntry.getKey()); builder.startObject(MAPPINGS.getPreferredName()); - Map mappings = null; - for (Map.Entry> typeEntry : indexEntry.getValue().entrySet()) { - assert mappings == null; - mappings = typeEntry.getValue(); - } if (mappings != null) { - addFieldMappingsToBuilder(builder, params, mappings); + addFieldMappingsToBuilder(builder, params, indexEntry.getValue()); } builder.endObject(); @@ -183,7 +174,6 @@ private void addFieldMappingsToBuilder(XContentBuilder builder, Params params, M } public static class FieldMappingMetadata implements ToXContentFragment { - public static final FieldMappingMetadata NULL = new FieldMappingMetadata("", BytesArray.EMPTY); private static final ParseField FULL_NAME = new ParseField("full_name"); private static final ParseField MAPPING = new ParseField("mapping"); @@ -220,10 +210,6 @@ public Map sourceAsMap() { return XContentHelper.convertToMap(source, true, XContentType.JSON).v2(); } - public boolean isNull() { - return NULL.fullName().equals(fullName) && NULL.source.length() == source.length(); - } - // pkg-private for testing BytesReference getSource() { return source; @@ -268,18 +254,18 @@ public int hashCode() { @Override public void writeTo(StreamOutput out) throws IOException { out.writeVInt(mappings.size()); - for (Map.Entry>> indexEntry : mappings.entrySet()) { + for (Map.Entry> indexEntry : mappings.entrySet()) { out.writeString(indexEntry.getKey()); + if (out.getVersion().before(Version.V_2_0_0)) { + out.writeVInt(1); + out.writeString(MapperService.SINGLE_MAPPING_NAME); + } out.writeVInt(indexEntry.getValue().size()); - for (Map.Entry> typeEntry : indexEntry.getValue().entrySet()) { - out.writeString(typeEntry.getKey()); - out.writeVInt(typeEntry.getValue().size()); - for (Map.Entry fieldEntry : typeEntry.getValue().entrySet()) { - out.writeString(fieldEntry.getKey()); - FieldMappingMetadata fieldMapping = fieldEntry.getValue(); - out.writeString(fieldMapping.fullName()); - out.writeBytesReference(fieldMapping.source); - } + for (Map.Entry fieldEntry : indexEntry.getValue().entrySet()) { + out.writeString(fieldEntry.getKey()); + FieldMappingMetadata fieldMapping = fieldEntry.getValue(); + out.writeString(fieldMapping.fullName()); + out.writeBytesReference(fieldMapping.source); } } } diff --git a/server/src/main/java/org/opensearch/action/admin/indices/mapping/get/GetMappingsResponse.java b/server/src/main/java/org/opensearch/action/admin/indices/mapping/get/GetMappingsResponse.java index d203a5e6a45fe..3be8e75be7290 100644 --- a/server/src/main/java/org/opensearch/action/admin/indices/mapping/get/GetMappingsResponse.java +++ b/server/src/main/java/org/opensearch/action/admin/indices/mapping/get/GetMappingsResponse.java @@ -112,13 +112,13 @@ public void writeTo(StreamOutput out) throws IOException { @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { for (final ObjectObjectCursor indexEntry : getMappings()) { + builder.startObject(indexEntry.key); if (indexEntry.value != null) { - builder.startObject(indexEntry.key); builder.field(MAPPINGS.getPreferredName(), indexEntry.value.sourceAsMap()); - builder.endObject(); } else { builder.startObject(MAPPINGS.getPreferredName()).endObject(); } + builder.endObject(); } return builder; } diff --git a/server/src/main/java/org/opensearch/action/admin/indices/mapping/get/TransportGetFieldMappingsAction.java b/server/src/main/java/org/opensearch/action/admin/indices/mapping/get/TransportGetFieldMappingsAction.java index afc905bcac2e4..bdb5222a6dcba 100644 --- a/server/src/main/java/org/opensearch/action/admin/indices/mapping/get/TransportGetFieldMappingsAction.java +++ b/server/src/main/java/org/opensearch/action/admin/indices/mapping/get/TransportGetFieldMappingsAction.java @@ -81,9 +81,8 @@ protected void doExecute(Task task, GetFieldMappingsRequest request, final Actio if (concreteIndices.length == 0) { listener.onResponse(new GetFieldMappingsResponse(emptyMap())); } else { - boolean probablySingleFieldRequest = concreteIndices.length == 1 && request.types().length == 1 && request.fields().length == 1; for (final String index : concreteIndices) { - GetFieldMappingsIndexRequest shardRequest = new GetFieldMappingsIndexRequest(request, index, probablySingleFieldRequest); + GetFieldMappingsIndexRequest shardRequest = new GetFieldMappingsIndexRequest(request, index); shardAction.execute(shardRequest, new ActionListener() { @Override public void onResponse(GetFieldMappingsResponse result) { @@ -107,7 +106,7 @@ public void onFailure(Exception e) { } private GetFieldMappingsResponse merge(AtomicReferenceArray indexResponses) { - Map>> mergedResponses = new HashMap<>(); + Map> mergedResponses = new HashMap<>(); for (int i = 0; i < indexResponses.length(); i++) { Object element = indexResponses.get(i); if (element instanceof GetFieldMappingsResponse) { diff --git a/server/src/main/java/org/opensearch/action/admin/indices/mapping/get/TransportGetFieldMappingsIndexAction.java b/server/src/main/java/org/opensearch/action/admin/indices/mapping/get/TransportGetFieldMappingsIndexAction.java index c1248b69381d5..ca07475f0deab 100644 --- a/server/src/main/java/org/opensearch/action/admin/indices/mapping/get/TransportGetFieldMappingsIndexAction.java +++ b/server/src/main/java/org/opensearch/action/admin/indices/mapping/get/TransportGetFieldMappingsIndexAction.java @@ -56,12 +56,10 @@ import org.opensearch.index.mapper.Mapper; import org.opensearch.index.shard.ShardId; import org.opensearch.indices.IndicesService; -import org.opensearch.indices.TypeMissingException; import org.opensearch.threadpool.ThreadPool; import org.opensearch.transport.TransportService; import java.io.IOException; -import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.Map; @@ -124,28 +122,9 @@ protected GetFieldMappingsResponse shardOperation(final GetFieldMappingsIndexReq Predicate metadataFieldPredicate = (f) -> indicesService.isMetadataField(indexCreatedVersion, f); Predicate fieldPredicate = metadataFieldPredicate.or(indicesService.getFieldFilter().apply(shardId.getIndexName())); - DocumentMapper mapper = indexService.mapperService().documentMapper(); - Collection typeIntersection; - if (request.types().length == 0) { - typeIntersection = mapper == null ? Collections.emptySet() : Collections.singleton(mapper.type()); - } else { - typeIntersection = mapper != null && Regex.simpleMatch(request.types(), mapper.type()) - ? Collections.singleton(mapper.type()) - : Collections.emptySet(); - if (typeIntersection.isEmpty()) { - throw new TypeMissingException(shardId.getIndex(), request.types()); - } - } - - Map> typeMappings = new HashMap<>(); - for (String type : typeIntersection) { - DocumentMapper documentMapper = indexService.mapperService().documentMapper(type); - Map fieldMapping = findFieldMappingsByType(fieldPredicate, documentMapper, request); - if (!fieldMapping.isEmpty()) { - typeMappings.put(type, fieldMapping); - } - } - return new GetFieldMappingsResponse(singletonMap(shardId.getIndexName(), Collections.unmodifiableMap(typeMappings))); + DocumentMapper documentMapper = indexService.mapperService().documentMapper(); + Map fieldMapping = findFieldMappings(fieldPredicate, documentMapper, request); + return new GetFieldMappingsResponse(singletonMap(shardId.getIndexName(), fieldMapping)); } @Override @@ -195,11 +174,14 @@ public Boolean paramAsBoolean(String key, Boolean defaultValue) { } }; - private static Map findFieldMappingsByType( + private static Map findFieldMappings( Predicate fieldPredicate, DocumentMapper documentMapper, GetFieldMappingsIndexRequest request ) { + if (documentMapper == null) { + return Collections.emptyMap(); + } Map fieldMappings = new HashMap<>(); final MappingLookup allFieldMappers = documentMapper.mappers(); for (String field : request.fields()) { @@ -218,8 +200,6 @@ private static Map findFieldMappingsByType( Mapper fieldMapper = allFieldMappers.getMapper(field); if (fieldMapper != null) { addFieldMapper(fieldPredicate, field, fieldMapper, fieldMappings, request.includeDefaults()); - } else if (request.probablySingleFieldRequest()) { - fieldMappings.put(field, FieldMappingMetadata.NULL); } } } diff --git a/server/src/main/java/org/opensearch/action/admin/indices/rollover/MetadataRolloverService.java b/server/src/main/java/org/opensearch/action/admin/indices/rollover/MetadataRolloverService.java index 9e1bff73b7038..19a7b8c95199b 100644 --- a/server/src/main/java/org/opensearch/action/admin/indices/rollover/MetadataRolloverService.java +++ b/server/src/main/java/org/opensearch/action/admin/indices/rollover/MetadataRolloverService.java @@ -390,7 +390,9 @@ static void validate(Metadata metadata, String rolloverTarget, String newIndexNa if (Strings.isNullOrEmpty(newIndexName) == false) { throw new IllegalArgumentException("new index name may not be specified when rolling over a data stream"); } - if ((request.settings().equals(Settings.EMPTY) == false) || (request.aliases().size() > 0) || (request.mappings().size() > 0)) { + if ((request.settings().equals(Settings.EMPTY) == false) + || (request.aliases().size() > 0) + || (request.mappings().equals("{}") == false)) { throw new IllegalArgumentException( "aliases, mappings, and index settings may not be specified when rolling over a data stream" ); diff --git a/server/src/main/java/org/opensearch/action/admin/indices/template/post/TransportSimulateIndexTemplateAction.java b/server/src/main/java/org/opensearch/action/admin/indices/template/post/TransportSimulateIndexTemplateAction.java index 5ca0c6e36e1ae..598b5bdbf6d3b 100644 --- a/server/src/main/java/org/opensearch/action/admin/indices/template/post/TransportSimulateIndexTemplateAction.java +++ b/server/src/main/java/org/opensearch/action/admin/indices/template/post/TransportSimulateIndexTemplateAction.java @@ -252,8 +252,8 @@ public static Template resolveTemplate( Map aliasesByName = aliases.stream().collect(Collectors.toMap(AliasMetadata::getAlias, Function.identity())); // empty request mapping as the user can't specify any explicit mappings via the simulate api - List>> mappings = MetadataCreateIndexService.collectV2Mappings( - Collections.emptyMap(), + List> mappings = MetadataCreateIndexService.collectV2Mappings( + "{}", simulatedState, matchingTemplate, xContentRegistry, @@ -264,11 +264,9 @@ public static Template resolveTemplate( indexMetadata, tempIndexService -> { MapperService mapperService = tempIndexService.mapperService(); - for (Map> mapping : mappings) { - if (!mapping.isEmpty()) { - assert mapping.size() == 1 : mapping; - Map.Entry> entry = mapping.entrySet().iterator().next(); - mapperService.merge(entry.getKey(), entry.getValue(), MapperService.MergeReason.INDEX_TEMPLATE); + for (Map mapping : mappings) { + if (mapping.isEmpty() == false) { + mapperService.merge(MapperService.SINGLE_MAPPING_NAME, mapping, MapperService.MergeReason.INDEX_TEMPLATE); } } diff --git a/server/src/main/java/org/opensearch/cluster/metadata/IndexTemplateMetadata.java b/server/src/main/java/org/opensearch/cluster/metadata/IndexTemplateMetadata.java index d08fe3b926c66..810365589ae1f 100644 --- a/server/src/main/java/org/opensearch/cluster/metadata/IndexTemplateMetadata.java +++ b/server/src/main/java/org/opensearch/cluster/metadata/IndexTemplateMetadata.java @@ -51,17 +51,17 @@ import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.common.xcontent.XContentHelper; import org.opensearch.common.xcontent.XContentParser; +import org.opensearch.common.xcontent.json.JsonXContent; +import org.opensearch.index.mapper.MapperService; import java.io.IOException; +import java.io.UncheckedIOException; import java.util.ArrayList; -import java.util.Collections; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Set; -import static org.opensearch.cluster.metadata.Metadata.CONTEXT_MODE_PARAM; - public class IndexTemplateMetadata extends AbstractDiffable { private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(IndexTemplateMetadata.class); @@ -161,12 +161,15 @@ public Settings settings() { return this.settings; } - public ImmutableOpenMap mappings() { - return this.mappings; + public CompressedXContent mappings() { + if (this.mappings.isEmpty()) { + return null; + } + return this.mappings.iterator().next().value; } - public ImmutableOpenMap getMappings() { - return this.mappings; + public CompressedXContent getMappings() { + return this.mappings(); } public ImmutableOpenMap aliases() { @@ -194,7 +197,7 @@ public boolean equals(Object o) { if (!settings.equals(that.settings)) return false; if (!patterns.equals(that.patterns)) return false; - return Objects.equals(version, that.version); + return Objects.equals(aliases, that.aliases) && Objects.equals(version, that.version); } @Override @@ -205,6 +208,7 @@ public int hashCode() { result = 31 * result + patterns.hashCode(); result = 31 * result + settings.hashCode(); result = 31 * result + mappings.hashCode(); + result = 31 * result + aliases.hashCode(); return result; } @@ -248,6 +252,19 @@ public void writeTo(StreamOutput out) throws IOException { out.writeOptionalVInt(version); } + @Override + public String toString() { + try { + XContentBuilder builder = JsonXContent.contentBuilder(); + builder.startObject(); + Builder.toXContentWithTypes(this, builder, ToXContent.EMPTY_PARAMS); + builder.endObject(); + return Strings.toString(builder); + } catch (IOException e) { + throw new UncheckedIOException(e); + } + } + public static class Builder { private static final Set VALID_FIELDS = Sets.newHashSet( @@ -286,7 +303,7 @@ public Builder(IndexTemplateMetadata indexTemplateMetadata) { patterns(indexTemplateMetadata.patterns()); settings(indexTemplateMetadata.settings()); - mappings = ImmutableOpenMap.builder(indexTemplateMetadata.mappings()); + mappings = ImmutableOpenMap.builder(indexTemplateMetadata.mappings); aliases = ImmutableOpenMap.builder(indexTemplateMetadata.aliases()); } @@ -356,23 +373,6 @@ public static void toXContentWithTypes( builder.endObject(); } - /** - * Removes the nested type in the xContent representation of {@link IndexTemplateMetadata}. - * - * This method is useful to help bridge the gap between an the internal representation which still uses (the legacy format) a - * nested type in the mapping, and the external representation which does not use a nested type in the mapping. - */ - public static void removeType(IndexTemplateMetadata indexTemplateMetadata, XContentBuilder builder) throws IOException { - builder.startObject(); - toInnerXContent( - indexTemplateMetadata, - builder, - new ToXContent.MapParams(Collections.singletonMap("reduce_mappings", "true")), - false - ); - builder.endObject(); - } - /** * Serializes the template to xContent, making sure not to nest mappings under the * type name. @@ -399,10 +399,6 @@ private static void toInnerXContent( ToXContent.Params params, boolean includeTypeName ) throws IOException { - Metadata.XContentContext context = params.param(CONTEXT_MODE_PARAM) != null - ? Metadata.XContentContext.valueOf(params.param(CONTEXT_MODE_PARAM)) - : null; - builder.field("order", indexTemplateMetadata.order()); if (indexTemplateMetadata.version() != null) { builder.field("version", indexTemplateMetadata.version()); @@ -413,50 +409,19 @@ private static void toInnerXContent( indexTemplateMetadata.settings().toXContent(builder, params); builder.endObject(); - if (context == Metadata.XContentContext.API) { - builder.startObject("mappings"); - for (ObjectObjectCursor cursor1 : indexTemplateMetadata.mappings()) { - Map mapping = XContentHelper.convertToMap(cursor1.value.uncompressed(), false).v2(); - if (mapping.size() == 1 && mapping.containsKey(cursor1.key)) { - // the type name is the root value, reduce it - mapping = (Map) mapping.get(cursor1.key); - } - builder.field(cursor1.key); - builder.map(mapping); - } - builder.endObject(); - } else if (params.paramAsBoolean("reduce_mappings", false)) { - // The parameter include_type_name is only ever used in the REST API, where reduce_mappings is - // always set to true. We therefore only check for include_type_name in this branch. + includeTypeName &= (params.paramAsBoolean("reduce_mappings", false) == false); + CompressedXContent m = indexTemplateMetadata.mappings(); + if (m != null) { + Map documentMapping = XContentHelper.convertToMap(m.uncompressed(), true).v2(); if (includeTypeName == false) { - Map documentMapping = null; - for (ObjectObjectCursor cursor : indexTemplateMetadata.mappings()) { - assert documentMapping == null; - Map mapping = XContentHelper.convertToMap(cursor.value.uncompressed(), true).v2(); - documentMapping = reduceMapping(cursor.key, mapping); - } - - if (documentMapping != null) { - builder.field("mappings", documentMapping); - } else { - builder.startObject("mappings").endObject(); - } + documentMapping = reduceMapping(documentMapping); } else { - builder.startObject("mappings"); - for (ObjectObjectCursor cursor : indexTemplateMetadata.mappings()) { - Map mapping = XContentHelper.convertToMap(cursor.value.uncompressed(), true).v2(); - mapping = reduceMapping(cursor.key, mapping); - builder.field(cursor.key); - builder.map(mapping); - } - builder.endObject(); + documentMapping = reduceEmptyMapping(documentMapping); } + builder.field("mappings"); + builder.map(documentMapping); } else { - builder.startArray("mappings"); - for (ObjectObjectCursor cursor : indexTemplateMetadata.mappings()) { - builder.map(XContentHelper.convertToMap(cursor.value.uncompressed(), true).v2()); - } - builder.endArray(); + builder.startObject("mappings").endObject(); } builder.startObject("aliases"); @@ -467,15 +432,22 @@ private static void toInnerXContent( } @SuppressWarnings("unchecked") - private static Map reduceMapping(String type, Map mapping) { - if (mapping.size() == 1 && mapping.containsKey(type)) { - // the type name is the root value, reduce it - return (Map) mapping.get(type); + private static Map reduceEmptyMapping(Map mapping) { + if (mapping.keySet().size() == 1 + && mapping.containsKey(MapperService.SINGLE_MAPPING_NAME) + && ((Map) mapping.get(MapperService.SINGLE_MAPPING_NAME)).size() == 0) { + return (Map) mapping.values().iterator().next(); } else { return mapping; } } + @SuppressWarnings("unchecked") + private static Map reduceMapping(Map mapping) { + assert mapping.keySet().size() == 1 : mapping.keySet(); + return (Map) mapping.values().iterator().next(); + } + public static IndexTemplateMetadata fromXContent(XContentParser parser, String templateName) throws IOException { Builder builder = new Builder(templateName); diff --git a/server/src/main/java/org/opensearch/cluster/metadata/MetadataCreateIndexService.java b/server/src/main/java/org/opensearch/cluster/metadata/MetadataCreateIndexService.java index 4e2c475e6c4ce..cb76b7217624f 100644 --- a/server/src/main/java/org/opensearch/cluster/metadata/MetadataCreateIndexService.java +++ b/server/src/main/java/org/opensearch/cluster/metadata/MetadataCreateIndexService.java @@ -32,7 +32,6 @@ package org.opensearch.cluster.metadata; -import com.carrotsearch.hppc.cursors.ObjectObjectCursor; import org.apache.logging.log4j.Level; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; @@ -114,7 +113,6 @@ import java.util.stream.Collectors; import java.util.stream.IntStream; -import static java.util.Collections.singletonMap; import static java.util.stream.Collectors.toList; import static org.opensearch.cluster.metadata.IndexMetadata.INDEX_NUMBER_OF_REPLICAS_SETTING; import static org.opensearch.cluster.metadata.IndexMetadata.INDEX_NUMBER_OF_SHARDS_SETTING; @@ -451,7 +449,7 @@ private ClusterState applyCreateIndexWithTemporaryService( final boolean silent, final IndexMetadata sourceMetadata, final IndexMetadata temporaryIndexMeta, - final List>> mappings, + final List> mappings, final Function> aliasSupplier, final List templatesApplied, final BiConsumer metadataTransformer @@ -541,20 +539,10 @@ private ClusterState applyCreateIndexRequestWithV1Templates( templates.stream().map(IndexTemplateMetadata::name).collect(Collectors.toList()) ); - final Map> mappings = Collections.unmodifiableMap( + final Map mappings = Collections.unmodifiableMap( parseV1Mappings( request.mappings(), - templates.stream() - .map(IndexTemplateMetadata::getMappings) - // Converts the ImmutableOpenMap into a non-terrible HashMap - .map(iom -> { - Map converted = new HashMap<>(iom.size()); - for (ObjectObjectCursor cursor : iom) { - converted.put(cursor.key, cursor.value); - } - return converted; - }) - .collect(toList()), + templates.stream().map(IndexTemplateMetadata::getMappings).collect(toList()), xContentRegistry ) ); @@ -616,7 +604,7 @@ private ClusterState applyCreateIndexRequestWithV2Template( ); } - final List>> mappings = collectV2Mappings( + final List> mappings = collectV2Mappings( request.mappings(), currentState, templateName, @@ -659,29 +647,31 @@ private ClusterState applyCreateIndexRequestWithV2Template( ); } - public static List>> collectV2Mappings( - final Map requestMappings, + public static List> collectV2Mappings( + final String requestMappings, final ClusterState currentState, final String templateName, final NamedXContentRegistry xContentRegistry, final String indexName ) throws Exception { - List>> result = new ArrayList<>(); - List templateMappings = MetadataIndexTemplateService.collectMappings(currentState, templateName, indexName); + return collectV2Mappings(requestMappings, templateMappings, xContentRegistry); + } + + public static List> collectV2Mappings( + final String requestMappings, + final List templateMappings, + final NamedXContentRegistry xContentRegistry + ) throws Exception { + List> result = new ArrayList<>(); + for (CompressedXContent templateMapping : templateMappings) { Map parsedTemplateMapping = MapperService.parseMapping(xContentRegistry, templateMapping.string()); - result.add(singletonMap(MapperService.SINGLE_MAPPING_NAME, parsedTemplateMapping)); + result.add(parsedTemplateMapping); } - if (requestMappings.size() > 0) { - assert requestMappings.size() == 1 : "expected request metadata mappings to have 1 type but it had: " + requestMappings; - Map.Entry entry = requestMappings.entrySet().iterator().next(); - - String type = entry.getKey(); - Map parsedMappings = MapperService.parseMapping(xContentRegistry, entry.getValue()); - result.add(singletonMap(type, parsedMappings)); - } + Map parsedRequestMappings = MapperService.parseMapping(xContentRegistry, requestMappings); + result.add(parsedRequestMappings); return result; } @@ -694,7 +684,8 @@ private ClusterState applyCreateIndexRequestWithExistingMetadata( ) throws Exception { logger.info("applying create index request using existing index [{}] metadata", sourceMetadata.getIndex().getName()); - if (request.mappings().size() > 0) { + final Map mappings = MapperService.parseMapping(xContentRegistry, request.mappings()); + if (mappings.isEmpty() == false) { throw new IllegalArgumentException( "mappings are not allowed when creating an index from a source index, " + "all mappings are copied from the source index" ); @@ -719,7 +710,7 @@ private ClusterState applyCreateIndexRequestWithExistingMetadata( silent, sourceMetadata, tmpImd, - Collections.emptyList(), + Collections.singletonList(mappings), indexService -> resolveAndValidateAliases( request.index(), request.aliases(), @@ -745,55 +736,28 @@ private ClusterState applyCreateIndexRequestWithExistingMetadata( * {@link IndexTemplateMetadata#order()}). This merging makes no distinction between field * definitions, as may result in an invalid field definition */ - static Map> parseV1Mappings( - Map requestMappings, - List> templateMappings, + static Map parseV1Mappings( + String requestMappings, + List templateMappings, NamedXContentRegistry xContentRegistry ) throws Exception { - Map> mappings = new HashMap<>(); - for (Map.Entry entry : requestMappings.entrySet()) { - Map mapping = MapperService.parseMapping(xContentRegistry, entry.getValue()); - if (mapping.isEmpty()) { - // Someone provided an empty '{}' for mappings, which is okay, but to avoid - // tripping the below assertion, we can safely ignore it - continue; - } - assert mapping.size() == 1 : mapping; - assert entry.getKey().equals(mapping.keySet().iterator().next()) : entry.getKey() + " != " + mapping; - mappings.put(entry.getKey(), mapping); - } - + Map mappings = MapperService.parseMapping(xContentRegistry, requestMappings); // apply templates, merging the mappings into the request mapping if exists - for (Map tMapping : templateMappings) { - for (Map.Entry cursor : tMapping.entrySet()) { - String mappingString = cursor.getValue().string(); - String type = cursor.getKey(); - if (mappings.containsKey(type)) { - XContentHelper.mergeDefaults(mappings.get(type), MapperService.parseMapping(xContentRegistry, mappingString)); - } else if (mappings.size() == 1 && type.equals(MapperService.SINGLE_MAPPING_NAME)) { - // Typeless template with typed mapping - Map templateMapping = MapperService.parseMapping(xContentRegistry, mappingString); - assert templateMapping.size() == 1 : templateMapping; - assert type.equals(templateMapping.keySet().iterator().next()) : type + " != " + templateMapping; - Map.Entry> mappingEntry = mappings.entrySet().iterator().next(); - templateMapping = singletonMap( - mappingEntry.getKey(), // reuse type name from the mapping - templateMapping.values().iterator().next() - ); // but actual mappings from the template - XContentHelper.mergeDefaults(mappingEntry.getValue(), templateMapping); - } else if (tMapping.size() == 1 && mappings.containsKey(MapperService.SINGLE_MAPPING_NAME)) { - // Typed template with typeless mapping - Map templateMapping = MapperService.parseMapping(xContentRegistry, mappingString); - assert templateMapping.size() == 1 : templateMapping; - assert type.equals(templateMapping.keySet().iterator().next()) : type + " != " + templateMapping; - Map mapping = mappings.get(MapperService.SINGLE_MAPPING_NAME); - templateMapping = singletonMap( - MapperService.SINGLE_MAPPING_NAME, // make template mapping typeless - templateMapping.values().iterator().next() - ); - XContentHelper.mergeDefaults(mapping, templateMapping); + for (CompressedXContent mapping : templateMappings) { + if (mapping != null) { + Map templateMapping = MapperService.parseMapping(xContentRegistry, mapping.string()); + if (templateMapping.isEmpty()) { + // Someone provided an empty '{}' for mappings, which is okay, but to avoid + // tripping the below assertion, we can safely ignore it + continue; + } + assert templateMapping.size() == 1 : "expected exactly one mapping value, got: " + templateMapping; + // pre-8x templates may have a wrapper type other than _doc, so we re-wrap things here + templateMapping = Collections.singletonMap(MapperService.SINGLE_MAPPING_NAME, templateMapping.values().iterator().next()); + if (mappings.isEmpty()) { + mappings = templateMapping; } else { - mappings.put(type, MapperService.parseMapping(xContentRegistry, mappingString)); + XContentHelper.mergeDefaults(mappings, templateMapping); } } } @@ -1170,15 +1134,13 @@ private static ClusterBlocks.Builder createClusterBlocksBuilder(ClusterState cur private static void updateIndexMappingsAndBuildSortOrder( IndexService indexService, CreateIndexClusterStateUpdateRequest request, - List>> mappings, + List> mappings, @Nullable IndexMetadata sourceMetadata ) throws IOException { MapperService mapperService = indexService.mapperService(); - for (Map> mapping : mappings) { - if (!mapping.isEmpty()) { - assert mapping.size() == 1 : mapping; - Map.Entry> entry = mapping.entrySet().iterator().next(); - mapperService.merge(entry.getKey(), entry.getValue(), MergeReason.INDEX_TEMPLATE); + for (Map mapping : mappings) { + if (mapping.isEmpty() == false) { + mapperService.merge(MapperService.SINGLE_MAPPING_NAME, mapping, MergeReason.INDEX_TEMPLATE); } } diff --git a/server/src/main/java/org/opensearch/cluster/metadata/MetadataIndexTemplateService.java b/server/src/main/java/org/opensearch/cluster/metadata/MetadataIndexTemplateService.java index eb9508781393f..22cd5c1dbbbe2 100644 --- a/server/src/main/java/org/opensearch/cluster/metadata/MetadataIndexTemplateService.java +++ b/server/src/main/java/org/opensearch/cluster/metadata/MetadataIndexTemplateService.java @@ -932,6 +932,11 @@ static ClusterState innerPutTemplate( templateBuilder.putAlias(aliasMetadata); } IndexTemplateMetadata template = templateBuilder.build(); + IndexTemplateMetadata existingTemplate = currentState.metadata().templates().get(request.name); + if (template.equals(existingTemplate)) { + // The template is unchanged, therefore there is no need for a cluster state update + return currentState; + } Metadata.Builder builder = Metadata.builder(currentState.metadata()).put(template); diff --git a/server/src/main/java/org/opensearch/index/mapper/DocumentMapper.java b/server/src/main/java/org/opensearch/index/mapper/DocumentMapper.java index c6724d1b3063e..4e9004a880a57 100644 --- a/server/src/main/java/org/opensearch/index/mapper/DocumentMapper.java +++ b/server/src/main/java/org/opensearch/index/mapper/DocumentMapper.java @@ -81,7 +81,7 @@ public Builder(RootObjectMapper.Builder builder, MapperService mapperService) { this.rootObjectMapper = builder.build(builderContext); final String type = rootObjectMapper.name(); - final DocumentMapper existingMapper = mapperService.documentMapper(type); + final DocumentMapper existingMapper = mapperService.documentMapper(); final Version indexCreatedVersion = mapperService.getIndexSettings().getIndexVersionCreated(); final Map metadataMapperParsers = mapperService.mapperRegistry.getMetadataMapperParsers( indexCreatedVersion diff --git a/server/src/main/java/org/opensearch/index/mapper/MapperService.java b/server/src/main/java/org/opensearch/index/mapper/MapperService.java index e2a197aa2b4dd..1d4e49a6e6fee 100644 --- a/server/src/main/java/org/opensearch/index/mapper/MapperService.java +++ b/server/src/main/java/org/opensearch/index/mapper/MapperService.java @@ -306,13 +306,13 @@ public boolean updateMapping(final IndexMetadata currentIndexMetadata, final Ind // refresh mapping can happen when the parsing/merging of the mapping from the metadata doesn't result in the same // mapping, in this case, we send to the master to refresh its own version of the mappings (to conform with the // merge version of it, which it does when refreshing the mappings), and warn log it. - if (documentMapper(mappingType).mappingSource().equals(incomingMappingSource) == false) { + if (documentMapper().mappingSource().equals(incomingMappingSource) == false) { logger.debug( "[{}] parsed mapping [{}], and got different sources\noriginal:\n{}\nparsed:\n{}", index(), mappingType, incomingMappingSource, - documentMapper(mappingType).mappingSource() + documentMapper().mappingSource() ); requireRefresh = true; @@ -530,16 +530,6 @@ public DocumentMapper documentMapper() { return mapper; } - /** - * Return the {@link DocumentMapper} for the given type. - */ - public DocumentMapper documentMapper(String type) { - if (mapper != null && type.equals(mapper.type())) { - return mapper; - } - return null; - } - /** * Returns {@code true} if the given {@code mappingSource} includes a type * as a top-level object. @@ -574,12 +564,12 @@ public String resolveDocumentType(String type) { * Returns the document mapper created, including a mapping update if the * type has been dynamically created. */ - public DocumentMapperForType documentMapperWithAutoCreate(String type) { - DocumentMapper mapper = documentMapper(type); + public DocumentMapperForType documentMapperWithAutoCreate() { + DocumentMapper mapper = documentMapper(); if (mapper != null) { return new DocumentMapperForType(mapper, null); } - mapper = parse(type, null); + mapper = parse(SINGLE_MAPPING_NAME, null); return new DocumentMapperForType(mapper, mapper.mapping()); } diff --git a/server/src/main/java/org/opensearch/index/query/QueryShardContext.java b/server/src/main/java/org/opensearch/index/query/QueryShardContext.java index 4e6077889576d..f67feadde4b41 100644 --- a/server/src/main/java/org/opensearch/index/query/QueryShardContext.java +++ b/server/src/main/java/org/opensearch/index/query/QueryShardContext.java @@ -312,11 +312,11 @@ public Set sourcePath(String fullName) { } /** - * Returns s {@link DocumentMapper} instance for the given type. - * Delegates to {@link MapperService#documentMapper(String)} + * Returns s {@link DocumentMapper} instance. + * Delegates to {@link MapperService#documentMapper()} */ public DocumentMapper documentMapper(String type) { - return mapperService.documentMapper(type); + return mapperService.documentMapper(); } /** diff --git a/server/src/main/java/org/opensearch/index/query/TypeQueryBuilder.java b/server/src/main/java/org/opensearch/index/query/TypeQueryBuilder.java index f8732586ec50a..d1ffcb394ec06 100644 --- a/server/src/main/java/org/opensearch/index/query/TypeQueryBuilder.java +++ b/server/src/main/java/org/opensearch/index/query/TypeQueryBuilder.java @@ -137,7 +137,7 @@ public String getWriteableName() { protected Query doToQuery(QueryShardContext context) throws IOException { deprecationLogger.deprecate("type_query", TYPES_DEPRECATION_MESSAGE); // LUCENE 4 UPGRADE document mapper should use bytesref as well? - DocumentMapper documentMapper = context.getMapperService().documentMapper(type); + DocumentMapper documentMapper = context.getMapperService().documentMapper(); if (documentMapper == null) { // no type means no documents return new MatchNoDocsQuery(); diff --git a/server/src/main/java/org/opensearch/index/shard/IndexShard.java b/server/src/main/java/org/opensearch/index/shard/IndexShard.java index cbf5d35327f6f..cd45b9483834b 100644 --- a/server/src/main/java/org/opensearch/index/shard/IndexShard.java +++ b/server/src/main/java/org/opensearch/index/shard/IndexShard.java @@ -882,7 +882,7 @@ private Engine.IndexResult applyIndexOperation( ); } operation = prepareIndex( - docMapper(resolvedType), + docMapper(), sourceWithResolvedType, seqNo, opPrimaryTerm, @@ -1102,7 +1102,7 @@ private Engine.DeleteResult applyDeleteOperation( // fail if index and delete operations do not use the same type. // TODO: clean this up when types are gone try { - Mapping update = docMapper(type).getMapping(); + Mapping update = docMapper().getMapping(); if (update != null) { return new Engine.DeleteResult(update); } @@ -1249,7 +1249,7 @@ public SeqNoStats seqNoStats() { return getEngine().getSeqNoStats(replicationTracker.getGlobalCheckpoint()); } - public IndexingStats indexingStats(String... types) { + public IndexingStats indexingStats() { Engine engine = getEngineOrNull(); final boolean throttled; final long throttleTimeInMillis; @@ -3143,8 +3143,8 @@ private static void persistMetadata( } } - private DocumentMapperForType docMapper(String type) { - return mapperService.documentMapperWithAutoCreate(mapperService.resolveDocumentType(type)); + private DocumentMapperForType docMapper() { + return mapperService.documentMapperWithAutoCreate(); } private EngineConfig newEngineConfig(LongSupplier globalCheckpointSupplier) { @@ -3874,7 +3874,7 @@ private EngineConfig.TombstoneDocSupplier tombstoneDocSupplier() { return new EngineConfig.TombstoneDocSupplier() { @Override public ParsedDocument newDeleteTombstoneDoc(String type, String id) { - return docMapper(type).getDocumentMapper().createDeleteTombstoneDoc(shardId.getIndexName(), type, id); + return docMapper().getDocumentMapper().createDeleteTombstoneDoc(shardId.getIndexName(), type, id); } @Override diff --git a/server/src/main/java/org/opensearch/index/termvectors/TermVectorsService.java b/server/src/main/java/org/opensearch/index/termvectors/TermVectorsService.java index 50cd160ecb00d..ecbdd3875f14a 100644 --- a/server/src/main/java/org/opensearch/index/termvectors/TermVectorsService.java +++ b/server/src/main/java/org/opensearch/index/termvectors/TermVectorsService.java @@ -389,7 +389,7 @@ private static ParsedDocument parseDocument( String routing ) { MapperService mapperService = indexShard.mapperService(); - DocumentMapperForType docMapper = mapperService.documentMapperWithAutoCreate(MapperService.SINGLE_MAPPING_NAME); + DocumentMapperForType docMapper = mapperService.documentMapperWithAutoCreate(); ParsedDocument parsedDocument = docMapper.getDocumentMapper() .parse(new SourceToParse(index, MapperService.SINGLE_MAPPING_NAME, "_id_for_tv_api", doc, xContentType, routing)); if (docMapper.getMapping() != null) { diff --git a/server/src/main/java/org/opensearch/rest/action/admin/indices/RestGetFieldMappingAction.java b/server/src/main/java/org/opensearch/rest/action/admin/indices/RestGetFieldMappingAction.java index d0610d790999b..c35f417795377 100644 --- a/server/src/main/java/org/opensearch/rest/action/admin/indices/RestGetFieldMappingAction.java +++ b/server/src/main/java/org/opensearch/rest/action/admin/indices/RestGetFieldMappingAction.java @@ -94,12 +94,7 @@ public RestChannelConsumer prepareRequest(final RestRequest request, final NodeC .getFieldMappings(getMappingsRequest, new RestBuilderListener(channel) { @Override public RestResponse buildResponse(GetFieldMappingsResponse response, XContentBuilder builder) throws Exception { - Map>> mappingsByIndex = response.mappings(); - - boolean isPossibleSingleFieldRequest = indices.length == 1 && fields.length == 1; - if (isPossibleSingleFieldRequest && isFieldMappingMissingField(mappingsByIndex)) { - return new BytesRestResponse(OK, builder.startObject().endObject()); - } + Map> mappingsByIndex = response.mappings(); RestStatus status = OK; if (mappingsByIndex.isEmpty() && fields.length > 0) { @@ -111,24 +106,4 @@ public RestResponse buildResponse(GetFieldMappingsResponse response, XContentBui }); } - /** - * Helper method to find out if the only included fieldmapping metadata is typed NULL, which means - * that type and index exist, but the field did not - */ - private boolean isFieldMappingMissingField(Map>> mappingsByIndex) { - if (mappingsByIndex.size() != 1) { - return false; - } - - for (Map> value : mappingsByIndex.values()) { - for (Map fieldValue : value.values()) { - for (Map.Entry fieldMappingMetadataEntry : fieldValue.entrySet()) { - if (fieldMappingMetadataEntry.getValue().isNull()) { - return true; - } - } - } - } - return false; - } } diff --git a/server/src/test/java/org/opensearch/action/admin/indices/create/CreateIndexRequestTests.java b/server/src/test/java/org/opensearch/action/admin/indices/create/CreateIndexRequestTests.java index de69be636c327..472d389a23890 100644 --- a/server/src/test/java/org/opensearch/action/admin/indices/create/CreateIndexRequestTests.java +++ b/server/src/test/java/org/opensearch/action/admin/indices/create/CreateIndexRequestTests.java @@ -66,7 +66,7 @@ public void testSerialization() throws IOException { try (StreamInput in = output.bytes().streamInput()) { CreateIndexRequest serialized = new CreateIndexRequest(in); assertEquals(request.index(), serialized.index()); - assertEquals(mapping, serialized.mappings().get("my_type")); + assertEquals("{\"_doc\":{}}", serialized.mappings()); } } } diff --git a/server/src/test/java/org/opensearch/action/admin/indices/mapping/get/GetFieldMappingsResponseTests.java b/server/src/test/java/org/opensearch/action/admin/indices/mapping/get/GetFieldMappingsResponseTests.java index 99e4b5a2cca89..512e21cc28469 100644 --- a/server/src/test/java/org/opensearch/action/admin/indices/mapping/get/GetFieldMappingsResponseTests.java +++ b/server/src/test/java/org/opensearch/action/admin/indices/mapping/get/GetFieldMappingsResponseTests.java @@ -33,6 +33,7 @@ package org.opensearch.action.admin.indices.mapping.get; import org.opensearch.action.admin.indices.mapping.get.GetFieldMappingsResponse.FieldMappingMetadata; +import org.opensearch.common.Strings; import org.opensearch.common.bytes.BytesArray; import org.opensearch.common.io.stream.BytesStreamOutput; import org.opensearch.common.io.stream.StreamInput; @@ -47,22 +48,29 @@ public class GetFieldMappingsResponseTests extends AbstractWireSerializingTestCase { public void testManualSerialization() throws IOException { - Map>> mappings = new HashMap<>(); + Map> mappings = new HashMap<>(); FieldMappingMetadata fieldMappingMetadata = new FieldMappingMetadata("my field", new BytesArray("{}")); - mappings.put("index", Collections.singletonMap("type", Collections.singletonMap("field", fieldMappingMetadata))); + mappings.put("index", Collections.singletonMap("field", fieldMappingMetadata)); GetFieldMappingsResponse response = new GetFieldMappingsResponse(mappings); try (BytesStreamOutput out = new BytesStreamOutput()) { response.writeTo(out); try (StreamInput in = StreamInput.wrap(out.bytes().toBytesRef().bytes)) { GetFieldMappingsResponse serialized = new GetFieldMappingsResponse(in); - FieldMappingMetadata metadata = serialized.fieldMappings("index", "type", "field"); + FieldMappingMetadata metadata = serialized.fieldMappings("index", "field"); assertNotNull(metadata); assertEquals(new BytesArray("{}"), metadata.getSource()); } } } + public void testNullFieldMappingToXContent() { + Map> mappings = new HashMap<>(); + mappings.put("index", Collections.emptyMap()); + GetFieldMappingsResponse response = new GetFieldMappingsResponse(mappings); + assertEquals("{\"index\":{\"mappings\":{}}}", Strings.toString(response)); + } + @Override protected GetFieldMappingsResponse createTestInstance() { return new GetFieldMappingsResponse(randomMapping()); @@ -73,24 +81,18 @@ protected Writeable.Reader instanceReader() { return GetFieldMappingsResponse::new; } - private Map>> randomMapping() { - Map>> mappings = new HashMap<>(); + private Map> randomMapping() { + Map> mappings = new HashMap<>(); int indices = randomInt(10); for (int i = 0; i < indices; i++) { - final Map> doctypesMappings = new HashMap<>(); - int doctypes = randomInt(10); - for (int j = 0; j < doctypes; j++) { - Map fieldMappings = new HashMap<>(); - int fields = randomInt(10); - for (int k = 0; k < fields; k++) { - final String mapping = randomBoolean() ? "{\"type\":\"string\"}" : "{\"type\":\"keyword\"}"; - FieldMappingMetadata metadata = new FieldMappingMetadata("my field", new BytesArray(mapping)); - fieldMappings.put("field" + k, metadata); - } - doctypesMappings.put("doctype" + j, fieldMappings); + Map fieldMappings = new HashMap<>(); + int fields = randomInt(10); + for (int k = 0; k < fields; k++) { + final String mapping = randomBoolean() ? "{\"type\":\"string\"}" : "{\"type\":\"keyword\"}"; + FieldMappingMetadata metaData = new FieldMappingMetadata("my field", new BytesArray(mapping)); + fieldMappings.put("field" + k, metaData); } - mappings.put("index" + i, doctypesMappings); } return mappings; } diff --git a/server/src/test/java/org/opensearch/action/admin/indices/rollover/RolloverRequestTests.java b/server/src/test/java/org/opensearch/action/admin/indices/rollover/RolloverRequestTests.java index 1e8dc2f031058..0fcc60e2a4087 100644 --- a/server/src/test/java/org/opensearch/action/admin/indices/rollover/RolloverRequestTests.java +++ b/server/src/test/java/org/opensearch/action/admin/indices/rollover/RolloverRequestTests.java @@ -62,6 +62,7 @@ import java.util.Map; import java.util.function.Consumer; +import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; public class RolloverRequestTests extends OpenSearchTestCase { @@ -122,7 +123,7 @@ public void testParsingWithIndexSettings() throws Exception { request.fromXContent(createParser(builder)); Map> conditions = request.getConditions(); assertThat(conditions.size(), equalTo(2)); - assertThat(request.getCreateIndexRequest().mappings().size(), equalTo(1)); + assertThat(request.getCreateIndexRequest().mappings(), containsString("not_analyzed")); assertThat(request.getCreateIndexRequest().aliases().size(), equalTo(1)); assertThat(request.getCreateIndexRequest().settings().getAsInt("number_of_shards", 0), equalTo(10)); } @@ -143,7 +144,7 @@ public void testTypelessMappingParsing() throws Exception { request.fromXContent(createParser(builder)); CreateIndexRequest createIndexRequest = request.getCreateIndexRequest(); - String mapping = createIndexRequest.mappings().get(MapperService.SINGLE_MAPPING_NAME); + String mapping = createIndexRequest.mappings(); assertNotNull(mapping); Map parsedMapping = XContentHelper.convertToMap(new BytesArray(mapping), false, XContentType.JSON).v2(); diff --git a/server/src/test/java/org/opensearch/cluster/ClusterStateTests.java b/server/src/test/java/org/opensearch/cluster/ClusterStateTests.java index 03db6b22bc8bd..4cc3108d6bf85 100644 --- a/server/src/test/java/org/opensearch/cluster/ClusterStateTests.java +++ b/server/src/test/java/org/opensearch/cluster/ClusterStateTests.java @@ -221,9 +221,7 @@ public void testToXContent() throws IOException { + " }\n" + " },\n" + " \"mappings\" : {\n" - + " \"type\" : {\n" - + " \"key1\" : { }\n" - + " }\n" + + " \"key1\" : { }\n" + " },\n" + " \"aliases\" : { }\n" + " }\n" @@ -424,9 +422,7 @@ public void testToXContent_FlatSettingTrue_ReduceMappingFalse() throws IOExcepti + "\"\n" + " },\n" + " \"mappings\" : {\n" - + " \"type\" : {\n" - + " \"key1\" : { }\n" - + " }\n" + + " \"key1\" : { }\n" + " },\n" + " \"aliases\" : { }\n" + " }\n" @@ -627,11 +623,7 @@ public void testToXContent_FlatSettingFalse_ReduceMappingTrue() throws IOExcepti + " }\n" + " }\n" + " },\n" - + " \"mappings\" : {\n" - + " \"type\" : {\n" - + " \"key1\" : { }\n" - + " }\n" - + " },\n" + + " \"mappings\" : { },\n" + " \"aliases\" : { }\n" + " }\n" + " },\n" diff --git a/server/src/test/java/org/opensearch/cluster/metadata/IndexTemplateMetadataTests.java b/server/src/test/java/org/opensearch/cluster/metadata/IndexTemplateMetadataTests.java index 203001b215cda..fb5537b5292ba 100644 --- a/server/src/test/java/org/opensearch/cluster/metadata/IndexTemplateMetadataTests.java +++ b/server/src/test/java/org/opensearch/cluster/metadata/IndexTemplateMetadataTests.java @@ -49,14 +49,12 @@ import java.util.Arrays; import java.util.Collections; -import static java.util.Collections.singletonMap; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.Matchers.contains; public class IndexTemplateMetadataTests extends OpenSearchTestCase { public void testIndexTemplateMetadataXContentRoundTrip() throws Exception { - ToXContent.Params params = new ToXContent.MapParams(singletonMap("reduce_mappings", "true")); String template = "{\"index_patterns\" : [ \".test-*\" ],\"order\" : 1000," + "\"settings\" : {\"number_of_shards\" : 1,\"number_of_replicas\" : 0}," @@ -84,7 +82,7 @@ public void testIndexTemplateMetadataXContentRoundTrip() throws Exception { final BytesReference templateBytesRoundTrip; try (XContentBuilder builder = XContentBuilder.builder(JsonXContent.jsonXContent)) { builder.startObject(); - IndexTemplateMetadata.Builder.toXContentWithTypes(indexTemplateMetadata, builder, params); + IndexTemplateMetadata.Builder.toXContentWithTypes(indexTemplateMetadata, builder, ToXContent.EMPTY_PARAMS); builder.endObject(); templateBytesRoundTrip = BytesReference.bytes(builder); } diff --git a/server/src/test/java/org/opensearch/cluster/metadata/MetadataCreateIndexServiceTests.java b/server/src/test/java/org/opensearch/cluster/metadata/MetadataCreateIndexServiceTests.java index 5caa9eb212e15..19f6a516ca83a 100644 --- a/server/src/test/java/org/opensearch/cluster/metadata/MetadataCreateIndexServiceTests.java +++ b/server/src/test/java/org/opensearch/cluster/metadata/MetadataCreateIndexServiceTests.java @@ -109,7 +109,6 @@ import static java.util.Collections.emptyMap; import static java.util.Collections.singleton; import static java.util.Collections.singletonList; -import static java.util.Collections.singletonMap; import static org.opensearch.index.IndexSettings.INDEX_SOFT_DELETES_SETTING; import static org.hamcrest.Matchers.endsWith; import static org.hamcrest.Matchers.equalTo; @@ -711,20 +710,18 @@ public void testParseMappingsAppliesDataFromTemplateAndRequest() throws Exceptio templateBuilder.putAlias(AliasMetadata.builder("alias1")); templateBuilder.putMapping("type", createMapping("mapping_from_template", "text")); }); - request.mappings(singletonMap("type", createMapping("mapping_from_request", "text").string())); + request.mappings(createMapping("mapping_from_request", "text").string()); - Map> parsedMappings = MetadataCreateIndexService.parseV1Mappings( + Map parsedMappings = MetadataCreateIndexService.parseV1Mappings( request.mappings(), - Collections.singletonList(convertMappings(templateMetadata.getMappings())), + Collections.singletonList(templateMetadata.getMappings()), NamedXContentRegistry.EMPTY ); - assertThat(parsedMappings, hasKey("type")); - Map mappingType = parsedMappings.get("type"); - assertThat(mappingType, hasKey("type")); - Map type = (Map) mappingType.get("type"); - assertThat(type, hasKey("properties")); - Map mappingsProperties = (Map) type.get("properties"); + assertThat(parsedMappings, hasKey(MapperService.SINGLE_MAPPING_NAME)); + Map doc = (Map) parsedMappings.get(MapperService.SINGLE_MAPPING_NAME); + assertThat(doc, hasKey("properties")); + Map mappingsProperties = (Map) doc.get("properties"); assertThat(mappingsProperties, hasKey("mapping_from_request")); assertThat(mappingsProperties, hasKey("mapping_from_template")); } @@ -781,17 +778,17 @@ public void testRequestDataHavePriorityOverTemplateData() throws Exception { IndexTemplateMetadata templateMetadata = addMatchingTemplate( builder -> builder.putAlias(AliasMetadata.builder("alias").searchRouting("fromTemplate").build()) - .putMapping("type", templateMapping) + .putMapping("_doc", templateMapping) .settings(Settings.builder().put("key1", "templateValue")) ); - request.mappings(singletonMap("type", reqMapping.string())); - request.aliases(singleton(new Alias("alias").searchRouting("fromRequest"))); + request.mappings(reqMapping.string()); + request.aliases(Collections.singleton(new Alias("alias").searchRouting("fromRequest"))); request.settings(Settings.builder().put("key1", "requestValue").build()); - Map> parsedMappings = MetadataCreateIndexService.parseV1Mappings( + Map parsedMappings = MetadataCreateIndexService.parseV1Mappings( request.mappings(), - Collections.singletonList(convertMappings(templateMetadata.mappings())), + Collections.singletonList(templateMetadata.mappings()), xContentRegistry() ); List resolvedAliases = resolveAndValidateAliases( @@ -816,12 +813,10 @@ public void testRequestDataHavePriorityOverTemplateData() throws Exception { assertThat(resolvedAliases.get(0).getSearchRouting(), equalTo("fromRequest")); assertThat(aggregatedIndexSettings.get("key1"), equalTo("requestValue")); - assertThat(parsedMappings, hasKey("type")); - Map mappingType = parsedMappings.get("type"); - assertThat(mappingType, hasKey("type")); - Map type = (Map) mappingType.get("type"); - assertThat(type, hasKey("properties")); - Map mappingsProperties = (Map) type.get("properties"); + assertThat(parsedMappings, hasKey("_doc")); + Map doc = (Map) parsedMappings.get("_doc"); + assertThat(doc, hasKey("properties")); + Map mappingsProperties = (Map) doc.get("properties"); assertThat(mappingsProperties, hasKey("test")); assertThat((Map) mappingsProperties.get("test"), hasValue("keyword")); } @@ -1046,9 +1041,9 @@ public void testParseMappingsWithTypedTemplateAndTypelessIndexMapping() throws E } }); - Map> mappings = parseV1Mappings( - singletonMap(MapperService.SINGLE_MAPPING_NAME, "{\"_doc\":{}}"), - Collections.singletonList(convertMappings(templateMetadata.mappings())), + Map mappings = parseV1Mappings( + "{\"" + MapperService.SINGLE_MAPPING_NAME + "\":{}}", + Collections.singletonList(templateMetadata.mappings()), xContentRegistry() ); assertThat(mappings, Matchers.hasKey(MapperService.SINGLE_MAPPING_NAME)); @@ -1062,12 +1057,8 @@ public void testParseMappingsWithTypedTemplate() throws Exception { ExceptionsHelper.reThrowIfNotNull(e); } }); - Map> mappings = parseV1Mappings( - emptyMap(), - Collections.singletonList(convertMappings(templateMetadata.mappings())), - xContentRegistry() - ); - assertThat(mappings, Matchers.hasKey("type")); + Map mappings = parseV1Mappings("", Collections.singletonList(templateMetadata.mappings()), xContentRegistry()); + assertThat(mappings, Matchers.hasKey(MapperService.SINGLE_MAPPING_NAME)); } public void testParseMappingsWithTypelessTemplate() throws Exception { @@ -1078,11 +1069,7 @@ public void testParseMappingsWithTypelessTemplate() throws Exception { ExceptionsHelper.reThrowIfNotNull(e); } }); - Map> mappings = parseV1Mappings( - emptyMap(), - Collections.singletonList(convertMappings(templateMetadata.mappings())), - xContentRegistry() - ); + Map mappings = parseV1Mappings("", Collections.singletonList(templateMetadata.mappings()), xContentRegistry()); assertThat(mappings, Matchers.hasKey(MapperService.SINGLE_MAPPING_NAME)); } @@ -1253,7 +1240,7 @@ private CompressedXContent createMapping(String fieldName, String fieldType) { final String mapping = Strings.toString( XContentFactory.jsonBuilder() .startObject() - .startObject("type") + .startObject(MapperService.SINGLE_MAPPING_NAME) .startObject("properties") .startObject(fieldName) .field("type", fieldType) diff --git a/server/src/test/java/org/opensearch/cluster/metadata/MetadataIndexTemplateServiceTests.java b/server/src/test/java/org/opensearch/cluster/metadata/MetadataIndexTemplateServiceTests.java index 685a2288a9128..19e73422c5362 100644 --- a/server/src/test/java/org/opensearch/cluster/metadata/MetadataIndexTemplateServiceTests.java +++ b/server/src/test/java/org/opensearch/cluster/metadata/MetadataIndexTemplateServiceTests.java @@ -970,7 +970,6 @@ public void testFindV2InvalidGlobalTemplate() { } } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/pull/57393") public void testResolveConflictingMappings() throws Exception { final MetadataIndexTemplateService service = getMetadataIndexTemplateService(); ClusterState state = ClusterState.EMPTY_STATE; @@ -2066,6 +2065,27 @@ public void testUnreferencedDataStreamsWhenAddingTemplate() throws Exception { service.addIndexTemplateV2(stateWithDSAndTemplate, false, "logs", nonDSTemplate); } + public void testLegacyNoopUpdate() { + ClusterState state = ClusterState.EMPTY_STATE; + PutRequest pr = new PutRequest("api", "id"); + pr.patterns(Arrays.asList("foo", "bar")); + if (randomBoolean()) { + pr.settings(Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 3).build()); + } + if (randomBoolean()) { + pr.mappings(Collections.emptyMap()); + } + if (randomBoolean()) { + pr.aliases(Collections.singleton(new Alias("alias"))); + } + pr.order(randomIntBetween(0, 10)); + state = MetadataIndexTemplateService.innerPutTemplate(state, pr, new IndexTemplateMetadata.Builder("id")); + + assertNotNull(state.metadata().templates().get("id")); + + assertThat(MetadataIndexTemplateService.innerPutTemplate(state, pr, new IndexTemplateMetadata.Builder("id")), equalTo(state)); + } + private static List putTemplate(NamedXContentRegistry xContentRegistry, PutRequest request) { MetadataCreateIndexService createIndexService = new MetadataCreateIndexService( Settings.EMPTY, diff --git a/server/src/test/java/org/opensearch/cluster/metadata/MetadataMappingServiceTests.java b/server/src/test/java/org/opensearch/cluster/metadata/MetadataMappingServiceTests.java index b1043dba0a02e..a87ec461e5dc8 100644 --- a/server/src/test/java/org/opensearch/cluster/metadata/MetadataMappingServiceTests.java +++ b/server/src/test/java/org/opensearch/cluster/metadata/MetadataMappingServiceTests.java @@ -39,6 +39,7 @@ import org.opensearch.common.compress.CompressedXContent; import org.opensearch.index.Index; import org.opensearch.index.IndexService; +import org.opensearch.index.mapper.MapperService; import org.opensearch.plugins.Plugin; import org.opensearch.test.OpenSearchSingleNodeTestCase; import org.opensearch.test.InternalSettingsPlugin; @@ -57,8 +58,11 @@ protected Collection> getPlugins() { } public void testMappingClusterStateUpdateDoesntChangeExistingIndices() throws Exception { - final IndexService indexService = createIndex("test", client().admin().indices().prepareCreate("test").addMapping("type")); - final CompressedXContent currentMapping = indexService.mapperService().documentMapper("type").mappingSource(); + final IndexService indexService = createIndex( + "test", + client().admin().indices().prepareCreate("test").addMapping(MapperService.SINGLE_MAPPING_NAME) + ); + final CompressedXContent currentMapping = indexService.mapperService().documentMapper().mappingSource(); final MetadataMappingService mappingService = getInstanceFromNode(MetadataMappingService.class); final ClusterService clusterService = getInstanceFromNode(ClusterService.class); @@ -74,11 +78,11 @@ public void testMappingClusterStateUpdateDoesntChangeExistingIndices() throws Ex assertTrue(result.executionResults.values().iterator().next().isSuccess()); // the task really was a mapping update assertThat( - indexService.mapperService().documentMapper("type").mappingSource(), - not(equalTo(result.resultingState.metadata().index("test").getMappings().get("type").source())) + indexService.mapperService().documentMapper().mappingSource(), + not(equalTo(result.resultingState.metadata().index("test").getMappings().get(MapperService.SINGLE_MAPPING_NAME).source())) ); // since we never committed the cluster state update, the in-memory state is unchanged - assertThat(indexService.mapperService().documentMapper("type").mappingSource(), equalTo(currentMapping)); + assertThat(indexService.mapperService().documentMapper().mappingSource(), equalTo(currentMapping)); } public void testClusterStateIsNotChangedWithIdenticalMappings() throws Exception { diff --git a/server/src/test/java/org/opensearch/cluster/metadata/ToAndFromJsonMetadataTests.java b/server/src/test/java/org/opensearch/cluster/metadata/ToAndFromJsonMetadataTests.java index 14996b5ebf453..80ad315c0f613 100644 --- a/server/src/test/java/org/opensearch/cluster/metadata/ToAndFromJsonMetadataTests.java +++ b/server/src/test/java/org/opensearch/cluster/metadata/ToAndFromJsonMetadataTests.java @@ -264,11 +264,9 @@ public void testToXContentGateway_FlatSettingTrue_ReduceMappingFalse() throws IO + Version.CURRENT.id + "\"\n" + " },\n" - + " \"mappings\" : [\n" - + " {\n" - + " \"key1\" : { }\n" - + " }\n" - + " ],\n" + + " \"mappings\" : {\n" + + " \"key1\" : { }\n" + + " },\n" + " \"aliases\" : { }\n" + " }\n" + " },\n" @@ -434,11 +432,7 @@ public void testToXContentGateway_FlatSettingFalse_ReduceMappingTrue() throws IO + " }\n" + " }\n" + " },\n" - + " \"mappings\" : {\n" - + " \"type\" : {\n" - + " \"key1\" : { }\n" - + " }\n" - + " },\n" + + " \"mappings\" : { },\n" + " \"aliases\" : { }\n" + " }\n" + " },\n" @@ -500,9 +494,7 @@ public void testToXContentAPI_FlatSettingTrue_ReduceMappingFalse() throws IOExce + "\"\n" + " },\n" + " \"mappings\" : {\n" - + " \"type\" : {\n" - + " \"key1\" : { }\n" - + " }\n" + + " \"key1\" : { }\n" + " },\n" + " \"aliases\" : { }\n" + " }\n" @@ -610,11 +602,7 @@ public void testToXContentAPI_FlatSettingFalse_ReduceMappingTrue() throws IOExce + " }\n" + " }\n" + " },\n" - + " \"mappings\" : {\n" - + " \"type\" : {\n" - + " \"key1\" : { }\n" - + " }\n" - + " },\n" + + " \"mappings\" : { },\n" + " \"aliases\" : { }\n" + " }\n" + " },\n" diff --git a/server/src/test/java/org/opensearch/index/mapper/AllFieldMapperTests.java b/server/src/test/java/org/opensearch/index/mapper/AllFieldMapperTests.java index 90112fb74832a..625cfbb81f8bc 100644 --- a/server/src/test/java/org/opensearch/index/mapper/AllFieldMapperTests.java +++ b/server/src/test/java/org/opensearch/index/mapper/AllFieldMapperTests.java @@ -57,7 +57,7 @@ public void testUpdateDefaultSearchAnalyzer() throws Exception { ); String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("_doc").endObject().endObject()); indexService.mapperService().merge("_doc", new CompressedXContent(mapping), MergeReason.MAPPING_UPDATE); - assertEquals(mapping, indexService.mapperService().documentMapper("_doc").mapping().toString()); + assertEquals(mapping, indexService.mapperService().documentMapper().mapping().toString()); } } diff --git a/server/src/test/java/org/opensearch/index/mapper/DocumentParserTests.java b/server/src/test/java/org/opensearch/index/mapper/DocumentParserTests.java index 9b355a8064660..aa0a7f36a793f 100644 --- a/server/src/test/java/org/opensearch/index/mapper/DocumentParserTests.java +++ b/server/src/test/java/org/opensearch/index/mapper/DocumentParserTests.java @@ -363,7 +363,7 @@ ObjectMapper createObjectMapper(MapperService mapperService, String name) { ParseContext context = new ParseContext.InternalParseContext( settings, mapperService.documentMapperParser(), - mapperService.documentMapper("type"), + mapperService.documentMapper(), null, null ); diff --git a/server/src/test/java/org/opensearch/index/mapper/FieldFilterMapperPluginTests.java b/server/src/test/java/org/opensearch/index/mapper/FieldFilterMapperPluginTests.java index 27e895ee64f90..c0900cc40abff 100644 --- a/server/src/test/java/org/opensearch/index/mapper/FieldFilterMapperPluginTests.java +++ b/server/src/test/java/org/opensearch/index/mapper/FieldFilterMapperPluginTests.java @@ -93,7 +93,7 @@ public void testGetIndex() { public void testGetFieldMappings() { GetFieldMappingsResponse getFieldMappingsResponse = client().admin().indices().prepareGetFieldMappings().setFields("*").get(); - Map>> mappings = getFieldMappingsResponse.mappings(); + Map> mappings = getFieldMappingsResponse.mappings(); assertEquals(2, mappings.size()); assertFieldMappings(mappings.get("index1"), ALL_FLAT_FIELDS); assertFieldMappings(mappings.get("filtered"), FILTERED_FLAT_FIELDS); @@ -107,6 +107,14 @@ public void testGetFieldMappings() { assertFieldMappings(response.mappings().get("test"), FILTERED_FLAT_FIELDS); } + public void testGetNonExistentFieldMapping() { + GetFieldMappingsResponse response = client().admin().indices().prepareGetFieldMappings("index1").setFields("non-existent").get(); + Map> mappings = response.mappings(); + assertEquals(1, mappings.size()); + Map fieldmapping = mappings.get("index1"); + assertEquals(0, fieldmapping.size()); + } + public void testFieldCapabilities() { List allFields = new ArrayList<>(ALL_FLAT_FIELDS); allFields.addAll(ALL_OBJECT_FIELDS); @@ -142,11 +150,10 @@ private static void assertFieldCaps(FieldCapabilitiesResponse fieldCapabilitiesR } private static void assertFieldMappings( - Map> mappings, + Map actual, Collection expectedFields ) { - assertEquals(1, mappings.size()); - Map fields = new HashMap<>(mappings.get("_doc")); + Map fields = new HashMap<>(actual); Set builtInMetadataFields = IndicesModule.getBuiltInMetadataFields(); for (String field : builtInMetadataFields) { GetFieldMappingsResponse.FieldMappingMetadata fieldMappingMetadata = fields.remove(field); diff --git a/server/src/test/java/org/opensearch/index/mapper/MapperServiceTests.java b/server/src/test/java/org/opensearch/index/mapper/MapperServiceTests.java index 3543fca856a20..eae52efa391a1 100644 --- a/server/src/test/java/org/opensearch/index/mapper/MapperServiceTests.java +++ b/server/src/test/java/org/opensearch/index/mapper/MapperServiceTests.java @@ -78,32 +78,6 @@ protected Collection> getPlugins() { return Arrays.asList(InternalSettingsPlugin.class, ReloadableFilterPlugin.class); } - public void testTypeNameStartsWithIllegalDot() { - String index = "test-index"; - String type = ".test-type"; - String field = "field"; - IllegalArgumentException e = expectThrows( - IllegalArgumentException.class, - () -> { client().admin().indices().prepareCreate(index).addMapping(type, field, "type=text").execute().actionGet(); } - ); - assertTrue(e.getMessage(), e.getMessage().contains("mapping type name [.test-type] must not start with a '.'")); - } - - public void testTypeNameTooLong() { - String index = "text-index"; - String field = "field"; - String type = new String(new char[256]).replace("\0", "a"); - - MapperException e = expectThrows( - MapperException.class, - () -> { client().admin().indices().prepareCreate(index).addMapping(type, field, "type=text").execute().actionGet(); } - ); - assertTrue( - e.getMessage(), - e.getMessage().contains("mapping type name [" + type + "] is too long; limit is length 255 but was [256]") - ); - } - public void testTypeValidation() { InvalidTypeNameException e = expectThrows(InvalidTypeNameException.class, () -> MapperService.validateTypeName("_type")); assertEquals("mapping type name [_type] can't start with '_' unless it is called [_doc]", e.getMessage()); diff --git a/server/src/test/java/org/opensearch/index/mapper/UpdateMappingTests.java b/server/src/test/java/org/opensearch/index/mapper/UpdateMappingTests.java index d54283f03759f..80fd4edc6ac78 100644 --- a/server/src/test/java/org/opensearch/index/mapper/UpdateMappingTests.java +++ b/server/src/test/java/org/opensearch/index/mapper/UpdateMappingTests.java @@ -63,16 +63,13 @@ public void testConflictFieldsMapping(String fieldName) throws Exception { // test store, ... all the parameters that are not to be changed just like in other fields XContentBuilder mapping = XContentFactory.jsonBuilder() .startObject() - .startObject("type") .startObject(fieldName) .field("enabled", true) .field("store", false) .endObject() - .endObject() .endObject(); XContentBuilder mappingUpdate = XContentFactory.jsonBuilder() .startObject() - .startObject("type") .startObject(fieldName) .field("enabled", true) .field("store", true) @@ -82,65 +79,73 @@ public void testConflictFieldsMapping(String fieldName) throws Exception { .field("type", "text") .endObject() .endObject() - .endObject() .endObject(); testConflictWhileMergingAndMappingUnchanged(mapping, mappingUpdate); } protected void testConflictWhileMergingAndMappingUnchanged(XContentBuilder mapping, XContentBuilder mappingUpdate) throws IOException { - IndexService indexService = createIndex("test", Settings.builder().build(), "type", mapping); - CompressedXContent mappingBeforeUpdate = indexService.mapperService().documentMapper("type").mappingSource(); + IndexService indexService = createIndex("test", Settings.builder().build(), MapperService.SINGLE_MAPPING_NAME, mapping); + CompressedXContent mappingBeforeUpdate = indexService.mapperService().documentMapper().mappingSource(); // simulate like in MetadataMappingService#putMapping try { indexService.mapperService() - .merge("type", new CompressedXContent(BytesReference.bytes(mappingUpdate)), MapperService.MergeReason.MAPPING_UPDATE); + .merge( + MapperService.SINGLE_MAPPING_NAME, + new CompressedXContent(BytesReference.bytes(mappingUpdate)), + MapperService.MergeReason.MAPPING_UPDATE + ); fail(); } catch (IllegalArgumentException e) { // expected } // make sure simulate flag actually worked - no mappings applied - CompressedXContent mappingAfterUpdate = indexService.mapperService().documentMapper("type").mappingSource(); + CompressedXContent mappingAfterUpdate = indexService.mapperService().documentMapper().mappingSource(); assertThat(mappingAfterUpdate, equalTo(mappingBeforeUpdate)); } public void testConflictSameType() throws Exception { XContentBuilder mapping = XContentFactory.jsonBuilder() .startObject() - .startObject("type") .startObject("properties") .startObject("foo") .field("type", "long") .endObject() .endObject() - .endObject() .endObject(); - MapperService mapperService = createIndex("test", Settings.builder().build(), "type", mapping).mapperService(); + MapperService mapperService = createIndex("test", Settings.builder().build(), MapperService.SINGLE_MAPPING_NAME, mapping) + .mapperService(); XContentBuilder update = XContentFactory.jsonBuilder() .startObject() - .startObject("type") .startObject("properties") .startObject("foo") .field("type", "double") .endObject() .endObject() - .endObject() .endObject(); IllegalArgumentException e = expectThrows( IllegalArgumentException.class, - () -> mapperService.merge("type", new CompressedXContent(Strings.toString(update)), MapperService.MergeReason.MAPPING_UPDATE) + () -> mapperService.merge( + MapperService.SINGLE_MAPPING_NAME, + new CompressedXContent(Strings.toString(update)), + MapperService.MergeReason.MAPPING_UPDATE + ) ); assertThat(e.getMessage(), containsString("mapper [foo] cannot be changed from type [long] to [double]")); e = expectThrows( IllegalArgumentException.class, - () -> mapperService.merge("type", new CompressedXContent(Strings.toString(update)), MapperService.MergeReason.MAPPING_UPDATE) + () -> mapperService.merge( + MapperService.SINGLE_MAPPING_NAME, + new CompressedXContent(Strings.toString(update)), + MapperService.MergeReason.MAPPING_UPDATE + ) ); assertThat(e.getMessage(), containsString("mapper [foo] cannot be changed from type [long] to [double]")); assertThat( - ((FieldMapper) mapperService.documentMapper("type").mapping().root().getMapper("foo")).fieldType().typeName(), + ((FieldMapper) mapperService.documentMapper().mapping().root().getMapper("foo")).fieldType().typeName(), equalTo("long") ); } @@ -148,35 +153,36 @@ public void testConflictSameType() throws Exception { public void testConflictNewType() throws Exception { XContentBuilder mapping = XContentFactory.jsonBuilder() .startObject() - .startObject("type") .startObject("properties") .startObject("foo") .field("type", "long") .endObject() .endObject() - .endObject() .endObject(); - MapperService mapperService = createIndex("test", Settings.builder().build(), "type", mapping).mapperService(); + MapperService mapperService = createIndex("test", Settings.builder().build(), MapperService.SINGLE_MAPPING_NAME, mapping) + .mapperService(); XContentBuilder update = XContentFactory.jsonBuilder() .startObject() - .startObject("type") .startObject("properties") .startObject("foo") .field("type", "double") .endObject() .endObject() - .endObject() .endObject(); IllegalArgumentException e = expectThrows( IllegalArgumentException.class, - () -> mapperService.merge("type", new CompressedXContent(Strings.toString(update)), MapperService.MergeReason.MAPPING_UPDATE) + () -> mapperService.merge( + MapperService.SINGLE_MAPPING_NAME, + new CompressedXContent(Strings.toString(update)), + MapperService.MergeReason.MAPPING_UPDATE + ) ); assertThat(e.getMessage(), containsString("mapper [foo] cannot be changed from type [long] to [double]")); assertThat( - ((FieldMapper) mapperService.documentMapper("type").mapping().root().getMapper("foo")).fieldType().typeName(), + ((FieldMapper) mapperService.documentMapper().mapping().root().getMapper("foo")).fieldType().typeName(), equalTo("long") ); } @@ -184,25 +190,31 @@ public void testConflictNewType() throws Exception { public void testReuseMetaField() throws IOException { XContentBuilder mapping = XContentFactory.jsonBuilder() .startObject() - .startObject("type") .startObject("properties") .startObject("_id") .field("type", "text") .endObject() .endObject() - .endObject() .endObject(); MapperService mapperService = createIndex("test", Settings.builder().build()).mapperService(); MapperParsingException e = expectThrows( MapperParsingException.class, - () -> mapperService.merge("type", new CompressedXContent(Strings.toString(mapping)), MapperService.MergeReason.MAPPING_UPDATE) + () -> mapperService.merge( + MapperService.SINGLE_MAPPING_NAME, + new CompressedXContent(Strings.toString(mapping)), + MapperService.MergeReason.MAPPING_UPDATE + ) ); assertThat(e.getMessage(), containsString("Field [_id] is defined more than once")); MapperParsingException e2 = expectThrows( MapperParsingException.class, - () -> mapperService.merge("type", new CompressedXContent(Strings.toString(mapping)), MapperService.MergeReason.MAPPING_UPDATE) + () -> mapperService.merge( + MapperService.SINGLE_MAPPING_NAME, + new CompressedXContent(Strings.toString(mapping)), + MapperService.MergeReason.MAPPING_UPDATE + ) ); assertThat(e2.getMessage(), containsString("Field [_id] is defined more than once")); } @@ -211,47 +223,43 @@ public void testRejectFieldDefinedTwice() throws IOException { String mapping1 = Strings.toString( XContentFactory.jsonBuilder() .startObject() - .startObject("type") .startObject("properties") .startObject("foo") .field("type", "object") .endObject() .endObject() .endObject() - .endObject() ); String mapping2 = Strings.toString( XContentFactory.jsonBuilder() .startObject() - .startObject("type") .startObject("properties") .startObject("foo") .field("type", "long") .endObject() .endObject() .endObject() - .endObject() ); MapperService mapperService1 = createIndex("test1").mapperService(); - mapperService1.merge("type", new CompressedXContent(mapping1), MergeReason.MAPPING_UPDATE); + mapperService1.merge(MapperService.SINGLE_MAPPING_NAME, new CompressedXContent(mapping1), MergeReason.MAPPING_UPDATE); IllegalArgumentException e = expectThrows( IllegalArgumentException.class, - () -> mapperService1.merge("type", new CompressedXContent(mapping2), MergeReason.MAPPING_UPDATE) + () -> mapperService1.merge(MapperService.SINGLE_MAPPING_NAME, new CompressedXContent(mapping2), MergeReason.MAPPING_UPDATE) ); assertThat(e.getMessage(), equalTo("can't merge a non object mapping [foo] with an object mapping")); MapperService mapperService2 = createIndex("test2").mapperService(); - mapperService2.merge("type", new CompressedXContent(mapping2), MergeReason.MAPPING_UPDATE); + mapperService2.merge(MapperService.SINGLE_MAPPING_NAME, new CompressedXContent(mapping2), MergeReason.MAPPING_UPDATE); e = expectThrows( IllegalArgumentException.class, - () -> mapperService2.merge("type", new CompressedXContent(mapping1), MergeReason.MAPPING_UPDATE) + () -> mapperService2.merge(MapperService.SINGLE_MAPPING_NAME, new CompressedXContent(mapping1), MergeReason.MAPPING_UPDATE) ); assertThat(e.getMessage(), equalTo("can't merge a non object mapping [foo] with an object mapping")); } public void testMappingVersion() { - createIndex("test", client().admin().indices().prepareCreate("test").addMapping("type")); + createIndex("test", client().admin().indices().prepareCreate("test").addMapping(MapperService.SINGLE_MAPPING_NAME)); final ClusterService clusterService = getInstanceFromNode(ClusterService.class); { final long previousVersion = clusterService.state().metadata().index("test").getMappingVersion(); diff --git a/server/src/test/java/org/opensearch/index/query/TypeQueryBuilderTests.java b/server/src/test/java/org/opensearch/index/query/TypeQueryBuilderTests.java index f4f5662755a54..bf373ac180f04 100644 --- a/server/src/test/java/org/opensearch/index/query/TypeQueryBuilderTests.java +++ b/server/src/test/java/org/opensearch/index/query/TypeQueryBuilderTests.java @@ -50,7 +50,7 @@ protected TypeQueryBuilder doCreateTestQueryBuilder() { @Override protected void doAssertLuceneQuery(TypeQueryBuilder queryBuilder, Query query, QueryShardContext context) throws IOException { - if (createShardContext().getMapperService().documentMapper(queryBuilder.type()) == null) { + if (createShardContext().getMapperService().documentMapper() == null) { assertEquals(new MatchNoDocsQuery(), query); } else { assertThat(query, equalTo(Queries.newNonNestedFilter(context.indexVersionCreated()))); diff --git a/server/src/test/java/org/opensearch/index/shard/IndexShardTests.java b/server/src/test/java/org/opensearch/index/shard/IndexShardTests.java index 91fb1f9b1ff21..c9179c9531c29 100644 --- a/server/src/test/java/org/opensearch/index/shard/IndexShardTests.java +++ b/server/src/test/java/org/opensearch/index/shard/IndexShardTests.java @@ -79,7 +79,6 @@ import org.opensearch.common.io.stream.StreamInput; import org.opensearch.common.lease.Releasable; import org.opensearch.common.lease.Releasables; -import org.opensearch.common.lucene.uid.Versions; import org.opensearch.common.settings.IndexScopedSettings; import org.opensearch.common.settings.Settings; import org.opensearch.common.unit.TimeValue; @@ -92,12 +91,10 @@ import org.opensearch.common.xcontent.XContentType; import org.opensearch.env.NodeEnvironment; import org.opensearch.index.IndexSettings; -import org.opensearch.index.VersionType; import org.opensearch.index.codec.CodecService; import org.opensearch.index.engine.CommitStats; import org.opensearch.index.engine.DocIdSeqNoAndSource; import org.opensearch.index.engine.Engine; -import org.opensearch.index.engine.Engine.DeleteResult; import org.opensearch.index.engine.EngineConfig; import org.opensearch.index.engine.EngineConfigFactory; import org.opensearch.index.engine.EngineTestCase; @@ -3358,11 +3355,7 @@ public void testEstimateTotalDocSize() throws Exception { // Do some updates and deletes, then recheck the correlation again. for (int i = 0; i < numDoc / 2; i++) { - if (randomBoolean()) { - deleteDoc(indexShard, "doc", Integer.toString(i)); - } else { - indexDoc(indexShard, "_doc", Integer.toString(i), "{\"foo\": \"bar\"}"); - } + indexDoc(indexShard, "_doc", Integer.toString(i), "{\"foo\": \"bar\"}"); } if (randomBoolean()) { indexShard.flush(new FlushRequest()); @@ -3939,7 +3932,10 @@ public void testOnCloseStats() throws IOException { public void testSupplyTombstoneDoc() throws Exception { IndexShard shard = newStartedShard(); String id = randomRealisticUnicodeOfLengthBetween(1, 10); - ParsedDocument deleteTombstone = shard.getEngine().config().getTombstoneDocSupplier().newDeleteTombstoneDoc("doc", id); + ParsedDocument deleteTombstone = shard.getEngine() + .config() + .getTombstoneDocSupplier() + .newDeleteTombstoneDoc(MapperService.SINGLE_MAPPING_NAME, id); assertThat(deleteTombstone.docs(), hasSize(1)); ParseContext.Document deleteDoc = deleteTombstone.docs().get(0); assertThat( @@ -4294,38 +4290,6 @@ public Settings threadPoolSettings() { return Settings.builder().put(super.threadPoolSettings()).put("thread_pool.estimated_time_interval", "5ms").build(); } - public void testTypelessDelete() throws IOException { - Settings settings = Settings.builder() - .put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT) - .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 1) - .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1) - .build(); - IndexMetadata metadata = IndexMetadata.builder("index") - .putMapping("some_type", "{ \"properties\": {}}") - .settings(settings) - .primaryTerm(0, 1) - .build(); - IndexShard shard = newShard(new ShardId(metadata.getIndex(), 0), true, "n1", metadata, null); - recoverShardFromStore(shard); - Engine.IndexResult indexResult = indexDoc(shard, "some_type", "id", "{}"); - assertTrue(indexResult.isCreated()); - - DeleteResult deleteResult = shard.applyDeleteOperationOnPrimary( - Versions.MATCH_ANY, - "some_other_type", - "id", - VersionType.INTERNAL, - UNASSIGNED_SEQ_NO, - 1 - ); - assertFalse(deleteResult.isFound()); - - deleteResult = shard.applyDeleteOperationOnPrimary(Versions.MATCH_ANY, "_doc", "id", VersionType.INTERNAL, UNASSIGNED_SEQ_NO, 1); - assertTrue(deleteResult.isFound()); - - closeShards(shard); - } - public void testTypelessGet() throws IOException { Settings settings = Settings.builder() .put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT) diff --git a/server/src/test/java/org/opensearch/search/suggest/completion/GeoContextMappingTests.java b/server/src/test/java/org/opensearch/search/suggest/completion/GeoContextMappingTests.java index b238bee976a96..361a6cd543d65 100644 --- a/server/src/test/java/org/opensearch/search/suggest/completion/GeoContextMappingTests.java +++ b/server/src/test/java/org/opensearch/search/suggest/completion/GeoContextMappingTests.java @@ -64,7 +64,6 @@ public class GeoContextMappingTests extends OpenSearchSingleNodeTestCase { public void testIndexingWithNoContexts() throws Exception { XContentBuilder mapping = jsonBuilder().startObject() - .startObject("type1") .startObject("properties") .startObject("completion") .field("type", "completion") @@ -76,16 +75,15 @@ public void testIndexingWithNoContexts() throws Exception { .endArray() .endObject() .endObject() - .endObject() .endObject(); - MapperService mapperService = createIndex("test", Settings.EMPTY, "type1", mapping).mapperService(); + MapperService mapperService = createIndex("test", Settings.EMPTY, MapperService.SINGLE_MAPPING_NAME, mapping).mapperService(); MappedFieldType completionFieldType = mapperService.fieldType("completion"); ParsedDocument parsedDocument = mapperService.documentMapper() .parse( new SourceToParse( "test", - "type1", + MapperService.SINGLE_MAPPING_NAME, "1", BytesReference.bytes( jsonBuilder().startObject() @@ -114,7 +112,6 @@ public void testIndexingWithNoContexts() throws Exception { public void testIndexingWithSimpleContexts() throws Exception { XContentBuilder mapping = jsonBuilder().startObject() - .startObject("type1") .startObject("properties") .startObject("completion") .field("type", "completion") @@ -126,16 +123,15 @@ public void testIndexingWithSimpleContexts() throws Exception { .endArray() .endObject() .endObject() - .endObject() .endObject(); - MapperService mapperService = createIndex("test", Settings.EMPTY, "type1", mapping).mapperService(); + MapperService mapperService = createIndex("test", Settings.EMPTY, MapperService.SINGLE_MAPPING_NAME, mapping).mapperService(); MappedFieldType completionFieldType = mapperService.fieldType("completion"); ParsedDocument parsedDocument = mapperService.documentMapper() .parse( new SourceToParse( "test", - "type1", + MapperService.SINGLE_MAPPING_NAME, "1", BytesReference.bytes( jsonBuilder().startObject() @@ -162,7 +158,6 @@ public void testIndexingWithSimpleContexts() throws Exception { public void testIndexingWithContextList() throws Exception { XContentBuilder mapping = jsonBuilder().startObject() - .startObject("type1") .startObject("properties") .startObject("completion") .field("type", "completion") @@ -174,16 +169,15 @@ public void testIndexingWithContextList() throws Exception { .endArray() .endObject() .endObject() - .endObject() .endObject(); - MapperService mapperService = createIndex("test", Settings.EMPTY, "type1", mapping).mapperService(); + MapperService mapperService = createIndex("test", Settings.EMPTY, MapperService.SINGLE_MAPPING_NAME, mapping).mapperService(); MappedFieldType completionFieldType = mapperService.fieldType("completion"); ParsedDocument parsedDocument = mapperService.documentMapper() .parse( new SourceToParse( "test", - "type1", + MapperService.SINGLE_MAPPING_NAME, "1", BytesReference.bytes( jsonBuilder().startObject() @@ -214,7 +208,6 @@ public void testIndexingWithContextList() throws Exception { public void testIndexingWithMultipleContexts() throws Exception { XContentBuilder mapping = jsonBuilder().startObject() - .startObject("type1") .startObject("properties") .startObject("completion") .field("type", "completion") @@ -230,10 +223,9 @@ public void testIndexingWithMultipleContexts() throws Exception { .endArray() .endObject() .endObject() - .endObject() .endObject(); - MapperService mapperService = createIndex("test", Settings.EMPTY, "type1", mapping).mapperService(); + MapperService mapperService = createIndex("test", Settings.EMPTY, MapperService.SINGLE_MAPPING_NAME, mapping).mapperService(); MappedFieldType completionFieldType = mapperService.fieldType("completion"); XContentBuilder builder = jsonBuilder().startObject() .startArray("completion") @@ -248,7 +240,7 @@ public void testIndexingWithMultipleContexts() throws Exception { .endArray() .endObject(); ParsedDocument parsedDocument = mapperService.documentMapper() - .parse(new SourceToParse("test", "type1", "1", BytesReference.bytes(builder), XContentType.JSON)); + .parse(new SourceToParse("test", MapperService.SINGLE_MAPPING_NAME, "1", BytesReference.bytes(builder), XContentType.JSON)); IndexableField[] fields = parsedDocument.rootDoc().getFields(completionFieldType.name()); assertContextSuggestFields(fields, 3); } From 6f12fa19e84a9a7510eadf749fe007ba12fdb01e Mon Sep 17 00:00:00 2001 From: Suraj Singh <79435743+dreamer-89@users.noreply.github.com> Date: Fri, 11 Mar 2022 09:36:37 -0800 Subject: [PATCH 20/46] [Remove] Type mapping end-points from RestMultiSearchTemplateAction (#2433) Signed-off-by: Suraj Singh --- .../script/mustache/RestMultiSearchTemplateAction.java | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/modules/lang-mustache/src/main/java/org/opensearch/script/mustache/RestMultiSearchTemplateAction.java b/modules/lang-mustache/src/main/java/org/opensearch/script/mustache/RestMultiSearchTemplateAction.java index fc5a0ff601a00..52a6fb3756c16 100644 --- a/modules/lang-mustache/src/main/java/org/opensearch/script/mustache/RestMultiSearchTemplateAction.java +++ b/modules/lang-mustache/src/main/java/org/opensearch/script/mustache/RestMultiSearchTemplateAction.java @@ -75,10 +75,7 @@ public List routes() { new Route(GET, "/_msearch/template"), new Route(POST, "/_msearch/template"), new Route(GET, "/{index}/_msearch/template"), - new Route(POST, "/{index}/_msearch/template"), - // Deprecated typed endpoints. - new Route(GET, "/{index}/{type}/_msearch/template"), - new Route(POST, "/{index}/{type}/_msearch/template") + new Route(POST, "/{index}/_msearch/template") ) ); } From 95d47502491ef1efa257275604686665f4e33dfb Mon Sep 17 00:00:00 2001 From: Nick Knize Date: Sat, 12 Mar 2022 15:03:00 -0500 Subject: [PATCH 21/46] [Remove] types from translog (#2439) Removes persisting the type in the translog since types are no longer supported. Signed-off-by: Nicholas Walter Knize --- .../action/PainlessExecuteAction.java | 3 +- .../RankFeatureMetaFieldMapperTests.java | 2 +- .../mapper/ScaledFloatFieldMapperTests.java | 9 - .../mapper/ParentJoinFieldMapperTests.java | 27 +- .../percolator/PercolateQueryBuilder.java | 2 +- .../PercolatorFieldMapperTests.java | 17 -- .../documentation/ReindexDocumentationIT.java | 8 +- .../index/mapper/size/SizeMappingTests.java | 6 +- .../20_missing_field.yml | 2 + .../action/termvectors/GetTermVectorsIT.java | 2 - .../opensearch/index/shard/IndexShardIT.java | 4 +- .../action/bulk/TransportShardBulkAction.java | 12 +- .../cluster/metadata/IndexMetadata.java | 14 +- .../metadata/MetadataMappingService.java | 15 +- .../org/opensearch/index/IndexingSlowLog.java | 2 - .../org/opensearch/index/engine/Engine.java | 26 +- .../opensearch/index/engine/EngineConfig.java | 2 +- .../index/engine/InternalEngine.java | 9 +- .../index/engine/LuceneChangesSnapshot.java | 4 +- .../opensearch/index/get/ShardGetService.java | 1 - .../index/mapper/DocumentMapper.java | 6 +- .../index/mapper/DocumentParser.java | 15 -- .../index/mapper/ParsedDocument.java | 8 +- .../index/mapper/SourceToParse.java | 13 +- .../index/mapper/TypeFieldMapper.java | 2 +- .../opensearch/index/shard/IndexShard.java | 96 +------ .../index/termvectors/TermVectorsService.java | 2 +- .../opensearch/index/translog/Translog.java | 84 +++---- .../index/translog/TranslogWriter.java | 2 - .../bulk/TransportShardBulkActionTests.java | 3 +- .../get/TransportMultiGetActionTests.java | 2 - .../resync/ResyncReplicationRequestTests.java | 2 +- ...TransportResyncReplicationActionTests.java | 2 +- ...ReplicationAllPermitsAcquisitionTests.java | 2 +- .../TransportMultiTermVectorsActionTests.java | 2 - .../MetadataCreateDataStreamServiceTests.java | 2 +- .../cluster/metadata/MetadataTests.java | 10 +- .../metadata/ToAndFromJsonMetadataTests.java | 3 +- .../index/IndexingSlowLogTests.java | 4 - .../index/engine/InternalEngineTests.java | 103 +++----- .../engine/LuceneChangesSnapshotTests.java | 6 +- .../index/engine/NoOpEngineTests.java | 2 +- .../index/engine/ReadOnlyEngineTests.java | 2 +- .../fielddata/BinaryDVFieldDataTests.java | 8 +- .../mapper/DataStreamFieldMapperTests.java | 5 - .../index/mapper/DocumentParserTests.java | 11 +- .../index/mapper/DynamicMappingTests.java | 2 +- .../mapper/FieldNamesFieldMapperTests.java | 3 - .../GenericStoreDynamicTemplateTests.java | 2 +- .../index/mapper/IdFieldMapperTests.java | 3 +- .../index/mapper/IndexFieldMapperTests.java | 1 - .../index/mapper/IpRangeFieldMapperTests.java | 1 - .../mapper/JavaMultiFieldMergeTests.java | 16 +- .../index/mapper/MultiFieldTests.java | 7 +- .../index/mapper/NestedObjectMapperTests.java | 22 +- .../mapper/NullValueObjectMappingTests.java | 3 - .../index/mapper/ObjectMapperTests.java | 1 - .../mapper/PathMatchDynamicTemplateTests.java | 2 +- .../index/mapper/RoutingFieldMapperTests.java | 2 - .../index/mapper/SourceFieldMapperTests.java | 8 +- .../mapper/StoredNumericValuesTests.java | 1 - .../index/mapper/TypeFieldMapperTests.java | 4 +- .../IndexLevelReplicationTests.java | 17 +- .../RecoveryDuringReplicationTests.java | 11 +- .../index/shard/IndexShardTests.java | 72 +++--- .../shard/IndexingOperationListenerTests.java | 2 +- .../shard/PrimaryReplicaSyncerTests.java | 4 +- .../index/shard/RefreshListenersTests.java | 12 +- .../RemoveCorruptedShardDataCommandTests.java | 2 +- .../index/shard/ShardGetServiceTests.java | 13 +- .../index/translog/TranslogTests.java | 234 ++++++------------ .../IndexingMemoryControllerTests.java | 2 +- .../PeerRecoveryTargetServiceTests.java | 2 +- .../recovery/RecoverySourceHandlerTests.java | 5 +- .../indices/recovery/RecoveryTests.java | 18 +- .../CategoryContextMappingTests.java | 12 +- .../completion/GeoContextMappingTests.java | 5 +- .../index/engine/EngineTestCase.java | 47 +--- .../index/engine/TranslogHandler.java | 34 +-- .../index/mapper/MapperServiceTestCase.java | 4 +- ...enSearchIndexLevelReplicationTestCase.java | 17 +- .../index/shard/IndexShardTestCase.java | 30 +-- 82 files changed, 334 insertions(+), 851 deletions(-) diff --git a/modules/lang-painless/src/main/java/org/opensearch/painless/action/PainlessExecuteAction.java b/modules/lang-painless/src/main/java/org/opensearch/painless/action/PainlessExecuteAction.java index 4999d5d444673..be26e69ec22d1 100644 --- a/modules/lang-painless/src/main/java/org/opensearch/painless/action/PainlessExecuteAction.java +++ b/modules/lang-painless/src/main/java/org/opensearch/painless/action/PainlessExecuteAction.java @@ -593,10 +593,9 @@ private static Response prepareRamIndex( try (Directory directory = new ByteBuffersDirectory()) { try (IndexWriter indexWriter = new IndexWriter(directory, new IndexWriterConfig(defaultAnalyzer))) { String index = indexService.index().getName(); - String type = indexService.mapperService().documentMapper().type(); BytesReference document = request.contextSetup.document; XContentType xContentType = request.contextSetup.xContentType; - SourceToParse sourceToParse = new SourceToParse(index, type, "_id", document, xContentType); + SourceToParse sourceToParse = new SourceToParse(index, "_id", document, xContentType); ParsedDocument parsedDocument = indexService.mapperService().documentMapper().parse(sourceToParse); indexWriter.addDocuments(parsedDocument.docs()); try (IndexReader indexReader = DirectoryReader.open(indexWriter)) { diff --git a/modules/mapper-extras/src/test/java/org/opensearch/index/mapper/RankFeatureMetaFieldMapperTests.java b/modules/mapper-extras/src/test/java/org/opensearch/index/mapper/RankFeatureMetaFieldMapperTests.java index 46e71096ba307..3161e7462d2a0 100644 --- a/modules/mapper-extras/src/test/java/org/opensearch/index/mapper/RankFeatureMetaFieldMapperTests.java +++ b/modules/mapper-extras/src/test/java/org/opensearch/index/mapper/RankFeatureMetaFieldMapperTests.java @@ -91,7 +91,7 @@ public void testDocumentParsingFailsOnMetaField() throws Exception { BytesReference bytes = BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field(rfMetaField, 0).endObject()); MapperParsingException e = expectThrows( MapperParsingException.class, - () -> mapper.parse(new SourceToParse("test", "_doc", "1", bytes, XContentType.JSON)) + () -> mapper.parse(new SourceToParse("test", "1", bytes, XContentType.JSON)) ); assertTrue( e.getCause().getMessage().contains("Field [" + rfMetaField + "] is a metadata field and cannot be added inside a document.") diff --git a/modules/mapper-extras/src/test/java/org/opensearch/index/mapper/ScaledFloatFieldMapperTests.java b/modules/mapper-extras/src/test/java/org/opensearch/index/mapper/ScaledFloatFieldMapperTests.java index b3db286d39dac..3de322b286183 100644 --- a/modules/mapper-extras/src/test/java/org/opensearch/index/mapper/ScaledFloatFieldMapperTests.java +++ b/modules/mapper-extras/src/test/java/org/opensearch/index/mapper/ScaledFloatFieldMapperTests.java @@ -134,7 +134,6 @@ public void testNotIndexed() throws Exception { ParsedDocument doc = mapper.parse( new SourceToParse( "test", - "_doc", "1", BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("field", 123).endObject()), XContentType.JSON @@ -156,7 +155,6 @@ public void testNoDocValues() throws Exception { ParsedDocument doc = mapper.parse( new SourceToParse( "test", - "_doc", "1", BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("field", 123).endObject()), XContentType.JSON @@ -178,7 +176,6 @@ public void testStore() throws Exception { ParsedDocument doc = mapper.parse( new SourceToParse( "test", - "_doc", "1", BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("field", 123).endObject()), XContentType.JSON @@ -202,7 +199,6 @@ public void testCoerce() throws Exception { ParsedDocument doc = mapper.parse( new SourceToParse( "test", - "_doc", "1", BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("field", "123").endObject()), XContentType.JSON @@ -222,7 +218,6 @@ public void testCoerce() throws Exception { ThrowingRunnable runnable = () -> mapper2.parse( new SourceToParse( "test", - "_doc", "1", BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("field", "123").endObject()), XContentType.JSON @@ -246,7 +241,6 @@ private void doTestIgnoreMalformed(String value, String exceptionMessageContains ThrowingRunnable runnable = () -> mapper.parse( new SourceToParse( "test", - "_doc", "1", BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("field", value).endObject()), XContentType.JSON @@ -261,7 +255,6 @@ private void doTestIgnoreMalformed(String value, String exceptionMessageContains ParsedDocument doc = mapper2.parse( new SourceToParse( "test", - "_doc", "1", BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("field", value).endObject()), XContentType.JSON @@ -277,7 +270,6 @@ public void testNullValue() throws IOException { ParsedDocument doc = mapper.parse( new SourceToParse( "test", - "_doc", "1", BytesReference.bytes(XContentFactory.jsonBuilder().startObject().nullField("field").endObject()), XContentType.JSON @@ -291,7 +283,6 @@ public void testNullValue() throws IOException { doc = mapper.parse( new SourceToParse( "test", - "_doc", "1", BytesReference.bytes(XContentFactory.jsonBuilder().startObject().nullField("field").endObject()), XContentType.JSON diff --git a/modules/parent-join/src/test/java/org/opensearch/join/mapper/ParentJoinFieldMapperTests.java b/modules/parent-join/src/test/java/org/opensearch/join/mapper/ParentJoinFieldMapperTests.java index 628345a625d1b..a9ac151dd3806 100644 --- a/modules/parent-join/src/test/java/org/opensearch/join/mapper/ParentJoinFieldMapperTests.java +++ b/modules/parent-join/src/test/java/org/opensearch/join/mapper/ParentJoinFieldMapperTests.java @@ -80,13 +80,7 @@ public void testSingleLevel() throws Exception { // Doc without join ParsedDocument doc = docMapper.parse( - new SourceToParse( - "test", - "type", - "0", - BytesReference.bytes(XContentFactory.jsonBuilder().startObject().endObject()), - XContentType.JSON - ) + new SourceToParse("test", "0", BytesReference.bytes(XContentFactory.jsonBuilder().startObject().endObject()), XContentType.JSON) ); assertNull(doc.rootDoc().getBinaryValue("join_field")); @@ -94,7 +88,6 @@ public void testSingleLevel() throws Exception { doc = docMapper.parse( new SourceToParse( "test", - "type", "1", BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("join_field", "parent").endObject()), XContentType.JSON @@ -107,7 +100,6 @@ public void testSingleLevel() throws Exception { doc = docMapper.parse( new SourceToParse( "test", - "type", "2", BytesReference.bytes( XContentFactory.jsonBuilder() @@ -131,7 +123,6 @@ public void testSingleLevel() throws Exception { () -> docMapper.parse( new SourceToParse( "test", - "type", "1", BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("join_field", "unknown").endObject()), XContentType.JSON @@ -161,7 +152,6 @@ public void testParentIdSpecifiedAsNumber() throws Exception { ParsedDocument doc = docMapper.parse( new SourceToParse( "test", - "type", "2", BytesReference.bytes( XContentFactory.jsonBuilder() @@ -181,7 +171,6 @@ public void testParentIdSpecifiedAsNumber() throws Exception { doc = docMapper.parse( new SourceToParse( "test", - "type", "2", BytesReference.bytes( XContentFactory.jsonBuilder() @@ -222,13 +211,7 @@ public void testMultipleLevels() throws Exception { // Doc without join ParsedDocument doc = docMapper.parse( - new SourceToParse( - "test", - "type", - "0", - BytesReference.bytes(XContentFactory.jsonBuilder().startObject().endObject()), - XContentType.JSON - ) + new SourceToParse("test", "0", BytesReference.bytes(XContentFactory.jsonBuilder().startObject().endObject()), XContentType.JSON) ); assertNull(doc.rootDoc().getBinaryValue("join_field")); @@ -236,7 +219,6 @@ public void testMultipleLevels() throws Exception { doc = docMapper.parse( new SourceToParse( "test", - "type", "1", BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("join_field", "parent").endObject()), XContentType.JSON @@ -249,7 +231,6 @@ public void testMultipleLevels() throws Exception { doc = docMapper.parse( new SourceToParse( "test", - "type", "2", BytesReference.bytes( XContentFactory.jsonBuilder() @@ -274,7 +255,6 @@ public void testMultipleLevels() throws Exception { () -> docMapper.parse( new SourceToParse( "test", - "type", "2", BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("join_field", "child").endObject()), XContentType.JSON, @@ -290,7 +270,6 @@ public void testMultipleLevels() throws Exception { () -> docMapper.parse( new SourceToParse( "test", - "type", "2", BytesReference.bytes( XContentFactory.jsonBuilder() @@ -311,7 +290,6 @@ public void testMultipleLevels() throws Exception { doc = docMapper.parse( new SourceToParse( "test", - "type", "3", BytesReference.bytes( XContentFactory.jsonBuilder() @@ -335,7 +313,6 @@ public void testMultipleLevels() throws Exception { () -> docMapper.parse( new SourceToParse( "test", - "type", "1", BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("join_field", "unknown").endObject()), XContentType.JSON diff --git a/modules/percolator/src/main/java/org/opensearch/percolator/PercolateQueryBuilder.java b/modules/percolator/src/main/java/org/opensearch/percolator/PercolateQueryBuilder.java index 75a7757ba8a5a..87f08e2ff50fc 100644 --- a/modules/percolator/src/main/java/org/opensearch/percolator/PercolateQueryBuilder.java +++ b/modules/percolator/src/main/java/org/opensearch/percolator/PercolateQueryBuilder.java @@ -586,7 +586,7 @@ protected Query doToQuery(QueryShardContext context) throws IOException { } docMapper = mapperService.documentMapper(); for (BytesReference document : documents) { - docs.add(docMapper.parse(new SourceToParse(context.index().getName(), type, "_temp_id", document, documentXContentType))); + docs.add(docMapper.parse(new SourceToParse(context.index().getName(), "_temp_id", document, documentXContentType))); } FieldNameAnalyzer fieldNameAnalyzer = (FieldNameAnalyzer) docMapper.mappers().indexAnalyzer(); diff --git a/modules/percolator/src/test/java/org/opensearch/percolator/PercolatorFieldMapperTests.java b/modules/percolator/src/test/java/org/opensearch/percolator/PercolatorFieldMapperTests.java index 691c3b648cd6a..2c0aa593317b4 100644 --- a/modules/percolator/src/test/java/org/opensearch/percolator/PercolatorFieldMapperTests.java +++ b/modules/percolator/src/test/java/org/opensearch/percolator/PercolatorFieldMapperTests.java @@ -553,7 +553,6 @@ public void testPercolatorFieldMapper() throws Exception { .parse( new SourceToParse( "test", - MapperService.SINGLE_MAPPING_NAME, "1", BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field(fieldName, queryBuilder).endObject()), XContentType.JSON @@ -574,7 +573,6 @@ public void testPercolatorFieldMapper() throws Exception { .parse( new SourceToParse( "test", - MapperService.SINGLE_MAPPING_NAME, "1", BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field(fieldName, queryBuilder).endObject()), XContentType.JSON @@ -592,7 +590,6 @@ public void testPercolatorFieldMapper() throws Exception { .parse( new SourceToParse( "test", - MapperService.SINGLE_MAPPING_NAME, "1", BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field(fieldName, queryBuilder).endObject()), XContentType.JSON @@ -621,7 +618,6 @@ public void testStoringQueries() throws Exception { .parse( new SourceToParse( "test", - MapperService.SINGLE_MAPPING_NAME, "1", BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field(fieldName, query).endObject()), XContentType.JSON @@ -640,7 +636,6 @@ public void testQueryWithRewrite() throws Exception { .parse( new SourceToParse( "test", - MapperService.SINGLE_MAPPING_NAME, "1", BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field(fieldName, queryBuilder).endObject()), XContentType.JSON @@ -665,7 +660,6 @@ public void testPercolatorFieldMapperUnMappedField() throws Exception { .parse( new SourceToParse( "test", - MapperService.SINGLE_MAPPING_NAME, "1", BytesReference.bytes( XContentFactory.jsonBuilder().startObject().field(fieldName, termQuery("unmapped_field", "value")).endObject() @@ -684,7 +678,6 @@ public void testPercolatorFieldMapper_noQuery() throws Exception { .parse( new SourceToParse( "test", - MapperService.SINGLE_MAPPING_NAME, "1", BytesReference.bytes(XContentFactory.jsonBuilder().startObject().endObject()), XContentType.JSON @@ -697,7 +690,6 @@ public void testPercolatorFieldMapper_noQuery() throws Exception { .parse( new SourceToParse( "test", - MapperService.SINGLE_MAPPING_NAME, "1", BytesReference.bytes(XContentFactory.jsonBuilder().startObject().nullField(fieldName).endObject()), XContentType.JSON @@ -760,7 +752,6 @@ public void testMultiplePercolatorFields() throws Exception { .parse( new SourceToParse( "test", - typeName, "1", BytesReference.bytes( jsonBuilder().startObject().field("query_field1", queryBuilder).field("query_field2", queryBuilder).endObject() @@ -803,7 +794,6 @@ public void testNestedPercolatorField() throws Exception { .parse( new SourceToParse( "test", - typeName, "1", BytesReference.bytes( jsonBuilder().startObject().startObject("object_field").field("query_field", queryBuilder).endObject().endObject() @@ -823,7 +813,6 @@ public void testNestedPercolatorField() throws Exception { .parse( new SourceToParse( "test", - typeName, "1", BytesReference.bytes( jsonBuilder().startObject() @@ -846,7 +835,6 @@ public void testNestedPercolatorField() throws Exception { .parse( new SourceToParse( "test", - typeName, "1", BytesReference.bytes( jsonBuilder().startObject() @@ -954,7 +942,6 @@ public void testImplicitlySetDefaultScriptLang() throws Exception { .parse( new SourceToParse( "test", - MapperService.SINGLE_MAPPING_NAME, "1", BytesReference.bytes( XContentFactory.jsonBuilder() @@ -1002,7 +989,6 @@ public void testImplicitlySetDefaultScriptLang() throws Exception { .parse( new SourceToParse( "test", - MapperService.SINGLE_MAPPING_NAME, "1", BytesReference.bytes( XContentFactory.jsonBuilder() @@ -1097,7 +1083,6 @@ public void testDuplicatedClauses() throws Exception { .parse( new SourceToParse( "test", - MapperService.SINGLE_MAPPING_NAME, "1", BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field(fieldName, qb).endObject()), XContentType.JSON @@ -1123,7 +1108,6 @@ public void testDuplicatedClauses() throws Exception { .parse( new SourceToParse( "test", - MapperService.SINGLE_MAPPING_NAME, "1", BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field(fieldName, qb).endObject()), XContentType.JSON @@ -1152,7 +1136,6 @@ public void testDuplicatedClauses() throws Exception { .parse( new SourceToParse( "test", - MapperService.SINGLE_MAPPING_NAME, "1", BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field(fieldName, qb).endObject()), XContentType.JSON diff --git a/modules/reindex/src/internalClusterTest/java/org/opensearch/client/documentation/ReindexDocumentationIT.java b/modules/reindex/src/internalClusterTest/java/org/opensearch/client/documentation/ReindexDocumentationIT.java index 08bc18442b760..6d313e06263b3 100644 --- a/modules/reindex/src/internalClusterTest/java/org/opensearch/client/documentation/ReindexDocumentationIT.java +++ b/modules/reindex/src/internalClusterTest/java/org/opensearch/client/documentation/ReindexDocumentationIT.java @@ -342,16 +342,16 @@ public static class BlockingOperationListener implements IndexingOperationListen @Override public Engine.Index preIndex(ShardId shardId, Engine.Index index) { - return preCheck(index, index.type()); + return preCheck(index); } @Override public Engine.Delete preDelete(ShardId shardId, Engine.Delete delete) { - return preCheck(delete, delete.type()); + return preCheck(delete); } - private T preCheck(T operation, String type) { - if (("_doc".equals(type) == false) || (operation.origin() != Engine.Operation.Origin.PRIMARY)) { + private T preCheck(T operation) { + if ((operation.origin() != Engine.Operation.Origin.PRIMARY)) { return operation; } diff --git a/plugins/mapper-size/src/internalClusterTest/java/org/opensearch/index/mapper/size/SizeMappingTests.java b/plugins/mapper-size/src/internalClusterTest/java/org/opensearch/index/mapper/size/SizeMappingTests.java index c1fb3d8083151..4e4648a87fbfc 100644 --- a/plugins/mapper-size/src/internalClusterTest/java/org/opensearch/index/mapper/size/SizeMappingTests.java +++ b/plugins/mapper-size/src/internalClusterTest/java/org/opensearch/index/mapper/size/SizeMappingTests.java @@ -66,7 +66,7 @@ public void testSizeEnabled() throws Exception { DocumentMapper docMapper = service.mapperService().documentMapper(); BytesReference source = BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("field", "value").endObject()); - ParsedDocument doc = docMapper.parse(new SourceToParse("test", MapperService.SINGLE_MAPPING_NAME, "1", source, XContentType.JSON)); + ParsedDocument doc = docMapper.parse(new SourceToParse("test", "1", source, XContentType.JSON)); boolean stored = false; boolean points = false; @@ -83,7 +83,7 @@ public void testSizeDisabled() throws Exception { DocumentMapper docMapper = service.mapperService().documentMapper(); BytesReference source = BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("field", "value").endObject()); - ParsedDocument doc = docMapper.parse(new SourceToParse("test", MapperService.SINGLE_MAPPING_NAME, "1", source, XContentType.JSON)); + ParsedDocument doc = docMapper.parse(new SourceToParse("test", "1", source, XContentType.JSON)); assertThat(doc.rootDoc().getField("_size"), nullValue()); } @@ -93,7 +93,7 @@ public void testSizeNotSet() throws Exception { DocumentMapper docMapper = service.mapperService().documentMapper(); BytesReference source = BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("field", "value").endObject()); - ParsedDocument doc = docMapper.parse(new SourceToParse("test", MapperService.SINGLE_MAPPING_NAME, "1", source, XContentType.JSON)); + ParsedDocument doc = docMapper.parse(new SourceToParse("test", "1", source, XContentType.JSON)); assertThat(doc.rootDoc().getField("_size"), nullValue()); } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_field_mapping/20_missing_field.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_field_mapping/20_missing_field.yml index a65908b238013..2f15334f882a9 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_field_mapping/20_missing_field.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_field_mapping/20_missing_field.yml @@ -14,5 +14,7 @@ indices.get_field_mapping: index: test_index fields: not_existent + ignore: 404 # ignore 404 failures for now + # see: https://github.com/opensearch-project/OpenSearch/issues/2440 - match: { 'test_index.mappings': {}} diff --git a/server/src/internalClusterTest/java/org/opensearch/action/termvectors/GetTermVectorsIT.java b/server/src/internalClusterTest/java/org/opensearch/action/termvectors/GetTermVectorsIT.java index d7017122d221c..d28dcbb924f95 100644 --- a/server/src/internalClusterTest/java/org/opensearch/action/termvectors/GetTermVectorsIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/action/termvectors/GetTermVectorsIT.java @@ -81,14 +81,12 @@ protected Collection> nodePlugins() { public void testNoSuchDoc() throws Exception { XContentBuilder mapping = jsonBuilder().startObject() - .startObject("type1") .startObject("properties") .startObject("field") .field("type", "text") .field("term_vector", "with_positions_offsets_payloads") .endObject() .endObject() - .endObject() .endObject(); assertAcked(prepareCreate("test").addAlias(new Alias("alias")).addMapping("type1", mapping)); diff --git a/server/src/internalClusterTest/java/org/opensearch/index/shard/IndexShardIT.java b/server/src/internalClusterTest/java/org/opensearch/index/shard/IndexShardIT.java index 4c200720a3af6..0e915577dc467 100644 --- a/server/src/internalClusterTest/java/org/opensearch/index/shard/IndexShardIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/index/shard/IndexShardIT.java @@ -375,7 +375,7 @@ public void testMaybeFlush() throws Exception { shard.applyIndexOperationOnPrimary( Versions.MATCH_ANY, VersionType.INTERNAL, - new SourceToParse("test", "_doc", "1", new BytesArray("{}"), XContentType.JSON), + new SourceToParse("test", "1", new BytesArray("{}"), XContentType.JSON), SequenceNumbers.UNASSIGNED_SEQ_NO, 0, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, @@ -460,7 +460,7 @@ public void testMaybeRollTranslogGeneration() throws Exception { final Engine.IndexResult result = shard.applyIndexOperationOnPrimary( Versions.MATCH_ANY, VersionType.INTERNAL, - new SourceToParse("test", MapperService.SINGLE_MAPPING_NAME, "1", new BytesArray("{}"), XContentType.JSON), + new SourceToParse("test", "1", new BytesArray("{}"), XContentType.JSON), SequenceNumbers.UNASSIGNED_SEQ_NO, 0, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, diff --git a/server/src/main/java/org/opensearch/action/bulk/TransportShardBulkAction.java b/server/src/main/java/org/opensearch/action/bulk/TransportShardBulkAction.java index f3ab9673a0201..cc9f20b7aa256 100644 --- a/server/src/main/java/org/opensearch/action/bulk/TransportShardBulkAction.java +++ b/server/src/main/java/org/opensearch/action/bulk/TransportShardBulkAction.java @@ -340,7 +340,6 @@ static boolean executeBulkItemRequest( final DeleteRequest request = context.getRequestToExecute(); result = primary.applyDeleteOperationOnPrimary( version, - MapperService.SINGLE_MAPPING_NAME, request.id(), request.versionType(), request.ifSeqNo(), @@ -351,14 +350,7 @@ static boolean executeBulkItemRequest( result = primary.applyIndexOperationOnPrimary( version, request.versionType(), - new SourceToParse( - request.index(), - MapperService.SINGLE_MAPPING_NAME, - request.id(), - request.source(), - request.getContentType(), - request.routing() - ), + new SourceToParse(request.index(), request.id(), request.source(), request.getContentType(), request.routing()), request.ifSeqNo(), request.ifPrimaryTerm(), request.getAutoGeneratedTimestamp(), @@ -601,7 +593,6 @@ private static Engine.Result performOpOnReplica( final ShardId shardId = replica.shardId(); final SourceToParse sourceToParse = new SourceToParse( shardId.getIndexName(), - MapperService.SINGLE_MAPPING_NAME, indexRequest.id(), indexRequest.source(), indexRequest.getContentType(), @@ -622,7 +613,6 @@ private static Engine.Result performOpOnReplica( primaryResponse.getSeqNo(), primaryResponse.getPrimaryTerm(), primaryResponse.getVersion(), - MapperService.SINGLE_MAPPING_NAME, deleteRequest.id() ); break; diff --git a/server/src/main/java/org/opensearch/cluster/metadata/IndexMetadata.java b/server/src/main/java/org/opensearch/cluster/metadata/IndexMetadata.java index c02358d47b066..a7f351a918ae5 100644 --- a/server/src/main/java/org/opensearch/cluster/metadata/IndexMetadata.java +++ b/server/src/main/java/org/opensearch/cluster/metadata/IndexMetadata.java @@ -67,6 +67,7 @@ import org.opensearch.common.xcontent.XContentParser; import org.opensearch.gateway.MetadataStateFormat; import org.opensearch.index.Index; +import org.opensearch.index.mapper.MapperService; import org.opensearch.index.seqno.SequenceNumbers; import org.opensearch.index.shard.ShardId; import org.opensearch.rest.RestStatus; @@ -1159,12 +1160,17 @@ public Builder settings(Settings settings) { return this; } - public MappingMetadata mapping(String type) { - return mappings.get(type); + public MappingMetadata mapping() { + return mappings.get(MapperService.SINGLE_MAPPING_NAME); } - public Builder putMapping(String type, String source) throws IOException { - putMapping(new MappingMetadata(type, XContentHelper.convertToMap(XContentFactory.xContent(source), source, true))); + public Builder putMapping(String source) throws IOException { + putMapping( + new MappingMetadata( + MapperService.SINGLE_MAPPING_NAME, + XContentHelper.convertToMap(XContentFactory.xContent(source), source, true) + ) + ); return this; } diff --git a/server/src/main/java/org/opensearch/cluster/metadata/MetadataMappingService.java b/server/src/main/java/org/opensearch/cluster/metadata/MetadataMappingService.java index 69145bdee72b2..3795961d39143 100644 --- a/server/src/main/java/org/opensearch/cluster/metadata/MetadataMappingService.java +++ b/server/src/main/java/org/opensearch/cluster/metadata/MetadataMappingService.java @@ -185,22 +185,11 @@ private boolean refreshIndexMapping(IndexService indexService, IndexMetadata.Bui boolean dirty = false; String index = indexService.index().getName(); try { - List updatedTypes = new ArrayList<>(); MapperService mapperService = indexService.mapperService(); DocumentMapper mapper = mapperService.documentMapper(); if (mapper != null) { - final String type = mapper.type(); - if (!mapper.mappingSource().equals(builder.mapping(type).source())) { - updatedTypes.add(type); - } - } - - // if a single type is not up-to-date, re-send everything - if (updatedTypes.isEmpty() == false) { - logger.warn("[{}] re-syncing mappings with cluster state because of types [{}]", index, updatedTypes); - dirty = true; - if (mapper != null) { - builder.putMapping(new MappingMetadata(mapper)); + if (mapper.mappingSource().equals(builder.mapping().source()) == false) { + dirty = true; } } } catch (Exception e) { diff --git a/server/src/main/java/org/opensearch/index/IndexingSlowLog.java b/server/src/main/java/org/opensearch/index/IndexingSlowLog.java index fca91983b2d12..b77e7639152fb 100644 --- a/server/src/main/java/org/opensearch/index/IndexingSlowLog.java +++ b/server/src/main/java/org/opensearch/index/IndexingSlowLog.java @@ -226,7 +226,6 @@ private static Map prepareMap( map.put("message", index); map.put("took", TimeValue.timeValueNanos(tookInNanos)); map.put("took_millis", "" + TimeUnit.NANOSECONDS.toMillis(tookInNanos)); - map.put("doc_type", doc.type()); map.put("id", doc.id()); map.put("routing", doc.routing()); @@ -258,7 +257,6 @@ private static String message(Index index, ParsedDocument doc, long tookInNanos, sb.append(index).append(" "); sb.append("took[").append(TimeValue.timeValueNanos(tookInNanos)).append("], "); sb.append("took_millis[").append(TimeUnit.NANOSECONDS.toMillis(tookInNanos)).append("], "); - sb.append("type[").append(doc.type()).append("], "); sb.append("id[").append(doc.id()).append("], "); if (doc.routing() == null) { sb.append("routing[]"); diff --git a/server/src/main/java/org/opensearch/index/engine/Engine.java b/server/src/main/java/org/opensearch/index/engine/Engine.java index b821b687c5f68..fe026dd3251eb 100644 --- a/server/src/main/java/org/opensearch/index/engine/Engine.java +++ b/server/src/main/java/org/opensearch/index/engine/Engine.java @@ -1389,8 +1389,6 @@ public long startTime() { return this.startTime; } - public abstract String type(); - abstract String id(); public abstract TYPE operationType(); @@ -1456,11 +1454,6 @@ public ParsedDocument parsedDoc() { return this.doc; } - @Override - public String type() { - return this.doc.type(); - } - @Override public String id() { return this.doc.id(); @@ -1485,7 +1478,7 @@ public BytesReference source() { @Override public int estimatedSizeInBytes() { - return (id().length() + type().length()) * 2 + source().length() + 12; + return id().length() * 2 + source().length() + 12; } /** @@ -1516,13 +1509,11 @@ public long getIfPrimaryTerm() { public static class Delete extends Operation { - private final String type; private final String id; private final long ifSeqNo; private final long ifPrimaryTerm; public Delete( - String type, String id, Term uid, long seqNo, @@ -1540,15 +1531,13 @@ public Delete( assert ifSeqNo == UNASSIGNED_SEQ_NO || ifSeqNo >= 0 : "ifSeqNo [" + ifSeqNo + "] must be non negative or unset"; assert (origin == Origin.PRIMARY) || (ifSeqNo == UNASSIGNED_SEQ_NO && ifPrimaryTerm == UNASSIGNED_PRIMARY_TERM) : "cas operations are only allowed if origin is primary. get [" + origin + "]"; - this.type = Objects.requireNonNull(type); this.id = Objects.requireNonNull(id); this.ifSeqNo = ifSeqNo; this.ifPrimaryTerm = ifPrimaryTerm; } - public Delete(String type, String id, Term uid, long primaryTerm) { + public Delete(String id, Term uid, long primaryTerm) { this( - type, id, uid, UNASSIGNED_SEQ_NO, @@ -1564,7 +1553,6 @@ public Delete(String type, String id, Term uid, long primaryTerm) { public Delete(Delete template, VersionType versionType) { this( - template.type(), template.id(), template.uid(), template.seqNo(), @@ -1578,11 +1566,6 @@ public Delete(Delete template, VersionType versionType) { ); } - @Override - public String type() { - return this.type; - } - @Override public String id() { return this.id; @@ -1625,11 +1608,6 @@ public Term uid() { throw new UnsupportedOperationException(); } - @Override - public String type() { - throw new UnsupportedOperationException(); - } - @Override public long version() { throw new UnsupportedOperationException(); diff --git a/server/src/main/java/org/opensearch/index/engine/EngineConfig.java b/server/src/main/java/org/opensearch/index/engine/EngineConfig.java index fd02f3049cc8e..d1085b01a3707 100644 --- a/server/src/main/java/org/opensearch/index/engine/EngineConfig.java +++ b/server/src/main/java/org/opensearch/index/engine/EngineConfig.java @@ -466,7 +466,7 @@ public interface TombstoneDocSupplier { /** * Creates a tombstone document for a delete operation. */ - ParsedDocument newDeleteTombstoneDoc(String type, String id); + ParsedDocument newDeleteTombstoneDoc(String id); /** * Creates a tombstone document for a noop operation. diff --git a/server/src/main/java/org/opensearch/index/engine/InternalEngine.java b/server/src/main/java/org/opensearch/index/engine/InternalEngine.java index a264c8e0a55d9..1756bc738cae1 100644 --- a/server/src/main/java/org/opensearch/index/engine/InternalEngine.java +++ b/server/src/main/java/org/opensearch/index/engine/InternalEngine.java @@ -1376,15 +1376,13 @@ private boolean assertDocDoesNotExist(final Index index, final boolean allowDele final VersionValue versionValue = versionMap.getVersionForAssert(index.uid().bytes()); if (versionValue != null) { if (versionValue.isDelete() == false || allowDeleted == false) { - throw new AssertionError( - "doc [" + index.type() + "][" + index.id() + "] exists in version map (version " + versionValue + ")" - ); + throw new AssertionError("doc [" + index.id() + "] exists in version map (version " + versionValue + ")"); } } else { try (Searcher searcher = acquireSearcher("assert doc doesn't exist", SearcherScope.INTERNAL)) { final long docsWithId = searcher.count(new TermQuery(index.uid())); if (docsWithId > 0) { - throw new AssertionError("doc [" + index.type() + "][" + index.id() + "] exists [" + docsWithId + "] times in index"); + throw new AssertionError("doc [" + index.id() + "] exists [" + docsWithId + "] times in index"); } } } @@ -1420,7 +1418,6 @@ public DeleteResult delete(Delete delete) throws IOException { // generate or register sequence number if (delete.origin() == Operation.Origin.PRIMARY) { delete = new Delete( - delete.type(), delete.id(), delete.uid(), generateSeqNoForOperationOnPrimary(delete), @@ -1608,7 +1605,7 @@ private DeletionStrategy planDeletionAsPrimary(Delete delete) throws IOException private DeleteResult deleteInLucene(Delete delete, DeletionStrategy plan) throws IOException { assert assertMaxSeqNoOfUpdatesIsAdvanced(delete.uid(), delete.seqNo(), false, false); try { - final ParsedDocument tombstone = engineConfig.getTombstoneDocSupplier().newDeleteTombstoneDoc(delete.type(), delete.id()); + final ParsedDocument tombstone = engineConfig.getTombstoneDocSupplier().newDeleteTombstoneDoc(delete.id()); assert tombstone.docs().size() == 1 : "Tombstone doc should have single doc [" + tombstone + "]"; tombstone.updateSeqID(delete.seqNo(), delete.primaryTerm()); tombstone.version().setLongValue(plan.versionOfDeletion); diff --git a/server/src/main/java/org/opensearch/index/engine/LuceneChangesSnapshot.java b/server/src/main/java/org/opensearch/index/engine/LuceneChangesSnapshot.java index 76bb47c64ab4c..fce866b624367 100644 --- a/server/src/main/java/org/opensearch/index/engine/LuceneChangesSnapshot.java +++ b/server/src/main/java/org/opensearch/index/engine/LuceneChangesSnapshot.java @@ -288,10 +288,9 @@ private Translog.Operation readDocAsOp(int docIndex) throws IOException { assert assertDocSoftDeleted(leaf.reader(), segmentDocID) : "Noop but soft_deletes field is not set [" + op + "]"; } else { final String id = fields.uid().id(); - final String type = fields.uid().type(); final Term uid = new Term(IdFieldMapper.NAME, Uid.encodeId(id)); if (isTombstone) { - op = new Translog.Delete(type, id, uid, seqNo, primaryTerm, version); + op = new Translog.Delete(id, uid, seqNo, primaryTerm, version); assert assertDocSoftDeleted(leaf.reader(), segmentDocID) : "Delete op but soft_deletes field is not set [" + op + "]"; } else { final BytesReference source = fields.source(); @@ -310,7 +309,6 @@ private Translog.Operation readDocAsOp(int docIndex) throws IOException { // TODO: pass the latest timestamp from engine. final long autoGeneratedIdTimestamp = -1; op = new Translog.Index( - type, id, seqNo, primaryTerm, diff --git a/server/src/main/java/org/opensearch/index/get/ShardGetService.java b/server/src/main/java/org/opensearch/index/get/ShardGetService.java index 8cf315e2fffa8..a877b0085816a 100644 --- a/server/src/main/java/org/opensearch/index/get/ShardGetService.java +++ b/server/src/main/java/org/opensearch/index/get/ShardGetService.java @@ -295,7 +295,6 @@ private GetResult innerGetLoadFromStoredFields( assert source != null : "original source in translog must exist"; SourceToParse sourceToParse = new SourceToParse( shardId.getIndexName(), - MapperService.SINGLE_MAPPING_NAME, id, source, XContentHelper.xContentType(source), diff --git a/server/src/main/java/org/opensearch/index/mapper/DocumentMapper.java b/server/src/main/java/org/opensearch/index/mapper/DocumentMapper.java index 4e9004a880a57..37e740ec33321 100644 --- a/server/src/main/java/org/opensearch/index/mapper/DocumentMapper.java +++ b/server/src/main/java/org/opensearch/index/mapper/DocumentMapper.java @@ -252,14 +252,14 @@ public ParsedDocument parse(SourceToParse source) throws MapperParsingException return documentParser.parseDocument(source, mapping.metadataMappers); } - public ParsedDocument createDeleteTombstoneDoc(String index, String type, String id) throws MapperParsingException { - final SourceToParse emptySource = new SourceToParse(index, type, id, new BytesArray("{}"), XContentType.JSON); + public ParsedDocument createDeleteTombstoneDoc(String index, String id) throws MapperParsingException { + final SourceToParse emptySource = new SourceToParse(index, id, new BytesArray("{}"), XContentType.JSON); return documentParser.parseDocument(emptySource, deleteTombstoneMetadataFieldMappers).toTombstone(); } public ParsedDocument createNoopTombstoneDoc(String index, String reason) throws MapperParsingException { final String id = ""; // _id won't be used. - final SourceToParse sourceToParse = new SourceToParse(index, type, id, new BytesArray("{}"), XContentType.JSON); + final SourceToParse sourceToParse = new SourceToParse(index, id, new BytesArray("{}"), XContentType.JSON); final ParsedDocument parsedDoc = documentParser.parseDocument(sourceToParse, noopTombstoneMetadataFieldMappers).toTombstone(); // Store the reason of a noop as a raw string in the _source field final BytesRef byteRef = new BytesRef(reason); diff --git a/server/src/main/java/org/opensearch/index/mapper/DocumentParser.java b/server/src/main/java/org/opensearch/index/mapper/DocumentParser.java index 30579f501a50c..bcafddd6d5816 100644 --- a/server/src/main/java/org/opensearch/index/mapper/DocumentParser.java +++ b/server/src/main/java/org/opensearch/index/mapper/DocumentParser.java @@ -53,7 +53,6 @@ import java.util.Collections; import java.util.Iterator; import java.util.List; -import java.util.Objects; import static org.opensearch.index.mapper.FieldMapper.IGNORE_MALFORMED_SETTING; @@ -71,8 +70,6 @@ final class DocumentParser { } ParsedDocument parseDocument(SourceToParse source, MetadataFieldMapper[] metadataFieldsMappers) throws MapperParsingException { - validateType(source); - final Mapping mapping = docMapper.mapping(); final ParseContext.InternalParseContext context; final XContentType xContentType = source.getXContentType(); @@ -140,17 +137,6 @@ private static void internalParseDocument( } } - private void validateType(SourceToParse source) { - if (Objects.equals(source.type(), docMapper.type()) == false && MapperService.SINGLE_MAPPING_NAME.equals(source.type()) == false) { // used - // by - // typeless - // APIs - throw new MapperParsingException( - "Type mismatch, provide type [" + source.type() + "] but mapper is of type [" + docMapper.type() + "]" - ); - } - } - private static void validateStart(XContentParser parser) throws IOException { // will result in START_OBJECT XContentParser.Token token = parser.nextToken(); @@ -189,7 +175,6 @@ private static ParsedDocument parsedDocument(SourceToParse source, ParseContext. context.version(), context.seqID(), context.sourceToParse().id(), - context.sourceToParse().type(), source.routing(), context.docs(), context.sourceToParse().source(), diff --git a/server/src/main/java/org/opensearch/index/mapper/ParsedDocument.java b/server/src/main/java/org/opensearch/index/mapper/ParsedDocument.java index 2d3b5fc1bb9dc..6991db3306ea7 100644 --- a/server/src/main/java/org/opensearch/index/mapper/ParsedDocument.java +++ b/server/src/main/java/org/opensearch/index/mapper/ParsedDocument.java @@ -47,7 +47,7 @@ public class ParsedDocument { private final Field version; - private final String id, type; + private final String id; private final SeqNoFieldMapper.SequenceIDFields seqID; private final String routing; @@ -63,7 +63,6 @@ public ParsedDocument( Field version, SeqNoFieldMapper.SequenceIDFields seqID, String id, - String type, String routing, List documents, BytesReference source, @@ -73,7 +72,6 @@ public ParsedDocument( this.version = version; this.seqID = seqID; this.id = id; - this.type = type; this.routing = routing; this.documents = documents; this.source = source; @@ -85,10 +83,6 @@ public String id() { return this.id; } - public String type() { - return this.type; - } - public Field version() { return version; } diff --git a/server/src/main/java/org/opensearch/index/mapper/SourceToParse.java b/server/src/main/java/org/opensearch/index/mapper/SourceToParse.java index 37ecf9491e4b3..4aa8d3117bc9c 100644 --- a/server/src/main/java/org/opensearch/index/mapper/SourceToParse.java +++ b/server/src/main/java/org/opensearch/index/mapper/SourceToParse.java @@ -45,17 +45,14 @@ public class SourceToParse { private final String index; - private final String type; - private final String id; private final @Nullable String routing; private final XContentType xContentType; - public SourceToParse(String index, String type, String id, BytesReference source, XContentType xContentType, @Nullable String routing) { + public SourceToParse(String index, String id, BytesReference source, XContentType xContentType, @Nullable String routing) { this.index = Objects.requireNonNull(index); - this.type = Objects.requireNonNull(type); this.id = Objects.requireNonNull(id); // we always convert back to byte array, since we store it and Field only supports bytes.. // so, we might as well do it here, and improve the performance of working with direct byte arrays @@ -64,8 +61,8 @@ public SourceToParse(String index, String type, String id, BytesReference source this.routing = routing; } - public SourceToParse(String index, String type, String id, BytesReference source, XContentType xContentType) { - this(index, type, id, source, xContentType, null); + public SourceToParse(String index, String id, BytesReference source, XContentType xContentType) { + this(index, id, source, xContentType, null); } public BytesReference source() { @@ -76,10 +73,6 @@ public String index() { return this.index; } - public String type() { - return this.type; - } - public String id() { return this.id; } diff --git a/server/src/main/java/org/opensearch/index/mapper/TypeFieldMapper.java b/server/src/main/java/org/opensearch/index/mapper/TypeFieldMapper.java index ce7bdd3682d83..9adb1430b3df0 100644 --- a/server/src/main/java/org/opensearch/index/mapper/TypeFieldMapper.java +++ b/server/src/main/java/org/opensearch/index/mapper/TypeFieldMapper.java @@ -186,7 +186,7 @@ public void preParse(ParseContext context) { if (fieldType.indexOptions() == IndexOptions.NONE && !fieldType.stored()) { return; } - context.doc().add(new Field(fieldType().name(), context.sourceToParse().type(), fieldType)); + context.doc().add(new Field(fieldType().name(), MapperService.SINGLE_MAPPING_NAME, fieldType)); if (fieldType().hasDocValues()) { context.doc().add(new SortedSetDocValuesField(fieldType().name(), new BytesRef(MapperService.SINGLE_MAPPING_NAME))); } diff --git a/server/src/main/java/org/opensearch/index/shard/IndexShard.java b/server/src/main/java/org/opensearch/index/shard/IndexShard.java index cd45b9483834b..9aac2c11e2d35 100644 --- a/server/src/main/java/org/opensearch/index/shard/IndexShard.java +++ b/server/src/main/java/org/opensearch/index/shard/IndexShard.java @@ -122,7 +122,6 @@ import org.opensearch.index.mapper.DocumentMapper; import org.opensearch.index.mapper.DocumentMapperForType; import org.opensearch.index.mapper.IdFieldMapper; -import org.opensearch.index.mapper.MapperParsingException; import org.opensearch.index.mapper.MapperService; import org.opensearch.index.mapper.Mapping; import org.opensearch.index.mapper.ParsedDocument; @@ -154,7 +153,6 @@ import org.opensearch.index.warmer.WarmerStats; import org.opensearch.indices.IndexingMemoryController; import org.opensearch.indices.IndicesService; -import org.opensearch.indices.TypeMissingException; import org.opensearch.indices.breaker.CircuitBreakerService; import org.opensearch.indices.cluster.IndicesClusterStateService; import org.opensearch.indices.recovery.PeerRecoveryTargetService; @@ -867,23 +865,9 @@ private Engine.IndexResult applyIndexOperation( ensureWriteAllowed(origin); Engine.Index operation; try { - final String resolvedType = mapperService.resolveDocumentType(sourceToParse.type()); - final SourceToParse sourceWithResolvedType; - if (resolvedType.equals(sourceToParse.type())) { - sourceWithResolvedType = sourceToParse; - } else { - sourceWithResolvedType = new SourceToParse( - sourceToParse.index(), - resolvedType, - sourceToParse.id(), - sourceToParse.source(), - sourceToParse.getXContentType(), - sourceToParse.routing() - ); - } operation = prepareIndex( docMapper(), - sourceWithResolvedType, + sourceToParse, seqNo, opPrimaryTerm, version, @@ -953,8 +937,7 @@ private Engine.IndexResult index(Engine engine, Engine.Index index) throws IOExc if (logger.isTraceEnabled()) { // don't use index.source().utf8ToString() here source might not be valid UTF-8 logger.trace( - "index [{}][{}] seq# [{}] allocation-id [{}] primaryTerm [{}] operationPrimaryTerm [{}] origin [{}]", - index.type(), + "index [{}] seq# [{}] allocation-id [{}] primaryTerm [{}] operationPrimaryTerm [{}] origin [{}]", index.id(), index.seqNo(), routingEntry().allocationId(), @@ -966,9 +949,8 @@ private Engine.IndexResult index(Engine engine, Engine.Index index) throws IOExc result = engine.index(index); if (logger.isTraceEnabled()) { logger.trace( - "index-done [{}][{}] seq# [{}] allocation-id [{}] primaryTerm [{}] operationPrimaryTerm [{}] origin [{}] " + "index-done [{}] seq# [{}] allocation-id [{}] primaryTerm [{}] operationPrimaryTerm [{}] origin [{}] " + "result-seq# [{}] result-term [{}] failure [{}]", - index.type(), index.id(), index.seqNo(), routingEntry().allocationId(), @@ -984,8 +966,7 @@ private Engine.IndexResult index(Engine engine, Engine.Index index) throws IOExc if (logger.isTraceEnabled()) { logger.trace( new ParameterizedMessage( - "index-fail [{}][{}] seq# [{}] allocation-id [{}] primaryTerm [{}] operationPrimaryTerm [{}] origin [{}]", - index.type(), + "index-fail [{}] seq# [{}] allocation-id [{}] primaryTerm [{}] operationPrimaryTerm [{}] origin [{}]", index.id(), index.seqNo(), routingEntry().allocationId(), @@ -1038,7 +1019,6 @@ public Engine.DeleteResult getFailedDeleteResult(Exception e, long version) { public Engine.DeleteResult applyDeleteOperationOnPrimary( long version, - String type, String id, VersionType versionType, long ifSeqNo, @@ -1050,7 +1030,6 @@ public Engine.DeleteResult applyDeleteOperationOnPrimary( UNASSIGNED_SEQ_NO, getOperationPrimaryTerm(), version, - type, id, versionType, ifSeqNo, @@ -1059,14 +1038,12 @@ public Engine.DeleteResult applyDeleteOperationOnPrimary( ); } - public Engine.DeleteResult applyDeleteOperationOnReplica(long seqNo, long opPrimaryTerm, long version, String type, String id) - throws IOException { + public Engine.DeleteResult applyDeleteOperationOnReplica(long seqNo, long opPrimaryTerm, long version, String id) throws IOException { return applyDeleteOperation( getEngine(), seqNo, opPrimaryTerm, version, - type, id, null, UNASSIGNED_SEQ_NO, @@ -1080,7 +1057,6 @@ private Engine.DeleteResult applyDeleteOperation( long seqNo, long opPrimaryTerm, long version, - String type, String id, @Nullable VersionType versionType, long ifSeqNo, @@ -1093,52 +1069,12 @@ private Engine.DeleteResult applyDeleteOperation( + getOperationPrimaryTerm() + "]"; ensureWriteAllowed(origin); - // When there is a single type, the unique identifier is only composed of the _id, - // so there is no way to differentiate foo#1 from bar#1. This is especially an issue - // if a user first deletes foo#1 and then indexes bar#1: since we do not encode the - // _type in the uid it might look like we are reindexing the same document, which - // would fail if bar#1 is indexed with a lower version than foo#1 was deleted with. - // In order to work around this issue, we make deletions create types. This way, we - // fail if index and delete operations do not use the same type. - // TODO: clean this up when types are gone - try { - Mapping update = docMapper().getMapping(); - if (update != null) { - return new Engine.DeleteResult(update); - } - } catch (MapperParsingException | IllegalArgumentException | TypeMissingException e) { - return new Engine.DeleteResult(e, version, getOperationPrimaryTerm(), seqNo, false); - } - if (mapperService.resolveDocumentType(type).equals(mapperService.documentMapper().type()) == false) { - // We should never get there due to the fact that we generate mapping updates on deletes, - // but we still prefer to have a hard exception here as we would otherwise delete a - // document in the wrong type. - throw new IllegalStateException( - "Deleting document from type [" - + mapperService.resolveDocumentType(type) - + "] while current type is [" - + mapperService.documentMapper().type() - + "]" - ); - } final Term uid = new Term(IdFieldMapper.NAME, Uid.encodeId(id)); - final Engine.Delete delete = prepareDelete( - type, - id, - uid, - seqNo, - opPrimaryTerm, - version, - versionType, - origin, - ifSeqNo, - ifPrimaryTerm - ); + final Engine.Delete delete = prepareDelete(id, uid, seqNo, opPrimaryTerm, version, versionType, origin, ifSeqNo, ifPrimaryTerm); return delete(engine, delete); } private Engine.Delete prepareDelete( - String type, String id, Term uid, long seqNo, @@ -1150,19 +1086,7 @@ private Engine.Delete prepareDelete( long ifPrimaryTerm ) { long startTime = System.nanoTime(); - return new Engine.Delete( - mapperService.resolveDocumentType(type), - id, - uid, - seqNo, - primaryTerm, - version, - versionType, - origin, - startTime, - ifSeqNo, - ifPrimaryTerm - ); + return new Engine.Delete(id, uid, seqNo, primaryTerm, version, versionType, origin, startTime, ifSeqNo, ifPrimaryTerm); } private Engine.DeleteResult delete(Engine engine, Engine.Delete delete) throws IOException { @@ -1813,7 +1737,6 @@ private Engine.Result applyTranslogOperation(Engine engine, Translog.Operation o origin, new SourceToParse( shardId.getIndexName(), - index.type(), index.id(), index.source(), XContentHelper.xContentType(index.source()), @@ -1828,7 +1751,6 @@ private Engine.Result applyTranslogOperation(Engine engine, Translog.Operation o delete.seqNo(), delete.primaryTerm(), delete.version(), - delete.type(), delete.id(), versionType, UNASSIGNED_SEQ_NO, @@ -3873,8 +3795,8 @@ private EngineConfig.TombstoneDocSupplier tombstoneDocSupplier() { : null; return new EngineConfig.TombstoneDocSupplier() { @Override - public ParsedDocument newDeleteTombstoneDoc(String type, String id) { - return docMapper().getDocumentMapper().createDeleteTombstoneDoc(shardId.getIndexName(), type, id); + public ParsedDocument newDeleteTombstoneDoc(String id) { + return docMapper().getDocumentMapper().createDeleteTombstoneDoc(shardId.getIndexName(), id); } @Override diff --git a/server/src/main/java/org/opensearch/index/termvectors/TermVectorsService.java b/server/src/main/java/org/opensearch/index/termvectors/TermVectorsService.java index ecbdd3875f14a..3467a86c86c86 100644 --- a/server/src/main/java/org/opensearch/index/termvectors/TermVectorsService.java +++ b/server/src/main/java/org/opensearch/index/termvectors/TermVectorsService.java @@ -391,7 +391,7 @@ private static ParsedDocument parseDocument( MapperService mapperService = indexShard.mapperService(); DocumentMapperForType docMapper = mapperService.documentMapperWithAutoCreate(); ParsedDocument parsedDocument = docMapper.getDocumentMapper() - .parse(new SourceToParse(index, MapperService.SINGLE_MAPPING_NAME, "_id_for_tv_api", doc, xContentType, routing)); + .parse(new SourceToParse(index, "_id_for_tv_api", doc, xContentType, routing)); if (docMapper.getMapping() != null) { parsedDocument.addDynamicMappingsUpdate(docMapper.getMapping()); } diff --git a/server/src/main/java/org/opensearch/index/translog/Translog.java b/server/src/main/java/org/opensearch/index/translog/Translog.java index dc7a2bb331808..e04dd6681705e 100644 --- a/server/src/main/java/org/opensearch/index/translog/Translog.java +++ b/server/src/main/java/org/opensearch/index/translog/Translog.java @@ -35,7 +35,7 @@ import org.apache.logging.log4j.message.ParameterizedMessage; import org.apache.lucene.index.Term; import org.apache.lucene.store.AlreadyClosedException; -import org.opensearch.LegacyESVersion; +import org.opensearch.Version; import org.opensearch.common.Nullable; import org.opensearch.common.Strings; import org.opensearch.common.UUIDs; @@ -54,6 +54,7 @@ import org.opensearch.index.VersionType; import org.opensearch.index.engine.Engine; import org.opensearch.index.engine.MissingHistoryOperationsException; +import org.opensearch.index.mapper.MapperService; import org.opensearch.index.seqno.SequenceNumbers; import org.opensearch.index.shard.AbstractIndexShardComponent; import org.opensearch.index.shard.IndexShardComponent; @@ -1192,11 +1193,10 @@ public static class Index implements Operation { public static final int FORMAT_6_0 = 8; // since 6.0.0 public static final int FORMAT_NO_PARENT = FORMAT_6_0 + 1; // since 7.0 public static final int FORMAT_NO_VERSION_TYPE = FORMAT_NO_PARENT + 1; - public static final int SERIALIZATION_FORMAT = FORMAT_NO_VERSION_TYPE; - + public static final int FORMAT_NO_DOC_TYPE = FORMAT_NO_VERSION_TYPE + 1; + public static final int SERIALIZATION_FORMAT = FORMAT_NO_DOC_TYPE; private final String id; private final long autoGeneratedIdTimestamp; - private final String type; private final long seqNo; private final long primaryTerm; private final long version; @@ -1207,7 +1207,10 @@ private Index(final StreamInput in) throws IOException { final int format = in.readVInt(); // SERIALIZATION_FORMAT assert format >= FORMAT_6_0 : "format was: " + format; id = in.readString(); - type = in.readString(); + if (format < FORMAT_NO_DOC_TYPE) { + in.readString(); + // can't assert that this is _doc because pre 2.0 indexes can have any name for a type + } source = in.readBytesReference(); routing = in.readOptionalString(); if (format < FORMAT_NO_PARENT) { @@ -1224,7 +1227,6 @@ private Index(final StreamInput in) throws IOException { public Index(Engine.Index index, Engine.IndexResult indexResult) { this.id = index.id(); - this.type = index.type(); this.source = index.source(); this.routing = index.routing(); this.seqNo = indexResult.getSeqNo(); @@ -1233,21 +1235,11 @@ public Index(Engine.Index index, Engine.IndexResult indexResult) { this.autoGeneratedIdTimestamp = index.getAutoGeneratedIdTimestamp(); } - public Index(String type, String id, long seqNo, long primaryTerm, byte[] source) { - this(type, id, seqNo, primaryTerm, Versions.MATCH_ANY, source, null, -1); + public Index(String id, long seqNo, long primaryTerm, byte[] source) { + this(id, seqNo, primaryTerm, Versions.MATCH_ANY, source, null, -1); } - public Index( - String type, - String id, - long seqNo, - long primaryTerm, - long version, - byte[] source, - String routing, - long autoGeneratedIdTimestamp - ) { - this.type = type; + public Index(String id, long seqNo, long primaryTerm, long version, byte[] source, String routing, long autoGeneratedIdTimestamp) { this.id = id; this.source = new BytesArray(source); this.seqNo = seqNo; @@ -1264,12 +1256,10 @@ public Type opType() { @Override public long estimateSize() { - return (2 * id.length()) + (2 * type.length()) + source.length() + (routing != null ? 2 * routing.length() : 0) + (4 - * Long.BYTES); // timestamp, seq_no, primary_term, and version - } - - public String type() { - return this.type; + return (2 * id.length()) + source.length() + (routing != null ? 2 * routing.length() : 0) + (4 * Long.BYTES); // timestamp, + // seq_no, + // primary_term, + // and version } public String id() { @@ -1304,10 +1294,12 @@ public Source getSource() { } private void write(final StreamOutput out) throws IOException { - final int format = out.getVersion().onOrAfter(LegacyESVersion.V_7_0_0) ? SERIALIZATION_FORMAT : FORMAT_6_0; + final int format = out.getVersion().onOrAfter(Version.V_2_0_0) ? SERIALIZATION_FORMAT : FORMAT_NO_VERSION_TYPE; out.writeVInt(format); out.writeString(id); - out.writeString(type); + if (format < FORMAT_NO_DOC_TYPE) { + out.writeString(MapperService.SINGLE_MAPPING_NAME); + } out.writeBytesReference(source); out.writeOptionalString(routing); if (format < FORMAT_NO_PARENT) { @@ -1337,7 +1329,6 @@ public boolean equals(Object o) { || seqNo != index.seqNo || primaryTerm != index.primaryTerm || id.equals(index.id) == false - || type.equals(index.type) == false || autoGeneratedIdTimestamp != index.autoGeneratedIdTimestamp || source.equals(index.source) == false) { return false; @@ -1352,7 +1343,6 @@ public boolean equals(Object o) { @Override public int hashCode() { int result = id.hashCode(); - result = 31 * result + type.hashCode(); result = 31 * result + Long.hashCode(seqNo); result = 31 * result + Long.hashCode(primaryTerm); result = 31 * result + Long.hashCode(version); @@ -1368,9 +1358,6 @@ public String toString() { + "id='" + id + '\'' - + ", type='" - + type - + '\'' + ", seqNo=" + seqNo + ", primaryTerm=" @@ -1393,9 +1380,10 @@ public static class Delete implements Operation { private static final int FORMAT_6_0 = 4; // 6.0 - * public static final int FORMAT_NO_PARENT = FORMAT_6_0 + 1; // since 7.0 public static final int FORMAT_NO_VERSION_TYPE = FORMAT_NO_PARENT + 1; - public static final int SERIALIZATION_FORMAT = FORMAT_NO_VERSION_TYPE; + public static final int FORMAT_NO_DOC_TYPE = FORMAT_NO_VERSION_TYPE + 1; + public static final int SERIALIZATION_FORMAT = FORMAT_NO_DOC_TYPE; - private final String type, id; + private final String id; private final Term uid; private final long seqNo; private final long primaryTerm; @@ -1404,7 +1392,10 @@ public static class Delete implements Operation { private Delete(final StreamInput in) throws IOException { final int format = in.readVInt();// SERIALIZATION_FORMAT assert format >= FORMAT_6_0 : "format was: " + format; - type = in.readString(); + if (format < FORMAT_NO_DOC_TYPE) { + in.readString(); + // Can't assert that this is _doc because pre 2.0 indexes can have any name for a type + } id = in.readString(); uid = new Term(in.readString(), in.readBytesRef()); this.version = in.readLong(); @@ -1416,16 +1407,15 @@ private Delete(final StreamInput in) throws IOException { } public Delete(Engine.Delete delete, Engine.DeleteResult deleteResult) { - this(delete.type(), delete.id(), delete.uid(), deleteResult.getSeqNo(), delete.primaryTerm(), deleteResult.getVersion()); + this(delete.id(), delete.uid(), deleteResult.getSeqNo(), delete.primaryTerm(), deleteResult.getVersion()); } /** utility for testing */ - public Delete(String type, String id, long seqNo, long primaryTerm, Term uid) { - this(type, id, uid, seqNo, primaryTerm, Versions.MATCH_ANY); + public Delete(String id, long seqNo, long primaryTerm, Term uid) { + this(id, uid, seqNo, primaryTerm, Versions.MATCH_ANY); } - public Delete(String type, String id, Term uid, long seqNo, long primaryTerm, long version) { - this.type = Objects.requireNonNull(type); + public Delete(String id, Term uid, long seqNo, long primaryTerm, long version) { this.id = Objects.requireNonNull(id); this.uid = uid; this.seqNo = seqNo; @@ -1440,12 +1430,8 @@ public Type opType() { @Override public long estimateSize() { - return (id.length() * 2) + (type.length() * 2) + ((uid.field().length() * 2) + (uid.text().length()) * 2) + (type.length() * 2) - + (3 * Long.BYTES); // seq_no, primary_term, and version; - } - - public String type() { - return type; + return (id.length() * 2) + ((uid.field().length() * 2) + (uid.text().length()) * 2) + (3 * Long.BYTES); // seq_no, primary_term, + // and version; } public String id() { @@ -1476,9 +1462,11 @@ public Source getSource() { } private void write(final StreamOutput out) throws IOException { - final int format = out.getVersion().onOrAfter(LegacyESVersion.V_7_0_0) ? SERIALIZATION_FORMAT : FORMAT_6_0; + final int format = out.getVersion().onOrAfter(Version.V_2_0_0) ? SERIALIZATION_FORMAT : FORMAT_NO_VERSION_TYPE; out.writeVInt(format); - out.writeString(type); + if (format < FORMAT_NO_DOC_TYPE) { + out.writeString(MapperService.SINGLE_MAPPING_NAME); + } out.writeString(id); out.writeString(uid.field()); out.writeBytesRef(uid.bytes()); diff --git a/server/src/main/java/org/opensearch/index/translog/TranslogWriter.java b/server/src/main/java/org/opensearch/index/translog/TranslogWriter.java index 37c1d5d698408..66241f7b6847f 100644 --- a/server/src/main/java/org/opensearch/index/translog/TranslogWriter.java +++ b/server/src/main/java/org/opensearch/index/translog/TranslogWriter.java @@ -283,7 +283,6 @@ private synchronized boolean assertNoSeqNumberConflict(long seqNo, BytesReferenc final Translog.Index o1 = (Translog.Index) prvOp; final Translog.Index o2 = (Translog.Index) newOp; sameOp = Objects.equals(o1.id(), o2.id()) - && Objects.equals(o1.type(), o2.type()) && Objects.equals(o1.source(), o2.source()) && Objects.equals(o1.routing(), o2.routing()) && o1.primaryTerm() == o2.primaryTerm() @@ -293,7 +292,6 @@ private synchronized boolean assertNoSeqNumberConflict(long seqNo, BytesReferenc final Translog.Delete o1 = (Translog.Delete) newOp; final Translog.Delete o2 = (Translog.Delete) prvOp; sameOp = Objects.equals(o1.id(), o2.id()) - && Objects.equals(o1.type(), o2.type()) && o1.primaryTerm() == o2.primaryTerm() && o1.seqNo() == o2.seqNo() && o1.version() == o2.version(); diff --git a/server/src/test/java/org/opensearch/action/bulk/TransportShardBulkActionTests.java b/server/src/test/java/org/opensearch/action/bulk/TransportShardBulkActionTests.java index a812dd2888e5d..b1fa20307a12b 100644 --- a/server/src/test/java/org/opensearch/action/bulk/TransportShardBulkActionTests.java +++ b/server/src/test/java/org/opensearch/action/bulk/TransportShardBulkActionTests.java @@ -109,7 +109,6 @@ public class TransportShardBulkActionTests extends IndexShardTestCase { private IndexMetadata indexMetadata() throws IOException { return IndexMetadata.builder("index") .putMapping( - "_doc", "{\"properties\":{\"foo\":{\"type\":\"text\",\"fields\":" + "{\"keyword\":{\"type\":\"keyword\",\"ignore_above\":256}}}}}" ) .settings(idxSettings) @@ -711,7 +710,7 @@ public void testUpdateWithDelete() throws Exception { final long resultSeqNo = 13; Engine.DeleteResult deleteResult = new FakeDeleteResult(1, 1, resultSeqNo, found, resultLocation); IndexShard shard = mock(IndexShard.class); - when(shard.applyDeleteOperationOnPrimary(anyLong(), any(), any(), any(), anyLong(), anyLong())).thenReturn(deleteResult); + when(shard.applyDeleteOperationOnPrimary(anyLong(), any(), any(), anyLong(), anyLong())).thenReturn(deleteResult); when(shard.indexSettings()).thenReturn(indexSettings); when(shard.shardId()).thenReturn(shardId); diff --git a/server/src/test/java/org/opensearch/action/get/TransportMultiGetActionTests.java b/server/src/test/java/org/opensearch/action/get/TransportMultiGetActionTests.java index 09bab1af7fc43..1184b05461025 100644 --- a/server/src/test/java/org/opensearch/action/get/TransportMultiGetActionTests.java +++ b/server/src/test/java/org/opensearch/action/get/TransportMultiGetActionTests.java @@ -127,7 +127,6 @@ public TaskManager getTaskManager() { .put(IndexMetadata.SETTING_INDEX_UUID, index1.getUUID()) ) .putMapping( - "_doc", XContentHelper.convertToJson( BytesReference.bytes( XContentFactory.jsonBuilder() @@ -153,7 +152,6 @@ public TaskManager getTaskManager() { .put(IndexMetadata.SETTING_INDEX_UUID, index1.getUUID()) ) .putMapping( - "_doc", XContentHelper.convertToJson( BytesReference.bytes( XContentFactory.jsonBuilder() diff --git a/server/src/test/java/org/opensearch/action/resync/ResyncReplicationRequestTests.java b/server/src/test/java/org/opensearch/action/resync/ResyncReplicationRequestTests.java index f8e76b5e85b61..a078966e3aa80 100644 --- a/server/src/test/java/org/opensearch/action/resync/ResyncReplicationRequestTests.java +++ b/server/src/test/java/org/opensearch/action/resync/ResyncReplicationRequestTests.java @@ -48,7 +48,7 @@ public class ResyncReplicationRequestTests extends OpenSearchTestCase { public void testSerialization() throws IOException { final byte[] bytes = "{}".getBytes(Charset.forName("UTF-8")); - final Translog.Index index = new Translog.Index("type", "id", 0, randomNonNegativeLong(), randomNonNegativeLong(), bytes, null, -1); + final Translog.Index index = new Translog.Index("id", 0, randomNonNegativeLong(), randomNonNegativeLong(), bytes, null, -1); final ShardId shardId = new ShardId(new Index("index", "uuid"), 0); final ResyncReplicationRequest before = new ResyncReplicationRequest(shardId, 42L, 100, new Translog.Operation[] { index }); diff --git a/server/src/test/java/org/opensearch/action/resync/TransportResyncReplicationActionTests.java b/server/src/test/java/org/opensearch/action/resync/TransportResyncReplicationActionTests.java index 2b6b913b080ec..a544bad4cd9e6 100644 --- a/server/src/test/java/org/opensearch/action/resync/TransportResyncReplicationActionTests.java +++ b/server/src/test/java/org/opensearch/action/resync/TransportResyncReplicationActionTests.java @@ -206,7 +206,7 @@ public void testResyncDoesNotBlockOnPrimaryAction() throws Exception { shardId, 42L, 100, - new Translog.Operation[] { new Translog.Index("type", "id", 0, primaryTerm, 0L, bytes, null, -1) } + new Translog.Operation[] { new Translog.Index("id", 0, primaryTerm, 0L, bytes, null, -1) } ); final PlainActionFuture listener = new PlainActionFuture<>(); diff --git a/server/src/test/java/org/opensearch/action/support/replication/TransportReplicationAllPermitsAcquisitionTests.java b/server/src/test/java/org/opensearch/action/support/replication/TransportReplicationAllPermitsAcquisitionTests.java index 29c8204af02b6..b034b335bd9a3 100644 --- a/server/src/test/java/org/opensearch/action/support/replication/TransportReplicationAllPermitsAcquisitionTests.java +++ b/server/src/test/java/org/opensearch/action/support/replication/TransportReplicationAllPermitsAcquisitionTests.java @@ -170,7 +170,7 @@ public void setUp() throws Exception { IndexMetadata indexMetadata = IndexMetadata.builder(shardId.getIndexName()) .settings(indexSettings) .primaryTerm(shardId.id(), primary.getOperationPrimaryTerm()) - .putMapping("_doc", "{ \"properties\": { \"value\": { \"type\": \"short\"}}}") + .putMapping("{ \"properties\": { \"value\": { \"type\": \"short\"}}}") .build(); state.metadata(Metadata.builder().put(indexMetadata, false).generateClusterUuidIfNeeded()); diff --git a/server/src/test/java/org/opensearch/action/termvectors/TransportMultiTermVectorsActionTests.java b/server/src/test/java/org/opensearch/action/termvectors/TransportMultiTermVectorsActionTests.java index f4f2d9b470a90..b62050a1b8050 100644 --- a/server/src/test/java/org/opensearch/action/termvectors/TransportMultiTermVectorsActionTests.java +++ b/server/src/test/java/org/opensearch/action/termvectors/TransportMultiTermVectorsActionTests.java @@ -128,7 +128,6 @@ public TaskManager getTaskManager() { .put(IndexMetadata.SETTING_INDEX_UUID, index1.getUUID()) ) .putMapping( - "_doc", XContentHelper.convertToJson( BytesReference.bytes( XContentFactory.jsonBuilder() @@ -154,7 +153,6 @@ public TaskManager getTaskManager() { .put(IndexMetadata.SETTING_INDEX_UUID, index1.getUUID()) ) .putMapping( - "_doc", XContentHelper.convertToJson( BytesReference.bytes( XContentFactory.jsonBuilder() diff --git a/server/src/test/java/org/opensearch/cluster/metadata/MetadataCreateDataStreamServiceTests.java b/server/src/test/java/org/opensearch/cluster/metadata/MetadataCreateDataStreamServiceTests.java index a60946f8befaa..ddaea6edbfd90 100644 --- a/server/src/test/java/org/opensearch/cluster/metadata/MetadataCreateDataStreamServiceTests.java +++ b/server/src/test/java/org/opensearch/cluster/metadata/MetadataCreateDataStreamServiceTests.java @@ -240,7 +240,7 @@ private static MetadataCreateIndexService getMetadataCreateIndexService() throws .put(request.settings()) .build() ) - .putMapping("_doc", generateMapping("@timestamp")) + .putMapping(generateMapping("@timestamp")) .numberOfShards(1) .numberOfReplicas(1) .build(), diff --git a/server/src/test/java/org/opensearch/cluster/metadata/MetadataTests.java b/server/src/test/java/org/opensearch/cluster/metadata/MetadataTests.java index 64716794bde2b..4e7502ada661f 100644 --- a/server/src/test/java/org/opensearch/cluster/metadata/MetadataTests.java +++ b/server/src/test/java/org/opensearch/cluster/metadata/MetadataTests.java @@ -644,7 +644,7 @@ public void testFindMappings() throws IOException { .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1) .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0) ) - .putMapping("_doc", FIND_MAPPINGS_TEST_ITEM) + .putMapping(FIND_MAPPINGS_TEST_ITEM) ) .put( IndexMetadata.builder("index2") @@ -654,7 +654,7 @@ public void testFindMappings() throws IOException { .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1) .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0) ) - .putMapping("_doc", FIND_MAPPINGS_TEST_ITEM) + .putMapping(FIND_MAPPINGS_TEST_ITEM) ) .build(); @@ -739,7 +739,7 @@ public void testFindMappingsWithFilters() throws IOException { .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1) .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0) ) - .putMapping("_doc", mapping) + .putMapping(mapping) ) .put( IndexMetadata.builder("index2") @@ -749,7 +749,7 @@ public void testFindMappingsWithFilters() throws IOException { .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1) .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0) ) - .putMapping("_doc", mapping) + .putMapping(mapping) ) .put( IndexMetadata.builder("index3") @@ -759,7 +759,7 @@ public void testFindMappingsWithFilters() throws IOException { .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1) .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0) ) - .putMapping("_doc", mapping) + .putMapping(mapping) ) .build(); diff --git a/server/src/test/java/org/opensearch/cluster/metadata/ToAndFromJsonMetadataTests.java b/server/src/test/java/org/opensearch/cluster/metadata/ToAndFromJsonMetadataTests.java index 80ad315c0f613..253018d7f569f 100644 --- a/server/src/test/java/org/opensearch/cluster/metadata/ToAndFromJsonMetadataTests.java +++ b/server/src/test/java/org/opensearch/cluster/metadata/ToAndFromJsonMetadataTests.java @@ -112,8 +112,7 @@ public void testSimpleJsonFromAndTo() throws IOException { .creationDate(2L) .numberOfShards(1) .numberOfReplicas(2) - .putMapping("mapping1", MAPPING_SOURCE1) - .putMapping("mapping2", MAPPING_SOURCE2) + .putMapping(MAPPING_SOURCE1) .putAlias(newAliasMetadataBuilder("alias1").filter(ALIAS_FILTER1)) .putAlias(newAliasMetadataBuilder("alias3").writeIndex(randomBoolean() ? null : randomBoolean())) .putAlias(newAliasMetadataBuilder("alias4").filter(ALIAS_FILTER2)) diff --git a/server/src/test/java/org/opensearch/index/IndexingSlowLogTests.java b/server/src/test/java/org/opensearch/index/IndexingSlowLogTests.java index facb443422b31..38c8491d79150 100644 --- a/server/src/test/java/org/opensearch/index/IndexingSlowLogTests.java +++ b/server/src/test/java/org/opensearch/index/IndexingSlowLogTests.java @@ -223,7 +223,6 @@ public void testSlowLogMessageHasJsonFields() throws IOException { new NumericDocValuesField("version", 1), SeqNoFieldMapper.SequenceIDFields.emptySeqID(), "id", - "test", "routingValue", null, source, @@ -237,7 +236,6 @@ public void testSlowLogMessageHasJsonFields() throws IOException { assertThat(p.getValueFor("message"), equalTo("[foo/123]")); assertThat(p.getValueFor("took"), equalTo("10nanos")); assertThat(p.getValueFor("took_millis"), equalTo("0")); - assertThat(p.getValueFor("doc_type"), equalTo("test")); assertThat(p.getValueFor("id"), equalTo("id")); assertThat(p.getValueFor("routing"), equalTo("routingValue")); assertThat(p.getValueFor("source"), is(emptyOrNullString())); @@ -253,7 +251,6 @@ public void testSlowLogParsedDocumentPrinterSourceToLog() throws IOException { new NumericDocValuesField("version", 1), SeqNoFieldMapper.SequenceIDFields.emptySeqID(), "id", - "test", null, null, source, @@ -284,7 +281,6 @@ public void testSlowLogParsedDocumentPrinterSourceToLog() throws IOException { new NumericDocValuesField("version", 1), SeqNoFieldMapper.SequenceIDFields.emptySeqID(), "id", - "test", null, null, source, diff --git a/server/src/test/java/org/opensearch/index/engine/InternalEngineTests.java b/server/src/test/java/org/opensearch/index/engine/InternalEngineTests.java index 5f98a05840562..aeba4b1b2f0e7 100644 --- a/server/src/test/java/org/opensearch/index/engine/InternalEngineTests.java +++ b/server/src/test/java/org/opensearch/index/engine/InternalEngineTests.java @@ -124,16 +124,11 @@ import org.opensearch.index.VersionType; import org.opensearch.index.codec.CodecService; import org.opensearch.index.fieldvisitor.FieldsVisitor; -import org.opensearch.index.mapper.ContentPath; import org.opensearch.index.mapper.IdFieldMapper; -import org.opensearch.index.mapper.Mapper.BuilderContext; import org.opensearch.index.mapper.MapperService; -import org.opensearch.index.mapper.Mapping; -import org.opensearch.index.mapper.MetadataFieldMapper; import org.opensearch.index.mapper.ParseContext; import org.opensearch.index.mapper.ParseContext.Document; import org.opensearch.index.mapper.ParsedDocument; -import org.opensearch.index.mapper.RootObjectMapper; import org.opensearch.index.mapper.SeqNoFieldMapper; import org.opensearch.index.mapper.SourceFieldMapper; import org.opensearch.index.mapper.Uid; @@ -195,7 +190,6 @@ import java.util.stream.Collectors; import java.util.stream.LongStream; -import static java.util.Collections.emptyMap; import static java.util.Collections.shuffle; import static org.hamcrest.CoreMatchers.instanceOf; import static org.hamcrest.CoreMatchers.sameInstance; @@ -304,7 +298,7 @@ public void testVersionMapAfterAutoIDDocument() throws IOException { if (operation.origin() == PRIMARY) { assertFalse("safe access should NOT be required last indexing round was only append only", engine.isSafeAccessRequired()); } - engine.delete(new Engine.Delete(operation.type(), operation.id(), operation.uid(), primaryTerm.get())); + engine.delete(new Engine.Delete(operation.id(), operation.uid(), primaryTerm.get())); assertTrue("safe access should be required", engine.isSafeAccessRequired()); engine.refresh("test"); assertTrue("safe access should be required", engine.isSafeAccessRequired()); @@ -478,7 +472,7 @@ public void testSegments() throws Exception { liveDocsFirstSegment.remove(idToUpdate); ParsedDocument doc = testParsedDocument(idToUpdate, null, testDocument(), B_1, null); if (randomBoolean()) { - engine.delete(new Engine.Delete(doc.type(), doc.id(), newUid(doc), primaryTerm.get())); + engine.delete(new Engine.Delete(doc.id(), newUid(doc), primaryTerm.get())); deletes++; } else { engine.index(indexForDoc(doc)); @@ -609,7 +603,6 @@ public void testTranslogMultipleOperationsSameDocument() throws IOException { initialEngine.index(operation); } else { final Engine.Delete operation = new Engine.Delete( - "test", "1", newUid(doc), UNASSIGNED_SEQ_NO, @@ -879,7 +872,7 @@ public void testSimpleOperations() throws Exception { searchResult.close(); // now delete - engine.delete(new Engine.Delete("test", "1", newUid(doc), primaryTerm.get())); + engine.delete(new Engine.Delete("1", newUid(doc), primaryTerm.get())); // its not deleted yet searchResult = engine.acquireSearcher("test"); @@ -1026,7 +1019,7 @@ public void testSearchResultRelease() throws Exception { // don't release the search result yet... // delete, refresh and do a new search, it should not be there - engine.delete(new Engine.Delete("test", "1", newUid(doc), primaryTerm.get())); + engine.delete(new Engine.Delete("1", newUid(doc), primaryTerm.get())); engine.refresh("test"); Engine.Searcher updateSearchResult = engine.acquireSearcher("test"); MatcherAssert.assertThat(updateSearchResult, EngineSearcherTotalHitsMatcher.engineSearcherTotalHits(0)); @@ -1471,7 +1464,7 @@ public void testForceMergeWithSoftDeletesRetention() throws Exception { final IndexMetadata indexMetadata = IndexMetadata.builder(defaultSettings.getIndexMetadata()).settings(settings).build(); final IndexSettings indexSettings = IndexSettingsModule.newIndexSettings(indexMetadata); final AtomicLong globalCheckpoint = new AtomicLong(SequenceNumbers.NO_OPS_PERFORMED); - final MapperService mapperService = createMapperService("test"); + final MapperService mapperService = createMapperService(); final Set liveDocs = new HashSet<>(); try ( Store store = createStore(); @@ -1488,7 +1481,7 @@ public void testForceMergeWithSoftDeletesRetention() throws Exception { for (int i = 0; i < numDocs; i++) { ParsedDocument doc = testParsedDocument(Integer.toString(i), null, testDocument(), B_1, null); if (randomBoolean()) { - engine.delete(new Engine.Delete(doc.type(), doc.id(), newUid(doc.id()), primaryTerm.get())); + engine.delete(new Engine.Delete(doc.id(), newUid(doc.id()), primaryTerm.get())); liveDocs.remove(doc.id()); } if (randomBoolean()) { @@ -1550,7 +1543,7 @@ public void testForceMergeWithSoftDeletesRetentionAndRecoverySource() throws Exc final IndexMetadata indexMetadata = IndexMetadata.builder(defaultSettings.getIndexMetadata()).settings(settings).build(); final IndexSettings indexSettings = IndexSettingsModule.newIndexSettings(indexMetadata); final AtomicLong globalCheckpoint = new AtomicLong(SequenceNumbers.NO_OPS_PERFORMED); - final MapperService mapperService = createMapperService("test"); + final MapperService mapperService = createMapperService(); final boolean omitSourceAllTheTime = randomBoolean(); final Set liveDocs = new HashSet<>(); final Set liveDocsWithSource = new HashSet<>(); @@ -1574,7 +1567,7 @@ public void testForceMergeWithSoftDeletesRetentionAndRecoverySource() throws Exc boolean useRecoverySource = randomBoolean() || omitSourceAllTheTime; ParsedDocument doc = testParsedDocument(Integer.toString(i), null, testDocument(), B_1, null, useRecoverySource); if (randomBoolean()) { - engine.delete(new Engine.Delete(doc.type(), doc.id(), newUid(doc.id()), primaryTerm.get())); + engine.delete(new Engine.Delete(doc.id(), newUid(doc.id()), primaryTerm.get())); liveDocs.remove(doc.id()); liveDocsWithSource.remove(doc.id()); } @@ -1826,7 +1819,6 @@ public void testConcurrentOutOfOrderDocsOnReplica() throws IOException, Interrup } else { Engine.Delete delete = (Engine.Delete) operation; return new Engine.Delete( - delete.type(), delete.id(), delete.uid(), newSeqNo, @@ -1930,7 +1922,6 @@ private int assertOpsOnPrimary(List ops, long currentOpVersion 0 ); BiFunction delWithVersion = (version, delete) -> new Engine.Delete( - delete.type(), delete.id(), delete.uid(), UNASSIGNED_SEQ_NO, @@ -1957,7 +1948,6 @@ private int assertOpsOnPrimary(List ops, long currentOpVersion term ); TriFunction delWithSeq = (seqNo, term, delete) -> new Engine.Delete( - delete.type(), delete.id(), delete.uid(), UNASSIGNED_SEQ_NO, @@ -1984,7 +1974,6 @@ private int assertOpsOnPrimary(List ops, long currentOpVersion index.getIfPrimaryTerm() ); Function deleteWithCurrentTerm = delete -> new Engine.Delete( - delete.type(), delete.id(), delete.uid(), UNASSIGNED_SEQ_NO, @@ -2371,7 +2360,7 @@ public void testBasicCreatedFlag() throws IOException { indexResult = engine.index(index); assertFalse(indexResult.isCreated()); - engine.delete(new Engine.Delete("doc", "1", newUid(doc), primaryTerm.get())); + engine.delete(new Engine.Delete("1", newUid(doc), primaryTerm.get())); index = indexForDoc(doc); indexResult = engine.index(index); @@ -2503,7 +2492,6 @@ public void testSeqNoAndCheckpoints() throws IOException, InterruptedException { // we have some docs indexed, so delete one of them id = randomFrom(indexedIds); final Engine.Delete delete = new Engine.Delete( - "test", id, newUid(id), UNASSIGNED_SEQ_NO, @@ -2817,7 +2805,6 @@ public void testEnableGcDeletes() throws Exception { // Delete document we just added: engine.delete( new Engine.Delete( - "test", "1", newUid(doc), UNASSIGNED_SEQ_NO, @@ -2845,7 +2832,6 @@ public void testEnableGcDeletes() throws Exception { // Delete non-existent document engine.delete( new Engine.Delete( - "test", "2", newUid("2"), UNASSIGNED_SEQ_NO, @@ -3234,15 +3220,6 @@ public void testSkipTranslogReplay() throws IOException { } } - private Mapping dynamicUpdate() { - BuilderContext context = new BuilderContext( - Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT).build(), - new ContentPath() - ); - final RootObjectMapper root = new RootObjectMapper.Builder("some_type").build(context); - return new Mapping(Version.CURRENT, root, new MetadataFieldMapper[0], emptyMap()); - } - private Path[] filterExtraFSFiles(Path[] files) { List paths = new ArrayList<>(); for (Path p : files) { @@ -3278,7 +3255,6 @@ public void testTranslogReplay() throws IOException { } assertVisibleCount(engine, numDocs); translogHandler = createTranslogHandler(engine.engineConfig.getIndexSettings()); - translogHandler.mappingUpdate = dynamicUpdate(); engine.close(); // we need to reuse the engine config unless the parser.mappingModified won't work @@ -3288,12 +3264,6 @@ public void testTranslogReplay() throws IOException { assertVisibleCount(engine, numDocs, false); assertEquals(numDocs, translogHandler.appliedOperations()); - if (translogHandler.mappingUpdate != null) { - assertEquals(1, translogHandler.getRecoveredTypes().size()); - assertTrue(translogHandler.getRecoveredTypes().containsKey("test")); - } else { - assertEquals(0, translogHandler.getRecoveredTypes().size()); - } engine.close(); translogHandler = createTranslogHandler(engine.engineConfig.getIndexSettings()); @@ -3358,7 +3328,7 @@ public void testTranslogReplay() throws IOException { assertThat(topDocs.totalHits.value, equalTo(numDocs + 1L)); } assertEquals(flush ? 1 : 2, translogHandler.appliedOperations()); - engine.delete(new Engine.Delete("test", Integer.toString(randomId), newUid(doc), primaryTerm.get())); + engine.delete(new Engine.Delete(Integer.toString(randomId), newUid(doc), primaryTerm.get())); if (randomBoolean()) { engine.close(); engine = createEngine(store, primaryTranslogDir, inSyncGlobalCheckpointSupplier); @@ -3405,7 +3375,7 @@ public void testRecoverFromForeignTranslog() throws IOException { primaryTerm::get, seqNo -> {} ); - translog.add(new Translog.Index("test", "SomeBogusId", 0, primaryTerm.get(), "{}".getBytes(Charset.forName("UTF-8")))); + translog.add(new Translog.Index("SomeBogusId", 0, primaryTerm.get(), "{}".getBytes(Charset.forName("UTF-8")))); assertEquals(generation.translogFileGeneration, translog.currentFileGeneration()); translog.close(); @@ -3689,10 +3659,7 @@ public BytesRef binaryValue() { } // now the engine is closed check we respond correctly expectThrows(AlreadyClosedException.class, () -> engine.index(indexForDoc(doc1))); - expectThrows( - AlreadyClosedException.class, - () -> engine.delete(new Engine.Delete("test", "", newUid(doc1), primaryTerm.get())) - ); + expectThrows(AlreadyClosedException.class, () -> engine.delete(new Engine.Delete("", newUid(doc1), primaryTerm.get()))); expectThrows( AlreadyClosedException.class, () -> engine.noOp( @@ -3714,8 +3681,8 @@ public void testDeleteWithFatalError() throws Exception { try (Store store = createStore()) { EngineConfig.TombstoneDocSupplier tombstoneDocSupplier = new EngineConfig.TombstoneDocSupplier() { @Override - public ParsedDocument newDeleteTombstoneDoc(String type, String id) { - ParsedDocument parsedDocument = tombstoneDocSupplier().newDeleteTombstoneDoc(type, id); + public ParsedDocument newDeleteTombstoneDoc(String id) { + ParsedDocument parsedDocument = tombstoneDocSupplier().newDeleteTombstoneDoc(id); parsedDocument.rootDoc().add(new StoredField("foo", "bar") { // this is a hack to add a failure during store document which triggers a tragic event // and in turn fails the engine @@ -3736,10 +3703,7 @@ public ParsedDocument newNoopTombstoneDoc(String reason) { try (InternalEngine engine = createEngine(null, null, null, config)) { final ParsedDocument doc = testParsedDocument("1", null, testDocumentWithTextField(), SOURCE, null); engine.index(indexForDoc(doc)); - expectThrows( - IllegalStateException.class, - () -> engine.delete(new Engine.Delete("test", "1", newUid("1"), primaryTerm.get())) - ); + expectThrows(IllegalStateException.class, () -> engine.delete(new Engine.Delete("1", newUid("1"), primaryTerm.get()))); assertTrue(engine.isClosed.get()); assertSame(tragicException, engine.failedEngine.get()); } @@ -3839,7 +3803,6 @@ public void testDoubleDeliveryReplicaAppendingAndDeleteOnly() throws IOException Engine.Index operation = appendOnlyReplica(doc, false, 1, randomIntBetween(0, 5)); Engine.Index retry = appendOnlyReplica(doc, true, 1, randomIntBetween(0, 5)); Engine.Delete delete = new Engine.Delete( - operation.type(), operation.id(), operation.uid(), Math.max(retry.seqNo(), operation.seqNo()) + 1, @@ -4000,7 +3963,7 @@ public void testDoubleDeliveryReplica() throws IOException { assertEquals(1, topDocs.totalHits.value); } if (engine.engineConfig.getIndexSettings().isSoftDeleteEnabled()) { - List ops = readAllOperationsInLucene(engine, createMapperService("test")); + List ops = readAllOperationsInLucene(engine, createMapperService()); assertThat(ops.stream().map(o -> o.seqNo()).collect(Collectors.toList()), hasItem(20L)); } } @@ -4597,7 +4560,6 @@ public void testLookupSeqNoByIdInLucene() throws Exception { } else { operations.add( new Engine.Delete( - doc.type(), doc.id(), EngineTestCase.newUid(doc), seqNo, @@ -4806,7 +4768,6 @@ public void testOutOfOrderSequenceNumbersWithVersionConflict() throws IOExceptio operations.add(index); } else { final Engine.Delete delete = new Engine.Delete( - "test", "1", uid, sequenceNumberSupplier.getAsLong(), @@ -4868,7 +4829,7 @@ public void testOutOfOrderSequenceNumbersWithVersionConflict() throws IOExceptio */ public void testVersionConflictIgnoreDeletedDoc() throws IOException { ParsedDocument doc = testParsedDocument("1", null, testDocument(), new BytesArray("{}".getBytes(Charset.defaultCharset())), null); - engine.delete(new Engine.Delete("test", "1", newUid("1"), 1)); + engine.delete(new Engine.Delete("1", newUid("1"), 1)); for (long seqNo : new long[] { 0, 1, randomNonNegativeLong() }) { assertDeletedVersionConflict( engine.index( @@ -4893,7 +4854,6 @@ public void testVersionConflictIgnoreDeletedDoc() throws IOException { assertDeletedVersionConflict( engine.delete( new Engine.Delete( - "test", "1", newUid("1"), UNASSIGNED_SEQ_NO, @@ -4973,7 +4933,7 @@ protected long doGenerateSeqNoForOperation(Operation operation) { assertThat(noOp.primaryTerm(), equalTo(primaryTerm.get())); assertThat(noOp.reason(), equalTo(reason)); if (engine.engineConfig.getIndexSettings().isSoftDeleteEnabled()) { - MapperService mapperService = createMapperService("test"); + MapperService mapperService = createMapperService(); List operationsFromLucene = readAllOperationsInLucene(noOpEngine, mapperService); assertThat(operationsFromLucene, hasSize(maxSeqNo + 2 - localCheckpoint)); // fills n gap and 2 manual noop. for (int i = 0; i < operationsFromLucene.size(); i++) { @@ -5050,7 +5010,7 @@ public void testRandomOperations() throws Exception { } } if (engine.engineConfig.getIndexSettings().isSoftDeleteEnabled()) { - List operations = readAllOperationsInLucene(engine, createMapperService("test")); + List operations = readAllOperationsInLucene(engine, createMapperService()); assertThat(operations, hasSize(numOps)); } } @@ -5207,7 +5167,7 @@ public void testRestoreLocalHistoryFromTranslog() throws IOException { equalTo(0) ); } - assertConsistentHistoryBetweenTranslogAndLuceneIndex(engine, createMapperService("test")); + assertConsistentHistoryBetweenTranslogAndLuceneIndex(engine, createMapperService()); } } @@ -5409,7 +5369,6 @@ public void testSeqNoGenerator() throws IOException { ) { final String id = "id"; final Field uidField = new Field("_id", id, IdFieldMapper.Defaults.FIELD_TYPE); - final String type = "type"; final Field versionField = new NumericDocValuesField("_version", 0); final SeqNoFieldMapper.SequenceIDFields seqID = SeqNoFieldMapper.SequenceIDFields.emptySeqID(); final ParseContext.Document document = new ParseContext.Document(); @@ -5423,7 +5382,6 @@ public void testSeqNoGenerator() throws IOException { versionField, seqID, id, - type, "routing", Collections.singletonList(document), source, @@ -5450,7 +5408,6 @@ public void testSeqNoGenerator() throws IOException { assertThat(seqNoGenerator.get(), equalTo(seqNo + 1)); final Engine.Delete delete = new Engine.Delete( - type, id, new Term("_id", parsedDocument.id()), UNASSIGNED_SEQ_NO, @@ -5577,7 +5534,7 @@ public void testConcurrentAppendUpdateAndRefresh() throws InterruptedException, Engine.Index operation = appendOnlyPrimary(doc, false, 1); engine.index(operation); if (rarely()) { - engine.delete(new Engine.Delete(operation.type(), operation.id(), operation.uid(), primaryTerm.get())); + engine.delete(new Engine.Delete(operation.id(), operation.uid(), primaryTerm.get())); numDeletes.incrementAndGet(); } else { doc = testParsedDocument( @@ -5915,7 +5872,7 @@ public void testStressUpdateSameDocWhileGettingIt() throws IOException, Interrup ); // first index an append only document and then delete it. such that we have it in the tombstones engine.index(doc); - engine.delete(new Engine.Delete(doc.type(), doc.id(), doc.uid(), primaryTerm.get())); + engine.delete(new Engine.Delete(doc.id(), doc.uid(), primaryTerm.get())); // now index more append only docs and refresh so we re-enabel the optimization for unsafe version map ParsedDocument document1 = testParsedDocument(Integer.toString(1), null, testDocumentWithTextField(), SOURCE, null); @@ -6163,7 +6120,7 @@ public void testHistoryBasedOnSource() throws Exception { engine.forceMerge(true, 1, false, false, false, UUIDs.randomBase64UUID()); } } - MapperService mapperService = createMapperService("test"); + MapperService mapperService = createMapperService(); List luceneOps = readAllOperationsBasedOnSource(engine, mapperService); assertThat(luceneOps.stream().map(o -> o.seqNo()).collect(Collectors.toList()), containsInAnyOrder(expectedSeqNos.toArray())); } @@ -6230,7 +6187,7 @@ private void assertOperationHistoryInLucene(List operations) t engine.forceMerge(true, 1, false, false, false, UUIDs.randomBase64UUID()); } } - MapperService mapperService = createMapperService("test"); + MapperService mapperService = createMapperService(); List actualOps = readAllOperationsInLucene(engine, mapperService); assertThat(actualOps.stream().map(o -> o.seqNo()).collect(Collectors.toList()), containsInAnyOrder(expectedSeqNos.toArray())); assertConsistentHistoryBetweenTranslogAndLuceneIndex(engine, mapperService); @@ -6320,7 +6277,7 @@ public void testKeepMinRetainedSeqNoByMergePolicy() throws IOException { long minRetainSeqNos = engine.getMinRetainedSeqNo(); assertThat(minRetainSeqNos, lessThanOrEqualTo(globalCheckpoint.get() + 1)); Long[] expectedOps = existingSeqNos.stream().filter(seqno -> seqno >= minRetainSeqNos).toArray(Long[]::new); - Set actualOps = readAllOperationsInLucene(engine, createMapperService("test")).stream() + Set actualOps = readAllOperationsInLucene(engine, createMapperService()).stream() .map(Translog.Operation::seqNo) .collect(Collectors.toSet()); assertThat(actualOps, containsInAnyOrder(expectedOps)); @@ -6369,7 +6326,7 @@ public void testLastRefreshCheckpoint() throws Exception { } public void testLuceneSnapshotRefreshesOnlyOnce() throws Exception { - final MapperService mapperService = createMapperService("test"); + final MapperService mapperService = createMapperService(); final long maxSeqNo = randomLongBetween(10, 50); final AtomicLong refreshCounter = new AtomicLong(); try ( @@ -6484,7 +6441,7 @@ public void testTrackMaxSeqNoOfUpdatesOrDeletesOnPrimary() throws Exception { ); } } else { - Engine.DeleteResult result = engine.delete(new Engine.Delete(doc.type(), doc.id(), newUid(doc.id()), primaryTerm.get())); + Engine.DeleteResult result = engine.delete(new Engine.Delete(doc.id(), newUid(doc.id()), primaryTerm.get())); liveDocIds.remove(doc.id()); assertThat( "delete operations on primary must advance max_seq_no_of_updates", @@ -6712,7 +6669,7 @@ public void testPruneAwayDeletedButRetainedIds() throws Exception { index(engine, i); } engine.forceMerge(true, 1, false, false, false, UUIDs.randomBase64UUID()); - engine.delete(new Engine.Delete("_doc", "0", newUid("0"), primaryTerm.get())); + engine.delete(new Engine.Delete("0", newUid("0"), primaryTerm.get())); engine.refresh("test"); // now we have 2 segments since we now added a tombstone plus the old segment with the delete try (Engine.Searcher searcher = engine.acquireSearcher("test")) { @@ -6913,7 +6870,7 @@ private void runTestDeleteFailure(final CheckedBiConsumer new IllegalArgumentException("fatal")); final IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> engine.delete(op)); @@ -7185,7 +7142,7 @@ public void testMaxDocsOnPrimary() throws Exception { operations.add(indexForDoc(createParsedDoc(id, null))); } else { id = "not_found"; - operations.add(new Engine.Delete("_doc", id, newUid(id), primaryTerm.get())); + operations.add(new Engine.Delete(id, newUid(id), primaryTerm.get())); } } for (int i = 0; i < numDocs; i++) { diff --git a/server/src/test/java/org/opensearch/index/engine/LuceneChangesSnapshotTests.java b/server/src/test/java/org/opensearch/index/engine/LuceneChangesSnapshotTests.java index ff569898b4910..05b6c77cad818 100644 --- a/server/src/test/java/org/opensearch/index/engine/LuceneChangesSnapshotTests.java +++ b/server/src/test/java/org/opensearch/index/engine/LuceneChangesSnapshotTests.java @@ -59,7 +59,7 @@ public class LuceneChangesSnapshotTests extends EngineTestCase { @Before public void createMapper() throws Exception { - mapperService = createMapperService("test"); + mapperService = createMapperService(); } @Override @@ -92,7 +92,7 @@ public void testBasics() throws Exception { if (randomBoolean()) { engine.index(indexForDoc(doc)); } else { - engine.delete(new Engine.Delete(doc.type(), doc.id(), newUid(doc.id()), primaryTerm.get())); + engine.delete(new Engine.Delete(doc.id(), newUid(doc.id()), primaryTerm.get())); } if (rarely()) { if (randomBoolean()) { @@ -264,7 +264,7 @@ public void testUpdateAndReadChangesConcurrently() throws Exception { if (randomBoolean()) { op = new Engine.Index(newUid(doc), primaryTerm.get(), doc); } else { - op = new Engine.Delete(doc.type(), doc.id(), newUid(doc.id()), primaryTerm.get()); + op = new Engine.Delete(doc.id(), newUid(doc.id()), primaryTerm.get()); } } else { if (randomBoolean()) { diff --git a/server/src/test/java/org/opensearch/index/engine/NoOpEngineTests.java b/server/src/test/java/org/opensearch/index/engine/NoOpEngineTests.java index e04bf1a4f20f2..a015443979527 100644 --- a/server/src/test/java/org/opensearch/index/engine/NoOpEngineTests.java +++ b/server/src/test/java/org/opensearch/index/engine/NoOpEngineTests.java @@ -152,7 +152,7 @@ public void testNoOpEngineStats() throws Exception { for (int i = 0; i < numDocs; i++) { if (randomBoolean()) { String delId = Integer.toString(i); - Engine.DeleteResult result = engine.delete(new Engine.Delete("_doc", delId, newUid(delId), primaryTerm.get())); + Engine.DeleteResult result = engine.delete(new Engine.Delete(delId, newUid(delId), primaryTerm.get())); assertTrue(result.isFound()); engine.syncTranslog(); // advance persisted local checkpoint globalCheckpoint.set(engine.getPersistedLocalCheckpoint()); diff --git a/server/src/test/java/org/opensearch/index/engine/ReadOnlyEngineTests.java b/server/src/test/java/org/opensearch/index/engine/ReadOnlyEngineTests.java index 609e972b2c026..95a2db9d74c38 100644 --- a/server/src/test/java/org/opensearch/index/engine/ReadOnlyEngineTests.java +++ b/server/src/test/java/org/opensearch/index/engine/ReadOnlyEngineTests.java @@ -112,7 +112,7 @@ public void testReadOnlyEngine() throws Exception { for (int i = 0; i < numDocs; i++) { if (randomBoolean()) { String delId = Integer.toString(i); - engine.delete(new Engine.Delete("test", delId, newUid(delId), primaryTerm.get())); + engine.delete(new Engine.Delete(delId, newUid(delId), primaryTerm.get())); } if (rarely()) { engine.flush(); diff --git a/server/src/test/java/org/opensearch/index/fielddata/BinaryDVFieldDataTests.java b/server/src/test/java/org/opensearch/index/fielddata/BinaryDVFieldDataTests.java index 2854f556bf8d8..071366d7c3345 100644 --- a/server/src/test/java/org/opensearch/index/fielddata/BinaryDVFieldDataTests.java +++ b/server/src/test/java/org/opensearch/index/fielddata/BinaryDVFieldDataTests.java @@ -81,16 +81,16 @@ public void testDocValue() throws Exception { doc.endArray(); } doc.endObject(); - ParsedDocument d = mapper.parse(new SourceToParse("test", "test", "1", BytesReference.bytes(doc), XContentType.JSON)); + ParsedDocument d = mapper.parse(new SourceToParse("test", "1", BytesReference.bytes(doc), XContentType.JSON)); writer.addDocument(d.rootDoc()); BytesRef bytes1 = randomBytes(); doc = XContentFactory.jsonBuilder().startObject().field("field", bytes1.bytes, bytes1.offset, bytes1.length).endObject(); - d = mapper.parse(new SourceToParse("test", "test", "2", BytesReference.bytes(doc), XContentType.JSON)); + d = mapper.parse(new SourceToParse("test", "2", BytesReference.bytes(doc), XContentType.JSON)); writer.addDocument(d.rootDoc()); doc = XContentFactory.jsonBuilder().startObject().endObject(); - d = mapper.parse(new SourceToParse("test", "test", "3", BytesReference.bytes(doc), XContentType.JSON)); + d = mapper.parse(new SourceToParse("test", "3", BytesReference.bytes(doc), XContentType.JSON)); writer.addDocument(d.rootDoc()); // test remove duplicate value @@ -106,7 +106,7 @@ public void testDocValue() throws Exception { doc.endArray(); } doc.endObject(); - d = mapper.parse(new SourceToParse("test", "test", "4", BytesReference.bytes(doc), XContentType.JSON)); + d = mapper.parse(new SourceToParse("test", "4", BytesReference.bytes(doc), XContentType.JSON)); writer.addDocument(d.rootDoc()); IndexFieldData indexFieldData = getForField("field"); diff --git a/server/src/test/java/org/opensearch/index/mapper/DataStreamFieldMapperTests.java b/server/src/test/java/org/opensearch/index/mapper/DataStreamFieldMapperTests.java index 3a10b5c422578..374b7ac9a5271 100644 --- a/server/src/test/java/org/opensearch/index/mapper/DataStreamFieldMapperTests.java +++ b/server/src/test/java/org/opensearch/index/mapper/DataStreamFieldMapperTests.java @@ -76,7 +76,6 @@ public void testDeeplyNestedCustomTimestampField() throws Exception { ParsedDocument doc = mapper.parse( new SourceToParse( "test", - "_doc", "1", BytesReference.bytes( XContentFactory.jsonBuilder() @@ -97,7 +96,6 @@ public void testDeeplyNestedCustomTimestampField() throws Exception { mapper.parse( new SourceToParse( "test", - "_doc", "3", BytesReference.bytes( XContentFactory.jsonBuilder() @@ -127,7 +125,6 @@ private void assertDataStreamFieldMapper(String mapping, String timestampFieldNa ParsedDocument doc = mapper.parse( new SourceToParse( "test", - "_doc", "1", BytesReference.bytes( XContentFactory.jsonBuilder().startObject().field(timestampFieldName, "2020-12-06T11:04:05.000Z").endObject() @@ -146,7 +143,6 @@ private void assertDataStreamFieldMapper(String mapping, String timestampFieldNa mapper.parse( new SourceToParse( "test", - "_doc", "2", BytesReference.bytes( XContentFactory.jsonBuilder().startObject().field("invalid-field-name", "2020-12-06T11:04:05.000Z").endObject() @@ -165,7 +161,6 @@ private void assertDataStreamFieldMapper(String mapping, String timestampFieldNa mapper.parse( new SourceToParse( "test", - "_doc", "3", BytesReference.bytes( XContentFactory.jsonBuilder() diff --git a/server/src/test/java/org/opensearch/index/mapper/DocumentParserTests.java b/server/src/test/java/org/opensearch/index/mapper/DocumentParserTests.java index aa0a7f36a793f..0ad8dc3f138e0 100644 --- a/server/src/test/java/org/opensearch/index/mapper/DocumentParserTests.java +++ b/server/src/test/java/org/opensearch/index/mapper/DocumentParserTests.java @@ -1063,8 +1063,7 @@ public void testParseToJsonAndParse() throws Exception { // reparse it DocumentMapper builtDocMapper = createDocumentMapper(MapperService.SINGLE_MAPPING_NAME, builtMapping); BytesReference json = new BytesArray(copyToBytesFromClasspath("/org/opensearch/index/mapper/simple/test1.json")); - Document doc = builtDocMapper.parse(new SourceToParse("test", MapperService.SINGLE_MAPPING_NAME, "1", json, XContentType.JSON)) - .rootDoc(); + Document doc = builtDocMapper.parse(new SourceToParse("test", "1", json, XContentType.JSON)).rootDoc(); assertThat(doc.getBinaryValue(builtDocMapper.idFieldMapper().name()), equalTo(Uid.encodeId("1"))); assertThat(doc.get(builtDocMapper.mappers().getMapper("name.first").name()), equalTo("fred")); } @@ -1076,8 +1075,7 @@ public void testSimpleParser() throws Exception { assertThat((String) docMapper.meta().get("param1"), equalTo("value1")); BytesReference json = new BytesArray(copyToBytesFromClasspath("/org/opensearch/index/mapper/simple/test1.json")); - Document doc = docMapper.parse(new SourceToParse("test", MapperService.SINGLE_MAPPING_NAME, "1", json, XContentType.JSON)) - .rootDoc(); + Document doc = docMapper.parse(new SourceToParse("test", "1", json, XContentType.JSON)).rootDoc(); assertThat(doc.getBinaryValue(docMapper.idFieldMapper().name()), equalTo(Uid.encodeId("1"))); assertThat(doc.get(docMapper.mappers().getMapper("name.first").name()), equalTo("fred")); } @@ -1086,8 +1084,7 @@ public void testSimpleParserNoTypeNoId() throws Exception { String mapping = copyToStringFromClasspath("/org/opensearch/index/mapper/simple/test-mapping.json"); DocumentMapper docMapper = createDocumentMapper(MapperService.SINGLE_MAPPING_NAME, mapping); BytesReference json = new BytesArray(copyToBytesFromClasspath("/org/opensearch/index/mapper/simple/test1-notype-noid.json")); - Document doc = docMapper.parse(new SourceToParse("test", MapperService.SINGLE_MAPPING_NAME, "1", json, XContentType.JSON)) - .rootDoc(); + Document doc = docMapper.parse(new SourceToParse("test", "1", json, XContentType.JSON)).rootDoc(); assertThat(doc.getBinaryValue(docMapper.idFieldMapper().name()), equalTo(Uid.encodeId("1"))); assertThat(doc.get(docMapper.mappers().getMapper("name.first").name()), equalTo("fred")); } @@ -1109,7 +1106,7 @@ public void testNoDocumentSent() throws Exception { BytesReference json = new BytesArray("".getBytes(StandardCharsets.UTF_8)); MapperParsingException e = expectThrows( MapperParsingException.class, - () -> docMapper.parse(new SourceToParse("test", MapperService.SINGLE_MAPPING_NAME, "1", json, XContentType.JSON)) + () -> docMapper.parse(new SourceToParse("test", "1", json, XContentType.JSON)) ); assertThat(e.getMessage(), equalTo("failed to parse, document is empty")); } diff --git a/server/src/test/java/org/opensearch/index/mapper/DynamicMappingTests.java b/server/src/test/java/org/opensearch/index/mapper/DynamicMappingTests.java index f40ffa600ba8c..dee5db4e31253 100644 --- a/server/src/test/java/org/opensearch/index/mapper/DynamicMappingTests.java +++ b/server/src/test/java/org/opensearch/index/mapper/DynamicMappingTests.java @@ -366,7 +366,7 @@ private void doTestDefaultFloatingPointMappings(DocumentMapper mapper, XContentB .field("quux", "3.2") // float detected through numeric detection .endObject() ); - ParsedDocument parsedDocument = mapper.parse(new SourceToParse("index", "_doc", "id", source, builder.contentType())); + ParsedDocument parsedDocument = mapper.parse(new SourceToParse("index", "id", source, builder.contentType())); Mapping update = parsedDocument.dynamicMappingsUpdate(); assertNotNull(update); assertThat(((FieldMapper) update.root().getMapper("foo")).fieldType().typeName(), equalTo("float")); diff --git a/server/src/test/java/org/opensearch/index/mapper/FieldNamesFieldMapperTests.java b/server/src/test/java/org/opensearch/index/mapper/FieldNamesFieldMapperTests.java index 117d66f50a178..639de9d314641 100644 --- a/server/src/test/java/org/opensearch/index/mapper/FieldNamesFieldMapperTests.java +++ b/server/src/test/java/org/opensearch/index/mapper/FieldNamesFieldMapperTests.java @@ -110,7 +110,6 @@ public void testInjectIntoDocDuringParsing() throws Exception { ParsedDocument doc = defaultMapper.parse( new SourceToParse( "test", - "type", "1", BytesReference.bytes( XContentFactory.jsonBuilder().startObject().field("a", "100").startObject("b").field("c", 42).endObject().endObject() @@ -148,7 +147,6 @@ public void testExplicitEnabled() throws Exception { ParsedDocument doc = docMapper.parse( new SourceToParse( "test", - "type", "1", BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("field", "value").endObject()), XContentType.JSON @@ -179,7 +177,6 @@ public void testDisabled() throws Exception { ParsedDocument doc = docMapper.parse( new SourceToParse( "test", - "type", "1", BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("field", "value").endObject()), XContentType.JSON diff --git a/server/src/test/java/org/opensearch/index/mapper/GenericStoreDynamicTemplateTests.java b/server/src/test/java/org/opensearch/index/mapper/GenericStoreDynamicTemplateTests.java index cc4626bc89641..9c9c0440231de 100644 --- a/server/src/test/java/org/opensearch/index/mapper/GenericStoreDynamicTemplateTests.java +++ b/server/src/test/java/org/opensearch/index/mapper/GenericStoreDynamicTemplateTests.java @@ -53,7 +53,7 @@ public void testSimple() throws Exception { byte[] json = copyToBytesFromClasspath("/org/opensearch/index/mapper/dynamictemplate/genericstore/test-data.json"); ParsedDocument parsedDoc = mapperService.documentMapper() - .parse(new SourceToParse("test", MapperService.SINGLE_MAPPING_NAME, "1", new BytesArray(json), XContentType.JSON)); + .parse(new SourceToParse("test", "1", new BytesArray(json), XContentType.JSON)); client().admin() .indices() .preparePutMapping("test") diff --git a/server/src/test/java/org/opensearch/index/mapper/IdFieldMapperTests.java b/server/src/test/java/org/opensearch/index/mapper/IdFieldMapperTests.java index 718e945042218..e897abad405d5 100644 --- a/server/src/test/java/org/opensearch/index/mapper/IdFieldMapperTests.java +++ b/server/src/test/java/org/opensearch/index/mapper/IdFieldMapperTests.java @@ -72,7 +72,6 @@ public void testIncludeInObjectNotAllowed() throws Exception { docMapper.parse( new SourceToParse( "test", - "type", "1", BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("_id", "1").endObject()), XContentType.JSON @@ -91,7 +90,7 @@ public void testDefaults() throws IOException { Settings indexSettings = Settings.EMPTY; MapperService mapperService = createIndex("test", indexSettings).mapperService(); DocumentMapper mapper = mapperService.merge("type", new CompressedXContent("{\"type\":{}}"), MergeReason.MAPPING_UPDATE); - ParsedDocument document = mapper.parse(new SourceToParse("index", "type", "id", new BytesArray("{}"), XContentType.JSON)); + ParsedDocument document = mapper.parse(new SourceToParse("index", "id", new BytesArray("{}"), XContentType.JSON)); IndexableField[] fields = document.rootDoc().getFields(IdFieldMapper.NAME); assertEquals(1, fields.length); assertEquals(IndexOptions.DOCS, fields[0].fieldType().indexOptions()); diff --git a/server/src/test/java/org/opensearch/index/mapper/IndexFieldMapperTests.java b/server/src/test/java/org/opensearch/index/mapper/IndexFieldMapperTests.java index b27eb54fbfe59..c4225cb576550 100644 --- a/server/src/test/java/org/opensearch/index/mapper/IndexFieldMapperTests.java +++ b/server/src/test/java/org/opensearch/index/mapper/IndexFieldMapperTests.java @@ -63,7 +63,6 @@ public void testDefaultDisabledIndexMapper() throws Exception { ParsedDocument doc = docMapper.parse( new SourceToParse( "test", - "type", "1", BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("field", "value").endObject()), XContentType.JSON diff --git a/server/src/test/java/org/opensearch/index/mapper/IpRangeFieldMapperTests.java b/server/src/test/java/org/opensearch/index/mapper/IpRangeFieldMapperTests.java index 33306c5842674..07fa602272b3d 100644 --- a/server/src/test/java/org/opensearch/index/mapper/IpRangeFieldMapperTests.java +++ b/server/src/test/java/org/opensearch/index/mapper/IpRangeFieldMapperTests.java @@ -79,7 +79,6 @@ public void testStoreCidr() throws Exception { ParsedDocument doc = mapper.parse( new SourceToParse( "test", - "type", "1", BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("field", entry.getKey()).endObject()), XContentType.JSON diff --git a/server/src/test/java/org/opensearch/index/mapper/JavaMultiFieldMergeTests.java b/server/src/test/java/org/opensearch/index/mapper/JavaMultiFieldMergeTests.java index b5989d93b520d..7e00a463124f1 100644 --- a/server/src/test/java/org/opensearch/index/mapper/JavaMultiFieldMergeTests.java +++ b/server/src/test/java/org/opensearch/index/mapper/JavaMultiFieldMergeTests.java @@ -56,9 +56,7 @@ public void testMergeMultiField() throws Exception { assertThat(mapperService.fieldType("name.indexed"), nullValue()); BytesReference json = BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("name", "some name").endObject()); - Document doc = mapperService.documentMapper() - .parse(new SourceToParse("test", MapperService.SINGLE_MAPPING_NAME, "1", json, XContentType.JSON)) - .rootDoc(); + Document doc = mapperService.documentMapper().parse(new SourceToParse("test", "1", json, XContentType.JSON)).rootDoc(); IndexableField f = doc.getField("name"); assertThat(f, notNullValue()); f = doc.getField("name.indexed"); @@ -74,9 +72,7 @@ public void testMergeMultiField() throws Exception { assertThat(mapperService.fieldType("name.not_indexed2"), nullValue()); assertThat(mapperService.fieldType("name.not_indexed3"), nullValue()); - doc = mapperService.documentMapper() - .parse(new SourceToParse("test", MapperService.SINGLE_MAPPING_NAME, "1", json, XContentType.JSON)) - .rootDoc(); + doc = mapperService.documentMapper().parse(new SourceToParse("test", "1", json, XContentType.JSON)).rootDoc(); f = doc.getField("name"); assertThat(f, notNullValue()); f = doc.getField("name.indexed"); @@ -113,9 +109,7 @@ public void testUpgradeFromMultiFieldTypeToMultiFields() throws Exception { assertThat(mapperService.fieldType("name.indexed"), nullValue()); BytesReference json = BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("name", "some name").endObject()); - Document doc = mapperService.documentMapper() - .parse(new SourceToParse("test", MapperService.SINGLE_MAPPING_NAME, "1", json, XContentType.JSON)) - .rootDoc(); + Document doc = mapperService.documentMapper().parse(new SourceToParse("test", "1", json, XContentType.JSON)).rootDoc(); IndexableField f = doc.getField("name"); assertThat(f, notNullValue()); f = doc.getField("name.indexed"); @@ -131,9 +125,7 @@ public void testUpgradeFromMultiFieldTypeToMultiFields() throws Exception { assertThat(mapperService.fieldType("name.not_indexed2"), nullValue()); assertThat(mapperService.fieldType("name.not_indexed3"), nullValue()); - doc = mapperService.documentMapper() - .parse(new SourceToParse("test", MapperService.SINGLE_MAPPING_NAME, "1", json, XContentType.JSON)) - .rootDoc(); + doc = mapperService.documentMapper().parse(new SourceToParse("test", "1", json, XContentType.JSON)).rootDoc(); f = doc.getField("name"); assertThat(f, notNullValue()); f = doc.getField("name.indexed"); diff --git a/server/src/test/java/org/opensearch/index/mapper/MultiFieldTests.java b/server/src/test/java/org/opensearch/index/mapper/MultiFieldTests.java index 918f5b325d81a..4027cf20baba8 100644 --- a/server/src/test/java/org/opensearch/index/mapper/MultiFieldTests.java +++ b/server/src/test/java/org/opensearch/index/mapper/MultiFieldTests.java @@ -76,9 +76,7 @@ private void testMultiField(String mapping) throws Exception { .merge(MapperService.SINGLE_MAPPING_NAME, new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE); BytesReference json = new BytesArray(copyToBytesFromClasspath("/org/opensearch/index/mapper/multifield/test-data.json")); - Document doc = mapperService.documentMapper() - .parse(new SourceToParse("test", MapperService.SINGLE_MAPPING_NAME, "1", json, XContentType.JSON)) - .rootDoc(); + Document doc = mapperService.documentMapper().parse(new SourceToParse("test", "1", json, XContentType.JSON)).rootDoc(); IndexableField f = doc.getField("name"); assertThat(f.name(), equalTo("name")); @@ -157,8 +155,7 @@ public void testBuildThenParse() throws Exception { .parse(MapperService.SINGLE_MAPPING_NAME, new CompressedXContent(builtMapping)); BytesReference json = new BytesArray(copyToBytesFromClasspath("/org/opensearch/index/mapper/multifield/test-data.json")); - Document doc = docMapper.parse(new SourceToParse("test", MapperService.SINGLE_MAPPING_NAME, "1", json, XContentType.JSON)) - .rootDoc(); + Document doc = docMapper.parse(new SourceToParse("test", "1", json, XContentType.JSON)).rootDoc(); IndexableField f = doc.getField("name"); assertThat(f.name(), equalTo("name")); diff --git a/server/src/test/java/org/opensearch/index/mapper/NestedObjectMapperTests.java b/server/src/test/java/org/opensearch/index/mapper/NestedObjectMapperTests.java index 045cc97275eb7..fe3ce5da6c90a 100644 --- a/server/src/test/java/org/opensearch/index/mapper/NestedObjectMapperTests.java +++ b/server/src/test/java/org/opensearch/index/mapper/NestedObjectMapperTests.java @@ -86,7 +86,6 @@ public void testEmptyNested() throws Exception { ParsedDocument doc = docMapper.parse( new SourceToParse( "test", - "type", "1", BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("field", "value").nullField("nested1").endObject()), XContentType.JSON @@ -98,7 +97,6 @@ public void testEmptyNested() throws Exception { doc = docMapper.parse( new SourceToParse( "test", - "type", "1", BytesReference.bytes( XContentFactory.jsonBuilder().startObject().field("field", "value").startArray("nested").endArray().endObject() @@ -135,7 +133,6 @@ public void testSingleNested() throws Exception { ParsedDocument doc = docMapper.parse( new SourceToParse( "test", - "type", "1", BytesReference.bytes( XContentFactory.jsonBuilder() @@ -161,7 +158,6 @@ public void testSingleNested() throws Exception { doc = docMapper.parse( new SourceToParse( "test", - "type", "1", BytesReference.bytes( XContentFactory.jsonBuilder() @@ -230,7 +226,6 @@ public void testMultiNested() throws Exception { ParsedDocument doc = docMapper.parse( new SourceToParse( "test", - "type", "1", BytesReference.bytes( XContentFactory.jsonBuilder() @@ -325,7 +320,6 @@ public void testMultiObjectAndNested1() throws Exception { ParsedDocument doc = docMapper.parse( new SourceToParse( "test", - "type", "1", BytesReference.bytes( XContentFactory.jsonBuilder() @@ -421,7 +415,6 @@ public void testMultiObjectAndNested2() throws Exception { ParsedDocument doc = docMapper.parse( new SourceToParse( "test", - "type", "1", BytesReference.bytes( XContentFactory.jsonBuilder() @@ -516,7 +509,6 @@ public void testMultiRootAndNested1() throws Exception { ParsedDocument doc = docMapper.parse( new SourceToParse( "test", - "type", "1", BytesReference.bytes( XContentFactory.jsonBuilder() @@ -611,7 +603,6 @@ public void testMultipleLevelsIncludeRoot1() throws Exception { ParsedDocument doc = docMapper.parse( new SourceToParse( "test", - MapperService.SINGLE_MAPPING_NAME, "1", BytesReference.bytes( XContentFactory.jsonBuilder() @@ -681,7 +672,6 @@ public void testMultipleLevelsIncludeRoot2() throws Exception { ParsedDocument doc = docMapper.parse( new SourceToParse( "test", - MapperService.SINGLE_MAPPING_NAME, "1", BytesReference.bytes( XContentFactory.jsonBuilder() @@ -766,7 +756,6 @@ public void testMultipleLevelsIncludeRootWithMerge() throws Exception { ParsedDocument doc = docMapper.parse( new SourceToParse( "test", - MapperService.SINGLE_MAPPING_NAME, "1", BytesReference.bytes( XContentFactory.jsonBuilder() @@ -822,7 +811,6 @@ public void testNestedArrayStrict() throws Exception { ParsedDocument doc = docMapper.parse( new SourceToParse( "test", - "type", "1", BytesReference.bytes( XContentFactory.jsonBuilder() @@ -975,7 +963,7 @@ public void testLimitNestedDocsDefaultSettings() throws Exception { docBuilder.endArray(); } docBuilder.endObject(); - SourceToParse source1 = new SourceToParse("test1", "type", "1", BytesReference.bytes(docBuilder), XContentType.JSON); + SourceToParse source1 = new SourceToParse("test1", "1", BytesReference.bytes(docBuilder), XContentType.JSON); MapperParsingException e = expectThrows(MapperParsingException.class, () -> docMapper.parse(source1)); assertEquals( "The number of nested documents has exceeded the allowed limit of [" @@ -1020,7 +1008,7 @@ public void testLimitNestedDocs() throws Exception { docBuilder.endArray(); } docBuilder.endObject(); - SourceToParse source1 = new SourceToParse("test1", "type", "1", BytesReference.bytes(docBuilder), XContentType.JSON); + SourceToParse source1 = new SourceToParse("test1", "1", BytesReference.bytes(docBuilder), XContentType.JSON); ParsedDocument doc = docMapper.parse(source1); assertThat(doc.docs().size(), equalTo(3)); @@ -1037,7 +1025,7 @@ public void testLimitNestedDocs() throws Exception { docBuilder2.endArray(); } docBuilder2.endObject(); - SourceToParse source2 = new SourceToParse("test1", "type", "2", BytesReference.bytes(docBuilder2), XContentType.JSON); + SourceToParse source2 = new SourceToParse("test1", "2", BytesReference.bytes(docBuilder2), XContentType.JSON); MapperParsingException e = expectThrows(MapperParsingException.class, () -> docMapper.parse(source2)); assertEquals( "The number of nested documents has exceeded the allowed limit of [" @@ -1089,7 +1077,7 @@ public void testLimitNestedDocsMultipleNestedFields() throws Exception { docBuilder.endArray(); } docBuilder.endObject(); - SourceToParse source1 = new SourceToParse("test1", "type", "1", BytesReference.bytes(docBuilder), XContentType.JSON); + SourceToParse source1 = new SourceToParse("test1", "1", BytesReference.bytes(docBuilder), XContentType.JSON); ParsedDocument doc = docMapper.parse(source1); assertThat(doc.docs().size(), equalTo(3)); @@ -1111,7 +1099,7 @@ public void testLimitNestedDocsMultipleNestedFields() throws Exception { } docBuilder2.endObject(); - SourceToParse source2 = new SourceToParse("test1", "type", "2", BytesReference.bytes(docBuilder2), XContentType.JSON); + SourceToParse source2 = new SourceToParse("test1", "2", BytesReference.bytes(docBuilder2), XContentType.JSON); MapperParsingException e = expectThrows(MapperParsingException.class, () -> docMapper.parse(source2)); assertEquals( "The number of nested documents has exceeded the allowed limit of [" diff --git a/server/src/test/java/org/opensearch/index/mapper/NullValueObjectMappingTests.java b/server/src/test/java/org/opensearch/index/mapper/NullValueObjectMappingTests.java index 9085c637ef89e..95c21823bfcae 100644 --- a/server/src/test/java/org/opensearch/index/mapper/NullValueObjectMappingTests.java +++ b/server/src/test/java/org/opensearch/index/mapper/NullValueObjectMappingTests.java @@ -65,7 +65,6 @@ public void testNullValueObject() throws IOException { ParsedDocument doc = defaultMapper.parse( new SourceToParse( "test", - "type", "1", BytesReference.bytes( XContentFactory.jsonBuilder().startObject().startObject("obj1").endObject().field("value1", "test1").endObject() @@ -79,7 +78,6 @@ public void testNullValueObject() throws IOException { doc = defaultMapper.parse( new SourceToParse( "test", - "type", "1", BytesReference.bytes(XContentFactory.jsonBuilder().startObject().nullField("obj1").field("value1", "test1").endObject()), XContentType.JSON @@ -91,7 +89,6 @@ public void testNullValueObject() throws IOException { doc = defaultMapper.parse( new SourceToParse( "test", - "type", "1", BytesReference.bytes( XContentFactory.jsonBuilder() diff --git a/server/src/test/java/org/opensearch/index/mapper/ObjectMapperTests.java b/server/src/test/java/org/opensearch/index/mapper/ObjectMapperTests.java index c82f918e55240..079475d9f3554 100644 --- a/server/src/test/java/org/opensearch/index/mapper/ObjectMapperTests.java +++ b/server/src/test/java/org/opensearch/index/mapper/ObjectMapperTests.java @@ -59,7 +59,6 @@ public void testDifferentInnerObjectTokenFailure() throws Exception { defaultMapper.parse( new SourceToParse( "test", - "type", "1", new BytesArray( " {\n" diff --git a/server/src/test/java/org/opensearch/index/mapper/PathMatchDynamicTemplateTests.java b/server/src/test/java/org/opensearch/index/mapper/PathMatchDynamicTemplateTests.java index 4976372ceaf23..e98dc399b3b41 100644 --- a/server/src/test/java/org/opensearch/index/mapper/PathMatchDynamicTemplateTests.java +++ b/server/src/test/java/org/opensearch/index/mapper/PathMatchDynamicTemplateTests.java @@ -53,7 +53,7 @@ public void testSimple() throws Exception { byte[] json = copyToBytesFromClasspath("/org/opensearch/index/mapper/dynamictemplate/pathmatch/test-data.json"); ParsedDocument parsedDoc = mapperService.documentMapper() - .parse(new SourceToParse("test", MapperService.SINGLE_MAPPING_NAME, "1", new BytesArray(json), XContentType.JSON)); + .parse(new SourceToParse("test", "1", new BytesArray(json), XContentType.JSON)); client().admin() .indices() .preparePutMapping("test") diff --git a/server/src/test/java/org/opensearch/index/mapper/RoutingFieldMapperTests.java b/server/src/test/java/org/opensearch/index/mapper/RoutingFieldMapperTests.java index a56521476c2d8..92236ad34013b 100644 --- a/server/src/test/java/org/opensearch/index/mapper/RoutingFieldMapperTests.java +++ b/server/src/test/java/org/opensearch/index/mapper/RoutingFieldMapperTests.java @@ -53,7 +53,6 @@ public void testRoutingMapper() throws Exception { ParsedDocument doc = docMapper.parse( new SourceToParse( "test", - "type", "1", BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("field", "value").endObject()), XContentType.JSON, @@ -75,7 +74,6 @@ public void testIncludeInObjectNotAllowed() throws Exception { docMapper.parse( new SourceToParse( "test", - "type", "1", BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("_routing", "foo").endObject()), XContentType.JSON diff --git a/server/src/test/java/org/opensearch/index/mapper/SourceFieldMapperTests.java b/server/src/test/java/org/opensearch/index/mapper/SourceFieldMapperTests.java index e37ef76ce9443..3cb16b452cbf4 100644 --- a/server/src/test/java/org/opensearch/index/mapper/SourceFieldMapperTests.java +++ b/server/src/test/java/org/opensearch/index/mapper/SourceFieldMapperTests.java @@ -69,7 +69,6 @@ public void testNoFormat() throws Exception { ParsedDocument doc = documentMapper.parse( new SourceToParse( "test", - "type", "1", BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("field", "value").endObject()), XContentType.JSON @@ -82,7 +81,6 @@ public void testNoFormat() throws Exception { doc = documentMapper.parse( new SourceToParse( "test", - "type", "1", BytesReference.bytes(XContentFactory.smileBuilder().startObject().field("field", "value").endObject()), XContentType.SMILE @@ -111,7 +109,6 @@ public void testIncludes() throws Exception { ParsedDocument doc = documentMapper.parse( new SourceToParse( "test", - "type", "1", BytesReference.bytes( XContentFactory.jsonBuilder() @@ -156,7 +153,6 @@ public void testExcludes() throws Exception { ParsedDocument doc = documentMapper.parse( new SourceToParse( "test", - "type", "1", BytesReference.bytes( XContentFactory.jsonBuilder() @@ -325,8 +321,8 @@ public void testSourceObjectContainsExtraTokens() throws Exception { .parse("type", new CompressedXContent(mapping)); try { - documentMapper.parse(new SourceToParse("test", "type", "1", new BytesArray("{}}"), XContentType.JSON)); // extra end object - // (invalid JSON) + documentMapper.parse(new SourceToParse("test", "1", new BytesArray("{}}"), XContentType.JSON)); // extra end object + // (invalid JSON) fail("Expected parse exception"); } catch (MapperParsingException e) { assertNotNull(e.getRootCause()); diff --git a/server/src/test/java/org/opensearch/index/mapper/StoredNumericValuesTests.java b/server/src/test/java/org/opensearch/index/mapper/StoredNumericValuesTests.java index f91120d4cf199..65776001381a0 100644 --- a/server/src/test/java/org/opensearch/index/mapper/StoredNumericValuesTests.java +++ b/server/src/test/java/org/opensearch/index/mapper/StoredNumericValuesTests.java @@ -111,7 +111,6 @@ public void testBytesAndNumericRepresentation() throws Exception { ParsedDocument doc = mapper.parse( new SourceToParse( "test", - "type", "1", BytesReference.bytes( XContentFactory.jsonBuilder() diff --git a/server/src/test/java/org/opensearch/index/mapper/TypeFieldMapperTests.java b/server/src/test/java/org/opensearch/index/mapper/TypeFieldMapperTests.java index d61c25c5ec622..89eee655ca9d4 100644 --- a/server/src/test/java/org/opensearch/index/mapper/TypeFieldMapperTests.java +++ b/server/src/test/java/org/opensearch/index/mapper/TypeFieldMapperTests.java @@ -73,7 +73,7 @@ public void testDocValuesSingleType() throws Exception { public static void testDocValues(Function createIndex) throws IOException { MapperService mapperService = createIndex.apply("test").mapperService(); DocumentMapper mapper = mapperService.merge("type", new CompressedXContent("{\"type\":{}}"), MergeReason.MAPPING_UPDATE); - ParsedDocument document = mapper.parse(new SourceToParse("index", "type", "id", new BytesArray("{}"), XContentType.JSON)); + ParsedDocument document = mapper.parse(new SourceToParse("index", "id", new BytesArray("{}"), XContentType.JSON)); Directory dir = newDirectory(); IndexWriter w = new IndexWriter(dir, newIndexWriterConfig()); @@ -100,7 +100,7 @@ public void testDefaults() throws IOException { Settings indexSettings = Settings.EMPTY; MapperService mapperService = createIndex("test", indexSettings).mapperService(); DocumentMapper mapper = mapperService.merge("type", new CompressedXContent("{\"type\":{}}"), MergeReason.MAPPING_UPDATE); - ParsedDocument document = mapper.parse(new SourceToParse("index", "type", "id", new BytesArray("{}"), XContentType.JSON)); + ParsedDocument document = mapper.parse(new SourceToParse("index", "id", new BytesArray("{}"), XContentType.JSON)); assertEquals(Collections.emptyList(), Arrays.asList(document.rootDoc().getFields(TypeFieldMapper.NAME))); } } diff --git a/server/src/test/java/org/opensearch/index/replication/IndexLevelReplicationTests.java b/server/src/test/java/org/opensearch/index/replication/IndexLevelReplicationTests.java index 6e2efe56a69d7..a88db8473cae0 100644 --- a/server/src/test/java/org/opensearch/index/replication/IndexLevelReplicationTests.java +++ b/server/src/test/java/org/opensearch/index/replication/IndexLevelReplicationTests.java @@ -57,6 +57,7 @@ import org.opensearch.index.engine.InternalEngineTests; import org.opensearch.index.engine.SegmentsStats; import org.opensearch.index.engine.VersionConflictEngineException; +import org.opensearch.index.mapper.MapperService; import org.opensearch.index.mapper.SeqNoFieldMapper; import org.opensearch.index.seqno.SeqNoStats; import org.opensearch.index.seqno.SequenceNumbers; @@ -75,7 +76,6 @@ import java.util.Collections; import java.util.HashSet; import java.util.List; -import java.util.Map; import java.util.Set; import java.util.concurrent.CountDownLatch; import java.util.concurrent.CyclicBarrier; @@ -315,10 +315,7 @@ public void testCheckpointsAdvance() throws Exception { } public void testConflictingOpsOnReplica() throws Exception { - Map mappings = Collections.singletonMap( - "type", - "{ \"type\": { \"properties\": { \"f\": { \"type\": \"keyword\"} }}}" - ); + String mappings = "{ \"" + MapperService.SINGLE_MAPPING_NAME + "\": { \"properties\": { \"f\": { \"type\": \"keyword\"} }}}"; try (ReplicationGroup shards = new ReplicationGroup(buildIndexMetadata(2, mappings))) { shards.startAll(); List replicas = shards.getReplicas(); @@ -345,10 +342,7 @@ public void testConflictingOpsOnReplica() throws Exception { } public void testReplicaTermIncrementWithConcurrentPrimaryPromotion() throws Exception { - Map mappings = Collections.singletonMap( - "type", - "{ \"type\": { \"properties\": { \"f\": { \"type\": \"keyword\"} }}}" - ); + String mappings = "{ \"" + MapperService.SINGLE_MAPPING_NAME + "\": { \"properties\": { \"f\": { \"type\": \"keyword\"} }}}"; try (ReplicationGroup shards = new ReplicationGroup(buildIndexMetadata(2, mappings))) { shards.startAll(); long primaryPrimaryTerm = shards.getPrimary().getPendingPrimaryTerm(); @@ -398,10 +392,7 @@ public void testReplicaTermIncrementWithConcurrentPrimaryPromotion() throws Exce } public void testReplicaOperationWithConcurrentPrimaryPromotion() throws Exception { - Map mappings = Collections.singletonMap( - "type", - "{ \"type\": { \"properties\": { \"f\": { \"type\": \"keyword\"} }}}" - ); + String mappings = "{ \"" + MapperService.SINGLE_MAPPING_NAME + "\": { \"properties\": { \"f\": { \"type\": \"keyword\"} }}}"; try (ReplicationGroup shards = new ReplicationGroup(buildIndexMetadata(1, mappings))) { shards.startAll(); long primaryPrimaryTerm = shards.getPrimary().getPendingPrimaryTerm(); diff --git a/server/src/test/java/org/opensearch/index/replication/RecoveryDuringReplicationTests.java b/server/src/test/java/org/opensearch/index/replication/RecoveryDuringReplicationTests.java index cccb2f470195b..add2ecd34e3af 100644 --- a/server/src/test/java/org/opensearch/index/replication/RecoveryDuringReplicationTests.java +++ b/server/src/test/java/org/opensearch/index/replication/RecoveryDuringReplicationTests.java @@ -75,10 +75,8 @@ import java.io.IOException; import java.util.ArrayList; -import java.util.Collections; import java.util.EnumSet; import java.util.List; -import java.util.Map; import java.util.Optional; import java.util.concurrent.CountDownLatch; import java.util.concurrent.Future; @@ -159,7 +157,7 @@ public void testRecoveryToReplicaThatReceivedExtraDocument() throws Exception { 1, randomNonNegativeLong(), false, - new SourceToParse("index", "type", "replica", new BytesArray("{}"), XContentType.JSON) + new SourceToParse("index", "replica", new BytesArray("{}"), XContentType.JSON) ); shards.promoteReplicaToPrimary(promotedReplica).get(); oldPrimary.close("demoted", randomBoolean()); @@ -173,7 +171,7 @@ public void testRecoveryToReplicaThatReceivedExtraDocument() throws Exception { promotedReplica.applyIndexOperationOnPrimary( Versions.MATCH_ANY, VersionType.INTERNAL, - new SourceToParse("index", "type", "primary", new BytesArray("{}"), XContentType.JSON), + new SourceToParse("index", "primary", new BytesArray("{}"), XContentType.JSON), SequenceNumbers.UNASSIGNED_SEQ_NO, 0, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, @@ -358,10 +356,7 @@ public void testReplicaRollbackStaleDocumentsInPeerRecovery() throws Exception { } public void testResyncAfterPrimaryPromotion() throws Exception { - Map mappings = Collections.singletonMap( - "type", - "{ \"type\": { \"properties\": { \"f\": { \"type\": \"keyword\"} }}}" - ); + String mappings = "{ \"_doc\": { \"properties\": { \"f\": { \"type\": \"keyword\"} }}}"; try (ReplicationGroup shards = new ReplicationGroup(buildIndexMetadata(2, mappings))) { shards.startAll(); int initialDocs = randomInt(10); diff --git a/server/src/test/java/org/opensearch/index/shard/IndexShardTests.java b/server/src/test/java/org/opensearch/index/shard/IndexShardTests.java index c9179c9531c29..3bc5218e2f61f 100644 --- a/server/src/test/java/org/opensearch/index/shard/IndexShardTests.java +++ b/server/src/test/java/org/opensearch/index/shard/IndexShardTests.java @@ -1861,7 +1861,7 @@ public void postDelete(ShardId shardId, Engine.Delete delete, Exception ex) { assertEquals(0, postDelete.get()); assertEquals(0, postDeleteException.get()); - deleteDoc(shard, "_doc", "1"); + deleteDoc(shard, "1"); assertEquals(2, preIndex.get()); assertEquals(1, postIndexCreate.get()); @@ -1889,7 +1889,7 @@ public void postDelete(ShardId shardId, Engine.Delete delete, Exception ex) { assertEquals(1, postDelete.get()); assertEquals(0, postDeleteException.get()); try { - deleteDoc(shard, "_doc", "1"); + deleteDoc(shard, "1"); fail(); } catch (AlreadyClosedException e) { @@ -2184,7 +2184,7 @@ public void testRecoverFromStoreWithOutOfOrderDelete() throws IOException { final IndexShard shard = newStartedShard(false); long primaryTerm = shard.getOperationPrimaryTerm(); shard.advanceMaxSeqNoOfUpdatesOrDeletes(1); // manually advance msu for this delete - shard.applyDeleteOperationOnReplica(1, primaryTerm, 2, "_doc", "id"); + shard.applyDeleteOperationOnReplica(1, primaryTerm, 2, "id"); shard.getEngine().rollTranslogGeneration(); // isolate the delete in it's own generation shard.applyIndexOperationOnReplica( 0, @@ -2192,7 +2192,7 @@ public void testRecoverFromStoreWithOutOfOrderDelete() throws IOException { 1, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false, - new SourceToParse(shard.shardId().getIndexName(), "_doc", "id", new BytesArray("{}"), XContentType.JSON) + new SourceToParse(shard.shardId().getIndexName(), "id", new BytesArray("{}"), XContentType.JSON) ); shard.applyIndexOperationOnReplica( 3, @@ -2200,7 +2200,7 @@ public void testRecoverFromStoreWithOutOfOrderDelete() throws IOException { 3, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false, - new SourceToParse(shard.shardId().getIndexName(), "_doc", "id-3", new BytesArray("{}"), XContentType.JSON) + new SourceToParse(shard.shardId().getIndexName(), "id-3", new BytesArray("{}"), XContentType.JSON) ); // Flushing a new commit with local checkpoint=1 allows to skip the translog gen #1 in recovery. shard.flush(new FlushRequest().force(true).waitIfOngoing(true)); @@ -2210,7 +2210,7 @@ public void testRecoverFromStoreWithOutOfOrderDelete() throws IOException { 3, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false, - new SourceToParse(shard.shardId().getIndexName(), "_doc", "id-2", new BytesArray("{}"), XContentType.JSON) + new SourceToParse(shard.shardId().getIndexName(), "id-2", new BytesArray("{}"), XContentType.JSON) ); shard.applyIndexOperationOnReplica( 5, @@ -2218,7 +2218,7 @@ public void testRecoverFromStoreWithOutOfOrderDelete() throws IOException { 1, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false, - new SourceToParse(shard.shardId().getIndexName(), "_doc", "id-5", new BytesArray("{}"), XContentType.JSON) + new SourceToParse(shard.shardId().getIndexName(), "id-5", new BytesArray("{}"), XContentType.JSON) ); shard.sync(); // advance local checkpoint @@ -2358,13 +2358,7 @@ public void testRecoverFromStoreWithNoOps() throws IOException { // start a replica shard and index the second doc final IndexShard otherShard = newStartedShard(false); updateMappings(otherShard, shard.indexSettings().getIndexMetadata()); - SourceToParse sourceToParse = new SourceToParse( - shard.shardId().getIndexName(), - "_doc", - "1", - new BytesArray("{}"), - XContentType.JSON - ); + SourceToParse sourceToParse = new SourceToParse(shard.shardId().getIndexName(), "1", new BytesArray("{}"), XContentType.JSON); otherShard.applyIndexOperationOnReplica( 1, otherShard.getOperationPrimaryTerm(), @@ -2498,7 +2492,7 @@ public void testRecoverFromStoreRemoveStaleOperations() throws Exception { 1, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false, - new SourceToParse(indexName, "_doc", "doc-0", new BytesArray("{}"), XContentType.JSON) + new SourceToParse(indexName, "doc-0", new BytesArray("{}"), XContentType.JSON) ); flushShard(shard); shard.updateGlobalCheckpointOnReplica(0, "test"); // stick the global checkpoint here. @@ -2508,7 +2502,7 @@ public void testRecoverFromStoreRemoveStaleOperations() throws Exception { 1, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false, - new SourceToParse(indexName, "_doc", "doc-1", new BytesArray("{}"), XContentType.JSON) + new SourceToParse(indexName, "doc-1", new BytesArray("{}"), XContentType.JSON) ); flushShard(shard); assertThat(getShardDocUIDs(shard), containsInAnyOrder("doc-0", "doc-1")); @@ -2520,7 +2514,7 @@ public void testRecoverFromStoreRemoveStaleOperations() throws Exception { 1, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false, - new SourceToParse(indexName, "_doc", "doc-2", new BytesArray("{}"), XContentType.JSON) + new SourceToParse(indexName, "doc-2", new BytesArray("{}"), XContentType.JSON) ); flushShard(shard); assertThat(getShardDocUIDs(shard), containsInAnyOrder("doc-0", "doc-1", "doc-2")); @@ -2708,7 +2702,7 @@ public void testReaderWrapperWorksWithGlobalOrdinals() throws IOException { .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1) .build(); IndexMetadata metadata = IndexMetadata.builder("test") - .putMapping("_doc", "{ \"properties\": { \"foo\": { \"type\": \"text\", \"fielddata\": true }}}") + .putMapping("{ \"properties\": { \"foo\": { \"type\": \"text\", \"fielddata\": true }}}") .settings(settings) .primaryTerm(0, 1) .build(); @@ -2761,7 +2755,7 @@ public void testReaderWrapperWorksWithGlobalOrdinals() throws IOException { public void testIndexingOperationListenersIsInvokedOnRecovery() throws IOException { IndexShard shard = newStartedShard(true); indexDoc(shard, "_doc", "0", "{\"foo\" : \"bar\"}"); - deleteDoc(shard, "_doc", "0"); + deleteDoc(shard, "0"); indexDoc(shard, "_doc", "1", "{\"foo\" : \"bar\"}"); shard.refresh("test"); @@ -2848,7 +2842,7 @@ public void testTranslogRecoverySyncsTranslog() throws IOException { .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1) .build(); IndexMetadata metadata = IndexMetadata.builder("test") - .putMapping("_doc", "{ \"properties\": { \"foo\": { \"type\": \"text\"}}}") + .putMapping("{ \"properties\": { \"foo\": { \"type\": \"text\"}}}") .settings(settings) .primaryTerm(0, 1) .build(); @@ -2893,7 +2887,7 @@ public void testRecoverFromTranslog() throws IOException { .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1) .build(); IndexMetadata metadata = IndexMetadata.builder("test") - .putMapping("_doc", "{ \"properties\": { \"foo\": { \"type\": \"text\"}}}") + .putMapping("{ \"properties\": { \"foo\": { \"type\": \"text\"}}}") .settings(settings) .primaryTerm(0, randomLongBetween(1, Long.MAX_VALUE)) .build(); @@ -2905,7 +2899,6 @@ public void testRecoverFromTranslog() throws IOException { if (randomBoolean()) { operations.add( new Translog.Index( - "_doc", "1", 0, primary.getPendingPrimaryTerm(), @@ -2919,7 +2912,6 @@ public void testRecoverFromTranslog() throws IOException { // corrupt entry operations.add( new Translog.Index( - "_doc", "2", 1, primary.getPendingPrimaryTerm(), @@ -2978,7 +2970,7 @@ public void testShardActiveDuringPeerRecovery() throws IOException { .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1) .build(); IndexMetadata metadata = IndexMetadata.builder("test") - .putMapping("_doc", "{ \"properties\": { \"foo\": { \"type\": \"text\"}}}") + .putMapping("{ \"properties\": { \"foo\": { \"type\": \"text\"}}}") .settings(settings) .primaryTerm(0, 1) .build(); @@ -3028,7 +3020,7 @@ public void testRefreshListenersDuringPeerRecovery() throws IOException { .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1) .build(); IndexMetadata metadata = IndexMetadata.builder("test") - .putMapping("_doc", "{ \"properties\": { \"foo\": { \"type\": \"text\"}}}") + .putMapping("{ \"properties\": { \"foo\": { \"type\": \"text\"}}}") .settings(settings) .primaryTerm(0, 1) .build(); @@ -3101,7 +3093,7 @@ public void testRecoverFromLocalShard() throws IOException { .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1) .build(); IndexMetadata metadata = IndexMetadata.builder("source") - .putMapping("_doc", "{ \"properties\": { \"foo\": { \"type\": \"text\"}}}") + .putMapping("{ \"properties\": { \"foo\": { \"type\": \"text\"}}}") .settings(settings) .primaryTerm(0, 1) .build(); @@ -3238,7 +3230,7 @@ public void testDocStats() throws Exception { ); for (final Integer i : ids) { final String id = Integer.toString(i); - deleteDoc(indexShard, "_doc", id); + deleteDoc(indexShard, id); indexDoc(indexShard, "_doc", id); } // Need to update and sync the global checkpoint and the retention leases for the soft-deletes retention MergePolicy. @@ -3355,7 +3347,11 @@ public void testEstimateTotalDocSize() throws Exception { // Do some updates and deletes, then recheck the correlation again. for (int i = 0; i < numDoc / 2; i++) { - indexDoc(indexShard, "_doc", Integer.toString(i), "{\"foo\": \"bar\"}"); + if (randomBoolean()) { + deleteDoc(indexShard, Integer.toString(i)); + } else { + indexDoc(indexShard, "_doc", Integer.toString(i), "{\"foo\": \"bar\"}"); + } } if (randomBoolean()) { indexShard.flush(new FlushRequest()); @@ -3705,7 +3701,6 @@ private Result indexOnReplicaWithGaps(final IndexShard indexShard, final int ope } SourceToParse sourceToParse = new SourceToParse( indexShard.shardId().getIndexName(), - "_doc", id, new BytesArray("{}"), XContentType.JSON @@ -3742,7 +3737,7 @@ public void testIsSearchIdle() throws Exception { .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1) .build(); IndexMetadata metadata = IndexMetadata.builder("test") - .putMapping("_doc", "{ \"properties\": { \"foo\": { \"type\": \"text\"}}}") + .putMapping("{ \"properties\": { \"foo\": { \"type\": \"text\"}}}") .settings(settings) .primaryTerm(0, 1) .build(); @@ -3792,7 +3787,7 @@ public void testScheduledRefresh() throws Exception { .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1) .build(); IndexMetadata metadata = IndexMetadata.builder("test") - .putMapping("_doc", "{ \"properties\": { \"foo\": { \"type\": \"text\"}}}") + .putMapping("{ \"properties\": { \"foo\": { \"type\": \"text\"}}}") .settings(settings) .primaryTerm(0, 1) .build(); @@ -3866,7 +3861,7 @@ public void testRefreshIsNeededWithRefreshListeners() throws IOException, Interr .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1) .build(); IndexMetadata metadata = IndexMetadata.builder("test") - .putMapping("_doc", "{ \"properties\": { \"foo\": { \"type\": \"text\"}}}") + .putMapping("{ \"properties\": { \"foo\": { \"type\": \"text\"}}}") .settings(settings) .primaryTerm(0, 1) .build(); @@ -3932,10 +3927,7 @@ public void testOnCloseStats() throws IOException { public void testSupplyTombstoneDoc() throws Exception { IndexShard shard = newStartedShard(); String id = randomRealisticUnicodeOfLengthBetween(1, 10); - ParsedDocument deleteTombstone = shard.getEngine() - .config() - .getTombstoneDocSupplier() - .newDeleteTombstoneDoc(MapperService.SINGLE_MAPPING_NAME, id); + ParsedDocument deleteTombstone = shard.getEngine().config().getTombstoneDocSupplier().newDeleteTombstoneDoc(id); assertThat(deleteTombstone.docs(), hasSize(1)); ParseContext.Document deleteDoc = deleteTombstone.docs().get(0); assertThat( @@ -4166,14 +4158,13 @@ public void testResetEngineWithBrokenTranslog() throws Exception { updateMappings( shard, IndexMetadata.builder(shard.indexSettings.getIndexMetadata()) - .putMapping("_doc", "{ \"properties\": { \"foo\": { \"type\": \"text\"}}}") + .putMapping("{ \"properties\": { \"foo\": { \"type\": \"text\"}}}") .build() ); final List operations = Stream.concat( IntStream.range(0, randomIntBetween(0, 10)) .mapToObj( n -> new Translog.Index( - "_doc", "1", 0, shard.getPendingPrimaryTerm(), @@ -4187,7 +4178,6 @@ public void testResetEngineWithBrokenTranslog() throws Exception { IntStream.range(0, randomIntBetween(1, 10)) .mapToObj( n -> new Translog.Index( - "_doc", "1", 0, shard.getPendingPrimaryTerm(), @@ -4297,7 +4287,7 @@ public void testTypelessGet() throws IOException { .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1) .build(); IndexMetadata metadata = IndexMetadata.builder("index") - .putMapping("some_type", "{ \"properties\": { \"foo\": { \"type\": \"text\"}}}") + .putMapping("{ \"properties\": { \"foo\": { \"type\": \"text\"}}}") .settings(settings) .primaryTerm(0, 1) .build(); @@ -4350,7 +4340,7 @@ public void testDoNotTrimCommitsWhenOpenReadOnlyEngine() throws Exception { 1, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false, - new SourceToParse(shard.shardId.getIndexName(), "_doc", Long.toString(i), new BytesArray("{}"), XContentType.JSON) + new SourceToParse(shard.shardId.getIndexName(), Long.toString(i), new BytesArray("{}"), XContentType.JSON) ); shard.updateGlobalCheckpointOnReplica(shard.getLocalCheckpoint(), "test"); if (randomInt(100) < 10) { diff --git a/server/src/test/java/org/opensearch/index/shard/IndexingOperationListenerTests.java b/server/src/test/java/org/opensearch/index/shard/IndexingOperationListenerTests.java index 63734831d0964..6ac5f96e11f34 100644 --- a/server/src/test/java/org/opensearch/index/shard/IndexingOperationListenerTests.java +++ b/server/src/test/java/org/opensearch/index/shard/IndexingOperationListenerTests.java @@ -161,7 +161,7 @@ public void postDelete(ShardId shardId, Engine.Delete delete, Exception ex) { logger ); ParsedDocument doc = InternalEngineTests.createParsedDoc("1", null); - Engine.Delete delete = new Engine.Delete("test", "1", new Term("_id", Uid.encodeId(doc.id())), randomNonNegativeLong()); + Engine.Delete delete = new Engine.Delete("1", new Term("_id", Uid.encodeId(doc.id())), randomNonNegativeLong()); Engine.Index index = new Engine.Index(new Term("_id", Uid.encodeId(doc.id())), randomNonNegativeLong(), doc); compositeListener.postDelete(randomShardId, delete, new Engine.DeleteResult(1, 0, SequenceNumbers.UNASSIGNED_SEQ_NO, true)); assertEquals(0, preIndex.get()); diff --git a/server/src/test/java/org/opensearch/index/shard/PrimaryReplicaSyncerTests.java b/server/src/test/java/org/opensearch/index/shard/PrimaryReplicaSyncerTests.java index 1c3fa908f11da..631fa384de335 100644 --- a/server/src/test/java/org/opensearch/index/shard/PrimaryReplicaSyncerTests.java +++ b/server/src/test/java/org/opensearch/index/shard/PrimaryReplicaSyncerTests.java @@ -94,7 +94,7 @@ public void testSyncerSendsOffCorrectDocuments() throws Exception { shard.applyIndexOperationOnPrimary( Versions.MATCH_ANY, VersionType.INTERNAL, - new SourceToParse(shard.shardId().getIndexName(), "_doc", Integer.toString(i), new BytesArray("{}"), XContentType.JSON), + new SourceToParse(shard.shardId().getIndexName(), Integer.toString(i), new BytesArray("{}"), XContentType.JSON), SequenceNumbers.UNASSIGNED_SEQ_NO, 0, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, @@ -176,7 +176,7 @@ public void testSyncerOnClosingShard() throws Exception { shard.applyIndexOperationOnPrimary( Versions.MATCH_ANY, VersionType.INTERNAL, - new SourceToParse(shard.shardId().getIndexName(), "_doc", Integer.toString(i), new BytesArray("{}"), XContentType.JSON), + new SourceToParse(shard.shardId().getIndexName(), Integer.toString(i), new BytesArray("{}"), XContentType.JSON), SequenceNumbers.UNASSIGNED_SEQ_NO, 0, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, diff --git a/server/src/test/java/org/opensearch/index/shard/RefreshListenersTests.java b/server/src/test/java/org/opensearch/index/shard/RefreshListenersTests.java index 97fd7fc8f279f..eea316d9a9370 100644 --- a/server/src/test/java/org/opensearch/index/shard/RefreshListenersTests.java +++ b/server/src/test/java/org/opensearch/index/shard/RefreshListenersTests.java @@ -440,17 +440,7 @@ private Engine.IndexResult index(String id, String testFieldValue) throws IOExce document.add(seqID.seqNoDocValue); document.add(seqID.primaryTerm); BytesReference source = new BytesArray(new byte[] { 1 }); - ParsedDocument doc = new ParsedDocument( - versionField, - seqID, - id, - "test", - null, - Arrays.asList(document), - source, - XContentType.JSON, - null - ); + ParsedDocument doc = new ParsedDocument(versionField, seqID, id, null, Arrays.asList(document), source, XContentType.JSON, null); Engine.Index index = new Engine.Index(new Term("_id", doc.id()), engine.config().getPrimaryTermSupplier().getAsLong(), doc); return engine.index(index); } diff --git a/server/src/test/java/org/opensearch/index/shard/RemoveCorruptedShardDataCommandTests.java b/server/src/test/java/org/opensearch/index/shard/RemoveCorruptedShardDataCommandTests.java index c3ce944f51588..1b8809ba04278 100644 --- a/server/src/test/java/org/opensearch/index/shard/RemoveCorruptedShardDataCommandTests.java +++ b/server/src/test/java/org/opensearch/index/shard/RemoveCorruptedShardDataCommandTests.java @@ -142,7 +142,7 @@ public void setup() throws IOException { final IndexMetadata.Builder metadata = IndexMetadata.builder(routing.getIndexName()) .settings(settings) .primaryTerm(0, randomIntBetween(1, 100)) - .putMapping("_doc", "{ \"properties\": {} }"); + .putMapping("{ \"properties\": {} }"); indexMetadata = metadata.build(); clusterState = ClusterState.builder(ClusterName.DEFAULT).metadata(Metadata.builder().put(indexMetadata, false).build()).build(); diff --git a/server/src/test/java/org/opensearch/index/shard/ShardGetServiceTests.java b/server/src/test/java/org/opensearch/index/shard/ShardGetServiceTests.java index a04be37176389..5dd053574268e 100644 --- a/server/src/test/java/org/opensearch/index/shard/ShardGetServiceTests.java +++ b/server/src/test/java/org/opensearch/index/shard/ShardGetServiceTests.java @@ -59,7 +59,7 @@ public void testGetForUpdate() throws IOException { .build(); IndexMetadata metadata = IndexMetadata.builder("test") - .putMapping("test", "{ \"properties\": { \"foo\": { \"type\": \"text\"}}}") + .putMapping("{ \"properties\": { \"foo\": { \"type\": \"text\"}}}") .settings(settings) .primaryTerm(0, 1) .build(); @@ -74,7 +74,7 @@ public void testGetForUpdate() throws IOException { assertEquals(searcher.getIndexReader().maxDoc(), 1); // we refreshed } - Engine.IndexResult test1 = indexDoc(primary, "test", "1", "{\"foo\" : \"baz\"}", XContentType.JSON, "foobar"); + Engine.IndexResult test1 = indexDoc(primary, "1", "{\"foo\" : \"baz\"}", XContentType.JSON, "foobar"); assertTrue(primary.getEngine().refreshNeeded()); GetResult testGet1 = primary.getService().getForUpdate("1", UNASSIGNED_SEQ_NO, UNASSIGNED_PRIMARY_TERM); assertEquals(new String(testGet1.source(), StandardCharsets.UTF_8), "{\"foo\" : \"baz\"}"); @@ -89,7 +89,7 @@ public void testGetForUpdate() throws IOException { } // now again from the reader - Engine.IndexResult test2 = indexDoc(primary, "test", "1", "{\"foo\" : \"baz\"}", XContentType.JSON, "foobar"); + Engine.IndexResult test2 = indexDoc(primary, "1", "{\"foo\" : \"baz\"}", XContentType.JSON, "foobar"); assertTrue(primary.getEngine().refreshNeeded()); testGet1 = primary.getService().getForUpdate("1", UNASSIGNED_SEQ_NO, UNASSIGNED_PRIMARY_TERM); assertEquals(new String(testGet1.source(), StandardCharsets.UTF_8), "{\"foo\" : \"baz\"}"); @@ -134,7 +134,6 @@ private void runGetFromTranslogWithOptions( IndexMetadata metadata = IndexMetadata.builder("test") .putMapping( - MapperService.SINGLE_MAPPING_NAME, "{ \"properties\": { \"foo\": { \"type\": " + fieldType + ", \"store\": true }, " @@ -158,7 +157,7 @@ private void runGetFromTranslogWithOptions( assertEquals(searcher.getIndexReader().maxDoc(), 1); // we refreshed } - Engine.IndexResult test1 = indexDoc(primary, MapperService.SINGLE_MAPPING_NAME, "1", docToIndex, XContentType.JSON, "foobar"); + Engine.IndexResult test1 = indexDoc(primary, "1", docToIndex, XContentType.JSON, "foobar"); assertTrue(primary.getEngine().refreshNeeded()); GetResult testGet1 = primary.getService().getForUpdate("1", UNASSIGNED_SEQ_NO, UNASSIGNED_PRIMARY_TERM); assertEquals(new String(testGet1.source() == null ? new byte[0] : testGet1.source(), StandardCharsets.UTF_8), expectedResult); @@ -172,7 +171,7 @@ private void runGetFromTranslogWithOptions( assertEquals(searcher.getIndexReader().maxDoc(), 2); } - Engine.IndexResult test2 = indexDoc(primary, MapperService.SINGLE_MAPPING_NAME, "2", docToIndex, XContentType.JSON, "foobar"); + Engine.IndexResult test2 = indexDoc(primary, "2", docToIndex, XContentType.JSON, "foobar"); assertTrue(primary.getEngine().refreshNeeded()); GetResult testGet2 = primary.getService() .get("2", new String[] { "foo" }, true, 1, VersionType.INTERNAL, FetchSourceContext.FETCH_SOURCE); @@ -204,7 +203,7 @@ public void testTypelessGetForUpdate() throws IOException { .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1) .build(); IndexMetadata metadata = IndexMetadata.builder("index") - .putMapping("some_type", "{ \"properties\": { \"foo\": { \"type\": \"text\"}}}") + .putMapping("{ \"properties\": { \"foo\": { \"type\": \"text\"}}}") .settings(settings) .primaryTerm(0, 1) .build(); diff --git a/server/src/test/java/org/opensearch/index/translog/TranslogTests.java b/server/src/test/java/org/opensearch/index/translog/TranslogTests.java index 5614e07d7104d..f1eb5666f6b7f 100644 --- a/server/src/test/java/org/opensearch/index/translog/TranslogTests.java +++ b/server/src/test/java/org/opensearch/index/translog/TranslogTests.java @@ -358,13 +358,13 @@ public void testSimpleOperations() throws IOException { assertThat(snapshot, SnapshotMatchers.size(0)); } - addToTranslogAndList(translog, ops, new Translog.Index("test", "1", 0, primaryTerm.get(), new byte[] { 1 })); + addToTranslogAndList(translog, ops, new Translog.Index("1", 0, primaryTerm.get(), new byte[] { 1 })); try (Translog.Snapshot snapshot = translog.newSnapshot()) { assertThat(snapshot, SnapshotMatchers.equalsTo(ops)); assertThat(snapshot.totalOperations(), equalTo(ops.size())); } - addToTranslogAndList(translog, ops, new Translog.Delete("test", "2", 1, primaryTerm.get(), newUid("2"))); + addToTranslogAndList(translog, ops, new Translog.Delete("2", 1, primaryTerm.get(), newUid("2"))); try (Translog.Snapshot snapshot = translog.newSnapshot()) { assertThat(snapshot, SnapshotMatchers.equalsTo(ops)); assertThat(snapshot.totalOperations(), equalTo(ops.size())); @@ -454,34 +454,34 @@ public void testStats() throws IOException { assertThat(stats.estimatedNumberOfOperations(), equalTo(0)); } assertThat((int) firstOperationPosition, greaterThan(CodecUtil.headerLength(TranslogHeader.TRANSLOG_CODEC))); - translog.add(new Translog.Index("test", "1", 0, primaryTerm.get(), new byte[] { 1 })); + translog.add(new Translog.Index("1", 0, primaryTerm.get(), new byte[] { 1 })); { final TranslogStats stats = stats(); assertThat(stats.estimatedNumberOfOperations(), equalTo(1)); - assertThat(stats.getTranslogSizeInBytes(), equalTo(162L)); + assertThat(stats.getTranslogSizeInBytes(), equalTo(157L)); assertThat(stats.getUncommittedOperations(), equalTo(1)); - assertThat(stats.getUncommittedSizeInBytes(), equalTo(107L)); + assertThat(stats.getUncommittedSizeInBytes(), equalTo(102L)); assertThat(stats.getEarliestLastModifiedAge(), greaterThan(0L)); } - translog.add(new Translog.Delete("test", "2", 1, primaryTerm.get(), newUid("2"))); + translog.add(new Translog.Delete("2", 1, primaryTerm.get(), newUid("2"))); { final TranslogStats stats = stats(); assertThat(stats.estimatedNumberOfOperations(), equalTo(2)); - assertThat(stats.getTranslogSizeInBytes(), equalTo(210L)); + assertThat(stats.getTranslogSizeInBytes(), equalTo(200L)); assertThat(stats.getUncommittedOperations(), equalTo(2)); - assertThat(stats.getUncommittedSizeInBytes(), equalTo(155L)); + assertThat(stats.getUncommittedSizeInBytes(), equalTo(145L)); assertThat(stats.getEarliestLastModifiedAge(), greaterThan(0L)); } - translog.add(new Translog.Delete("test", "3", 2, primaryTerm.get(), newUid("3"))); + translog.add(new Translog.Delete("3", 2, primaryTerm.get(), newUid("3"))); { final TranslogStats stats = stats(); assertThat(stats.estimatedNumberOfOperations(), equalTo(3)); - assertThat(stats.getTranslogSizeInBytes(), equalTo(258L)); + assertThat(stats.getTranslogSizeInBytes(), equalTo(243L)); assertThat(stats.getUncommittedOperations(), equalTo(3)); - assertThat(stats.getUncommittedSizeInBytes(), equalTo(203L)); + assertThat(stats.getUncommittedSizeInBytes(), equalTo(188L)); assertThat(stats.getEarliestLastModifiedAge(), greaterThan(0L)); } @@ -489,9 +489,9 @@ public void testStats() throws IOException { { final TranslogStats stats = stats(); assertThat(stats.estimatedNumberOfOperations(), equalTo(4)); - assertThat(stats.getTranslogSizeInBytes(), equalTo(300L)); + assertThat(stats.getTranslogSizeInBytes(), equalTo(285L)); assertThat(stats.getUncommittedOperations(), equalTo(4)); - assertThat(stats.getUncommittedSizeInBytes(), equalTo(245L)); + assertThat(stats.getUncommittedSizeInBytes(), equalTo(230L)); assertThat(stats.getEarliestLastModifiedAge(), greaterThan(0L)); } @@ -499,9 +499,9 @@ public void testStats() throws IOException { { final TranslogStats stats = stats(); assertThat(stats.estimatedNumberOfOperations(), equalTo(4)); - assertThat(stats.getTranslogSizeInBytes(), equalTo(355L)); + assertThat(stats.getTranslogSizeInBytes(), equalTo(340L)); assertThat(stats.getUncommittedOperations(), equalTo(4)); - assertThat(stats.getUncommittedSizeInBytes(), equalTo(300L)); + assertThat(stats.getUncommittedSizeInBytes(), equalTo(285L)); assertThat(stats.getEarliestLastModifiedAge(), greaterThan(0L)); } @@ -511,7 +511,7 @@ public void testStats() throws IOException { stats.writeTo(out); final TranslogStats copy = new TranslogStats(out.bytes().streamInput()); assertThat(copy.estimatedNumberOfOperations(), equalTo(4)); - assertThat(copy.getTranslogSizeInBytes(), equalTo(355L)); + assertThat(copy.getTranslogSizeInBytes(), equalTo(340L)); try (XContentBuilder builder = XContentFactory.jsonBuilder()) { builder.startObject(); @@ -521,9 +521,9 @@ public void testStats() throws IOException { Strings.toString(builder), equalTo( "{\"translog\":{\"operations\":4,\"size_in_bytes\":" - + 355 + + 340 + ",\"uncommitted_operations\":4,\"uncommitted_size_in_bytes\":" - + 300 + + 285 + ",\"earliest_last_modified_age\":" + stats.getEarliestLastModifiedAge() + "}}" @@ -537,7 +537,7 @@ public void testStats() throws IOException { long lastModifiedAge = System.currentTimeMillis() - translog.getCurrent().getLastModifiedTime(); final TranslogStats stats = stats(); assertThat(stats.estimatedNumberOfOperations(), equalTo(4)); - assertThat(stats.getTranslogSizeInBytes(), equalTo(355L)); + assertThat(stats.getTranslogSizeInBytes(), equalTo(340L)); assertThat(stats.getUncommittedOperations(), equalTo(0)); assertThat(stats.getUncommittedSizeInBytes(), equalTo(firstOperationPosition)); assertThat(stats.getEarliestLastModifiedAge(), greaterThanOrEqualTo(lastModifiedAge)); @@ -553,7 +553,7 @@ public void testUncommittedOperations() throws Exception { int uncommittedOps = 0; int operationsInLastGen = 0; for (int i = 0; i < operations; i++) { - translog.add(new Translog.Index("test", Integer.toString(i), i, primaryTerm.get(), new byte[] { 1 })); + translog.add(new Translog.Index(Integer.toString(i), i, primaryTerm.get(), new byte[] { 1 })); uncommittedOps++; operationsInLastGen++; if (rarely()) { @@ -634,7 +634,7 @@ public void testBasicSnapshot() throws IOException { assertThat(snapshot, SnapshotMatchers.size(0)); } - addToTranslogAndList(translog, ops, new Translog.Index("test", "1", 0, primaryTerm.get(), new byte[] { 1 })); + addToTranslogAndList(translog, ops, new Translog.Index("1", 0, primaryTerm.get(), new byte[] { 1 })); try (Translog.Snapshot snapshot = translog.newSnapshot(0, Long.MAX_VALUE)) { assertThat(snapshot, SnapshotMatchers.equalsTo(ops)); @@ -656,9 +656,9 @@ public void testBasicSnapshot() throws IOException { public void testReadLocation() throws IOException { ArrayList ops = new ArrayList<>(); ArrayList locs = new ArrayList<>(); - locs.add(addToTranslogAndList(translog, ops, new Translog.Index("test", "1", 0, primaryTerm.get(), new byte[] { 1 }))); - locs.add(addToTranslogAndList(translog, ops, new Translog.Index("test", "2", 1, primaryTerm.get(), new byte[] { 1 }))); - locs.add(addToTranslogAndList(translog, ops, new Translog.Index("test", "3", 2, primaryTerm.get(), new byte[] { 1 }))); + locs.add(addToTranslogAndList(translog, ops, new Translog.Index("1", 0, primaryTerm.get(), new byte[] { 1 }))); + locs.add(addToTranslogAndList(translog, ops, new Translog.Index("2", 1, primaryTerm.get(), new byte[] { 1 }))); + locs.add(addToTranslogAndList(translog, ops, new Translog.Index("3", 2, primaryTerm.get(), new byte[] { 1 }))); int i = 0; for (Translog.Operation op : ops) { assertEquals(op, translog.readOperation(locs.get(i++))); @@ -674,16 +674,16 @@ public void testSnapshotWithNewTranslog() throws IOException { toClose.add(snapshot); assertThat(snapshot, SnapshotMatchers.size(0)); - addToTranslogAndList(translog, ops, new Translog.Index("test", "1", 0, primaryTerm.get(), new byte[] { 1 })); + addToTranslogAndList(translog, ops, new Translog.Index("1", 0, primaryTerm.get(), new byte[] { 1 })); Translog.Snapshot snapshot1 = translog.newSnapshot(); toClose.add(snapshot1); - addToTranslogAndList(translog, ops, new Translog.Index("test", "2", 1, primaryTerm.get(), new byte[] { 2 })); + addToTranslogAndList(translog, ops, new Translog.Index("2", 1, primaryTerm.get(), new byte[] { 2 })); assertThat(snapshot1, SnapshotMatchers.equalsTo(ops.get(0))); translog.rollGeneration(); - addToTranslogAndList(translog, ops, new Translog.Index("test", "3", 2, primaryTerm.get(), new byte[] { 3 })); + addToTranslogAndList(translog, ops, new Translog.Index("3", 2, primaryTerm.get(), new byte[] { 3 })); Translog.Snapshot snapshot2 = translog.newSnapshot(); toClose.add(snapshot2); @@ -697,7 +697,7 @@ public void testSnapshotWithNewTranslog() throws IOException { public void testSnapshotOnClosedTranslog() throws IOException { assertTrue(Files.exists(translogDir.resolve(Translog.getFilename(1)))); - translog.add(new Translog.Index("test", "1", 0, primaryTerm.get(), new byte[] { 1 })); + translog.add(new Translog.Index("1", 0, primaryTerm.get(), new byte[] { 1 })); translog.close(); AlreadyClosedException ex = expectThrows(AlreadyClosedException.class, () -> translog.newSnapshot()); assertEquals(ex.getMessage(), "translog is already closed"); @@ -719,13 +719,7 @@ public void testRangeSnapshot() throws Exception { } List ops = new ArrayList<>(seqNos.size()); for (long seqNo : seqNos) { - Translog.Index op = new Translog.Index( - "_doc", - randomAlphaOfLength(10), - seqNo, - primaryTerm.get(), - new byte[] { randomByte() } - ); + Translog.Index op = new Translog.Index(randomAlphaOfLength(10), seqNo, primaryTerm.get(), new byte[] { randomByte() }); translog.add(op); ops.add(op); } @@ -786,13 +780,7 @@ private Long populateTranslogOps(boolean withMissingOps) throws IOException { Collections.shuffle(seqNos, new Random(100)); List ops = new ArrayList<>(seqNos.size()); for (long seqNo : seqNos) { - Translog.Index op = new Translog.Index( - "_doc", - randomAlphaOfLength(10), - seqNo, - primaryTerm.get(), - new byte[] { randomByte() } - ); + Translog.Index op = new Translog.Index(randomAlphaOfLength(10), seqNo, primaryTerm.get(), new byte[] { randomByte() }); boolean shouldAdd = !withMissingOps || seqNo % 4 != 0; if (shouldAdd) { translog.add(op); @@ -928,7 +916,6 @@ public void testConcurrentWritesWithVaryingSize() throws Throwable { Translog.Index expIndexOp = (Translog.Index) expectedOp; assertEquals(expIndexOp.id(), indexOp.id()); assertEquals(expIndexOp.routing(), indexOp.routing()); - assertEquals(expIndexOp.type(), indexOp.type()); assertEquals(expIndexOp.source(), indexOp.source()); assertEquals(expIndexOp.version(), indexOp.version()); break; @@ -962,7 +949,7 @@ public void testTranslogCorruption() throws Exception { int translogOperations = randomIntBetween(10, 1000); for (int op = 0; op < translogOperations; op++) { String ascii = randomAlphaOfLengthBetween(1, 50); - locations.add(translog.add(new Translog.Index("test", "" + op, op, primaryTerm.get(), ascii.getBytes("UTF-8")))); + locations.add(translog.add(new Translog.Index("" + op, op, primaryTerm.get(), ascii.getBytes("UTF-8")))); if (rarely()) { translog.rollGeneration(); @@ -989,7 +976,7 @@ public void testTruncatedTranslogs() throws Exception { int translogOperations = randomIntBetween(10, 100); for (int op = 0; op < translogOperations; op++) { String ascii = randomAlphaOfLengthBetween(1, 50); - locations.add(translog.add(new Translog.Index("test", "" + op, op, primaryTerm.get(), ascii.getBytes("UTF-8")))); + locations.add(translog.add(new Translog.Index("" + op, op, primaryTerm.get(), ascii.getBytes("UTF-8")))); } translog.sync(); @@ -1034,7 +1021,7 @@ private Term newUid(String id) { public void testVerifyTranslogIsNotDeleted() throws IOException { assertFileIsPresent(translog, 1); - translog.add(new Translog.Index("test", "1", 0, primaryTerm.get(), new byte[] { 1 })); + translog.add(new Translog.Index("1", 0, primaryTerm.get(), new byte[] { 1 })); try (Translog.Snapshot snapshot = translog.newSnapshot()) { assertThat(snapshot, SnapshotMatchers.size(1)); assertFileIsPresent(translog, 1); @@ -1086,10 +1073,10 @@ public void doRun() throws BrokenBarrierException, InterruptedException, IOExcep switch (type) { case CREATE: case INDEX: - op = new Translog.Index("type", "" + id, id, primaryTerm.get(), new byte[] { (byte) id }); + op = new Translog.Index("" + id, id, primaryTerm.get(), new byte[] { (byte) id }); break; case DELETE: - op = new Translog.Delete("test", Long.toString(id), id, primaryTerm.get(), newUid(Long.toString(id))); + op = new Translog.Delete(Long.toString(id), id, primaryTerm.get(), newUid(Long.toString(id))); break; case NO_OP: op = new Translog.NoOp(id, 1, Long.toString(id)); @@ -1248,7 +1235,7 @@ public void testSyncUpTo() throws IOException { for (int op = 0; op < translogOperations; op++) { int seqNo = ++count; final Translog.Location location = translog.add( - new Translog.Index("test", "" + op, seqNo, primaryTerm.get(), Integer.toString(seqNo).getBytes(Charset.forName("UTF-8"))) + new Translog.Index("" + op, seqNo, primaryTerm.get(), Integer.toString(seqNo).getBytes(Charset.forName("UTF-8"))) ); if (randomBoolean()) { assertTrue("at least one operation pending", translog.syncNeeded()); @@ -1257,13 +1244,7 @@ public void testSyncUpTo() throws IOException { assertFalse("the last call to ensureSycned synced all previous ops", translog.syncNeeded()); seqNo = ++count; translog.add( - new Translog.Index( - "test", - "" + op, - seqNo, - primaryTerm.get(), - Integer.toString(seqNo).getBytes(Charset.forName("UTF-8")) - ) + new Translog.Index("" + op, seqNo, primaryTerm.get(), Integer.toString(seqNo).getBytes(Charset.forName("UTF-8"))) ); assertTrue("one pending operation", translog.syncNeeded()); assertFalse("this op has been synced before", translog.ensureSynced(location)); // not syncing now @@ -1293,7 +1274,7 @@ public void testSyncUpToStream() throws IOException { translog.rollGeneration(); } final Translog.Location location = translog.add( - new Translog.Index("test", "" + op, op, primaryTerm.get(), Integer.toString(++count).getBytes(Charset.forName("UTF-8"))) + new Translog.Index("" + op, op, primaryTerm.get(), Integer.toString(++count).getBytes(Charset.forName("UTF-8"))) ); locations.add(location); } @@ -1325,7 +1306,7 @@ public void testLocationComparison() throws IOException { for (int op = 0; op < translogOperations; op++) { locations.add( translog.add( - new Translog.Index("test", "" + op, op, primaryTerm.get(), Integer.toString(++count).getBytes(Charset.forName("UTF-8"))) + new Translog.Index("" + op, op, primaryTerm.get(), Integer.toString(++count).getBytes(Charset.forName("UTF-8"))) ) ); if (rarely() && translogOperations > op + 1) { @@ -1364,9 +1345,7 @@ public void testBasicCheckpoint() throws IOException { long lastSyncedGlobalCheckpoint = globalCheckpoint.get(); for (int op = 0; op < translogOperations; op++) { locations.add( - translog.add( - new Translog.Index("test", "" + op, op, primaryTerm.get(), Integer.toString(op).getBytes(Charset.forName("UTF-8"))) - ) + translog.add(new Translog.Index("" + op, op, primaryTerm.get(), Integer.toString(op).getBytes(Charset.forName("UTF-8")))) ); if (randomBoolean()) { globalCheckpoint.set(globalCheckpoint.get() + randomIntBetween(1, 16)); @@ -1380,7 +1359,6 @@ public void testBasicCheckpoint() throws IOException { assertEquals(translogOperations, translog.totalOperations()); translog.add( new Translog.Index( - "test", "" + translogOperations, translogOperations, primaryTerm.get(), @@ -1730,9 +1708,7 @@ public void testBasicRecovery() throws IOException { final boolean commitOften = randomBoolean(); for (int op = 0; op < translogOperations; op++) { locations.add( - translog.add( - new Translog.Index("test", "" + op, op, primaryTerm.get(), Integer.toString(op).getBytes(Charset.forName("UTF-8"))) - ) + translog.add(new Translog.Index("" + op, op, primaryTerm.get(), Integer.toString(op).getBytes(Charset.forName("UTF-8")))) ); final boolean commit = commitOften ? frequently() : rarely(); if (commit && op < translogOperations - 1) { @@ -1791,9 +1767,7 @@ public void testRecoveryUncommitted() throws IOException { final boolean sync = randomBoolean(); for (int op = 0; op < translogOperations; op++) { locations.add( - translog.add( - new Translog.Index("test", "" + op, op, primaryTerm.get(), Integer.toString(op).getBytes(Charset.forName("UTF-8"))) - ) + translog.add(new Translog.Index("" + op, op, primaryTerm.get(), Integer.toString(op).getBytes(Charset.forName("UTF-8")))) ); if (op == prepareOp) { translogGeneration = translog.getGeneration(); @@ -1878,9 +1852,7 @@ public void testRecoveryUncommittedFileExists() throws IOException { final boolean sync = randomBoolean(); for (int op = 0; op < translogOperations; op++) { locations.add( - translog.add( - new Translog.Index("test", "" + op, op, primaryTerm.get(), Integer.toString(op).getBytes(Charset.forName("UTF-8"))) - ) + translog.add(new Translog.Index("" + op, op, primaryTerm.get(), Integer.toString(op).getBytes(Charset.forName("UTF-8")))) ); if (op == prepareOp) { translogGeneration = translog.getGeneration(); @@ -1968,7 +1940,7 @@ public void testRecoveryUncommittedCorruptedCheckpoint() throws IOException { Translog.TranslogGeneration translogGeneration = null; final boolean sync = randomBoolean(); for (int op = 0; op < translogOperations; op++) { - translog.add(new Translog.Index("test", "" + op, op, primaryTerm.get(), Integer.toString(op).getBytes(StandardCharsets.UTF_8))); + translog.add(new Translog.Index("" + op, op, primaryTerm.get(), Integer.toString(op).getBytes(StandardCharsets.UTF_8))); if (op == prepareOp) { translogGeneration = translog.getGeneration(); translog.rollGeneration(); @@ -2003,7 +1975,7 @@ public void testRecoveryUncommittedCorruptedCheckpoint() throws IOException { assertThat( translogCorruptedException.getMessage(), endsWith( - "] is corrupted, checkpoint file translog-3.ckp already exists but has corrupted content: expected Checkpoint{offset=3025, " + "] is corrupted, checkpoint file translog-3.ckp already exists but has corrupted content: expected Checkpoint{offset=2750, " + "numOps=55, generation=3, minSeqNo=45, maxSeqNo=99, globalCheckpoint=-1, minTranslogGeneration=1, trimmedAboveSeqNo=-2} " + "but got Checkpoint{offset=0, numOps=0, generation=0, minSeqNo=-1, maxSeqNo=-1, globalCheckpoint=-1, " + "minTranslogGeneration=0, trimmedAboveSeqNo=-2}" @@ -2050,7 +2022,6 @@ public void testSnapshotFromStreamInput() throws IOException { int translogOperations = randomIntBetween(10, 100); for (int op = 0; op < translogOperations; op++) { Translog.Index test = new Translog.Index( - "test", "" + op, op, primaryTerm.get(), @@ -2073,7 +2044,7 @@ public void testSnapshotCurrentHasUnexpectedOperationsForTrimmedOperations() thr for (int op = 0; op < extraDocs; op++) { String ascii = randomAlphaOfLengthBetween(1, 50); - Translog.Index operation = new Translog.Index("test", "" + op, op, primaryTerm.get() - op, ascii.getBytes("UTF-8")); + Translog.Index operation = new Translog.Index("" + op, op, primaryTerm.get() - op, ascii.getBytes("UTF-8")); translog.add(operation); } @@ -2093,13 +2064,7 @@ public void testSnapshotCurrentHasUnexpectedOperationsForTrimmedOperations() thr translog.rollGeneration(); // add a single operation to current with seq# > trimmed seq# but higher primary term - Translog.Index operation = new Translog.Index( - "test", - "" + 1, - 1L, - primaryTerm.get(), - randomAlphaOfLengthBetween(1, 50).getBytes("UTF-8") - ); + Translog.Index operation = new Translog.Index("" + 1, 1L, primaryTerm.get(), randomAlphaOfLengthBetween(1, 50).getBytes("UTF-8")); translog.add(operation); // it is possible to trim after generation rollover @@ -2129,7 +2094,7 @@ public void testSnapshotTrimmedOperations() throws Exception { } // use ongoing primaryTerms - or the same as it was - Translog.Index operation = new Translog.Index("test", "" + op, op, primaryTerm.get(), source.get().getBytes("UTF-8")); + Translog.Index operation = new Translog.Index("" + op, op, primaryTerm.get(), source.get().getBytes("UTF-8")); translog.add(operation); inMemoryTranslog.add(operation); allOperations.add(operation); @@ -2213,7 +2178,7 @@ public void testRandomExceptionsOnTrimOperations() throws Exception { Randomness.shuffle(ops); for (int op : ops) { String ascii = randomAlphaOfLengthBetween(1, 50); - Translog.Index operation = new Translog.Index("test", "" + op, op, primaryTerm.get(), ascii.getBytes("UTF-8")); + Translog.Index operation = new Translog.Index("" + op, op, primaryTerm.get(), ascii.getBytes("UTF-8")); failableTLog.add(operation); } @@ -2271,12 +2236,12 @@ public void testLocationHashCodeEquals() throws IOException { for (int op = 0; op < translogOperations; op++) { locations.add( translog.add( - new Translog.Index("test", "" + op, op, primaryTerm.get(), Integer.toString(op).getBytes(Charset.forName("UTF-8"))) + new Translog.Index("" + op, op, primaryTerm.get(), Integer.toString(op).getBytes(Charset.forName("UTF-8"))) ) ); locations2.add( translog2.add( - new Translog.Index("test", "" + op, op, primaryTerm.get(), Integer.toString(op).getBytes(Charset.forName("UTF-8"))) + new Translog.Index("" + op, op, primaryTerm.get(), Integer.toString(op).getBytes(Charset.forName("UTF-8"))) ) ); } @@ -2305,9 +2270,7 @@ public void testOpenForeignTranslog() throws IOException { int firstUncommitted = 0; for (int op = 0; op < translogOperations; op++) { locations.add( - translog.add( - new Translog.Index("test", "" + op, op, primaryTerm.get(), Integer.toString(op).getBytes(Charset.forName("UTF-8"))) - ) + translog.add(new Translog.Index("" + op, op, primaryTerm.get(), Integer.toString(op).getBytes(Charset.forName("UTF-8")))) ); if (randomBoolean()) { translog.rollGeneration(); @@ -2355,10 +2318,10 @@ public void testOpenForeignTranslog() throws IOException { } public void testFailOnClosedWrite() throws IOException { - translog.add(new Translog.Index("test", "1", 0, primaryTerm.get(), Integer.toString(1).getBytes(Charset.forName("UTF-8")))); + translog.add(new Translog.Index("1", 0, primaryTerm.get(), Integer.toString(1).getBytes(Charset.forName("UTF-8")))); translog.close(); try { - translog.add(new Translog.Index("test", "1", 0, primaryTerm.get(), Integer.toString(1).getBytes(Charset.forName("UTF-8")))); + translog.add(new Translog.Index("1", 0, primaryTerm.get(), Integer.toString(1).getBytes(Charset.forName("UTF-8")))); fail("closed"); } catch (AlreadyClosedException ex) { // all is well @@ -2442,7 +2405,6 @@ public void run() { case CREATE: case INDEX: op = new Translog.Index( - "test", threadId + "_" + opCount, seqNoGenerator.getAndIncrement(), primaryTerm.get(), @@ -2451,7 +2413,6 @@ public void run() { break; case DELETE: op = new Translog.Delete( - "test", threadId + "_" + opCount, new Term("_uid", threadId + "_" + opCount), seqNoGenerator.getAndIncrement(), @@ -2499,7 +2460,6 @@ public void testFailFlush() throws IOException { locations.add( translog.add( new Translog.Index( - "test", "" + opsSynced, opsSynced, primaryTerm.get(), @@ -2529,7 +2489,6 @@ public void testFailFlush() throws IOException { locations.add( translog.add( new Translog.Index( - "test", "" + opsSynced, opsSynced, primaryTerm.get(), @@ -2611,7 +2570,6 @@ public void testTranslogOpsCountIsCorrect() throws IOException { locations.add( translog.add( new Translog.Index( - "test", "" + opsAdded, opsAdded, primaryTerm.get(), @@ -2640,13 +2598,11 @@ public void testTragicEventCanBeAnyException() throws IOException { TranslogConfig config = getTranslogConfig(tempDir); Translog translog = getFailableTranslog(fail, config, false, true, null, createTranslogDeletionPolicy()); LineFileDocs lineFileDocs = new LineFileDocs(random()); // writes pretty big docs so we cross buffer boarders regularly - translog.add( - new Translog.Index("test", "1", 0, primaryTerm.get(), lineFileDocs.nextDoc().toString().getBytes(Charset.forName("UTF-8"))) - ); + translog.add(new Translog.Index("1", 0, primaryTerm.get(), lineFileDocs.nextDoc().toString().getBytes(Charset.forName("UTF-8")))); fail.failAlways(); try { Translog.Location location = translog.add( - new Translog.Index("test", "2", 1, primaryTerm.get(), lineFileDocs.nextDoc().toString().getBytes(Charset.forName("UTF-8"))) + new Translog.Index("2", 1, primaryTerm.get(), lineFileDocs.nextDoc().toString().getBytes(Charset.forName("UTF-8"))) ); if (randomBoolean()) { translog.ensureSynced(location); @@ -2772,13 +2728,7 @@ public void testRecoveryFromAFutureGenerationCleansUp() throws IOException { int op = 0; for (; op < translogOperations / 2; op++) { translog.add( - new Translog.Index( - "_doc", - Integer.toString(op), - op, - primaryTerm.get(), - Integer.toString(op).getBytes(Charset.forName("UTF-8")) - ) + new Translog.Index(Integer.toString(op), op, primaryTerm.get(), Integer.toString(op).getBytes(Charset.forName("UTF-8"))) ); if (rarely()) { translog.rollGeneration(); @@ -2788,13 +2738,7 @@ public void testRecoveryFromAFutureGenerationCleansUp() throws IOException { long localCheckpoint = randomLongBetween(SequenceNumbers.NO_OPS_PERFORMED, op); for (op = translogOperations / 2; op < translogOperations; op++) { translog.add( - new Translog.Index( - "test", - Integer.toString(op), - op, - primaryTerm.get(), - Integer.toString(op).getBytes(Charset.forName("UTF-8")) - ) + new Translog.Index(Integer.toString(op), op, primaryTerm.get(), Integer.toString(op).getBytes(Charset.forName("UTF-8"))) ); if (rarely()) { translog.rollGeneration(); @@ -2847,13 +2791,7 @@ public void testRecoveryFromFailureOnTrimming() throws IOException { int op = 0; for (; op < translogOperations / 2; op++) { translog.add( - new Translog.Index( - "test", - Integer.toString(op), - op, - primaryTerm.get(), - Integer.toString(op).getBytes(Charset.forName("UTF-8")) - ) + new Translog.Index(Integer.toString(op), op, primaryTerm.get(), Integer.toString(op).getBytes(Charset.forName("UTF-8"))) ); if (rarely()) { translog.rollGeneration(); @@ -2863,13 +2801,7 @@ public void testRecoveryFromFailureOnTrimming() throws IOException { localCheckpoint = randomLongBetween(SequenceNumbers.NO_OPS_PERFORMED, op); for (op = translogOperations / 2; op < translogOperations; op++) { translog.add( - new Translog.Index( - "test", - Integer.toString(op), - op, - primaryTerm.get(), - Integer.toString(op).getBytes(Charset.forName("UTF-8")) - ) + new Translog.Index(Integer.toString(op), op, primaryTerm.get(), Integer.toString(op).getBytes(Charset.forName("UTF-8"))) ); if (rarely()) { translog.rollGeneration(); @@ -3132,7 +3064,7 @@ public void testFailWhileCreateWriteWithRecoveredTLogs() throws IOException { Path tempDir = createTempDir(); TranslogConfig config = getTranslogConfig(tempDir); Translog translog = createTranslog(config); - translog.add(new Translog.Index("test", "boom", 0, primaryTerm.get(), "boom".getBytes(Charset.forName("UTF-8")))); + translog.add(new Translog.Index("boom", 0, primaryTerm.get(), "boom".getBytes(Charset.forName("UTF-8")))); translog.close(); try { new Translog( @@ -3161,7 +3093,7 @@ protected TranslogWriter createWriter( } public void testRecoverWithUnbackedNextGen() throws IOException { - translog.add(new Translog.Index("test", "" + 0, 0, primaryTerm.get(), Integer.toString(1).getBytes(Charset.forName("UTF-8")))); + translog.add(new Translog.Index("" + 0, 0, primaryTerm.get(), Integer.toString(1).getBytes(Charset.forName("UTF-8")))); translog.close(); TranslogConfig config = translog.getConfig(); @@ -3176,7 +3108,7 @@ public void testRecoverWithUnbackedNextGen() throws IOException { assertNotNull("operation 1 must be non-null", op); assertEquals("payload mismatch for operation 1", 1, Integer.parseInt(op.getSource().source.utf8ToString())); - tlog.add(new Translog.Index("test", "" + 1, 1, primaryTerm.get(), Integer.toString(2).getBytes(Charset.forName("UTF-8")))); + tlog.add(new Translog.Index("" + 1, 1, primaryTerm.get(), Integer.toString(2).getBytes(Charset.forName("UTF-8")))); } try (Translog tlog = openTranslog(config, translog.getTranslogUUID()); Translog.Snapshot snapshot = tlog.newSnapshot()) { @@ -3193,7 +3125,7 @@ public void testRecoverWithUnbackedNextGen() throws IOException { } public void testRecoverWithUnbackedNextGenInIllegalState() throws IOException { - translog.add(new Translog.Index("test", "" + 0, 0, primaryTerm.get(), Integer.toString(0).getBytes(Charset.forName("UTF-8")))); + translog.add(new Translog.Index("" + 0, 0, primaryTerm.get(), Integer.toString(0).getBytes(Charset.forName("UTF-8")))); translog.close(); TranslogConfig config = translog.getConfig(); Path ckp = config.getTranslogPath().resolve(Translog.CHECKPOINT_FILE_NAME); @@ -3217,7 +3149,7 @@ public void testRecoverWithUnbackedNextGenInIllegalState() throws IOException { } public void testRecoverWithUnbackedNextGenAndFutureFile() throws IOException { - translog.add(new Translog.Index("test", "" + 0, 0, primaryTerm.get(), Integer.toString(0).getBytes(Charset.forName("UTF-8")))); + translog.add(new Translog.Index("" + 0, 0, primaryTerm.get(), Integer.toString(0).getBytes(Charset.forName("UTF-8")))); translog.close(); TranslogConfig config = translog.getConfig(); final String translogUUID = translog.getTranslogUUID(); @@ -3247,7 +3179,7 @@ public void testRecoverWithUnbackedNextGenAndFutureFile() throws IOException { assertEquals("payload missmatch", i, Integer.parseInt(next.getSource().source.utf8ToString())); } } - tlog.add(new Translog.Index("test", "" + 1, 1, primaryTerm.get(), Integer.toString(1).getBytes(Charset.forName("UTF-8")))); + tlog.add(new Translog.Index("" + 1, 1, primaryTerm.get(), Integer.toString(1).getBytes(Charset.forName("UTF-8")))); } TranslogException ex = expectThrows( @@ -3293,7 +3225,7 @@ public void testWithRandomException() throws IOException { for (int opsAdded = 0; opsAdded < numOps; opsAdded++) { String doc = lineFileDocs.nextDoc().toString(); failableTLog.add( - new Translog.Index("test", "" + opsAdded, opsAdded, primaryTerm.get(), doc.getBytes(Charset.forName("UTF-8"))) + new Translog.Index("" + opsAdded, opsAdded, primaryTerm.get(), doc.getBytes(Charset.forName("UTF-8"))) ); unsynced.add(doc); if (randomBoolean()) { @@ -3464,7 +3396,7 @@ public void testLegacyCheckpointVersion() throws IOException { * Tests that closing views after the translog is fine and we can reopen the translog */ public void testPendingDelete() throws IOException { - translog.add(new Translog.Index("test", "1", 0, primaryTerm.get(), new byte[] { 1 })); + translog.add(new Translog.Index("1", 0, primaryTerm.get(), new byte[] { 1 })); translog.rollGeneration(); TranslogConfig config = translog.getConfig(); final String translogUUID = translog.getTranslogUUID(); @@ -3478,10 +3410,10 @@ public void testPendingDelete() throws IOException { primaryTerm::get, seqNo -> {} ); - translog.add(new Translog.Index("test", "2", 1, primaryTerm.get(), new byte[] { 2 })); + translog.add(new Translog.Index("2", 1, primaryTerm.get(), new byte[] { 2 })); translog.rollGeneration(); Closeable lock = translog.acquireRetentionLock(); - translog.add(new Translog.Index("test", "3", 2, primaryTerm.get(), new byte[] { 3 })); + translog.add(new Translog.Index("3", 2, primaryTerm.get(), new byte[] { 3 })); translog.close(); IOUtils.close(lock); translog = new Translog( @@ -3515,17 +3447,7 @@ public void testTranslogOpSerialization() throws Exception { document.add(seqID.seqNo); document.add(seqID.seqNoDocValue); document.add(seqID.primaryTerm); - ParsedDocument doc = new ParsedDocument( - versionField, - seqID, - "1", - "type", - null, - Arrays.asList(document), - B_1, - XContentType.JSON, - null - ); + ParsedDocument doc = new ParsedDocument(versionField, seqID, "1", null, Arrays.asList(document), B_1, XContentType.JSON, null); Engine.Index eIndex = new Engine.Index( newUid(doc), @@ -3554,7 +3476,6 @@ public void testTranslogOpSerialization() throws Exception { assertEquals(index, serializedIndex); Engine.Delete eDelete = new Engine.Delete( - doc.type(), doc.id(), newUid(doc), randomSeqNum, @@ -3793,7 +3714,6 @@ public void testSnapshotReadOperationInReverse() throws Exception { final int operations = randomIntBetween(1, 100); for (int i = 0; i < operations; i++) { Translog.Index op = new Translog.Index( - "doc", randomAlphaOfLength(10), seqNo.getAndIncrement(), primaryTerm.get(), @@ -3823,7 +3743,7 @@ public void testSnapshotDedupOperations() throws Exception { List batch = LongStream.rangeClosed(0, between(0, 500)).boxed().collect(Collectors.toList()); Randomness.shuffle(batch); for (Long seqNo : batch) { - Translog.Index op = new Translog.Index("doc", randomAlphaOfLength(10), seqNo, primaryTerm.get(), new byte[] { 1 }); + Translog.Index op = new Translog.Index(randomAlphaOfLength(10), seqNo, primaryTerm.get(), new byte[] { 1 }); translog.add(op); latestOperations.put(op.seqNo(), op); } @@ -3838,7 +3758,7 @@ public void testSnapshotDedupOperations() throws Exception { public void testCloseSnapshotTwice() throws Exception { int numOps = between(0, 10); for (int i = 0; i < numOps; i++) { - Translog.Index op = new Translog.Index("doc", randomAlphaOfLength(10), i, primaryTerm.get(), new byte[] { 1 }); + Translog.Index op = new Translog.Index(randomAlphaOfLength(10), i, primaryTerm.get(), new byte[] { 1 }); translog.add(op); if (randomBoolean()) { translog.rollGeneration(); @@ -3912,7 +3832,7 @@ public void testMaxSeqNo() throws Exception { Randomness.shuffle(seqNos); for (long seqNo : seqNos) { if (frequently()) { - translog.add(new Translog.Index("test", "id", seqNo, primaryTerm.get(), new byte[] { 1 })); + translog.add(new Translog.Index("id", seqNo, primaryTerm.get(), new byte[] { 1 })); maxSeqNoPerGeneration.compute( translog.currentFileGeneration(), (key, existing) -> existing == null ? seqNo : Math.max(existing, seqNo) @@ -4050,9 +3970,7 @@ public void testSyncConcurrently() throws Exception { int iterations = randomIntBetween(10, 100); for (int i = 0; i < iterations; i++) { List ops = IntStream.range(0, between(1, 10)) - .mapToObj( - n -> new Translog.Index("test", "1", nextSeqNo.incrementAndGet(), primaryTerm.get(), new byte[] { 1 }) - ) + .mapToObj(n -> new Translog.Index("1", nextSeqNo.incrementAndGet(), primaryTerm.get(), new byte[] { 1 })) .collect(Collectors.toList()); try { Translog.Location location = null; @@ -4134,7 +4052,7 @@ void syncBeforeRollGeneration() { } }; try { - translog.add(new Translog.Index("1", "_doc", 1, primaryTerm.get(), new byte[] { 1 })); + translog.add(new Translog.Index("1", 1, primaryTerm.get(), new byte[] { 1 })); failedToSyncCheckpoint.set(true); expectThrows(IOException.class, translog::rollGeneration); final AlreadyClosedException alreadyClosedException = expectThrows(AlreadyClosedException.class, translog::rollGeneration); diff --git a/server/src/test/java/org/opensearch/indices/IndexingMemoryControllerTests.java b/server/src/test/java/org/opensearch/indices/IndexingMemoryControllerTests.java index 2c32e419b27db..c68ad7eaba82e 100644 --- a/server/src/test/java/org/opensearch/indices/IndexingMemoryControllerTests.java +++ b/server/src/test/java/org/opensearch/indices/IndexingMemoryControllerTests.java @@ -367,7 +367,7 @@ public void testThrottling() throws Exception { public void testTranslogRecoveryWorksWithIMC() throws IOException { IndexShard shard = newStartedShard(true); for (int i = 0; i < 100; i++) { - indexDoc(shard, "_doc", Integer.toString(i), "{\"foo\" : \"bar\"}", XContentType.JSON, null); + indexDoc(shard, Integer.toString(i), "{\"foo\" : \"bar\"}", XContentType.JSON, null); } shard.close("simon says", false); AtomicReference shardRef = new AtomicReference<>(); diff --git a/server/src/test/java/org/opensearch/indices/recovery/PeerRecoveryTargetServiceTests.java b/server/src/test/java/org/opensearch/indices/recovery/PeerRecoveryTargetServiceTests.java index 501253bee27f9..e54f06937cad3 100644 --- a/server/src/test/java/org/opensearch/indices/recovery/PeerRecoveryTargetServiceTests.java +++ b/server/src/test/java/org/opensearch/indices/recovery/PeerRecoveryTargetServiceTests.java @@ -185,7 +185,7 @@ private SeqNoStats populateRandomData(IndexShard shard) throws IOException { shard.getOperationPrimaryTerm(), IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false, - new SourceToParse(shard.shardId().getIndexName(), "_doc", UUIDs.randomBase64UUID(), new BytesArray("{}"), XContentType.JSON) + new SourceToParse(shard.shardId().getIndexName(), UUIDs.randomBase64UUID(), new BytesArray("{}"), XContentType.JSON) ); if (randomInt(100) < 5) { shard.flush(new FlushRequest().waitIfOngoing(true)); diff --git a/server/src/test/java/org/opensearch/indices/recovery/RecoverySourceHandlerTests.java b/server/src/test/java/org/opensearch/indices/recovery/RecoverySourceHandlerTests.java index 3890470f966ca..e713ef5d35f67 100644 --- a/server/src/test/java/org/opensearch/indices/recovery/RecoverySourceHandlerTests.java +++ b/server/src/test/java/org/opensearch/indices/recovery/RecoverySourceHandlerTests.java @@ -462,7 +462,6 @@ public void indexTranslogOperations( } private Engine.Index getIndex(final String id) { - final String type = "test"; final ParseContext.Document document = new ParseContext.Document(); document.add(new TextField("test", "test", Field.Store.YES)); final Field idField = new Field("_id", Uid.encodeId(id), IdFieldMapper.Defaults.FIELD_TYPE); @@ -478,7 +477,6 @@ private Engine.Index getIndex(final String id) { versionField, seqID, id, - type, null, Arrays.asList(document), source, @@ -1188,10 +1186,9 @@ private static List generateOperations(int numOps) { final long seqNo = randomValueOtherThanMany(n -> seqNos.add(n) == false, OpenSearchTestCase::randomNonNegativeLong); final Translog.Operation op; if (randomBoolean()) { - op = new Translog.Index("_doc", "id", seqNo, randomNonNegativeLong(), randomNonNegativeLong(), source, null, -1); + op = new Translog.Index("id", seqNo, randomNonNegativeLong(), randomNonNegativeLong(), source, null, -1); } else if (randomBoolean()) { op = new Translog.Delete( - "_doc", "id", new Term("_id", Uid.encodeId("id")), seqNo, diff --git a/server/src/test/java/org/opensearch/indices/recovery/RecoveryTests.java b/server/src/test/java/org/opensearch/indices/recovery/RecoveryTests.java index 54f4a22f3a577..c714bd0eb85a2 100644 --- a/server/src/test/java/org/opensearch/indices/recovery/RecoveryTests.java +++ b/server/src/test/java/org/opensearch/indices/recovery/RecoveryTests.java @@ -161,7 +161,7 @@ public void testRecoveryWithOutOfOrderDeleteWithSoftDeletes() throws Exception { // delete #1 orgReplica.advanceMaxSeqNoOfUpdatesOrDeletes(1); // manually advance msu for this delete - orgReplica.applyDeleteOperationOnReplica(1, primaryTerm, 2, "type", "id"); + orgReplica.applyDeleteOperationOnReplica(1, primaryTerm, 2, "id"); orgReplica.flush(new FlushRequest().force(true)); // isolate delete#1 in its own translog generation and lucene segment // index #0 orgReplica.applyIndexOperationOnReplica( @@ -170,7 +170,7 @@ public void testRecoveryWithOutOfOrderDeleteWithSoftDeletes() throws Exception { 1, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false, - new SourceToParse(indexName, "type", "id", new BytesArray("{}"), XContentType.JSON) + new SourceToParse(indexName, "id", new BytesArray("{}"), XContentType.JSON) ); // index #3 orgReplica.applyIndexOperationOnReplica( @@ -179,7 +179,7 @@ public void testRecoveryWithOutOfOrderDeleteWithSoftDeletes() throws Exception { 1, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false, - new SourceToParse(indexName, "type", "id-3", new BytesArray("{}"), XContentType.JSON) + new SourceToParse(indexName, "id-3", new BytesArray("{}"), XContentType.JSON) ); // Flushing a new commit with local checkpoint=1 allows to delete the translog gen #1. orgReplica.flush(new FlushRequest().force(true).waitIfOngoing(true)); @@ -190,7 +190,7 @@ public void testRecoveryWithOutOfOrderDeleteWithSoftDeletes() throws Exception { 1, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false, - new SourceToParse(indexName, "type", "id-2", new BytesArray("{}"), XContentType.JSON) + new SourceToParse(indexName, "id-2", new BytesArray("{}"), XContentType.JSON) ); orgReplica.sync(); // advance local checkpoint orgReplica.updateGlobalCheckpointOnReplica(3L, "test"); @@ -201,7 +201,7 @@ public void testRecoveryWithOutOfOrderDeleteWithSoftDeletes() throws Exception { 1, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false, - new SourceToParse(indexName, "type", "id-5", new BytesArray("{}"), XContentType.JSON) + new SourceToParse(indexName, "id-5", new BytesArray("{}"), XContentType.JSON) ); if (randomBoolean()) { @@ -310,13 +310,7 @@ public void testPeerRecoverySendSafeCommitInFileBased() throws Exception { Engine.IndexResult result = primaryShard.applyIndexOperationOnPrimary( Versions.MATCH_ANY, VersionType.INTERNAL, - new SourceToParse( - primaryShard.shardId().getIndexName(), - "_doc", - Integer.toString(i), - new BytesArray("{}"), - XContentType.JSON - ), + new SourceToParse(primaryShard.shardId().getIndexName(), Integer.toString(i), new BytesArray("{}"), XContentType.JSON), SequenceNumbers.UNASSIGNED_SEQ_NO, 0, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, diff --git a/server/src/test/java/org/opensearch/search/suggest/completion/CategoryContextMappingTests.java b/server/src/test/java/org/opensearch/search/suggest/completion/CategoryContextMappingTests.java index bbc9a0fdbe309..b2c54492b66d7 100644 --- a/server/src/test/java/org/opensearch/search/suggest/completion/CategoryContextMappingTests.java +++ b/server/src/test/java/org/opensearch/search/suggest/completion/CategoryContextMappingTests.java @@ -100,7 +100,6 @@ public void testIndexingWithNoContexts() throws Exception { ParsedDocument parsedDocument = defaultMapper.parse( new SourceToParse( "test", - "type1", "1", BytesReference.bytes( jsonBuilder().startObject() @@ -153,7 +152,6 @@ public void testIndexingWithSimpleContexts() throws Exception { ParsedDocument parsedDocument = defaultMapper.parse( new SourceToParse( "test", - "type1", "1", BytesReference.bytes( jsonBuilder().startObject() @@ -201,7 +199,6 @@ public void testIndexingWithSimpleNumberContexts() throws Exception { ParsedDocument parsedDocument = defaultMapper.parse( new SourceToParse( "test", - "type1", "1", BytesReference.bytes( jsonBuilder().startObject() @@ -249,7 +246,6 @@ public void testIndexingWithSimpleBooleanContexts() throws Exception { ParsedDocument parsedDocument = defaultMapper.parse( new SourceToParse( "test", - "type1", "1", BytesReference.bytes( jsonBuilder().startObject() @@ -307,7 +303,7 @@ public void testIndexingWithSimpleNULLContexts() throws Exception { Exception e = expectThrows( MapperParsingException.class, - () -> defaultMapper.parse(new SourceToParse("test", "type1", "1", BytesReference.bytes(builder), XContentType.JSON)) + () -> defaultMapper.parse(new SourceToParse("test", "1", BytesReference.bytes(builder), XContentType.JSON)) ); assertEquals( "contexts must be a string, number or boolean or a list of string, number or boolean, but was [VALUE_NULL]", @@ -341,7 +337,6 @@ public void testIndexingWithContextList() throws Exception { ParsedDocument parsedDocument = defaultMapper.parse( new SourceToParse( "test", - "type1", "1", BytesReference.bytes( jsonBuilder().startObject() @@ -387,7 +382,6 @@ public void testIndexingWithMixedTypeContextList() throws Exception { ParsedDocument parsedDocument = defaultMapper.parse( new SourceToParse( "test", - "type1", "1", BytesReference.bytes( jsonBuilder().startObject() @@ -441,7 +435,7 @@ public void testIndexingWithMixedTypeContextListHavingNULL() throws Exception { Exception e = expectThrows( MapperParsingException.class, - () -> defaultMapper.parse(new SourceToParse("test", "type1", "1", BytesReference.bytes(builder), XContentType.JSON)) + () -> defaultMapper.parse(new SourceToParse("test", "1", BytesReference.bytes(builder), XContentType.JSON)) ); assertEquals("context array must have string, number or boolean values, but was [VALUE_NULL]", e.getCause().getMessage()); } @@ -486,7 +480,7 @@ public void testIndexingWithMultipleContexts() throws Exception { .endArray() .endObject(); ParsedDocument parsedDocument = defaultMapper.parse( - new SourceToParse("test", "type1", "1", BytesReference.bytes(builder), XContentType.JSON) + new SourceToParse("test", "1", BytesReference.bytes(builder), XContentType.JSON) ); IndexableField[] fields = parsedDocument.rootDoc().getFields(fieldMapper.name()); assertContextSuggestFields(fields, 3); diff --git a/server/src/test/java/org/opensearch/search/suggest/completion/GeoContextMappingTests.java b/server/src/test/java/org/opensearch/search/suggest/completion/GeoContextMappingTests.java index 361a6cd543d65..31cc2e73ff2a3 100644 --- a/server/src/test/java/org/opensearch/search/suggest/completion/GeoContextMappingTests.java +++ b/server/src/test/java/org/opensearch/search/suggest/completion/GeoContextMappingTests.java @@ -83,7 +83,6 @@ public void testIndexingWithNoContexts() throws Exception { .parse( new SourceToParse( "test", - MapperService.SINGLE_MAPPING_NAME, "1", BytesReference.bytes( jsonBuilder().startObject() @@ -131,7 +130,6 @@ public void testIndexingWithSimpleContexts() throws Exception { .parse( new SourceToParse( "test", - MapperService.SINGLE_MAPPING_NAME, "1", BytesReference.bytes( jsonBuilder().startObject() @@ -177,7 +175,6 @@ public void testIndexingWithContextList() throws Exception { .parse( new SourceToParse( "test", - MapperService.SINGLE_MAPPING_NAME, "1", BytesReference.bytes( jsonBuilder().startObject() @@ -240,7 +237,7 @@ public void testIndexingWithMultipleContexts() throws Exception { .endArray() .endObject(); ParsedDocument parsedDocument = mapperService.documentMapper() - .parse(new SourceToParse("test", MapperService.SINGLE_MAPPING_NAME, "1", BytesReference.bytes(builder), XContentType.JSON)); + .parse(new SourceToParse("test", "1", BytesReference.bytes(builder), XContentType.JSON)); IndexableField[] fields = parsedDocument.rootDoc().getFields(completionFieldType.name()); assertContextSuggestFields(fields, 3); } diff --git a/test/framework/src/main/java/org/opensearch/index/engine/EngineTestCase.java b/test/framework/src/main/java/org/opensearch/index/engine/EngineTestCase.java index 69f7bef90d78f..6c382a09b90f3 100644 --- a/test/framework/src/main/java/org/opensearch/index/engine/EngineTestCase.java +++ b/test/framework/src/main/java/org/opensearch/index/engine/EngineTestCase.java @@ -175,10 +175,6 @@ public abstract class EngineTestCase extends OpenSearchTestCase { // A default primary term is used by engine instances created in this test. protected final PrimaryTermSupplier primaryTerm = new PrimaryTermSupplier(1L); - protected static void assertVisibleCount(Engine engine, int numDocs) throws IOException { - assertVisibleCount(engine, numDocs, true); - } - protected static void assertVisibleCount(Engine engine, int numDocs, boolean refresh) throws IOException { if (refresh) { engine.refresh("test"); @@ -333,14 +329,14 @@ public void tearDown() throws Exception { try { if (engine != null && engine.isClosed.get() == false) { engine.getTranslog().getDeletionPolicy().assertNoOpenTranslogRefs(); - assertConsistentHistoryBetweenTranslogAndLuceneIndex(engine, createMapperService("test")); + assertConsistentHistoryBetweenTranslogAndLuceneIndex(engine, createMapperService()); assertNoInFlightDocuments(engine); assertMaxSeqNoInCommitUserData(engine); assertAtMostOneLuceneDocumentPerSequenceNumber(engine); } if (replicaEngine != null && replicaEngine.isClosed.get() == false) { replicaEngine.getTranslog().getDeletionPolicy().assertNoOpenTranslogRefs(); - assertConsistentHistoryBetweenTranslogAndLuceneIndex(replicaEngine, createMapperService("test")); + assertConsistentHistoryBetweenTranslogAndLuceneIndex(replicaEngine, createMapperService()); assertNoInFlightDocuments(replicaEngine); assertMaxSeqNoInCommitUserData(replicaEngine); assertAtMostOneLuceneDocumentPerSequenceNumber(replicaEngine); @@ -412,21 +408,11 @@ protected static ParsedDocument testParsedDocument( } else { document.add(new StoredField(SourceFieldMapper.NAME, ref.bytes, ref.offset, ref.length)); } - return new ParsedDocument( - versionField, - seqID, - id, - "test", - routing, - Arrays.asList(document), - source, - XContentType.JSON, - mappingUpdate - ); + return new ParsedDocument(versionField, seqID, id, routing, Arrays.asList(document), source, XContentType.JSON, mappingUpdate); } public static CheckedBiFunction nestedParsedDocFactory() throws Exception { - final MapperService mapperService = createMapperService("type"); + final MapperService mapperService = createMapperService(); final String nestedMapping = Strings.toString( XContentFactory.jsonBuilder() .startObject() @@ -450,7 +436,7 @@ public static CheckedBiFunction ne source.endObject(); } source.endObject(); - return nestedMapper.parse(new SourceToParse("test", "type", docId, BytesReference.bytes(source), XContentType.JSON)); + return nestedMapper.parse(new SourceToParse("test", docId, BytesReference.bytes(source), XContentType.JSON)); }; } @@ -460,7 +446,7 @@ public static CheckedBiFunction ne public static EngineConfig.TombstoneDocSupplier tombstoneDocSupplier() { return new EngineConfig.TombstoneDocSupplier() { @Override - public ParsedDocument newDeleteTombstoneDoc(String type, String id) { + public ParsedDocument newDeleteTombstoneDoc(String id) { final ParseContext.Document doc = new ParseContext.Document(); Field uidField = new Field(IdFieldMapper.NAME, Uid.encodeId(id), IdFieldMapper.Defaults.FIELD_TYPE); doc.add(uidField); @@ -476,7 +462,6 @@ public ParsedDocument newDeleteTombstoneDoc(String type, String id) { versionField, seqID, id, - type, null, Collections.singletonList(doc), new BytesArray("{}"), @@ -498,17 +483,7 @@ public ParsedDocument newNoopTombstoneDoc(String reason) { doc.add(versionField); BytesRef byteRef = new BytesRef(reason); doc.add(new StoredField(SourceFieldMapper.NAME, byteRef.bytes, byteRef.offset, byteRef.length)); - return new ParsedDocument( - versionField, - seqID, - null, - null, - null, - Collections.singletonList(doc), - null, - XContentType.JSON, - null - ); + return new ParsedDocument(versionField, seqID, null, null, Collections.singletonList(doc), null, XContentType.JSON, null); } }; } @@ -991,7 +966,7 @@ protected Engine.Index replicaIndexForDoc(ParsedDocument doc, long version, long } protected Engine.Delete replicaDeleteForDoc(String id, long version, long seqNo, long startTime) { - return new Engine.Delete("test", id, newUid(id), seqNo, 1, version, null, REPLICA, startTime, SequenceNumbers.UNASSIGNED_SEQ_NO, 0); + return new Engine.Delete(id, newUid(id), seqNo, 1, version, null, REPLICA, startTime, SequenceNumbers.UNASSIGNED_SEQ_NO, 0); } protected static void assertVisibleCount(InternalEngine engine, int numDocs) throws IOException { @@ -1056,7 +1031,6 @@ public static List generateSingleDocHistory( ); } else { op = new Engine.Delete( - "test", docId, id, forReplica && i >= startWithSeqNo ? i * 2 : SequenceNumbers.UNASSIGNED_SEQ_NO, @@ -1115,7 +1089,6 @@ public List generateHistoryOnReplica( case DELETE: operations.add( new Engine.Delete( - doc.type(), doc.id(), EngineTestCase.newUid(doc), seqNo, @@ -1478,7 +1451,7 @@ public static void assertAtMostOneLuceneDocumentPerSequenceNumber(IndexSettings } } - public static MapperService createMapperService(String type) throws IOException { + public static MapperService createMapperService() throws IOException { IndexMetadata indexMetadata = IndexMetadata.builder("test") .settings( Settings.builder() @@ -1486,7 +1459,7 @@ public static MapperService createMapperService(String type) throws IOException .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1) .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 1) ) - .putMapping(type, "{\"properties\": {}}") + .putMapping("{\"properties\": {}}") .build(); MapperService mapperService = MapperTestUtils.newMapperService( new NamedXContentRegistry(ClusterModule.getNamedXWriteables()), diff --git a/test/framework/src/main/java/org/opensearch/index/engine/TranslogHandler.java b/test/framework/src/main/java/org/opensearch/index/engine/TranslogHandler.java index afb44caa64987..7dbe2c7381fd8 100644 --- a/test/framework/src/main/java/org/opensearch/index/engine/TranslogHandler.java +++ b/test/framework/src/main/java/org/opensearch/index/engine/TranslogHandler.java @@ -44,7 +44,6 @@ import org.opensearch.index.mapper.DocumentMapper; import org.opensearch.index.mapper.DocumentMapperForType; import org.opensearch.index.mapper.MapperService; -import org.opensearch.index.mapper.Mapping; import org.opensearch.index.mapper.RootObjectMapper; import org.opensearch.index.mapper.SourceToParse; import org.opensearch.index.seqno.SequenceNumbers; @@ -65,8 +64,6 @@ public class TranslogHandler implements Engine.TranslogRecoveryRunner { private final MapperService mapperService; - public Mapping mappingUpdate = null; - private final Map recoveredTypes = new HashMap<>(); private final AtomicLong appliedOperations = new AtomicLong(); @@ -95,21 +92,13 @@ public TranslogHandler(NamedXContentRegistry xContentRegistry, IndexSettings ind private DocumentMapperForType docMapper(String type) { RootObjectMapper.Builder rootBuilder = new RootObjectMapper.Builder(type); DocumentMapper.Builder b = new DocumentMapper.Builder(rootBuilder, mapperService); - return new DocumentMapperForType(b.build(mapperService), mappingUpdate); + return new DocumentMapperForType(b.build(mapperService), null); } private void applyOperation(Engine engine, Engine.Operation operation) throws IOException { switch (operation.operationType()) { case INDEX: - Engine.Index engineIndex = (Engine.Index) operation; - Mapping update = engineIndex.parsedDoc().dynamicMappingsUpdate(); - if (engineIndex.parsedDoc().dynamicMappingsUpdate() != null) { - recoveredTypes.compute( - engineIndex.type(), - (k, mapping) -> mapping == null ? update : mapping.merge(update, MapperService.MergeReason.MAPPING_RECOVERY) - ); - } - engine.index(engineIndex); + engine.index((Engine.Index) operation); break; case DELETE: engine.delete((Engine.Delete) operation); @@ -122,13 +111,6 @@ private void applyOperation(Engine engine, Engine.Operation operation) throws IO } } - /** - * Returns the recovered types modifying the mapping during the recovery - */ - public Map getRecoveredTypes() { - return recoveredTypes; - } - @Override public int run(Engine engine, Translog.Snapshot snapshot) throws IOException { int opsRecovered = 0; @@ -150,15 +132,8 @@ public Engine.Operation convertToEngineOp(Translog.Operation operation, Engine.O final Translog.Index index = (Translog.Index) operation; final String indexName = mapperService.index().getName(); final Engine.Index engineIndex = IndexShard.prepareIndex( - docMapper(index.type()), - new SourceToParse( - indexName, - index.type(), - index.id(), - index.source(), - XContentHelper.xContentType(index.source()), - index.routing() - ), + docMapper(MapperService.SINGLE_MAPPING_NAME), + new SourceToParse(indexName, index.id(), index.source(), XContentHelper.xContentType(index.source()), index.routing()), index.seqNo(), index.primaryTerm(), index.version(), @@ -173,7 +148,6 @@ public Engine.Operation convertToEngineOp(Translog.Operation operation, Engine.O case DELETE: final Translog.Delete delete = (Translog.Delete) operation; final Engine.Delete engineDelete = new Engine.Delete( - delete.type(), delete.id(), delete.uid(), delete.seqNo(), diff --git a/test/framework/src/main/java/org/opensearch/index/mapper/MapperServiceTestCase.java b/test/framework/src/main/java/org/opensearch/index/mapper/MapperServiceTestCase.java index fa0309ef165d4..03ac664da1734 100644 --- a/test/framework/src/main/java/org/opensearch/index/mapper/MapperServiceTestCase.java +++ b/test/framework/src/main/java/org/opensearch/index/mapper/MapperServiceTestCase.java @@ -183,11 +183,11 @@ protected final SourceToParse source(CheckedConsumer indexMapping = Collections.singletonMap("type", "{ \"type\": {} }"); + protected final String indexMapping = "{ \"" + MapperService.SINGLE_MAPPING_NAME + "\": {} }"; protected ReplicationGroup createGroup(int replicas) throws IOException { return createGroup(replicas, Settings.EMPTY); @@ -143,11 +143,11 @@ protected IndexMetadata buildIndexMetadata(int replicas) throws IOException { return buildIndexMetadata(replicas, indexMapping); } - protected IndexMetadata buildIndexMetadata(int replicas, Map mappings) throws IOException { + protected IndexMetadata buildIndexMetadata(int replicas, String mappings) throws IOException { return buildIndexMetadata(replicas, Settings.EMPTY, mappings); } - protected IndexMetadata buildIndexMetadata(int replicas, Settings indexSettings, Map mappings) throws IOException { + protected IndexMetadata buildIndexMetadata(int replicas, Settings indexSettings, String mappings) throws IOException { Settings settings = Settings.builder() .put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT) .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, replicas) @@ -155,10 +155,11 @@ protected IndexMetadata buildIndexMetadata(int replicas, Settings indexSettings, .put(IndexSettings.INDEX_SOFT_DELETES_RETENTION_OPERATIONS_SETTING.getKey(), between(0, 1000)) .put(indexSettings) .build(); - IndexMetadata.Builder metadata = IndexMetadata.builder(index.getName()).settings(settings).primaryTerm(0, randomIntBetween(1, 100)); - for (Map.Entry typeMapping : mappings.entrySet()) { - metadata.putMapping(typeMapping.getKey(), typeMapping.getValue()); - } + IndexMetadata.Builder metadata = IndexMetadata.builder(index.getName()) + .settings(settings) + .putMapping(mappings) + .primaryTerm(0, randomIntBetween(1, 100)); + return metadata.build(); } diff --git a/test/framework/src/main/java/org/opensearch/index/shard/IndexShardTestCase.java b/test/framework/src/main/java/org/opensearch/index/shard/IndexShardTestCase.java index 09c5dfad486e9..6b18963056450 100644 --- a/test/framework/src/main/java/org/opensearch/index/shard/IndexShardTestCase.java +++ b/test/framework/src/main/java/org/opensearch/index/shard/IndexShardTestCase.java @@ -279,7 +279,7 @@ protected IndexShard newShard( IndexMetadata.Builder metadata = IndexMetadata.builder(shardRouting.getIndexName()) .settings(indexSettings) .primaryTerm(0, primaryTerm) - .putMapping("_doc", "{ \"properties\": {} }"); + .putMapping("{ \"properties\": {} }"); return newShard(shardRouting, metadata.build(), null, engineFactory, () -> {}, RetentionLeaseSyncer.EMPTY, listeners); } @@ -877,25 +877,12 @@ protected Engine.IndexResult indexDoc(IndexShard shard, String type, String id) } protected Engine.IndexResult indexDoc(IndexShard shard, String type, String id, String source) throws IOException { - return indexDoc(shard, type, id, source, XContentType.JSON, null); + return indexDoc(shard, id, source, XContentType.JSON, null); } - protected Engine.IndexResult indexDoc( - IndexShard shard, - String type, - String id, - String source, - XContentType xContentType, - String routing - ) throws IOException { - SourceToParse sourceToParse = new SourceToParse( - shard.shardId().getIndexName(), - type, - id, - new BytesArray(source), - xContentType, - routing - ); + protected Engine.IndexResult indexDoc(IndexShard shard, String id, String source, XContentType xContentType, String routing) + throws IOException { + SourceToParse sourceToParse = new SourceToParse(shard.shardId().getIndexName(), id, new BytesArray(source), xContentType, routing); Engine.IndexResult result; if (shard.routingEntry().primary()) { result = shard.applyIndexOperationOnPrimary( @@ -911,7 +898,7 @@ protected Engine.IndexResult indexDoc( updateMappings( shard, IndexMetadata.builder(shard.indexSettings().getIndexMetadata()) - .putMapping(type, result.getRequiredMappingUpdate().toString()) + .putMapping(result.getRequiredMappingUpdate().toString()) .build() ); result = shard.applyIndexOperationOnPrimary( @@ -956,12 +943,11 @@ protected void updateMappings(IndexShard shard, IndexMetadata indexMetadata) { ); } - protected Engine.DeleteResult deleteDoc(IndexShard shard, String type, String id) throws IOException { + protected Engine.DeleteResult deleteDoc(IndexShard shard, String id) throws IOException { final Engine.DeleteResult result; if (shard.routingEntry().primary()) { result = shard.applyDeleteOperationOnPrimary( Versions.MATCH_ANY, - type, id, VersionType.INTERNAL, SequenceNumbers.UNASSIGNED_SEQ_NO, @@ -972,7 +958,7 @@ protected Engine.DeleteResult deleteDoc(IndexShard shard, String type, String id } else { final long seqNo = shard.seqNoStats().getMaxSeqNo() + 1; shard.advanceMaxSeqNoOfUpdatesOrDeletes(seqNo); // manually replicate max_seq_no_of_updates - result = shard.applyDeleteOperationOnReplica(seqNo, shard.getOperationPrimaryTerm(), 0L, type, id); + result = shard.applyDeleteOperationOnReplica(seqNo, shard.getOperationPrimaryTerm(), 0L, id); shard.sync(); // advance local checkpoint } return result; From bdcaec5caf4a55afc8a2e5e5f136c2f65d098fd6 Mon Sep 17 00:00:00 2001 From: Nick Knize Date: Sun, 13 Mar 2022 13:49:51 -0400 Subject: [PATCH 22/46] [Remove] types from Uid and remaining types/Uid from translog (#2450) Removes types from UID class along with cleaning up all obsolete MapperService dependendices. This includes removing UID from the Translog Delete operation which is no longer needed due to type dependency removal. Signed-off-by: Nicholas Walter Knize --- .../org/opensearch/index/engine/Engine.java | 10 +---- .../index/engine/InternalEngine.java | 10 +---- .../index/engine/LuceneChangesSnapshot.java | 25 +++-------- .../index/engine/ReadOnlyEngine.java | 9 +--- .../index/fieldvisitor/FieldsVisitor.java | 17 ++----- .../java/org/opensearch/index/mapper/Uid.java | 45 ++----------------- .../opensearch/index/shard/IndexShard.java | 11 +++-- .../opensearch/index/translog/Translog.java | 40 ++++++++--------- .../opensearch/search/fetch/FetchPhase.java | 15 +++---- .../fetch/subphase/InnerHitsContext.java | 12 +++-- .../search/fetch/subphase/InnerHitsPhase.java | 4 +- .../index/engine/InternalEngineTests.java | 45 ++++++++----------- .../engine/LuceneChangesSnapshotTests.java | 19 +++----- .../index/translog/TranslogTests.java | 37 ++++++++------- .../recovery/RecoverySourceHandlerTests.java | 8 +--- .../index/engine/EngineTestCase.java | 18 ++++---- .../index/engine/TranslogHandler.java | 5 +-- .../index/shard/IndexShardTestCase.java | 2 +- 18 files changed, 108 insertions(+), 224 deletions(-) diff --git a/server/src/main/java/org/opensearch/index/engine/Engine.java b/server/src/main/java/org/opensearch/index/engine/Engine.java index fe026dd3251eb..7cf7b3245c0e5 100644 --- a/server/src/main/java/org/opensearch/index/engine/Engine.java +++ b/server/src/main/java/org/opensearch/index/engine/Engine.java @@ -72,7 +72,6 @@ import org.opensearch.common.unit.TimeValue; import org.opensearch.common.util.concurrent.ReleasableLock; import org.opensearch.index.VersionType; -import org.opensearch.index.mapper.MapperService; import org.opensearch.index.mapper.Mapping; import org.opensearch.index.mapper.ParseContext.Document; import org.opensearch.index.mapper.ParsedDocument; @@ -736,13 +735,8 @@ public enum SearcherScope { * Creates a new history snapshot from Lucene for reading operations whose seqno in the requesting seqno range (both inclusive). * This feature requires soft-deletes enabled. If soft-deletes are disabled, this method will throw an {@link IllegalStateException}. */ - public abstract Translog.Snapshot newChangesSnapshot( - String source, - MapperService mapperService, - long fromSeqNo, - long toSeqNo, - boolean requiredFullRange - ) throws IOException; + public abstract Translog.Snapshot newChangesSnapshot(String source, long fromSeqNo, long toSeqNo, boolean requiredFullRange) + throws IOException; public abstract boolean hasCompleteOperationHistory(String reason, long startingSeqNo); diff --git a/server/src/main/java/org/opensearch/index/engine/InternalEngine.java b/server/src/main/java/org/opensearch/index/engine/InternalEngine.java index 1756bc738cae1..438bb0b290b9c 100644 --- a/server/src/main/java/org/opensearch/index/engine/InternalEngine.java +++ b/server/src/main/java/org/opensearch/index/engine/InternalEngine.java @@ -91,7 +91,6 @@ import org.opensearch.index.VersionType; import org.opensearch.index.fieldvisitor.IdOnlyFieldVisitor; import org.opensearch.index.mapper.IdFieldMapper; -import org.opensearch.index.mapper.MapperService; import org.opensearch.index.mapper.ParseContext; import org.opensearch.index.mapper.ParsedDocument; import org.opensearch.index.mapper.SeqNoFieldMapper; @@ -2773,20 +2772,13 @@ long getNumDocUpdates() { } @Override - public Translog.Snapshot newChangesSnapshot( - String source, - MapperService mapperService, - long fromSeqNo, - long toSeqNo, - boolean requiredFullRange - ) throws IOException { + public Translog.Snapshot newChangesSnapshot(String source, long fromSeqNo, long toSeqNo, boolean requiredFullRange) throws IOException { ensureOpen(); refreshIfNeeded(source, toSeqNo); Searcher searcher = acquireSearcher(source, SearcherScope.INTERNAL); try { LuceneChangesSnapshot snapshot = new LuceneChangesSnapshot( searcher, - mapperService, LuceneChangesSnapshot.DEFAULT_BATCH_SIZE, fromSeqNo, toSeqNo, diff --git a/server/src/main/java/org/opensearch/index/engine/LuceneChangesSnapshot.java b/server/src/main/java/org/opensearch/index/engine/LuceneChangesSnapshot.java index fce866b624367..d640cf1468ec3 100644 --- a/server/src/main/java/org/opensearch/index/engine/LuceneChangesSnapshot.java +++ b/server/src/main/java/org/opensearch/index/engine/LuceneChangesSnapshot.java @@ -36,7 +36,6 @@ import org.apache.lucene.index.LeafReader; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.NumericDocValues; -import org.apache.lucene.index.Term; import org.apache.lucene.search.BooleanClause; import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.DocValuesFieldExistsQuery; @@ -51,11 +50,8 @@ import org.opensearch.common.lucene.Lucene; import org.opensearch.core.internal.io.IOUtils; import org.opensearch.index.fieldvisitor.FieldsVisitor; -import org.opensearch.index.mapper.IdFieldMapper; -import org.opensearch.index.mapper.MapperService; import org.opensearch.index.mapper.SeqNoFieldMapper; import org.opensearch.index.mapper.SourceFieldMapper; -import org.opensearch.index.mapper.Uid; import org.opensearch.index.translog.Translog; import java.io.Closeable; @@ -77,7 +73,6 @@ final class LuceneChangesSnapshot implements Translog.Snapshot { private final boolean requiredFullRange; private final IndexSearcher indexSearcher; - private final MapperService mapperService; private int docIndex = 0; private final int totalHits; private ScoreDoc[] scoreDocs; @@ -88,20 +83,13 @@ final class LuceneChangesSnapshot implements Translog.Snapshot { * Creates a new "translog" snapshot from Lucene for reading operations whose seq# in the specified range. * * @param engineSearcher the internal engine searcher which will be taken over if the snapshot is opened successfully - * @param mapperService the mapper service which will be mainly used to resolve the document's type and uid * @param searchBatchSize the number of documents should be returned by each search * @param fromSeqNo the min requesting seq# - inclusive * @param toSeqNo the maximum requesting seq# - inclusive * @param requiredFullRange if true, the snapshot will strictly check for the existence of operations between fromSeqNo and toSeqNo */ - LuceneChangesSnapshot( - Engine.Searcher engineSearcher, - MapperService mapperService, - int searchBatchSize, - long fromSeqNo, - long toSeqNo, - boolean requiredFullRange - ) throws IOException { + LuceneChangesSnapshot(Engine.Searcher engineSearcher, int searchBatchSize, long fromSeqNo, long toSeqNo, boolean requiredFullRange) + throws IOException { if (fromSeqNo < 0 || toSeqNo < 0 || fromSeqNo > toSeqNo) { throw new IllegalArgumentException("Invalid range; from_seqno [" + fromSeqNo + "], to_seqno [" + toSeqNo + "]"); } @@ -114,7 +102,6 @@ final class LuceneChangesSnapshot implements Translog.Snapshot { IOUtils.close(engineSearcher); } }; - this.mapperService = mapperService; final long requestingSize = (toSeqNo - fromSeqNo) == Long.MAX_VALUE ? Long.MAX_VALUE : (toSeqNo - fromSeqNo + 1L); this.searchBatchSize = requestingSize < searchBatchSize ? Math.toIntExact(requestingSize) : searchBatchSize; this.fromSeqNo = fromSeqNo; @@ -278,19 +265,17 @@ private Translog.Operation readDocAsOp(int docIndex) throws IOException { : SourceFieldMapper.NAME; final FieldsVisitor fields = new FieldsVisitor(true, sourceField); leaf.reader().document(segmentDocID, fields); - fields.postProcess(mapperService); final Translog.Operation op; final boolean isTombstone = parallelArray.isTombStone[docIndex]; - if (isTombstone && fields.uid() == null) { + if (isTombstone && fields.id() == null) { op = new Translog.NoOp(seqNo, primaryTerm, fields.source().utf8ToString()); assert version == 1L : "Noop tombstone should have version 1L; actual version [" + version + "]"; assert assertDocSoftDeleted(leaf.reader(), segmentDocID) : "Noop but soft_deletes field is not set [" + op + "]"; } else { - final String id = fields.uid().id(); - final Term uid = new Term(IdFieldMapper.NAME, Uid.encodeId(id)); + final String id = fields.id(); if (isTombstone) { - op = new Translog.Delete(id, uid, seqNo, primaryTerm, version); + op = new Translog.Delete(id, seqNo, primaryTerm, version); assert assertDocSoftDeleted(leaf.reader(), segmentDocID) : "Delete op but soft_deletes field is not set [" + op + "]"; } else { final BytesReference source = fields.source(); diff --git a/server/src/main/java/org/opensearch/index/engine/ReadOnlyEngine.java b/server/src/main/java/org/opensearch/index/engine/ReadOnlyEngine.java index 9bbffb7cc19d6..32d6b9b98d169 100644 --- a/server/src/main/java/org/opensearch/index/engine/ReadOnlyEngine.java +++ b/server/src/main/java/org/opensearch/index/engine/ReadOnlyEngine.java @@ -46,7 +46,6 @@ import org.opensearch.common.lucene.index.OpenSearchDirectoryReader; import org.opensearch.common.util.concurrent.ReleasableLock; import org.opensearch.core.internal.io.IOUtils; -import org.opensearch.index.mapper.MapperService; import org.opensearch.index.seqno.SeqNoStats; import org.opensearch.index.seqno.SequenceNumbers; import org.opensearch.index.store.Store; @@ -326,13 +325,7 @@ public Closeable acquireHistoryRetentionLock() { } @Override - public Translog.Snapshot newChangesSnapshot( - String source, - MapperService mapperService, - long fromSeqNo, - long toSeqNo, - boolean requiredFullRange - ) { + public Translog.Snapshot newChangesSnapshot(String source, long fromSeqNo, long toSeqNo, boolean requiredFullRange) { return newEmptySnapshot(); } diff --git a/server/src/main/java/org/opensearch/index/fieldvisitor/FieldsVisitor.java b/server/src/main/java/org/opensearch/index/fieldvisitor/FieldsVisitor.java index c5fddb5d26c5b..a51137b4a4f69 100644 --- a/server/src/main/java/org/opensearch/index/fieldvisitor/FieldsVisitor.java +++ b/server/src/main/java/org/opensearch/index/fieldvisitor/FieldsVisitor.java @@ -36,7 +36,6 @@ import org.apache.lucene.util.BytesRef; import org.opensearch.common.bytes.BytesArray; import org.opensearch.common.bytes.BytesReference; -import org.opensearch.index.mapper.DocumentMapper; import org.opensearch.index.mapper.IdFieldMapper; import org.opensearch.index.mapper.IgnoredFieldMapper; import org.opensearch.index.mapper.MappedFieldType; @@ -67,7 +66,7 @@ public class FieldsVisitor extends StoredFieldVisitor { private final String sourceFieldName; private final Set requiredFields; protected BytesReference source; - protected String type, id; + protected String id; protected Map> fieldsValues; public FieldsVisitor(boolean loadSource) { @@ -98,10 +97,6 @@ public Status needsField(FieldInfo fieldInfo) { } public void postProcess(MapperService mapperService) { - final DocumentMapper mapper = mapperService.documentMapper(); - if (mapper != null) { - type = mapper.type(); - } for (Map.Entry> entry : fields().entrySet()) { MappedFieldType fieldType = mapperService.fieldType(entry.getKey()); if (fieldType == null) { @@ -167,13 +162,8 @@ public BytesReference source() { return source; } - public Uid uid() { - if (id == null) { - return null; - } else if (type == null) { - throw new IllegalStateException("Call postProcess before getting the uid"); - } - return new Uid(type, id); + public String id() { + return id; } public String routing() { @@ -195,7 +185,6 @@ public Map> fields() { public void reset() { if (fieldsValues != null) fieldsValues.clear(); source = null; - type = null; id = null; requiredFields.addAll(BASE_REQUIRED_FIELDS); diff --git a/server/src/main/java/org/opensearch/index/mapper/Uid.java b/server/src/main/java/org/opensearch/index/mapper/Uid.java index 90d54319c026b..fa149b9dcac46 100644 --- a/server/src/main/java/org/opensearch/index/mapper/Uid.java +++ b/server/src/main/java/org/opensearch/index/mapper/Uid.java @@ -43,52 +43,13 @@ public final class Uid { public static final char DELIMITER = '#'; public static final byte DELIMITER_BYTE = 0x23; - private final String type; - - private final String id; - - public Uid(String type, String id) { - this.type = type; - this.id = id; - } - - public String type() { - return type; - } - - public String id() { - return id; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - - Uid uid = (Uid) o; - - if (id != null ? !id.equals(uid.id) : uid.id != null) return false; - if (type != null ? !type.equals(uid.type) : uid.type != null) return false; - - return true; - } - - @Override - public int hashCode() { - int result = type != null ? type.hashCode() : 0; - result = 31 * result + (id != null ? id.hashCode() : 0); - return result; - } - - @Override - public String toString() { - return type + "#" + id; - } - private static final int UTF8 = 0xff; private static final int NUMERIC = 0xfe; private static final int BASE64_ESCAPE = 0xfd; + // non-instantiable + private Uid() {} + static boolean isURLBase64WithoutPadding(String id) { // We are not lenient about padding chars ('=') otherwise // 'xxx=' and 'xxx' could be considered the same id diff --git a/server/src/main/java/org/opensearch/index/shard/IndexShard.java b/server/src/main/java/org/opensearch/index/shard/IndexShard.java index 9aac2c11e2d35..ad370051c53ac 100644 --- a/server/src/main/java/org/opensearch/index/shard/IndexShard.java +++ b/server/src/main/java/org/opensearch/index/shard/IndexShard.java @@ -1069,14 +1069,12 @@ private Engine.DeleteResult applyDeleteOperation( + getOperationPrimaryTerm() + "]"; ensureWriteAllowed(origin); - final Term uid = new Term(IdFieldMapper.NAME, Uid.encodeId(id)); - final Engine.Delete delete = prepareDelete(id, uid, seqNo, opPrimaryTerm, version, versionType, origin, ifSeqNo, ifPrimaryTerm); + final Engine.Delete delete = prepareDelete(id, seqNo, opPrimaryTerm, version, versionType, origin, ifSeqNo, ifPrimaryTerm); return delete(engine, delete); } - private Engine.Delete prepareDelete( + public static Engine.Delete prepareDelete( String id, - Term uid, long seqNo, long primaryTerm, long version, @@ -1086,6 +1084,7 @@ private Engine.Delete prepareDelete( long ifPrimaryTerm ) { long startTime = System.nanoTime(); + final Term uid = new Term(IdFieldMapper.NAME, Uid.encodeId(id)); return new Engine.Delete(id, uid, seqNo, primaryTerm, version, versionType, origin, startTime, ifSeqNo, ifPrimaryTerm); } @@ -2238,7 +2237,7 @@ public Closeable acquireHistoryRetentionLock() { * The returned snapshot can be retrieved from either Lucene index or translog files. */ public Translog.Snapshot getHistoryOperations(String reason, long startingSeqNo, long endSeqNo) throws IOException { - return getEngine().newChangesSnapshot(reason, mapperService, startingSeqNo, endSeqNo, true); + return getEngine().newChangesSnapshot(reason, startingSeqNo, endSeqNo, true); } /** @@ -2270,7 +2269,7 @@ public long getMinRetainedSeqNo() { * This parameter should be only enabled when the entire requesting range is below the global checkpoint. */ public Translog.Snapshot newChangesSnapshot(String source, long fromSeqNo, long toSeqNo, boolean requiredFullRange) throws IOException { - return getEngine().newChangesSnapshot(source, mapperService, fromSeqNo, toSeqNo, requiredFullRange); + return getEngine().newChangesSnapshot(source, fromSeqNo, toSeqNo, requiredFullRange); } public List segments(boolean verbose) { diff --git a/server/src/main/java/org/opensearch/index/translog/Translog.java b/server/src/main/java/org/opensearch/index/translog/Translog.java index e04dd6681705e..2586599d3ed59 100644 --- a/server/src/main/java/org/opensearch/index/translog/Translog.java +++ b/server/src/main/java/org/opensearch/index/translog/Translog.java @@ -33,7 +33,6 @@ package org.opensearch.index.translog; import org.apache.logging.log4j.message.ParameterizedMessage; -import org.apache.lucene.index.Term; import org.apache.lucene.store.AlreadyClosedException; import org.opensearch.Version; import org.opensearch.common.Nullable; @@ -54,7 +53,9 @@ import org.opensearch.index.VersionType; import org.opensearch.index.engine.Engine; import org.opensearch.index.engine.MissingHistoryOperationsException; +import org.opensearch.index.mapper.IdFieldMapper; import org.opensearch.index.mapper.MapperService; +import org.opensearch.index.mapper.Uid; import org.opensearch.index.seqno.SequenceNumbers; import org.opensearch.index.shard.AbstractIndexShardComponent; import org.opensearch.index.shard.IndexShardComponent; @@ -1384,7 +1385,6 @@ public static class Delete implements Operation { public static final int SERIALIZATION_FORMAT = FORMAT_NO_DOC_TYPE; private final String id; - private final Term uid; private final long seqNo; private final long primaryTerm; private final long version; @@ -1397,7 +1397,11 @@ private Delete(final StreamInput in) throws IOException { // Can't assert that this is _doc because pre 2.0 indexes can have any name for a type } id = in.readString(); - uid = new Term(in.readString(), in.readBytesRef()); + if (format < FORMAT_NO_DOC_TYPE) { + final String docType = in.readString(); + assert docType.equals(IdFieldMapper.NAME) : docType + " != " + IdFieldMapper.NAME; + in.readBytesRef(); // uid + } this.version = in.readLong(); if (format < FORMAT_NO_VERSION_TYPE) { in.readByte(); // versionType @@ -1407,17 +1411,16 @@ private Delete(final StreamInput in) throws IOException { } public Delete(Engine.Delete delete, Engine.DeleteResult deleteResult) { - this(delete.id(), delete.uid(), deleteResult.getSeqNo(), delete.primaryTerm(), deleteResult.getVersion()); + this(delete.id(), deleteResult.getSeqNo(), delete.primaryTerm(), deleteResult.getVersion()); } /** utility for testing */ - public Delete(String id, long seqNo, long primaryTerm, Term uid) { - this(id, uid, seqNo, primaryTerm, Versions.MATCH_ANY); + public Delete(String id, long seqNo, long primaryTerm) { + this(id, seqNo, primaryTerm, Versions.MATCH_ANY); } - public Delete(String id, Term uid, long seqNo, long primaryTerm, long version) { + public Delete(String id, long seqNo, long primaryTerm, long version) { this.id = Objects.requireNonNull(id); - this.uid = uid; this.seqNo = seqNo; this.primaryTerm = primaryTerm; this.version = version; @@ -1430,18 +1433,14 @@ public Type opType() { @Override public long estimateSize() { - return (id.length() * 2) + ((uid.field().length() * 2) + (uid.text().length()) * 2) + (3 * Long.BYTES); // seq_no, primary_term, - // and version; + return (id.length() * 2) + (3 * Long.BYTES); // seq_no, primary_term, + // and version; } public String id() { return id; } - public Term uid() { - return this.uid; - } - @Override public long seqNo() { return seqNo; @@ -1468,8 +1467,10 @@ private void write(final StreamOutput out) throws IOException { out.writeString(MapperService.SINGLE_MAPPING_NAME); } out.writeString(id); - out.writeString(uid.field()); - out.writeBytesRef(uid.bytes()); + if (format < FORMAT_NO_DOC_TYPE) { + out.writeString(IdFieldMapper.NAME); + out.writeBytesRef(Uid.encodeId(id)); + } out.writeLong(version); if (format < FORMAT_NO_VERSION_TYPE) { out.writeByte(VersionType.EXTERNAL.getValue()); @@ -1489,13 +1490,12 @@ public boolean equals(Object o) { Delete delete = (Delete) o; - return version == delete.version && seqNo == delete.seqNo && primaryTerm == delete.primaryTerm && uid.equals(delete.uid); + return version == delete.version && seqNo == delete.seqNo && primaryTerm == delete.primaryTerm; } @Override public int hashCode() { - int result = uid.hashCode(); - result = 31 * result + Long.hashCode(seqNo); + int result = Long.hashCode(seqNo); result = 31 * result + Long.hashCode(primaryTerm); result = 31 * result + Long.hashCode(version); return result; @@ -1503,7 +1503,7 @@ public int hashCode() { @Override public String toString() { - return "Delete{" + "uid=" + uid + ", seqNo=" + seqNo + ", primaryTerm=" + primaryTerm + ", version=" + version + '}'; + return "Delete{" + "seqNo=" + seqNo + ", primaryTerm=" + primaryTerm + ", version=" + version + '}'; } } diff --git a/server/src/main/java/org/opensearch/search/fetch/FetchPhase.java b/server/src/main/java/org/opensearch/search/fetch/FetchPhase.java index 7b1beaed07c4f..a74497477099a 100644 --- a/server/src/main/java/org/opensearch/search/fetch/FetchPhase.java +++ b/server/src/main/java/org/opensearch/search/fetch/FetchPhase.java @@ -60,7 +60,6 @@ import org.opensearch.index.mapper.MapperService; import org.opensearch.index.mapper.ObjectMapper; import org.opensearch.index.mapper.SourceFieldMapper; -import org.opensearch.index.mapper.Uid; import org.opensearch.search.SearchContextSourcePrinter; import org.opensearch.search.SearchHit; import org.opensearch.search.SearchHits; @@ -335,14 +334,14 @@ private HitContext prepareNonNestedHitContext( } else { SearchHit hit; loadStoredFields(context.mapperService(), fieldReader, fieldsVisitor, subDocId); - Uid uid = fieldsVisitor.uid(); + String id = fieldsVisitor.id(); if (fieldsVisitor.fields().isEmpty() == false) { Map docFields = new HashMap<>(); Map metaFields = new HashMap<>(); fillDocAndMetaFields(context, fieldsVisitor, storedToRequestedFields, docFields, metaFields); - hit = new SearchHit(docId, uid.id(), docFields, metaFields); + hit = new SearchHit(docId, id, docFields, metaFields); } else { - hit = new SearchHit(docId, uid.id(), emptyMap(), emptyMap()); + hit = new SearchHit(docId, id, emptyMap(), emptyMap()); } HitContext hitContext = new HitContext(hit, subReaderContext, subDocId, lookup.source()); @@ -375,7 +374,7 @@ private HitContext prepareNestedHitContext( // because the entire _source is only stored with the root document. boolean needSource = sourceRequired(context) || context.highlight() != null; - Uid rootId; + String rootId; Map rootSourceAsMap = null; XContentType rootSourceContentType = null; @@ -383,7 +382,7 @@ private HitContext prepareNestedHitContext( if (context instanceof InnerHitsContext.InnerHitSubContext) { InnerHitsContext.InnerHitSubContext innerHitsContext = (InnerHitsContext.InnerHitSubContext) context; - rootId = innerHitsContext.getRootId(); + rootId = innerHitsContext.getId(); if (needSource) { SourceLookup rootLookup = innerHitsContext.getRootLookup(); @@ -394,7 +393,7 @@ private HitContext prepareNestedHitContext( FieldsVisitor rootFieldsVisitor = new FieldsVisitor(needSource); loadStoredFields(context.mapperService(), storedFieldReader, rootFieldsVisitor, rootDocId); rootFieldsVisitor.postProcess(context.mapperService()); - rootId = rootFieldsVisitor.uid(); + rootId = rootFieldsVisitor.id(); if (needSource) { if (rootFieldsVisitor.source() != null) { @@ -431,7 +430,7 @@ private HitContext prepareNestedHitContext( nestedObjectMapper ); - SearchHit hit = new SearchHit(nestedTopDocId, rootId.id(), nestedIdentity, docFields, metaFields); + SearchHit hit = new SearchHit(nestedTopDocId, rootId, nestedIdentity, docFields, metaFields); HitContext hitContext = new HitContext(hit, subReaderContext, nestedDocId, new SourceLookup()); // Use a clean, fresh SourceLookup // for the nested context diff --git a/server/src/main/java/org/opensearch/search/fetch/subphase/InnerHitsContext.java b/server/src/main/java/org/opensearch/search/fetch/subphase/InnerHitsContext.java index 792fc61db8ed3..d7ac7d21f1922 100644 --- a/server/src/main/java/org/opensearch/search/fetch/subphase/InnerHitsContext.java +++ b/server/src/main/java/org/opensearch/search/fetch/subphase/InnerHitsContext.java @@ -45,7 +45,6 @@ import org.apache.lucene.search.Weight; import org.apache.lucene.util.Bits; import org.opensearch.common.lucene.search.TopDocsAndMaxScore; -import org.opensearch.index.mapper.Uid; import org.opensearch.search.SearchHit; import org.opensearch.search.internal.SearchContext; import org.opensearch.search.internal.SubSearchContext; @@ -98,8 +97,7 @@ public abstract static class InnerHitSubContext extends SubSearchContext { private InnerHitsContext childInnerHits; private Weight innerHitQueryWeight; - // TODO: when types are complete removed just use String instead for the id: - private Uid rootId; + private String id; private SourceLookup rootLookup; protected InnerHitSubContext(String name, SearchContext context) { @@ -141,12 +139,12 @@ public SearchContext parentSearchContext() { * * Since this ID is available on the context, inner hits can avoid re-loading the root _id. */ - public Uid getRootId() { - return rootId; + public String getId() { + return id; } - public void setRootId(Uid rootId) { - this.rootId = rootId; + public void setId(String id) { + this.id = id; } /** diff --git a/server/src/main/java/org/opensearch/search/fetch/subphase/InnerHitsPhase.java b/server/src/main/java/org/opensearch/search/fetch/subphase/InnerHitsPhase.java index 2bb610f49215c..56020d334a446 100644 --- a/server/src/main/java/org/opensearch/search/fetch/subphase/InnerHitsPhase.java +++ b/server/src/main/java/org/opensearch/search/fetch/subphase/InnerHitsPhase.java @@ -36,8 +36,6 @@ import org.apache.lucene.search.FieldDoc; import org.apache.lucene.search.ScoreDoc; import org.opensearch.common.lucene.search.TopDocsAndMaxScore; -import org.opensearch.index.mapper.MapperService; -import org.opensearch.index.mapper.Uid; import org.opensearch.search.SearchHit; import org.opensearch.search.SearchHits; import org.opensearch.search.fetch.FetchContext; @@ -96,7 +94,7 @@ private void hitExecute(Map innerHi docIdsToLoad[j] = topDoc.topDocs.scoreDocs[j].doc; } innerHitsContext.docIdsToLoad(docIdsToLoad, 0, docIdsToLoad.length); - innerHitsContext.setRootId(new Uid(MapperService.SINGLE_MAPPING_NAME, hit.getId())); + innerHitsContext.setId(hit.getId()); innerHitsContext.setRootLookup(rootLookup); fetchPhase.execute(innerHitsContext); diff --git a/server/src/test/java/org/opensearch/index/engine/InternalEngineTests.java b/server/src/test/java/org/opensearch/index/engine/InternalEngineTests.java index aeba4b1b2f0e7..af9b913b11d56 100644 --- a/server/src/test/java/org/opensearch/index/engine/InternalEngineTests.java +++ b/server/src/test/java/org/opensearch/index/engine/InternalEngineTests.java @@ -125,7 +125,6 @@ import org.opensearch.index.codec.CodecService; import org.opensearch.index.fieldvisitor.FieldsVisitor; import org.opensearch.index.mapper.IdFieldMapper; -import org.opensearch.index.mapper.MapperService; import org.opensearch.index.mapper.ParseContext; import org.opensearch.index.mapper.ParseContext.Document; import org.opensearch.index.mapper.ParsedDocument; @@ -1464,7 +1463,6 @@ public void testForceMergeWithSoftDeletesRetention() throws Exception { final IndexMetadata indexMetadata = IndexMetadata.builder(defaultSettings.getIndexMetadata()).settings(settings).build(); final IndexSettings indexSettings = IndexSettingsModule.newIndexSettings(indexMetadata); final AtomicLong globalCheckpoint = new AtomicLong(SequenceNumbers.NO_OPS_PERFORMED); - final MapperService mapperService = createMapperService(); final Set liveDocs = new HashSet<>(); try ( Store store = createStore(); @@ -1502,8 +1500,8 @@ public void testForceMergeWithSoftDeletesRetention() throws Exception { safeCommitCheckpoint = Long.parseLong(wrappedSafeCommit.get().getUserData().get(SequenceNumbers.LOCAL_CHECKPOINT_KEY)); } engine.forceMerge(true, 1, false, false, false, UUIDs.randomBase64UUID()); - assertConsistentHistoryBetweenTranslogAndLuceneIndex(engine, mapperService); - Map ops = readAllOperationsInLucene(engine, mapperService).stream() + assertConsistentHistoryBetweenTranslogAndLuceneIndex(engine); + Map ops = readAllOperationsInLucene(engine).stream() .collect(Collectors.toMap(Translog.Operation::seqNo, Function.identity())); for (long seqno = 0; seqno <= localCheckpoint; seqno++) { long minSeqNoToRetain = Math.min(globalCheckpoint.get() + 1 - retainedExtraOps, safeCommitCheckpoint + 1); @@ -1530,8 +1528,8 @@ public void testForceMergeWithSoftDeletesRetention() throws Exception { engine.syncTranslog(); engine.forceMerge(true, 1, false, false, false, UUIDs.randomBase64UUID()); - assertConsistentHistoryBetweenTranslogAndLuceneIndex(engine, mapperService); - assertThat(readAllOperationsInLucene(engine, mapperService), hasSize(liveDocs.size())); + assertConsistentHistoryBetweenTranslogAndLuceneIndex(engine); + assertThat(readAllOperationsInLucene(engine), hasSize(liveDocs.size())); } } @@ -1543,7 +1541,6 @@ public void testForceMergeWithSoftDeletesRetentionAndRecoverySource() throws Exc final IndexMetadata indexMetadata = IndexMetadata.builder(defaultSettings.getIndexMetadata()).settings(settings).build(); final IndexSettings indexSettings = IndexSettingsModule.newIndexSettings(indexMetadata); final AtomicLong globalCheckpoint = new AtomicLong(SequenceNumbers.NO_OPS_PERFORMED); - final MapperService mapperService = createMapperService(); final boolean omitSourceAllTheTime = randomBoolean(); final Set liveDocs = new HashSet<>(); final Set liveDocsWithSource = new HashSet<>(); @@ -1595,8 +1592,8 @@ public void testForceMergeWithSoftDeletesRetentionAndRecoverySource() throws Exc minSeqNoToRetain = Math.min(globalCheckpoint.get() + 1 - retainedExtraOps, safeCommitLocalCheckpoint + 1); } engine.forceMerge(true, 1, false, false, false, UUIDs.randomBase64UUID()); - assertConsistentHistoryBetweenTranslogAndLuceneIndex(engine, mapperService); - Map ops = readAllOperationsInLucene(engine, mapperService).stream() + assertConsistentHistoryBetweenTranslogAndLuceneIndex(engine); + Map ops = readAllOperationsInLucene(engine).stream() .collect(Collectors.toMap(Translog.Operation::seqNo, Function.identity())); for (long seqno = 0; seqno <= engine.getPersistedLocalCheckpoint(); seqno++) { String msg = "seq# [" + seqno + "], global checkpoint [" + globalCheckpoint + "], retained-ops [" + retainedExtraOps + "]"; @@ -1642,8 +1639,8 @@ public void testForceMergeWithSoftDeletesRetentionAndRecoverySource() throws Exc engine.syncTranslog(); } engine.forceMerge(true, 1, false, false, false, UUIDs.randomBase64UUID()); - assertConsistentHistoryBetweenTranslogAndLuceneIndex(engine, mapperService); - assertThat(readAllOperationsInLucene(engine, mapperService), hasSize(liveDocsWithSource.size())); + assertConsistentHistoryBetweenTranslogAndLuceneIndex(engine); + assertThat(readAllOperationsInLucene(engine), hasSize(liveDocsWithSource.size())); } } @@ -3963,7 +3960,7 @@ public void testDoubleDeliveryReplica() throws IOException { assertEquals(1, topDocs.totalHits.value); } if (engine.engineConfig.getIndexSettings().isSoftDeleteEnabled()) { - List ops = readAllOperationsInLucene(engine, createMapperService()); + List ops = readAllOperationsInLucene(engine); assertThat(ops.stream().map(o -> o.seqNo()).collect(Collectors.toList()), hasItem(20L)); } } @@ -4933,8 +4930,7 @@ protected long doGenerateSeqNoForOperation(Operation operation) { assertThat(noOp.primaryTerm(), equalTo(primaryTerm.get())); assertThat(noOp.reason(), equalTo(reason)); if (engine.engineConfig.getIndexSettings().isSoftDeleteEnabled()) { - MapperService mapperService = createMapperService(); - List operationsFromLucene = readAllOperationsInLucene(noOpEngine, mapperService); + List operationsFromLucene = readAllOperationsInLucene(noOpEngine); assertThat(operationsFromLucene, hasSize(maxSeqNo + 2 - localCheckpoint)); // fills n gap and 2 manual noop. for (int i = 0; i < operationsFromLucene.size(); i++) { assertThat( @@ -4942,7 +4938,7 @@ protected long doGenerateSeqNoForOperation(Operation operation) { equalTo(new Translog.NoOp(localCheckpoint + 1 + i, primaryTerm.get(), "filling gaps")) ); } - assertConsistentHistoryBetweenTranslogAndLuceneIndex(noOpEngine, mapperService); + assertConsistentHistoryBetweenTranslogAndLuceneIndex(noOpEngine); } } finally { IOUtils.close(noOpEngine); @@ -5010,7 +5006,7 @@ public void testRandomOperations() throws Exception { } } if (engine.engineConfig.getIndexSettings().isSoftDeleteEnabled()) { - List operations = readAllOperationsInLucene(engine, createMapperService()); + List operations = readAllOperationsInLucene(engine); assertThat(operations, hasSize(numOps)); } } @@ -5167,7 +5163,7 @@ public void testRestoreLocalHistoryFromTranslog() throws IOException { equalTo(0) ); } - assertConsistentHistoryBetweenTranslogAndLuceneIndex(engine, createMapperService()); + assertConsistentHistoryBetweenTranslogAndLuceneIndex(engine); } } @@ -6120,8 +6116,7 @@ public void testHistoryBasedOnSource() throws Exception { engine.forceMerge(true, 1, false, false, false, UUIDs.randomBase64UUID()); } } - MapperService mapperService = createMapperService(); - List luceneOps = readAllOperationsBasedOnSource(engine, mapperService); + List luceneOps = readAllOperationsBasedOnSource(engine); assertThat(luceneOps.stream().map(o -> o.seqNo()).collect(Collectors.toList()), containsInAnyOrder(expectedSeqNos.toArray())); } } @@ -6187,10 +6182,9 @@ private void assertOperationHistoryInLucene(List operations) t engine.forceMerge(true, 1, false, false, false, UUIDs.randomBase64UUID()); } } - MapperService mapperService = createMapperService(); - List actualOps = readAllOperationsInLucene(engine, mapperService); + List actualOps = readAllOperationsInLucene(engine); assertThat(actualOps.stream().map(o -> o.seqNo()).collect(Collectors.toList()), containsInAnyOrder(expectedSeqNos.toArray())); - assertConsistentHistoryBetweenTranslogAndLuceneIndex(engine, mapperService); + assertConsistentHistoryBetweenTranslogAndLuceneIndex(engine); } } @@ -6277,9 +6271,7 @@ public void testKeepMinRetainedSeqNoByMergePolicy() throws IOException { long minRetainSeqNos = engine.getMinRetainedSeqNo(); assertThat(minRetainSeqNos, lessThanOrEqualTo(globalCheckpoint.get() + 1)); Long[] expectedOps = existingSeqNos.stream().filter(seqno -> seqno >= minRetainSeqNos).toArray(Long[]::new); - Set actualOps = readAllOperationsInLucene(engine, createMapperService()).stream() - .map(Translog.Operation::seqNo) - .collect(Collectors.toSet()); + Set actualOps = readAllOperationsInLucene(engine).stream().map(Translog.Operation::seqNo).collect(Collectors.toSet()); assertThat(actualOps, containsInAnyOrder(expectedOps)); } try (GatedCloseable wrappedSafeCommit = engine.acquireSafeIndexCommit()) { @@ -6326,7 +6318,6 @@ public void testLastRefreshCheckpoint() throws Exception { } public void testLuceneSnapshotRefreshesOnlyOnce() throws Exception { - final MapperService mapperService = createMapperService(); final long maxSeqNo = randomLongBetween(10, 50); final AtomicLong refreshCounter = new AtomicLong(); try ( @@ -6371,7 +6362,7 @@ public void onFailure(Exception e) { @Override protected void doRun() throws Exception { latch.await(); - Translog.Snapshot changes = engine.newChangesSnapshot("test", mapperService, min, max, true); + Translog.Snapshot changes = engine.newChangesSnapshot("test", min, max, true); changes.close(); } }); diff --git a/server/src/test/java/org/opensearch/index/engine/LuceneChangesSnapshotTests.java b/server/src/test/java/org/opensearch/index/engine/LuceneChangesSnapshotTests.java index 05b6c77cad818..bd191e235369d 100644 --- a/server/src/test/java/org/opensearch/index/engine/LuceneChangesSnapshotTests.java +++ b/server/src/test/java/org/opensearch/index/engine/LuceneChangesSnapshotTests.java @@ -74,14 +74,14 @@ public void testBasics() throws Exception { long fromSeqNo = randomNonNegativeLong(); long toSeqNo = randomLongBetween(fromSeqNo, Long.MAX_VALUE); // Empty engine - try (Translog.Snapshot snapshot = engine.newChangesSnapshot("test", mapperService, fromSeqNo, toSeqNo, true)) { + try (Translog.Snapshot snapshot = engine.newChangesSnapshot("test", fromSeqNo, toSeqNo, true)) { IllegalStateException error = expectThrows(IllegalStateException.class, () -> drainAll(snapshot)); assertThat( error.getMessage(), containsString("Not all operations between from_seqno [" + fromSeqNo + "] and to_seqno [" + toSeqNo + "] found") ); } - try (Translog.Snapshot snapshot = engine.newChangesSnapshot("test", mapperService, fromSeqNo, toSeqNo, false)) { + try (Translog.Snapshot snapshot = engine.newChangesSnapshot("test", fromSeqNo, toSeqNo, false)) { assertThat(snapshot, SnapshotMatchers.size(0)); } int numOps = between(1, 100); @@ -111,7 +111,6 @@ public void testBasics() throws Exception { try ( Translog.Snapshot snapshot = new LuceneChangesSnapshot( searcher, - mapperService, between(1, LuceneChangesSnapshot.DEFAULT_BATCH_SIZE), fromSeqNo, toSeqNo, @@ -128,7 +127,6 @@ public void testBasics() throws Exception { try ( Translog.Snapshot snapshot = new LuceneChangesSnapshot( searcher, - mapperService, between(1, LuceneChangesSnapshot.DEFAULT_BATCH_SIZE), fromSeqNo, toSeqNo, @@ -151,7 +149,6 @@ public void testBasics() throws Exception { try ( Translog.Snapshot snapshot = new LuceneChangesSnapshot( searcher, - mapperService, between(1, LuceneChangesSnapshot.DEFAULT_BATCH_SIZE), fromSeqNo, toSeqNo, @@ -167,7 +164,6 @@ public void testBasics() throws Exception { try ( Translog.Snapshot snapshot = new LuceneChangesSnapshot( searcher, - mapperService, between(1, LuceneChangesSnapshot.DEFAULT_BATCH_SIZE), fromSeqNo, toSeqNo, @@ -188,7 +184,6 @@ public void testBasics() throws Exception { try ( Translog.Snapshot snapshot = new LuceneChangesSnapshot( searcher, - mapperService, between(1, LuceneChangesSnapshot.DEFAULT_BATCH_SIZE), fromSeqNo, toSeqNo, @@ -204,7 +199,7 @@ public void testBasics() throws Exception { // Get snapshot via engine will auto refresh fromSeqNo = randomLongBetween(0, numOps - 1); toSeqNo = randomLongBetween(fromSeqNo, numOps - 1); - try (Translog.Snapshot snapshot = engine.newChangesSnapshot("test", mapperService, fromSeqNo, toSeqNo, randomBoolean())) { + try (Translog.Snapshot snapshot = engine.newChangesSnapshot("test", fromSeqNo, toSeqNo, randomBoolean())) { assertThat(snapshot, SnapshotMatchers.containsSeqNoRange(fromSeqNo, toSeqNo)); } } @@ -235,7 +230,7 @@ public void testSkipNonRootOfNestedDocuments() throws Exception { long maxSeqNo = engine.getLocalCheckpointTracker().getMaxSeqNo(); engine.refresh("test"); Engine.Searcher searcher = engine.acquireSearcher("test", Engine.SearcherScope.INTERNAL); - try (Translog.Snapshot snapshot = new LuceneChangesSnapshot(searcher, mapperService, between(1, 100), 0, maxSeqNo, false)) { + try (Translog.Snapshot snapshot = new LuceneChangesSnapshot(searcher, between(1, 100), 0, maxSeqNo, false)) { assertThat(snapshot.totalOperations(), equalTo(seqNoToTerm.size())); Translog.Operation op; while ((op = snapshot.next()) != null) { @@ -311,7 +306,7 @@ void pullOperations(InternalEngine follower) throws IOException { long fromSeqNo = followerCheckpoint + 1; long batchSize = randomLongBetween(0, 100); long toSeqNo = Math.min(fromSeqNo + batchSize, leaderCheckpoint); - try (Translog.Snapshot snapshot = leader.newChangesSnapshot("test", mapperService, fromSeqNo, toSeqNo, true)) { + try (Translog.Snapshot snapshot = leader.newChangesSnapshot("test", fromSeqNo, toSeqNo, true)) { translogHandler.run(follower, snapshot); } } @@ -327,7 +322,7 @@ public void run() { .getProcessedCheckpoint()) { pullOperations(engine); } - assertConsistentHistoryBetweenTranslogAndLuceneIndex(engine, mapperService); + assertConsistentHistoryBetweenTranslogAndLuceneIndex(engine); // have to verify without source since we are randomly testing without _source List docsWithoutSourceOnFollower = getDocIds(engine, true).stream() .map(d -> new DocIdSeqNoAndSource(d.getId(), null, d.getSeqNo(), d.getPrimaryTerm(), d.getVersion())) @@ -357,7 +352,7 @@ private List drainAll(Translog.Snapshot snapshot) throws IOE public void testOverFlow() throws Exception { long fromSeqNo = randomLongBetween(0, 5); long toSeqNo = randomLongBetween(Long.MAX_VALUE - 5, Long.MAX_VALUE); - try (Translog.Snapshot snapshot = engine.newChangesSnapshot("test", mapperService, fromSeqNo, toSeqNo, true)) { + try (Translog.Snapshot snapshot = engine.newChangesSnapshot("test", fromSeqNo, toSeqNo, true)) { IllegalStateException error = expectThrows(IllegalStateException.class, () -> drainAll(snapshot)); assertThat( error.getMessage(), diff --git a/server/src/test/java/org/opensearch/index/translog/TranslogTests.java b/server/src/test/java/org/opensearch/index/translog/TranslogTests.java index f1eb5666f6b7f..35fec28a1c798 100644 --- a/server/src/test/java/org/opensearch/index/translog/TranslogTests.java +++ b/server/src/test/java/org/opensearch/index/translog/TranslogTests.java @@ -364,7 +364,7 @@ public void testSimpleOperations() throws IOException { assertThat(snapshot.totalOperations(), equalTo(ops.size())); } - addToTranslogAndList(translog, ops, new Translog.Delete("2", 1, primaryTerm.get(), newUid("2"))); + addToTranslogAndList(translog, ops, new Translog.Delete("2", 1, primaryTerm.get())); try (Translog.Snapshot snapshot = translog.newSnapshot()) { assertThat(snapshot, SnapshotMatchers.equalsTo(ops)); assertThat(snapshot.totalOperations(), equalTo(ops.size())); @@ -383,7 +383,7 @@ public void testSimpleOperations() throws IOException { Translog.Delete delete = (Translog.Delete) snapshot.next(); assertNotNull(delete); - assertThat(delete.uid(), equalTo(newUid("2"))); + assertThat(delete.id(), equalTo("2")); Translog.NoOp noOp = (Translog.NoOp) snapshot.next(); assertNotNull(noOp); @@ -465,23 +465,23 @@ public void testStats() throws IOException { assertThat(stats.getEarliestLastModifiedAge(), greaterThan(0L)); } - translog.add(new Translog.Delete("2", 1, primaryTerm.get(), newUid("2"))); + translog.add(new Translog.Delete("2", 1, primaryTerm.get())); { final TranslogStats stats = stats(); assertThat(stats.estimatedNumberOfOperations(), equalTo(2)); - assertThat(stats.getTranslogSizeInBytes(), equalTo(200L)); + assertThat(stats.getTranslogSizeInBytes(), equalTo(193L)); assertThat(stats.getUncommittedOperations(), equalTo(2)); - assertThat(stats.getUncommittedSizeInBytes(), equalTo(145L)); + assertThat(stats.getUncommittedSizeInBytes(), equalTo(138L)); assertThat(stats.getEarliestLastModifiedAge(), greaterThan(0L)); } - translog.add(new Translog.Delete("3", 2, primaryTerm.get(), newUid("3"))); + translog.add(new Translog.Delete("3", 2, primaryTerm.get())); { final TranslogStats stats = stats(); assertThat(stats.estimatedNumberOfOperations(), equalTo(3)); - assertThat(stats.getTranslogSizeInBytes(), equalTo(243L)); + assertThat(stats.getTranslogSizeInBytes(), equalTo(229L)); assertThat(stats.getUncommittedOperations(), equalTo(3)); - assertThat(stats.getUncommittedSizeInBytes(), equalTo(188L)); + assertThat(stats.getUncommittedSizeInBytes(), equalTo(174L)); assertThat(stats.getEarliestLastModifiedAge(), greaterThan(0L)); } @@ -489,9 +489,9 @@ public void testStats() throws IOException { { final TranslogStats stats = stats(); assertThat(stats.estimatedNumberOfOperations(), equalTo(4)); - assertThat(stats.getTranslogSizeInBytes(), equalTo(285L)); + assertThat(stats.getTranslogSizeInBytes(), equalTo(271L)); assertThat(stats.getUncommittedOperations(), equalTo(4)); - assertThat(stats.getUncommittedSizeInBytes(), equalTo(230L)); + assertThat(stats.getUncommittedSizeInBytes(), equalTo(216L)); assertThat(stats.getEarliestLastModifiedAge(), greaterThan(0L)); } @@ -499,9 +499,9 @@ public void testStats() throws IOException { { final TranslogStats stats = stats(); assertThat(stats.estimatedNumberOfOperations(), equalTo(4)); - assertThat(stats.getTranslogSizeInBytes(), equalTo(340L)); + assertThat(stats.getTranslogSizeInBytes(), equalTo(326L)); assertThat(stats.getUncommittedOperations(), equalTo(4)); - assertThat(stats.getUncommittedSizeInBytes(), equalTo(285L)); + assertThat(stats.getUncommittedSizeInBytes(), equalTo(271L)); assertThat(stats.getEarliestLastModifiedAge(), greaterThan(0L)); } @@ -511,7 +511,7 @@ public void testStats() throws IOException { stats.writeTo(out); final TranslogStats copy = new TranslogStats(out.bytes().streamInput()); assertThat(copy.estimatedNumberOfOperations(), equalTo(4)); - assertThat(copy.getTranslogSizeInBytes(), equalTo(340L)); + assertThat(copy.getTranslogSizeInBytes(), equalTo(326L)); try (XContentBuilder builder = XContentFactory.jsonBuilder()) { builder.startObject(); @@ -521,9 +521,9 @@ public void testStats() throws IOException { Strings.toString(builder), equalTo( "{\"translog\":{\"operations\":4,\"size_in_bytes\":" - + 340 + + 326 + ",\"uncommitted_operations\":4,\"uncommitted_size_in_bytes\":" - + 285 + + 271 + ",\"earliest_last_modified_age\":" + stats.getEarliestLastModifiedAge() + "}}" @@ -537,7 +537,7 @@ public void testStats() throws IOException { long lastModifiedAge = System.currentTimeMillis() - translog.getCurrent().getLastModifiedTime(); final TranslogStats stats = stats(); assertThat(stats.estimatedNumberOfOperations(), equalTo(4)); - assertThat(stats.getTranslogSizeInBytes(), equalTo(340L)); + assertThat(stats.getTranslogSizeInBytes(), equalTo(326L)); assertThat(stats.getUncommittedOperations(), equalTo(0)); assertThat(stats.getUncommittedSizeInBytes(), equalTo(firstOperationPosition)); assertThat(stats.getEarliestLastModifiedAge(), greaterThanOrEqualTo(lastModifiedAge)); @@ -922,7 +922,7 @@ public void testConcurrentWritesWithVaryingSize() throws Throwable { case DELETE: Translog.Delete delOp = (Translog.Delete) op; Translog.Delete expDelOp = (Translog.Delete) expectedOp; - assertEquals(expDelOp.uid(), delOp.uid()); + assertEquals(expDelOp.id(), delOp.id()); assertEquals(expDelOp.version(), delOp.version()); break; case NO_OP: @@ -1076,7 +1076,7 @@ public void doRun() throws BrokenBarrierException, InterruptedException, IOExcep op = new Translog.Index("" + id, id, primaryTerm.get(), new byte[] { (byte) id }); break; case DELETE: - op = new Translog.Delete(Long.toString(id), id, primaryTerm.get(), newUid(Long.toString(id))); + op = new Translog.Delete(Long.toString(id), id, primaryTerm.get()); break; case NO_OP: op = new Translog.NoOp(id, 1, Long.toString(id)); @@ -2414,7 +2414,6 @@ public void run() { case DELETE: op = new Translog.Delete( threadId + "_" + opCount, - new Term("_uid", threadId + "_" + opCount), seqNoGenerator.getAndIncrement(), primaryTerm.get(), 1 + randomInt(100000) diff --git a/server/src/test/java/org/opensearch/indices/recovery/RecoverySourceHandlerTests.java b/server/src/test/java/org/opensearch/indices/recovery/RecoverySourceHandlerTests.java index e713ef5d35f67..dbafab49d8655 100644 --- a/server/src/test/java/org/opensearch/indices/recovery/RecoverySourceHandlerTests.java +++ b/server/src/test/java/org/opensearch/indices/recovery/RecoverySourceHandlerTests.java @@ -1188,13 +1188,7 @@ private static List generateOperations(int numOps) { if (randomBoolean()) { op = new Translog.Index("id", seqNo, randomNonNegativeLong(), randomNonNegativeLong(), source, null, -1); } else if (randomBoolean()) { - op = new Translog.Delete( - "id", - new Term("_id", Uid.encodeId("id")), - seqNo, - randomNonNegativeLong(), - randomNonNegativeLong() - ); + op = new Translog.Delete("id", seqNo, randomNonNegativeLong(), randomNonNegativeLong()); } else { op = new Translog.NoOp(seqNo, randomNonNegativeLong(), "test"); } diff --git a/test/framework/src/main/java/org/opensearch/index/engine/EngineTestCase.java b/test/framework/src/main/java/org/opensearch/index/engine/EngineTestCase.java index 6c382a09b90f3..fe810a87358d0 100644 --- a/test/framework/src/main/java/org/opensearch/index/engine/EngineTestCase.java +++ b/test/framework/src/main/java/org/opensearch/index/engine/EngineTestCase.java @@ -329,14 +329,14 @@ public void tearDown() throws Exception { try { if (engine != null && engine.isClosed.get() == false) { engine.getTranslog().getDeletionPolicy().assertNoOpenTranslogRefs(); - assertConsistentHistoryBetweenTranslogAndLuceneIndex(engine, createMapperService()); + assertConsistentHistoryBetweenTranslogAndLuceneIndex(engine); assertNoInFlightDocuments(engine); assertMaxSeqNoInCommitUserData(engine); assertAtMostOneLuceneDocumentPerSequenceNumber(engine); } if (replicaEngine != null && replicaEngine.isClosed.get() == false) { replicaEngine.getTranslog().getDeletionPolicy().assertNoOpenTranslogRefs(); - assertConsistentHistoryBetweenTranslogAndLuceneIndex(replicaEngine, createMapperService()); + assertConsistentHistoryBetweenTranslogAndLuceneIndex(replicaEngine); assertNoInFlightDocuments(replicaEngine); assertMaxSeqNoInCommitUserData(replicaEngine); assertAtMostOneLuceneDocumentPerSequenceNumber(replicaEngine); @@ -1310,9 +1310,9 @@ public static List getDocIds(Engine engine, boolean refresh * Reads all engine operations that have been processed by the engine from Lucene index. * The returned operations are sorted and de-duplicated, thus each sequence number will be have at most one operation. */ - public static List readAllOperationsInLucene(Engine engine, MapperService mapper) throws IOException { + public static List readAllOperationsInLucene(Engine engine) throws IOException { final List operations = new ArrayList<>(); - try (Translog.Snapshot snapshot = engine.newChangesSnapshot("test", mapper, 0, Long.MAX_VALUE, false)) { + try (Translog.Snapshot snapshot = engine.newChangesSnapshot("test", 0, Long.MAX_VALUE, false)) { Translog.Operation op; while ((op = snapshot.next()) != null) { operations.add(op); @@ -1324,9 +1324,9 @@ public static List readAllOperationsInLucene(Engine engine, /** * Reads all engine operations that have been processed by the engine from Lucene index/Translog based on source. */ - public static List readAllOperationsBasedOnSource(Engine engine, MapperService mapper) throws IOException { + public static List readAllOperationsBasedOnSource(Engine engine) throws IOException { final List operations = new ArrayList<>(); - try (Translog.Snapshot snapshot = engine.newChangesSnapshot("test", mapper, 0, Long.MAX_VALUE, false)) { + try (Translog.Snapshot snapshot = engine.newChangesSnapshot("test", 0, Long.MAX_VALUE, false)) { Translog.Operation op; while ((op = snapshot.next()) != null) { operations.add(op); @@ -1338,8 +1338,8 @@ public static List readAllOperationsBasedOnSource(Engine eng /** * Asserts the provided engine has a consistent document history between translog and Lucene index. */ - public static void assertConsistentHistoryBetweenTranslogAndLuceneIndex(Engine engine, MapperService mapper) throws IOException { - if (mapper == null || mapper.documentMapper() == null || (engine instanceof InternalEngine) == false) { + public static void assertConsistentHistoryBetweenTranslogAndLuceneIndex(Engine engine) throws IOException { + if (engine instanceof InternalEngine == false) { return; } final List translogOps = new ArrayList<>(); @@ -1349,7 +1349,7 @@ public static void assertConsistentHistoryBetweenTranslogAndLuceneIndex(Engine e translogOps.add(op); } } - final Map luceneOps = readAllOperationsInLucene(engine, mapper).stream() + final Map luceneOps = readAllOperationsInLucene(engine).stream() .collect(Collectors.toMap(Translog.Operation::seqNo, Function.identity())); final long maxSeqNo = ((InternalEngine) engine).getLocalCheckpointTracker().getMaxSeqNo(); for (Translog.Operation op : translogOps) { diff --git a/test/framework/src/main/java/org/opensearch/index/engine/TranslogHandler.java b/test/framework/src/main/java/org/opensearch/index/engine/TranslogHandler.java index 7dbe2c7381fd8..e1f2357aa2400 100644 --- a/test/framework/src/main/java/org/opensearch/index/engine/TranslogHandler.java +++ b/test/framework/src/main/java/org/opensearch/index/engine/TranslogHandler.java @@ -147,19 +147,16 @@ public Engine.Operation convertToEngineOp(Translog.Operation operation, Engine.O return engineIndex; case DELETE: final Translog.Delete delete = (Translog.Delete) operation; - final Engine.Delete engineDelete = new Engine.Delete( + return IndexShard.prepareDelete( delete.id(), - delete.uid(), delete.seqNo(), delete.primaryTerm(), delete.version(), versionType, origin, - System.nanoTime(), SequenceNumbers.UNASSIGNED_SEQ_NO, SequenceNumbers.UNASSIGNED_PRIMARY_TERM ); - return engineDelete; case NO_OP: final Translog.NoOp noOp = (Translog.NoOp) operation; final Engine.NoOp engineNoOp = new Engine.NoOp(noOp.seqNo(), noOp.primaryTerm(), origin, System.nanoTime(), noOp.reason()); diff --git a/test/framework/src/main/java/org/opensearch/index/shard/IndexShardTestCase.java b/test/framework/src/main/java/org/opensearch/index/shard/IndexShardTestCase.java index 6b18963056450..509edfd1b9103 100644 --- a/test/framework/src/main/java/org/opensearch/index/shard/IndexShardTestCase.java +++ b/test/framework/src/main/java/org/opensearch/index/shard/IndexShardTestCase.java @@ -868,7 +868,7 @@ public static void assertConsistentHistoryBetweenTranslogAndLucene(IndexShard sh } final Engine engine = shard.getEngineOrNull(); if (engine != null) { - EngineTestCase.assertConsistentHistoryBetweenTranslogAndLuceneIndex(engine, shard.mapperService()); + EngineTestCase.assertConsistentHistoryBetweenTranslogAndLuceneIndex(engine); } } From 189563bad689b2de2e3a8742b791a6f558533088 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 14 Mar 2022 09:36:26 -0700 Subject: [PATCH 23/46] Bump commons-lang3 from 3.4 to 3.12.0 in /plugins/repository-azure (#2455) * Bump commons-lang3 from 3.4 to 3.12.0 in /plugins/repository-azure Bumps commons-lang3 from 3.4 to 3.12.0. --- updated-dependencies: - dependency-name: org.apache.commons:commons-lang3 dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] * Updating SHAs Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: dependabot[bot] --- plugins/repository-azure/build.gradle | 2 +- plugins/repository-azure/licenses/commons-lang3-3.12.0.jar.sha1 | 1 + plugins/repository-azure/licenses/commons-lang3-3.4.jar.sha1 | 1 - 3 files changed, 2 insertions(+), 2 deletions(-) create mode 100644 plugins/repository-azure/licenses/commons-lang3-3.12.0.jar.sha1 delete mode 100644 plugins/repository-azure/licenses/commons-lang3-3.4.jar.sha1 diff --git a/plugins/repository-azure/build.gradle b/plugins/repository-azure/build.gradle index 1f923b8f36bbd..c531cd390e7ee 100644 --- a/plugins/repository-azure/build.gradle +++ b/plugins/repository-azure/build.gradle @@ -69,7 +69,7 @@ dependencies { api 'org.codehaus.woodstox:stax2-api:4.2.1' implementation 'com.fasterxml.woodstox:woodstox-core:6.1.1' runtimeOnly 'com.google.guava:guava:31.1-jre' - api 'org.apache.commons:commons-lang3:3.4' + api 'org.apache.commons:commons-lang3:3.12.0' testImplementation project(':test:fixtures:azure-fixture') } diff --git a/plugins/repository-azure/licenses/commons-lang3-3.12.0.jar.sha1 b/plugins/repository-azure/licenses/commons-lang3-3.12.0.jar.sha1 new file mode 100644 index 0000000000000..9273d8c01aaba --- /dev/null +++ b/plugins/repository-azure/licenses/commons-lang3-3.12.0.jar.sha1 @@ -0,0 +1 @@ +c6842c86792ff03b9f1d1fe2aab8dc23aa6c6f0e \ No newline at end of file diff --git a/plugins/repository-azure/licenses/commons-lang3-3.4.jar.sha1 b/plugins/repository-azure/licenses/commons-lang3-3.4.jar.sha1 deleted file mode 100644 index fdd7040377b8f..0000000000000 --- a/plugins/repository-azure/licenses/commons-lang3-3.4.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -5fe28b9518e58819180a43a850fbc0dd24b7c050 \ No newline at end of file From d19081356a2ee658568a3127829ab1ee8772a6fb Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 14 Mar 2022 12:47:11 -0700 Subject: [PATCH 24/46] Bump woodstox-core from 6.1.1 to 6.2.8 in /plugins/repository-azure (#2456) * Bump woodstox-core from 6.1.1 to 6.2.8 in /plugins/repository-azure Bumps [woodstox-core](https://github.com/FasterXML/woodstox) from 6.1.1 to 6.2.8. - [Release notes](https://github.com/FasterXML/woodstox/releases) - [Commits](https://github.com/FasterXML/woodstox/compare/woodstox-core-6.1.1...woodstox-core-6.2.8) --- updated-dependencies: - dependency-name: com.fasterxml.woodstox:woodstox-core dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] * Updating SHAs Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: dependabot[bot] --- plugins/repository-azure/build.gradle | 2 +- plugins/repository-azure/licenses/woodstox-core-6.1.1.jar.sha1 | 1 - plugins/repository-azure/licenses/woodstox-core-6.2.8.jar.sha1 | 1 + 3 files changed, 2 insertions(+), 2 deletions(-) delete mode 100644 plugins/repository-azure/licenses/woodstox-core-6.1.1.jar.sha1 create mode 100644 plugins/repository-azure/licenses/woodstox-core-6.2.8.jar.sha1 diff --git a/plugins/repository-azure/build.gradle b/plugins/repository-azure/build.gradle index c531cd390e7ee..3dc089ef8acb7 100644 --- a/plugins/repository-azure/build.gradle +++ b/plugins/repository-azure/build.gradle @@ -67,7 +67,7 @@ dependencies { api "com.fasterxml.jackson.dataformat:jackson-dataformat-xml:${versions.jackson}" api "com.fasterxml.jackson.module:jackson-module-jaxb-annotations:${versions.jackson}" api 'org.codehaus.woodstox:stax2-api:4.2.1' - implementation 'com.fasterxml.woodstox:woodstox-core:6.1.1' + implementation 'com.fasterxml.woodstox:woodstox-core:6.2.8' runtimeOnly 'com.google.guava:guava:31.1-jre' api 'org.apache.commons:commons-lang3:3.12.0' testImplementation project(':test:fixtures:azure-fixture') diff --git a/plugins/repository-azure/licenses/woodstox-core-6.1.1.jar.sha1 b/plugins/repository-azure/licenses/woodstox-core-6.1.1.jar.sha1 deleted file mode 100644 index f2ad1c80882d3..0000000000000 --- a/plugins/repository-azure/licenses/woodstox-core-6.1.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -989bb31963ed1758b95c7c4381a91592a9a8df61 \ No newline at end of file diff --git a/plugins/repository-azure/licenses/woodstox-core-6.2.8.jar.sha1 b/plugins/repository-azure/licenses/woodstox-core-6.2.8.jar.sha1 new file mode 100644 index 0000000000000..ae65cdebf26de --- /dev/null +++ b/plugins/repository-azure/licenses/woodstox-core-6.2.8.jar.sha1 @@ -0,0 +1 @@ +670748292899c53b1963730d9eb7f8ab71314e90 \ No newline at end of file From 5c0f9bc499c5c4a744d2f29fb5bd9eab4aabc004 Mon Sep 17 00:00:00 2001 From: Andriy Redko Date: Mon, 14 Mar 2022 17:11:26 -0400 Subject: [PATCH 25/46] Discrepancy in result from _validate/query API and actual query validity (#2416) * Discrepancy in result from _validate/query API and actual query validity Signed-off-by: Andriy Redko * Moved the validate() check later into the flow to allow range validation to trigger first Signed-off-by: Andriy Redko --- .../validate/SimpleValidateQueryIT.java | 97 +++++++++++++++++++ .../query/TransportValidateQueryAction.java | 4 +- .../org/opensearch/index/IndexService.java | 19 +++- .../index/query/QueryRewriteContext.java | 15 +++ .../index/query/QueryShardContext.java | 53 +++++++++- .../index/query/RangeQueryBuilder.java | 9 +- .../opensearch/indices/IndicesService.java | 16 ++- .../search/DefaultSearchContext.java | 6 +- .../org/opensearch/search/SearchService.java | 26 ++++- .../search/DefaultSearchContextTests.java | 27 ++++-- 10 files changed, 248 insertions(+), 24 deletions(-) diff --git a/server/src/internalClusterTest/java/org/opensearch/validate/SimpleValidateQueryIT.java b/server/src/internalClusterTest/java/org/opensearch/validate/SimpleValidateQueryIT.java index 51d0a4395127a..29845b39becf2 100644 --- a/server/src/internalClusterTest/java/org/opensearch/validate/SimpleValidateQueryIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/validate/SimpleValidateQueryIT.java @@ -62,6 +62,7 @@ import static org.opensearch.cluster.metadata.IndexMetadata.SETTING_NUMBER_OF_SHARDS; import static org.opensearch.index.query.QueryBuilders.queryStringQuery; +import static org.opensearch.index.query.QueryBuilders.rangeQuery; import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertAcked; import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertNoFailures; import static org.hamcrest.Matchers.allOf; @@ -500,4 +501,100 @@ public void testExplainTermsQueryWithLookup() throws Exception { .actionGet(); assertThat(response.isValid(), is(true)); } + + // Issue: https://github.com/opensearch-project/OpenSearch/issues/2036 + public void testValidateDateRangeInQueryString() throws IOException { + assertAcked(prepareCreate("test").setSettings(Settings.builder().put(indexSettings()).put("index.number_of_shards", 1))); + + assertAcked( + client().admin() + .indices() + .preparePutMapping("test") + .setSource( + XContentFactory.jsonBuilder() + .startObject() + .startObject(MapperService.SINGLE_MAPPING_NAME) + .startObject("properties") + .startObject("name") + .field("type", "keyword") + .endObject() + .startObject("timestamp") + .field("type", "date") + .endObject() + .endObject() + .endObject() + .endObject() + ) + ); + + client().prepareIndex("test").setId("1").setSource("name", "username", "timestamp", 200).get(); + refresh(); + + ValidateQueryResponse response = client().admin() + .indices() + .prepareValidateQuery() + .setQuery( + QueryBuilders.boolQuery() + .must(rangeQuery("timestamp").gte(0).lte(100)) + .must(queryStringQuery("username").allowLeadingWildcard(false)) + ) + .setRewrite(true) + .get(); + + assertNoFailures(response); + assertThat(response.isValid(), is(true)); + + // Use wildcard and date outside the range + response = client().admin() + .indices() + .prepareValidateQuery() + .setQuery( + QueryBuilders.boolQuery() + .must(rangeQuery("timestamp").gte(0).lte(100)) + .must(queryStringQuery("*erna*").allowLeadingWildcard(false)) + ) + .setRewrite(true) + .get(); + + assertNoFailures(response); + assertThat(response.isValid(), is(false)); + + // Use wildcard and date inside the range + response = client().admin() + .indices() + .prepareValidateQuery() + .setQuery( + QueryBuilders.boolQuery() + .must(rangeQuery("timestamp").gte(0).lte(1000)) + .must(queryStringQuery("*erna*").allowLeadingWildcard(false)) + ) + .setRewrite(true) + .get(); + + assertNoFailures(response); + assertThat(response.isValid(), is(false)); + + // Use wildcard and date inside the range (allow leading wildcard) + response = client().admin() + .indices() + .prepareValidateQuery() + .setQuery(QueryBuilders.boolQuery().must(rangeQuery("timestamp").gte(0).lte(1000)).must(queryStringQuery("*erna*"))) + .setRewrite(true) + .get(); + + assertNoFailures(response); + assertThat(response.isValid(), is(true)); + + // Use invalid date range + response = client().admin() + .indices() + .prepareValidateQuery() + .setQuery(QueryBuilders.boolQuery().must(rangeQuery("timestamp").gte("aaa").lte(100))) + .setRewrite(true) + .get(); + + assertNoFailures(response); + assertThat(response.isValid(), is(false)); + + } } diff --git a/server/src/main/java/org/opensearch/action/admin/indices/validate/query/TransportValidateQueryAction.java b/server/src/main/java/org/opensearch/action/admin/indices/validate/query/TransportValidateQueryAction.java index 1fb293b200e51..1849b41ce707f 100644 --- a/server/src/main/java/org/opensearch/action/admin/indices/validate/query/TransportValidateQueryAction.java +++ b/server/src/main/java/org/opensearch/action/admin/indices/validate/query/TransportValidateQueryAction.java @@ -131,7 +131,7 @@ protected void doExecute(Task task, ValidateQueryRequest request, ActionListener if (request.query() == null) { rewriteListener.onResponse(request.query()); } else { - Rewriteable.rewriteAndFetch(request.query(), searchService.getRewriteContext(timeProvider), rewriteListener); + Rewriteable.rewriteAndFetch(request.query(), searchService.getValidationRewriteContext(timeProvider), rewriteListener); } } @@ -225,7 +225,7 @@ protected ShardValidateQueryResponse shardOperation(ShardValidateQueryRequest re request.nowInMillis(), request.filteringAliases() ); - SearchContext searchContext = searchService.createSearchContext(shardSearchLocalRequest, SearchService.NO_TIMEOUT); + SearchContext searchContext = searchService.createValidationContext(shardSearchLocalRequest, SearchService.NO_TIMEOUT); try { ParsedQuery parsedQuery = searchContext.getQueryShardContext().toQuery(request.query()); searchContext.parsedQuery(parsedQuery); diff --git a/server/src/main/java/org/opensearch/index/IndexService.java b/server/src/main/java/org/opensearch/index/IndexService.java index 7c1033ecea3ad..1b301e85365ba 100644 --- a/server/src/main/java/org/opensearch/index/IndexService.java +++ b/server/src/main/java/org/opensearch/index/IndexService.java @@ -630,6 +630,22 @@ public IndexSettings getIndexSettings() { * {@link IndexReader}-specific optimizations, such as rewriting containing range queries. */ public QueryShardContext newQueryShardContext(int shardId, IndexSearcher searcher, LongSupplier nowInMillis, String clusterAlias) { + return newQueryShardContext(shardId, searcher, nowInMillis, clusterAlias, false); + } + + /** + * Creates a new QueryShardContext. + * + * Passing a {@code null} {@link IndexSearcher} will return a valid context, however it won't be able to make + * {@link IndexReader}-specific optimizations, such as rewriting containing range queries. + */ + public QueryShardContext newQueryShardContext( + int shardId, + IndexSearcher searcher, + LongSupplier nowInMillis, + String clusterAlias, + boolean validate + ) { final SearchIndexNameMatcher indexNameMatcher = new SearchIndexNameMatcher( index().getName(), clusterAlias, @@ -653,7 +669,8 @@ public QueryShardContext newQueryShardContext(int shardId, IndexSearcher searche clusterAlias, indexNameMatcher, allowExpensiveQueries, - valuesSourceRegistry + valuesSourceRegistry, + validate ); } diff --git a/server/src/main/java/org/opensearch/index/query/QueryRewriteContext.java b/server/src/main/java/org/opensearch/index/query/QueryRewriteContext.java index ad1f02ce0265d..720ee077119d6 100644 --- a/server/src/main/java/org/opensearch/index/query/QueryRewriteContext.java +++ b/server/src/main/java/org/opensearch/index/query/QueryRewriteContext.java @@ -52,6 +52,7 @@ public class QueryRewriteContext { protected final Client client; protected final LongSupplier nowInMillis; private final List>> asyncActions = new ArrayList<>(); + private final boolean validate; public QueryRewriteContext( NamedXContentRegistry xContentRegistry, @@ -59,11 +60,22 @@ public QueryRewriteContext( Client client, LongSupplier nowInMillis ) { + this(xContentRegistry, writeableRegistry, client, nowInMillis, false); + } + + public QueryRewriteContext( + NamedXContentRegistry xContentRegistry, + NamedWriteableRegistry writeableRegistry, + Client client, + LongSupplier nowInMillis, + boolean validate + ) { this.xContentRegistry = xContentRegistry; this.writeableRegistry = writeableRegistry; this.client = client; this.nowInMillis = nowInMillis; + this.validate = validate; } /** @@ -140,4 +152,7 @@ public void onFailure(Exception e) { } } + public boolean validate() { + return validate; + } } diff --git a/server/src/main/java/org/opensearch/index/query/QueryShardContext.java b/server/src/main/java/org/opensearch/index/query/QueryShardContext.java index f67feadde4b41..bfc0490e507db 100644 --- a/server/src/main/java/org/opensearch/index/query/QueryShardContext.java +++ b/server/src/main/java/org/opensearch/index/query/QueryShardContext.java @@ -132,6 +132,48 @@ public QueryShardContext( Predicate indexNameMatcher, BooleanSupplier allowExpensiveQueries, ValuesSourceRegistry valuesSourceRegistry + ) { + this( + shardId, + indexSettings, + bigArrays, + bitsetFilterCache, + indexFieldDataLookup, + mapperService, + similarityService, + scriptService, + xContentRegistry, + namedWriteableRegistry, + client, + searcher, + nowInMillis, + clusterAlias, + indexNameMatcher, + allowExpensiveQueries, + valuesSourceRegistry, + false + ); + } + + public QueryShardContext( + int shardId, + IndexSettings indexSettings, + BigArrays bigArrays, + BitsetFilterCache bitsetFilterCache, + TriFunction, IndexFieldData> indexFieldDataLookup, + MapperService mapperService, + SimilarityService similarityService, + ScriptService scriptService, + NamedXContentRegistry xContentRegistry, + NamedWriteableRegistry namedWriteableRegistry, + Client client, + IndexSearcher searcher, + LongSupplier nowInMillis, + String clusterAlias, + Predicate indexNameMatcher, + BooleanSupplier allowExpensiveQueries, + ValuesSourceRegistry valuesSourceRegistry, + boolean validate ) { this( shardId, @@ -153,7 +195,8 @@ public QueryShardContext( indexSettings.getIndex().getUUID() ), allowExpensiveQueries, - valuesSourceRegistry + valuesSourceRegistry, + validate ); } @@ -175,7 +218,8 @@ public QueryShardContext(QueryShardContext source) { source.indexNameMatcher, source.fullyQualifiedIndex, source.allowExpensiveQueries, - source.valuesSourceRegistry + source.valuesSourceRegistry, + source.validate() ); } @@ -196,9 +240,10 @@ private QueryShardContext( Predicate indexNameMatcher, Index fullyQualifiedIndex, BooleanSupplier allowExpensiveQueries, - ValuesSourceRegistry valuesSourceRegistry + ValuesSourceRegistry valuesSourceRegistry, + boolean validate ) { - super(xContentRegistry, namedWriteableRegistry, client, nowInMillis); + super(xContentRegistry, namedWriteableRegistry, client, nowInMillis, validate); this.shardId = shardId; this.similarityService = similarityService; this.mapperService = mapperService; diff --git a/server/src/main/java/org/opensearch/index/query/RangeQueryBuilder.java b/server/src/main/java/org/opensearch/index/query/RangeQueryBuilder.java index 1c27946514a3d..80b792d750546 100644 --- a/server/src/main/java/org/opensearch/index/query/RangeQueryBuilder.java +++ b/server/src/main/java/org/opensearch/index/query/RangeQueryBuilder.java @@ -452,7 +452,7 @@ protected MappedFieldType.Relation getRelation(QueryRewriteContext queryRewriteC } DateMathParser dateMathParser = getForceDateParser(); - return fieldType.isFieldWithinQuery( + final MappedFieldType.Relation relation = fieldType.isFieldWithinQuery( shardContext.getIndexReader(), from, to, @@ -462,6 +462,13 @@ protected MappedFieldType.Relation getRelation(QueryRewriteContext queryRewriteC dateMathParser, queryRewriteContext ); + + // For validation, always assume that there is an intersection + if (relation == MappedFieldType.Relation.DISJOINT && shardContext.validate()) { + return MappedFieldType.Relation.INTERSECTS; + } + + return relation; } // Not on the shard, we have no way to know what the relation is. diff --git a/server/src/main/java/org/opensearch/indices/IndicesService.java b/server/src/main/java/org/opensearch/indices/IndicesService.java index 22ab5a9cd9c0b..5caafb0ce60d4 100644 --- a/server/src/main/java/org/opensearch/indices/IndicesService.java +++ b/server/src/main/java/org/opensearch/indices/IndicesService.java @@ -1632,7 +1632,21 @@ public AliasFilter buildAliasFilter(ClusterState state, String index, Set context1.preProcess(false)); @@ -286,7 +291,8 @@ protected Engine.Searcher acquireSearcherInternal(String source) { timeout, null, false, - Version.CURRENT + Version.CURRENT, + false ); SliceBuilder sliceBuilder = mock(SliceBuilder.class); @@ -323,7 +329,8 @@ protected Engine.Searcher acquireSearcherInternal(String source) { timeout, null, false, - Version.CURRENT + Version.CURRENT, + false ); ParsedQuery parsedQuery = ParsedQuery.parsedMatchAllQuery(); context3.sliceBuilder(null).parsedQuery(parsedQuery).preProcess(false); @@ -352,7 +359,8 @@ protected Engine.Searcher acquireSearcherInternal(String source) { timeout, null, false, - Version.CURRENT + Version.CURRENT, + false ); context4.sliceBuilder(new SliceBuilder(1, 2)).parsedQuery(parsedQuery).preProcess(false); Query query1 = context4.query(); @@ -380,7 +388,9 @@ public void testClearQueryCancellationsOnClose() throws IOException { IndexService indexService = mock(IndexService.class); QueryShardContext queryShardContext = mock(QueryShardContext.class); - when(indexService.newQueryShardContext(eq(shardId.id()), any(), any(), nullable(String.class))).thenReturn(queryShardContext); + when(indexService.newQueryShardContext(eq(shardId.id()), any(), any(), nullable(String.class), anyBoolean())).thenReturn( + queryShardContext + ); BigArrays bigArrays = new MockBigArrays(new MockPageCacheRecycler(Settings.EMPTY), new NoneCircuitBreakerService()); @@ -429,7 +439,8 @@ protected Engine.Searcher acquireSearcherInternal(String source) { timeout, null, false, - Version.CURRENT + Version.CURRENT, + false ); assertThat(context.searcher().hasCancellations(), is(false)); context.searcher().addQueryCancellation(() -> {}); From 2b68b14629ba8bb5ccb1db3ed3806f837b56df15 Mon Sep 17 00:00:00 2001 From: Suraj Singh <79435743+dreamer-89@users.noreply.github.com> Date: Mon, 14 Mar 2022 19:09:04 -0700 Subject: [PATCH 26/46] [Remove] Type from TermsLookUp (#2459) * [Remove] Type from TermsLookUp Signed-off-by: Suraj Singh * Fix unit test failure Signed-off-by: Suraj Singh --- .../search/150_rewrite_on_coordinator.yml | 4 +- .../search/query/SearchQueryIT.java | 36 ++++------- .../validate/SimpleValidateQueryIT.java | 2 +- .../index/query/TermsQueryBuilder.java | 4 -- .../org/opensearch/indices/TermsLookup.java | 62 ++++--------------- .../index/query/TermsQueryBuilderTests.java | 11 +--- .../opensearch/indices/TermsLookupTests.java | 55 ++-------------- 7 files changed, 32 insertions(+), 142 deletions(-) diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search/150_rewrite_on_coordinator.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search/150_rewrite_on_coordinator.yml index be34e10ddcd74..77298cb4f61c3 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/search/150_rewrite_on_coordinator.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search/150_rewrite_on_coordinator.yml @@ -39,7 +39,7 @@ search: rest_total_hits_as_int: true index: "search_index" - body: { "size" : 0, "query" : { "terms" : { "user" : { "index": "lookup_index", "type" : "_doc", "id": "1", "path": "followers"} } } } + body: { "size" : 0, "query" : { "terms" : { "user" : { "index": "lookup_index", "id": "1", "path": "followers"} } } } - do: indices.create: index: lookup_index @@ -64,7 +64,7 @@ search: rest_total_hits_as_int: true index: "search_index" - body: { "size" : 0, "query" : { "terms" : { "user" : { "index": "lookup_index", "type" : "_doc", "id": "1", "path": "followers"} } } } + body: { "size" : 0, "query" : { "terms" : { "user" : { "index": "lookup_index", "id": "1", "path": "followers"} } } } - match: { _shards.total: 5 } - match: { _shards.successful: 5 } diff --git a/server/src/internalClusterTest/java/org/opensearch/search/query/SearchQueryIT.java b/server/src/internalClusterTest/java/org/opensearch/search/query/SearchQueryIT.java index db87269c8ceae..c9bb746973226 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/query/SearchQueryIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/query/SearchQueryIT.java @@ -1195,75 +1195,63 @@ public void testTermsLookupFilter() throws Exception { ); SearchResponse searchResponse = client().prepareSearch("test") - .setQuery(termsLookupQuery("term", new TermsLookup("lookup", "type", "1", "terms"))) + .setQuery(termsLookupQuery("term", new TermsLookup("lookup", "1", "terms"))) .get(); assertHitCount(searchResponse, 2L); assertSearchHits(searchResponse, "1", "3"); // same as above, just on the _id... - searchResponse = client().prepareSearch("test") - .setQuery(termsLookupQuery("_id", new TermsLookup("lookup", "type", "1", "terms"))) - .get(); + searchResponse = client().prepareSearch("test").setQuery(termsLookupQuery("_id", new TermsLookup("lookup", "1", "terms"))).get(); assertHitCount(searchResponse, 2L); assertSearchHits(searchResponse, "1", "3"); // another search with same parameters... - searchResponse = client().prepareSearch("test") - .setQuery(termsLookupQuery("term", new TermsLookup("lookup", "type", "1", "terms"))) - .get(); + searchResponse = client().prepareSearch("test").setQuery(termsLookupQuery("term", new TermsLookup("lookup", "1", "terms"))).get(); assertHitCount(searchResponse, 2L); assertSearchHits(searchResponse, "1", "3"); - searchResponse = client().prepareSearch("test") - .setQuery(termsLookupQuery("term", new TermsLookup("lookup", "type", "2", "terms"))) - .get(); + searchResponse = client().prepareSearch("test").setQuery(termsLookupQuery("term", new TermsLookup("lookup", "2", "terms"))).get(); assertHitCount(searchResponse, 1L); assertFirstHit(searchResponse, hasId("2")); - searchResponse = client().prepareSearch("test") - .setQuery(termsLookupQuery("term", new TermsLookup("lookup", "type", "3", "terms"))) - .get(); + searchResponse = client().prepareSearch("test").setQuery(termsLookupQuery("term", new TermsLookup("lookup", "3", "terms"))).get(); assertHitCount(searchResponse, 2L); assertSearchHits(searchResponse, "2", "4"); - searchResponse = client().prepareSearch("test") - .setQuery(termsLookupQuery("term", new TermsLookup("lookup", "type", "4", "terms"))) - .get(); + searchResponse = client().prepareSearch("test").setQuery(termsLookupQuery("term", new TermsLookup("lookup", "4", "terms"))).get(); assertHitCount(searchResponse, 0L); searchResponse = client().prepareSearch("test") - .setQuery(termsLookupQuery("term", new TermsLookup("lookup2", "type", "1", "arr.term"))) + .setQuery(termsLookupQuery("term", new TermsLookup("lookup2", "1", "arr.term"))) .get(); assertHitCount(searchResponse, 2L); assertSearchHits(searchResponse, "1", "3"); searchResponse = client().prepareSearch("test") - .setQuery(termsLookupQuery("term", new TermsLookup("lookup2", "type", "2", "arr.term"))) + .setQuery(termsLookupQuery("term", new TermsLookup("lookup2", "2", "arr.term"))) .get(); assertHitCount(searchResponse, 1L); assertFirstHit(searchResponse, hasId("2")); searchResponse = client().prepareSearch("test") - .setQuery(termsLookupQuery("term", new TermsLookup("lookup2", "type", "3", "arr.term"))) + .setQuery(termsLookupQuery("term", new TermsLookup("lookup2", "3", "arr.term"))) .get(); assertHitCount(searchResponse, 2L); assertSearchHits(searchResponse, "2", "4"); searchResponse = client().prepareSearch("test") - .setQuery(termsLookupQuery("not_exists", new TermsLookup("lookup2", "type", "3", "arr.term"))) + .setQuery(termsLookupQuery("not_exists", new TermsLookup("lookup2", "3", "arr.term"))) .get(); assertHitCount(searchResponse, 0L); // index "lookup" type "type" id "missing" document does not exist: ignore the lookup terms searchResponse = client().prepareSearch("test") - .setQuery(termsLookupQuery("term", new TermsLookup("lookup", "type", "missing", "terms"))) + .setQuery(termsLookupQuery("term", new TermsLookup("lookup", "missing", "terms"))) .get(); assertHitCount(searchResponse, 0L); // index "lookup3" type "type" has the source disabled: ignore the lookup terms - searchResponse = client().prepareSearch("test") - .setQuery(termsLookupQuery("term", new TermsLookup("lookup3", "type", "1", "terms"))) - .get(); + searchResponse = client().prepareSearch("test").setQuery(termsLookupQuery("term", new TermsLookup("lookup3", "1", "terms"))).get(); assertHitCount(searchResponse, 0L); } diff --git a/server/src/internalClusterTest/java/org/opensearch/validate/SimpleValidateQueryIT.java b/server/src/internalClusterTest/java/org/opensearch/validate/SimpleValidateQueryIT.java index 29845b39becf2..30ab282bf3d44 100644 --- a/server/src/internalClusterTest/java/org/opensearch/validate/SimpleValidateQueryIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/validate/SimpleValidateQueryIT.java @@ -491,7 +491,7 @@ public void testExplainTermsQueryWithLookup() throws Exception { client().prepareIndex("twitter").setId("1").setSource("followers", new int[] { 1, 2, 3 }).get(); refresh(); - TermsQueryBuilder termsLookupQuery = QueryBuilders.termsLookupQuery("user", new TermsLookup("twitter", "_doc", "1", "followers")); + TermsQueryBuilder termsLookupQuery = QueryBuilders.termsLookupQuery("user", new TermsLookup("twitter", "1", "followers")); ValidateQueryResponse response = client().admin() .indices() .prepareValidateQuery("twitter") diff --git a/server/src/main/java/org/opensearch/index/query/TermsQueryBuilder.java b/server/src/main/java/org/opensearch/index/query/TermsQueryBuilder.java index e797730ac0dff..ac29cb2cf5201 100644 --- a/server/src/main/java/org/opensearch/index/query/TermsQueryBuilder.java +++ b/server/src/main/java/org/opensearch/index/query/TermsQueryBuilder.java @@ -225,10 +225,6 @@ public TermsLookup termsLookup() { return this.termsLookup; } - public boolean isTypeless() { - return termsLookup == null || termsLookup.type() == null; - } - private static final Set> INTEGER_TYPES = new HashSet<>( Arrays.asList(Byte.class, Short.class, Integer.class, Long.class) ); diff --git a/server/src/main/java/org/opensearch/indices/TermsLookup.java b/server/src/main/java/org/opensearch/indices/TermsLookup.java index 1aa16ad5cd72c..bf6c024fa130e 100644 --- a/server/src/main/java/org/opensearch/indices/TermsLookup.java +++ b/server/src/main/java/org/opensearch/indices/TermsLookup.java @@ -32,8 +32,7 @@ package org.opensearch.indices; -import org.opensearch.LegacyESVersion; -import org.opensearch.common.Nullable; +import org.opensearch.Version; import org.opensearch.common.ParseField; import org.opensearch.common.io.stream.StreamInput; import org.opensearch.common.io.stream.StreamOutput; @@ -42,13 +41,13 @@ import org.opensearch.common.xcontent.ToXContentFragment; import org.opensearch.common.xcontent.XContentBuilder; import org.opensearch.common.xcontent.XContentParser; +import org.opensearch.index.mapper.MapperService; import org.opensearch.index.query.TermsQueryBuilder; import java.io.IOException; import java.util.Objects; import static org.opensearch.common.xcontent.ConstructingObjectParser.constructorArg; -import static org.opensearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg; /** * Encapsulates the parameters needed to fetch terms. @@ -56,20 +55,11 @@ public class TermsLookup implements Writeable, ToXContentFragment { private final String index; - private @Nullable String type; private final String id; private final String path; private String routing; public TermsLookup(String index, String id, String path) { - this(index, null, id, path); - } - - /** - * @deprecated Types are in the process of being removed, use {@link TermsLookup(String, String, String)} instead. - */ - @Deprecated - public TermsLookup(String index, String type, String id, String path) { if (id == null) { throw new IllegalArgumentException("[" + TermsQueryBuilder.NAME + "] query lookup element requires specifying the id."); } @@ -80,7 +70,6 @@ public TermsLookup(String index, String type, String id, String path) { throw new IllegalArgumentException("[" + TermsQueryBuilder.NAME + "] query lookup element requires specifying the index."); } this.index = index; - this.type = type; this.id = id; this.path = path; } @@ -89,11 +78,8 @@ public TermsLookup(String index, String type, String id, String path) { * Read from a stream. */ public TermsLookup(StreamInput in) throws IOException { - if (in.getVersion().onOrAfter(LegacyESVersion.V_7_0_0)) { - type = in.readOptionalString(); - } else { - // Before 7.0, the type parameter was always non-null and serialized as a (non-optional) string. - type = in.readString(); + if (in.getVersion().before(Version.V_2_0_0)) { + in.readOptionalString(); } id = in.readString(); path = in.readString(); @@ -103,16 +89,8 @@ public TermsLookup(StreamInput in) throws IOException { @Override public void writeTo(StreamOutput out) throws IOException { - if (out.getVersion().onOrAfter(LegacyESVersion.V_7_0_0)) { - out.writeOptionalString(type); - } else { - if (type == null) { - throw new IllegalArgumentException( - "Typeless [terms] lookup queries are not supported if any " + "node is running a version before 7.0." - ); - - } - out.writeString(type); + if (out.getVersion().before(Version.V_2_0_0)) { + out.writeOptionalString(MapperService.SINGLE_MAPPING_NAME); } out.writeString(id); out.writeString(path); @@ -124,14 +102,6 @@ public String index() { return index; } - /** - * @deprecated Types are in the process of being removed. - */ - @Deprecated - public String type() { - return type; - } - public String id() { return id; } @@ -151,14 +121,12 @@ public TermsLookup routing(String routing) { private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>("terms_lookup", args -> { String index = (String) args[0]; - String type = (String) args[1]; - String id = (String) args[2]; - String path = (String) args[3]; - return new TermsLookup(index, type, id, path); + String id = (String) args[1]; + String path = (String) args[2]; + return new TermsLookup(index, id, path); }); static { PARSER.declareString(constructorArg(), new ParseField("index")); - PARSER.declareString(optionalConstructorArg(), new ParseField("type").withAllDeprecated()); PARSER.declareString(constructorArg(), new ParseField("id")); PARSER.declareString(constructorArg(), new ParseField("path")); PARSER.declareString(TermsLookup::routing, new ParseField("routing")); @@ -170,19 +138,12 @@ public static TermsLookup parseTermsLookup(XContentParser parser) throws IOExcep @Override public String toString() { - if (type == null) { - return index + "/" + id + "/" + path; - } else { - return index + "/" + type + "/" + id + "/" + path; - } + return index + "/" + id + "/" + path; } @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.field("index", index); - if (type != null) { - builder.field("type", type); - } builder.field("id", id); builder.field("path", path); if (routing != null) { @@ -193,7 +154,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws @Override public int hashCode() { - return Objects.hash(index, type, id, path, routing); + return Objects.hash(index, id, path, routing); } @Override @@ -206,7 +167,6 @@ public boolean equals(Object obj) { } TermsLookup other = (TermsLookup) obj; return Objects.equals(index, other.index) - && Objects.equals(type, other.type) && Objects.equals(id, other.id) && Objects.equals(path, other.path) && Objects.equals(routing, other.routing); diff --git a/server/src/test/java/org/opensearch/index/query/TermsQueryBuilderTests.java b/server/src/test/java/org/opensearch/index/query/TermsQueryBuilderTests.java index e37b4f1a1c39f..ea93d7a65b951 100644 --- a/server/src/test/java/org/opensearch/index/query/TermsQueryBuilderTests.java +++ b/server/src/test/java/org/opensearch/index/query/TermsQueryBuilderTests.java @@ -119,9 +119,7 @@ protected TermsQueryBuilder doCreateTestQueryBuilder() { private TermsLookup randomTermsLookup() { // Randomly choose between a typeless terms lookup and one with an explicit type to make sure we are - TermsLookup lookup = maybeIncludeType && randomBoolean() - ? new TermsLookup(randomAlphaOfLength(10), randomAlphaOfLength(10), randomAlphaOfLength(10), termsPath) - : new TermsLookup(randomAlphaOfLength(10), randomAlphaOfLength(10), termsPath); + TermsLookup lookup = new TermsLookup(randomAlphaOfLength(10), randomAlphaOfLength(10), termsPath); // testing both cases. lookup.routing(randomBoolean() ? randomAlphaOfLength(10) : null); return lookup; @@ -379,13 +377,6 @@ protected QueryBuilder parseQuery(XContentParser parser) throws IOException { try { QueryBuilder query = super.parseQuery(parser); assertThat(query, CoreMatchers.instanceOf(TermsQueryBuilder.class)); - - TermsQueryBuilder termsQuery = (TermsQueryBuilder) query; - String deprecationWarning = "Deprecated field [type] used, this field is unused and will be removed entirely"; - if (termsQuery.isTypeless() == false && !assertedWarnings.contains(deprecationWarning)) { - assertWarnings(deprecationWarning); - assertedWarnings.add(deprecationWarning); - } return query; } finally { diff --git a/server/src/test/java/org/opensearch/indices/TermsLookupTests.java b/server/src/test/java/org/opensearch/indices/TermsLookupTests.java index fb1462b500ea9..661995a22c507 100644 --- a/server/src/test/java/org/opensearch/indices/TermsLookupTests.java +++ b/server/src/test/java/org/opensearch/indices/TermsLookupTests.java @@ -45,42 +45,36 @@ public class TermsLookupTests extends OpenSearchTestCase { public void testTermsLookup() { String index = randomAlphaOfLengthBetween(1, 10); - String type = randomAlphaOfLengthBetween(1, 10); String id = randomAlphaOfLengthBetween(1, 10); String path = randomAlphaOfLengthBetween(1, 10); String routing = randomAlphaOfLengthBetween(1, 10); - TermsLookup termsLookup = new TermsLookup(index, type, id, path); + TermsLookup termsLookup = new TermsLookup(index, id, path); termsLookup.routing(routing); assertEquals(index, termsLookup.index()); - assertEquals(type, termsLookup.type()); assertEquals(id, termsLookup.id()); assertEquals(path, termsLookup.path()); assertEquals(routing, termsLookup.routing()); } public void testIllegalArguments() { - String type = randomAlphaOfLength(5); String id = randomAlphaOfLength(5); String path = randomAlphaOfLength(5); String index = randomAlphaOfLength(5); - switch (randomIntBetween(0, 3)) { + switch (randomIntBetween(0, 2)) { case 0: - type = null; - break; - case 1: id = null; break; - case 2: + case 1: path = null; break; - case 3: + case 2: index = null; break; default: fail("unknown case"); } try { - new TermsLookup(index, type, id, path); + new TermsLookup(index, id, path); } catch (IllegalArgumentException e) { assertThat(e.getMessage(), containsString("[terms] query lookup element requires specifying")); } @@ -99,35 +93,6 @@ public void testSerialization() throws IOException { } } - public void testSerializationWithTypes() throws IOException { - TermsLookup termsLookup = randomTermsLookupWithTypes(); - try (BytesStreamOutput output = new BytesStreamOutput()) { - termsLookup.writeTo(output); - try (StreamInput in = output.bytes().streamInput()) { - TermsLookup deserializedLookup = new TermsLookup(in); - assertEquals(deserializedLookup, termsLookup); - assertEquals(deserializedLookup.hashCode(), termsLookup.hashCode()); - assertNotSame(deserializedLookup, termsLookup); - } - } - } - - public void testXContentParsingWithType() throws IOException { - XContentParser parser = createParser( - JsonXContent.jsonXContent, - "{ \"index\" : \"index\", \"id\" : \"id\", \"type\" : \"type\", \"path\" : \"path\", \"routing\" : \"routing\" }" - ); - - TermsLookup tl = TermsLookup.parseTermsLookup(parser); - assertEquals("index", tl.index()); - assertEquals("type", tl.type()); - assertEquals("id", tl.id()); - assertEquals("path", tl.path()); - assertEquals("routing", tl.routing()); - - assertWarnings("Deprecated field [type] used, this field is unused and will be removed entirely"); - } - public void testXContentParsing() throws IOException { XContentParser parser = createParser( JsonXContent.jsonXContent, @@ -136,7 +101,6 @@ public void testXContentParsing() throws IOException { TermsLookup tl = TermsLookup.parseTermsLookup(parser); assertEquals("index", tl.index()); - assertNull(tl.type()); assertEquals("id", tl.id()); assertEquals("path", tl.path()); assertEquals("routing", tl.routing()); @@ -147,13 +111,4 @@ public static TermsLookup randomTermsLookup() { randomBoolean() ? randomAlphaOfLength(10) : null ); } - - public static TermsLookup randomTermsLookupWithTypes() { - return new TermsLookup( - randomAlphaOfLength(10), - randomAlphaOfLength(10), - randomAlphaOfLength(10), - randomAlphaOfLength(10).replace('.', '_') - ).routing(randomBoolean() ? randomAlphaOfLength(10) : null); - } } From 02d000c514c6bab875d2985a2a77455a81576b41 Mon Sep 17 00:00:00 2001 From: Suraj Singh <79435743+dreamer-89@users.noreply.github.com> Date: Mon, 14 Mar 2022 20:46:23 -0700 Subject: [PATCH 27/46] [Remove] Type query (#2448) Signed-off-by: Suraj Singh --- .../index/mapper/DocumentMapper.java | 8 - .../index/query/TypeQueryBuilder.java | 158 ------------------ .../org/opensearch/search/SearchModule.java | 2 - .../index/query/TypeQueryBuilderTests.java | 90 ---------- .../opensearch/search/SearchModuleTests.java | 1 - 5 files changed, 259 deletions(-) delete mode 100644 server/src/main/java/org/opensearch/index/query/TypeQueryBuilder.java delete mode 100644 server/src/test/java/org/opensearch/index/query/TypeQueryBuilderTests.java diff --git a/server/src/main/java/org/opensearch/index/mapper/DocumentMapper.java b/server/src/main/java/org/opensearch/index/mapper/DocumentMapper.java index 37e740ec33321..0ee0a3cb9a180 100644 --- a/server/src/main/java/org/opensearch/index/mapper/DocumentMapper.java +++ b/server/src/main/java/org/opensearch/index/mapper/DocumentMapper.java @@ -208,14 +208,6 @@ public T metadataMapper(Class type) { return mapping.metadataMapper(type); } - public IndexFieldMapper indexMapper() { - return metadataMapper(IndexFieldMapper.class); - } - - public TypeFieldMapper typeMapper() { - return metadataMapper(TypeFieldMapper.class); - } - public SourceFieldMapper sourceMapper() { return metadataMapper(SourceFieldMapper.class); } diff --git a/server/src/main/java/org/opensearch/index/query/TypeQueryBuilder.java b/server/src/main/java/org/opensearch/index/query/TypeQueryBuilder.java deleted file mode 100644 index d1ffcb394ec06..0000000000000 --- a/server/src/main/java/org/opensearch/index/query/TypeQueryBuilder.java +++ /dev/null @@ -1,158 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - */ - -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -/* - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - -package org.opensearch.index.query; - -import org.apache.lucene.search.MatchNoDocsQuery; -import org.apache.lucene.search.Query; -import org.opensearch.common.ParseField; -import org.opensearch.common.ParsingException; -import org.opensearch.common.io.stream.StreamInput; -import org.opensearch.common.io.stream.StreamOutput; -import org.opensearch.common.logging.DeprecationLogger; -import org.opensearch.common.lucene.search.Queries; -import org.opensearch.common.xcontent.XContentBuilder; -import org.opensearch.common.xcontent.XContentParser; -import org.opensearch.index.mapper.DocumentMapper; - -import java.io.IOException; -import java.util.Objects; - -public class TypeQueryBuilder extends AbstractQueryBuilder { - public static final String NAME = "type"; - - private static final ParseField VALUE_FIELD = new ParseField("value"); - private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(TypeQueryBuilder.class); - static final String TYPES_DEPRECATION_MESSAGE = "[types removal] Type queries are deprecated, " - + "prefer to filter on a field instead."; - - private final String type; - - public TypeQueryBuilder(String type) { - if (type == null) { - throw new IllegalArgumentException("[type] cannot be null"); - } - this.type = type; - } - - /** - * Read from a stream. - */ - public TypeQueryBuilder(StreamInput in) throws IOException { - super(in); - type = in.readString(); - } - - @Override - protected void doWriteTo(StreamOutput out) throws IOException { - out.writeString(type); - } - - public String type() { - return type; - } - - @Override - protected void doXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(NAME); - builder.field(VALUE_FIELD.getPreferredName(), type); - printBoostAndQueryName(builder); - builder.endObject(); - } - - public static TypeQueryBuilder fromXContent(XContentParser parser) throws IOException { - String type = null; - String queryName = null; - float boost = AbstractQueryBuilder.DEFAULT_BOOST; - String currentFieldName = null; - XContentParser.Token token; - while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { - if (token == XContentParser.Token.FIELD_NAME) { - currentFieldName = parser.currentName(); - } else if (token.isValue()) { - if (AbstractQueryBuilder.NAME_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { - queryName = parser.text(); - } else if (AbstractQueryBuilder.BOOST_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { - boost = parser.floatValue(); - } else if (VALUE_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { - type = parser.text(); - } else { - throw new ParsingException( - parser.getTokenLocation(), - "[" + TypeQueryBuilder.NAME + "] filter doesn't support [" + currentFieldName + "]" - ); - } - } else { - throw new ParsingException( - parser.getTokenLocation(), - "[" + TypeQueryBuilder.NAME + "] filter doesn't support [" + currentFieldName + "]" - ); - } - } - - if (type == null) { - throw new ParsingException( - parser.getTokenLocation(), - "[" + TypeQueryBuilder.NAME + "] filter needs to be provided with a value for the type" - ); - } - return new TypeQueryBuilder(type).boost(boost).queryName(queryName); - } - - @Override - public String getWriteableName() { - return NAME; - } - - @Override - protected Query doToQuery(QueryShardContext context) throws IOException { - deprecationLogger.deprecate("type_query", TYPES_DEPRECATION_MESSAGE); - // LUCENE 4 UPGRADE document mapper should use bytesref as well? - DocumentMapper documentMapper = context.getMapperService().documentMapper(); - if (documentMapper == null) { - // no type means no documents - return new MatchNoDocsQuery(); - } else { - return Queries.newNonNestedFilter(context.indexVersionCreated()); - } - } - - @Override - protected int doHashCode() { - return Objects.hash(type); - } - - @Override - protected boolean doEquals(TypeQueryBuilder other) { - return Objects.equals(type, other.type); - } -} diff --git a/server/src/main/java/org/opensearch/search/SearchModule.java b/server/src/main/java/org/opensearch/search/SearchModule.java index cdc2509bbcb00..c052f7f89e14e 100644 --- a/server/src/main/java/org/opensearch/search/SearchModule.java +++ b/server/src/main/java/org/opensearch/search/SearchModule.java @@ -89,7 +89,6 @@ import org.opensearch.index.query.TermQueryBuilder; import org.opensearch.index.query.TermsQueryBuilder; import org.opensearch.index.query.TermsSetQueryBuilder; -import org.opensearch.index.query.TypeQueryBuilder; import org.opensearch.index.query.WildcardQueryBuilder; import org.opensearch.index.query.WrapperQueryBuilder; import org.opensearch.index.query.functionscore.ExponentialDecayFunctionBuilder; @@ -1183,7 +1182,6 @@ private void registerQueryParsers(List plugins) { registerQuery( new QuerySpec<>(SimpleQueryStringBuilder.NAME, SimpleQueryStringBuilder::new, SimpleQueryStringBuilder::fromXContent) ); - registerQuery(new QuerySpec<>(TypeQueryBuilder.NAME, TypeQueryBuilder::new, TypeQueryBuilder::fromXContent)); registerQuery(new QuerySpec<>(ScriptQueryBuilder.NAME, ScriptQueryBuilder::new, ScriptQueryBuilder::fromXContent)); registerQuery(new QuerySpec<>(GeoDistanceQueryBuilder.NAME, GeoDistanceQueryBuilder::new, GeoDistanceQueryBuilder::fromXContent)); registerQuery( diff --git a/server/src/test/java/org/opensearch/index/query/TypeQueryBuilderTests.java b/server/src/test/java/org/opensearch/index/query/TypeQueryBuilderTests.java deleted file mode 100644 index bf373ac180f04..0000000000000 --- a/server/src/test/java/org/opensearch/index/query/TypeQueryBuilderTests.java +++ /dev/null @@ -1,90 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - */ - -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -/* - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - -package org.opensearch.index.query; - -import org.apache.lucene.search.MatchNoDocsQuery; -import org.apache.lucene.search.Query; -import org.opensearch.common.lucene.search.Queries; -import org.opensearch.test.AbstractQueryTestCase; - -import java.io.IOException; - -import static org.hamcrest.Matchers.equalTo; - -public class TypeQueryBuilderTests extends AbstractQueryTestCase { - - @Override - protected TypeQueryBuilder doCreateTestQueryBuilder() { - return new TypeQueryBuilder("_doc"); - } - - @Override - protected void doAssertLuceneQuery(TypeQueryBuilder queryBuilder, Query query, QueryShardContext context) throws IOException { - if (createShardContext().getMapperService().documentMapper() == null) { - assertEquals(new MatchNoDocsQuery(), query); - } else { - assertThat(query, equalTo(Queries.newNonNestedFilter(context.indexVersionCreated()))); - } - } - - public void testIllegalArgument() { - expectThrows(IllegalArgumentException.class, () -> new TypeQueryBuilder((String) null)); - } - - public void testFromJson() throws IOException { - String json = "{\n" + " \"type\" : {\n" + " \"value\" : \"my_type\",\n" + " \"boost\" : 1.0\n" + " }\n" + "}"; - - TypeQueryBuilder parsed = (TypeQueryBuilder) parseQuery(json); - checkGeneratedJson(json, parsed); - - assertEquals(json, "my_type", parsed.type()); - } - - @Override - public void testToQuery() throws IOException { - super.testToQuery(); - assertWarnings(TypeQueryBuilder.TYPES_DEPRECATION_MESSAGE); - } - - @Override - public void testMustRewrite() throws IOException { - super.testMustRewrite(); - assertWarnings(TypeQueryBuilder.TYPES_DEPRECATION_MESSAGE); - } - - @Override - public void testCacheability() throws IOException { - super.testCacheability(); - assertWarnings(TypeQueryBuilder.TYPES_DEPRECATION_MESSAGE); - } -} diff --git a/server/src/test/java/org/opensearch/search/SearchModuleTests.java b/server/src/test/java/org/opensearch/search/SearchModuleTests.java index 19b61275b8f62..05d4153949f9a 100644 --- a/server/src/test/java/org/opensearch/search/SearchModuleTests.java +++ b/server/src/test/java/org/opensearch/search/SearchModuleTests.java @@ -459,7 +459,6 @@ public List> getRescorers() { "term", "terms", "terms_set", - "type", "wildcard", "wrapper", "distance_feature" }; From 7df40ee1b098014cf3ef817acae303263ddc917f Mon Sep 17 00:00:00 2001 From: Nick Knize Date: Tue, 15 Mar 2022 01:05:58 -0500 Subject: [PATCH 28/46] [Remove] type from TaskResults index and IndexMetadata.getMappings (#2469) Removes types from the TaskResults internal index along with the getMappings method from IndexMetadata. This is needed to further remove types from CreateIndexRequest. Signed-off-by: Nicholas Walter Knize --- .../admin/cluster/node/tasks/TasksIT.java | 5 ++ .../gateway/GatewayIndexStateIT.java | 19 ++---- .../opensearch/gateway/MetadataNodesIT.java | 10 +-- .../opensearch/action/index/IndexRequest.java | 2 +- .../cluster/metadata/IndexMetadata.java | 34 +++++----- .../cluster/metadata/MappingMetadata.java | 62 +++++-------------- .../opensearch/cluster/metadata/Metadata.java | 2 +- .../index/mapper/MapperService.java | 5 +- .../opensearch/index/shard/StoreRecovery.java | 5 +- .../opensearch/tasks/TaskResultsService.java | 8 +-- .../opensearch/tasks/task-index-mapping.json | 2 +- .../metadata/MetadataMappingServiceTests.java | 2 +- 12 files changed, 50 insertions(+), 106 deletions(-) diff --git a/server/src/internalClusterTest/java/org/opensearch/action/admin/cluster/node/tasks/TasksIT.java b/server/src/internalClusterTest/java/org/opensearch/action/admin/cluster/node/tasks/TasksIT.java index e1346492999be..fbac2f7dbff6e 100644 --- a/server/src/internalClusterTest/java/org/opensearch/action/admin/cluster/node/tasks/TasksIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/action/admin/cluster/node/tasks/TasksIT.java @@ -840,6 +840,11 @@ public void testTaskStoringSuccessfulResult() throws Exception { GetTaskResponse getResponse = expectFinishedTask(taskId); assertEquals(result, getResponse.getTask().getResponseAsMap()); assertNull(getResponse.getTask().getError()); + + // run it again to check that the tasks index has been successfully created and can be re-used + client().execute(TestTaskPlugin.TestTaskAction.INSTANCE, request).get(); + events = findEvents(TestTaskPlugin.TestTaskAction.NAME, Tuple::v1); + assertEquals(2, events.size()); } public void testTaskStoringFailureResult() throws Exception { diff --git a/server/src/internalClusterTest/java/org/opensearch/gateway/GatewayIndexStateIT.java b/server/src/internalClusterTest/java/org/opensearch/gateway/GatewayIndexStateIT.java index 6fe22e2a8fde4..2138e24cc9b4c 100644 --- a/server/src/internalClusterTest/java/org/opensearch/gateway/GatewayIndexStateIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/gateway/GatewayIndexStateIT.java @@ -60,7 +60,6 @@ import org.opensearch.common.xcontent.XContentType; import org.opensearch.env.NodeEnvironment; import org.opensearch.index.mapper.MapperParsingException; -import org.opensearch.index.mapper.MapperService; import org.opensearch.indices.IndexClosedException; import org.opensearch.indices.ShardLimitValidator; import org.opensearch.test.OpenSearchIntegTestCase; @@ -123,9 +122,8 @@ public void testMappingMetadataParsed() throws Exception { .getState() .metadata() .index("test") - .getMappings() - .get(MapperService.SINGLE_MAPPING_NAME); - assertThat(mappingMd.routing().required(), equalTo(true)); + .mapping(); + assertThat(mappingMd.routingRequired(), equalTo(true)); logger.info("--> restarting nodes..."); internalCluster().fullRestart(); @@ -134,17 +132,8 @@ public void testMappingMetadataParsed() throws Exception { ensureYellow(); logger.info("--> verify meta _routing required exists"); - mappingMd = client().admin() - .cluster() - .prepareState() - .execute() - .actionGet() - .getState() - .metadata() - .index("test") - .getMappings() - .get(MapperService.SINGLE_MAPPING_NAME); - assertThat(mappingMd.routing().required(), equalTo(true)); + mappingMd = client().admin().cluster().prepareState().execute().actionGet().getState().metadata().index("test").mapping(); + assertThat(mappingMd.routingRequired(), equalTo(true)); } public void testSimpleOpenClose() throws Exception { diff --git a/server/src/internalClusterTest/java/org/opensearch/gateway/MetadataNodesIT.java b/server/src/internalClusterTest/java/org/opensearch/gateway/MetadataNodesIT.java index 2731eb9a290d6..c9807aa24e259 100644 --- a/server/src/internalClusterTest/java/org/opensearch/gateway/MetadataNodesIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/gateway/MetadataNodesIT.java @@ -153,11 +153,7 @@ public void testMetaWrittenWhenIndexIsClosedAndMetaUpdated() throws Exception { // make sure it was also written on red node although index is closed ImmutableOpenMap indicesMetadata = getIndicesMetadataOnNode(dataNode); - assertNotNull( - ((Map) (indicesMetadata.get(index).getMappings().get("_doc").getSourceAsMap().get("properties"))).get( - "integer_field" - ) - ); + assertNotNull(((Map) (indicesMetadata.get(index).mapping().getSourceAsMap().get("properties"))).get("integer_field")); assertThat(indicesMetadata.get(index).getState(), equalTo(IndexMetadata.State.CLOSE)); /* Try the same and see if this also works if node was just restarted. @@ -190,9 +186,7 @@ public void testMetaWrittenWhenIndexIsClosedAndMetaUpdated() throws Exception { // make sure it was also written on red node although index is closed indicesMetadata = getIndicesMetadataOnNode(dataNode); - assertNotNull( - ((Map) (indicesMetadata.get(index).getMappings().get("_doc").getSourceAsMap().get("properties"))).get("float_field") - ); + assertNotNull(((Map) (indicesMetadata.get(index).mapping().getSourceAsMap().get("properties"))).get("float_field")); assertThat(indicesMetadata.get(index).getState(), equalTo(IndexMetadata.State.CLOSE)); // finally check that meta data is also written of index opened again diff --git a/server/src/main/java/org/opensearch/action/index/IndexRequest.java b/server/src/main/java/org/opensearch/action/index/IndexRequest.java index ed77774bc01d3..7bf6b876fa652 100644 --- a/server/src/main/java/org/opensearch/action/index/IndexRequest.java +++ b/server/src/main/java/org/opensearch/action/index/IndexRequest.java @@ -615,7 +615,7 @@ public VersionType versionType() { public void process(Version indexCreatedVersion, @Nullable MappingMetadata mappingMd, String concreteIndex) { if (mappingMd != null) { // might as well check for routing here - if (mappingMd.routing().required() && routing == null) { + if (mappingMd.routingRequired() && routing == null) { throw new RoutingMissingException(concreteIndex, id); } } diff --git a/server/src/main/java/org/opensearch/cluster/metadata/IndexMetadata.java b/server/src/main/java/org/opensearch/cluster/metadata/IndexMetadata.java index a7f351a918ae5..6510c57060fe0 100644 --- a/server/src/main/java/org/opensearch/cluster/metadata/IndexMetadata.java +++ b/server/src/main/java/org/opensearch/cluster/metadata/IndexMetadata.java @@ -660,17 +660,6 @@ public ImmutableOpenMap getAliases() { return this.aliases; } - /** - * Return an object that maps each type to the associated mappings. - * The return value is never {@code null} but may be empty if the index - * has no mappings. - * @deprecated Use {@link #mapping()} instead now that indices have a single type - */ - @Deprecated - public ImmutableOpenMap getMappings() { - return mappings; - } - /** * Return the concrete mapping for this index or {@code null} if this index has no mappings at all. */ @@ -1175,7 +1164,10 @@ public Builder putMapping(String source) throws IOException { } public Builder putMapping(MappingMetadata mappingMd) { - mappings.put(mappingMd.type(), mappingMd); + mappings.clear(); + if (mappingMd != null) { + mappings.put(mappingMd.type(), mappingMd); + } return this; } @@ -1464,23 +1456,25 @@ public static void toXContent(IndexMetadata indexMetadata, XContentBuilder build if (context != Metadata.XContentContext.API) { builder.startArray(KEY_MAPPINGS); - for (ObjectObjectCursor cursor : indexMetadata.getMappings()) { + MappingMetadata mmd = indexMetadata.mapping(); + if (mmd != null) { if (binary) { - builder.value(cursor.value.source().compressed()); + builder.value(mmd.source().compressed()); } else { - builder.map(XContentHelper.convertToMap(cursor.value.source().uncompressed(), true).v2()); + builder.map(XContentHelper.convertToMap(mmd.source().uncompressed(), true).v2()); } } builder.endArray(); } else { builder.startObject(KEY_MAPPINGS); - for (ObjectObjectCursor cursor : indexMetadata.getMappings()) { - Map mapping = XContentHelper.convertToMap(cursor.value.source().uncompressed(), false).v2(); - if (mapping.size() == 1 && mapping.containsKey(cursor.key)) { + MappingMetadata mmd = indexMetadata.mapping(); + if (mmd != null) { + Map mapping = XContentHelper.convertToMap(mmd.source().uncompressed(), false).v2(); + if (mapping.size() == 1 && mapping.containsKey(mmd.type())) { // the type name is the root value, reduce it - mapping = (Map) mapping.get(cursor.key); + mapping = (Map) mapping.get(mmd.type()); } - builder.field(cursor.key); + builder.field(mmd.type()); builder.map(mapping); } builder.endObject(); diff --git a/server/src/main/java/org/opensearch/cluster/metadata/MappingMetadata.java b/server/src/main/java/org/opensearch/cluster/metadata/MappingMetadata.java index 66bca027d7cc4..620542f8f1bde 100644 --- a/server/src/main/java/org/opensearch/cluster/metadata/MappingMetadata.java +++ b/server/src/main/java/org/opensearch/cluster/metadata/MappingMetadata.java @@ -50,6 +50,7 @@ import java.io.UncheckedIOException; import java.util.Collections; import java.util.Map; +import java.util.Objects; import static org.opensearch.common.xcontent.support.XContentMapValues.nodeBooleanValue; @@ -59,46 +60,16 @@ public class MappingMetadata extends AbstractDiffable { public static final MappingMetadata EMPTY_MAPPINGS = new MappingMetadata(MapperService.SINGLE_MAPPING_NAME, Collections.emptyMap()); - public static class Routing { - - public static final Routing EMPTY = new Routing(false); - - private final boolean required; - - public Routing(boolean required) { - this.required = required; - } - - public boolean required() { - return required; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - - Routing routing = (Routing) o; - - return required == routing.required; - } - - @Override - public int hashCode() { - return getClass().hashCode() + (required ? 1 : 0); - } - } - private final String type; private final CompressedXContent source; - private final Routing routing; + private final boolean routingRequired; public MappingMetadata(DocumentMapper docMapper) { this.type = docMapper.type(); this.source = docMapper.mappingSource(); - this.routing = new Routing(docMapper.routingFieldMapper().required()); + this.routingRequired = docMapper.routingFieldMapper().required(); } @SuppressWarnings("unchecked") @@ -109,7 +80,7 @@ public MappingMetadata(CompressedXContent mapping) { throw new IllegalStateException("Can't derive type from mapping, no root type: " + mapping.string()); } this.type = mappingMap.keySet().iterator().next(); - this.routing = initRouting((Map) mappingMap.get(this.type)); + this.routingRequired = isRoutingRequired((Map) mappingMap.get(this.type)); } @SuppressWarnings("unchecked") @@ -125,13 +96,13 @@ public MappingMetadata(String type, Map mapping) { if (mapping.size() == 1 && mapping.containsKey(type)) { withoutType = (Map) mapping.get(type); } - this.routing = initRouting(withoutType); + this.routingRequired = isRoutingRequired(withoutType); } @SuppressWarnings("unchecked") - private Routing initRouting(Map withoutType) { + private boolean isRoutingRequired(Map withoutType) { + boolean required = false; if (withoutType.containsKey("_routing")) { - boolean required = false; Map routingNode = (Map) withoutType.get("_routing"); for (Map.Entry entry : routingNode.entrySet()) { String fieldName = entry.getKey(); @@ -147,10 +118,8 @@ private Routing initRouting(Map withoutType) { } } } - return new Routing(required); - } else { - return Routing.EMPTY; } + return required; } public String type() { @@ -180,8 +149,8 @@ public Map getSourceAsMap() throws OpenSearchParseException { return sourceAsMap(); } - public Routing routing() { - return this.routing; + public boolean routingRequired() { + return this.routingRequired; } @Override @@ -189,7 +158,7 @@ public void writeTo(StreamOutput out) throws IOException { out.writeString(type()); source().writeTo(out); // routing - out.writeBoolean(routing().required()); + out.writeBoolean(routingRequired); if (out.getVersion().before(LegacyESVersion.V_7_0_0)) { out.writeBoolean(false); // hasParentField } @@ -202,7 +171,7 @@ public boolean equals(Object o) { MappingMetadata that = (MappingMetadata) o; - if (!routing.equals(that.routing)) return false; + if (!Objects.equals(this.routingRequired, that.routingRequired)) return false; if (!source.equals(that.source)) return false; if (!type.equals(that.type)) return false; @@ -211,17 +180,14 @@ public boolean equals(Object o) { @Override public int hashCode() { - int result = type.hashCode(); - result = 31 * result + source.hashCode(); - result = 31 * result + routing.hashCode(); - return result; + return Objects.hash(type, source, routingRequired); } public MappingMetadata(StreamInput in) throws IOException { type = in.readString(); source = CompressedXContent.readCompressedString(in); // routing - routing = new Routing(in.readBoolean()); + routingRequired = in.readBoolean(); if (in.getVersion().before(LegacyESVersion.V_7_0_0)) { in.readBoolean(); // hasParentField } diff --git a/server/src/main/java/org/opensearch/cluster/metadata/Metadata.java b/server/src/main/java/org/opensearch/cluster/metadata/Metadata.java index b3503f64c53f3..6e9c30877f9c2 100644 --- a/server/src/main/java/org/opensearch/cluster/metadata/Metadata.java +++ b/server/src/main/java/org/opensearch/cluster/metadata/Metadata.java @@ -880,7 +880,7 @@ public boolean routingRequired(String concreteIndex) { if (indexMetadata != null) { MappingMetadata mappingMetadata = indexMetadata.mapping(); if (mappingMetadata != null) { - return mappingMetadata.routing().required(); + return mappingMetadata.routingRequired(); } } return false; diff --git a/server/src/main/java/org/opensearch/index/mapper/MapperService.java b/server/src/main/java/org/opensearch/index/mapper/MapperService.java index 1d4e49a6e6fee..a92647929ff08 100644 --- a/server/src/main/java/org/opensearch/index/mapper/MapperService.java +++ b/server/src/main/java/org/opensearch/index/mapper/MapperService.java @@ -32,7 +32,6 @@ package org.opensearch.index.mapper; -import com.carrotsearch.hppc.cursors.ObjectCursor; import org.apache.logging.log4j.message.ParameterizedMessage; import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.DelegatingAnalyzerWrapper; @@ -416,8 +415,8 @@ public DocumentMapper merge(String type, CompressedXContent mappingSource, Merge private synchronized Map internalMerge(IndexMetadata indexMetadata, MergeReason reason) { assert reason != MergeReason.MAPPING_UPDATE_PREFLIGHT; Map map = new LinkedHashMap<>(); - for (ObjectCursor cursor : indexMetadata.getMappings().values()) { - MappingMetadata mappingMetadata = cursor.value; + MappingMetadata mappingMetadata = indexMetadata.mapping(); + if (mappingMetadata != null) { map.put(mappingMetadata.type(), mappingMetadata.source()); } return internalMerge(map, reason); diff --git a/server/src/main/java/org/opensearch/index/shard/StoreRecovery.java b/server/src/main/java/org/opensearch/index/shard/StoreRecovery.java index 6cf6ad645ca00..20bb6e7060ca3 100644 --- a/server/src/main/java/org/opensearch/index/shard/StoreRecovery.java +++ b/server/src/main/java/org/opensearch/index/shard/StoreRecovery.java @@ -32,7 +32,6 @@ package org.opensearch.index.shard; -import com.carrotsearch.hppc.cursors.ObjectObjectCursor; import org.apache.logging.log4j.Logger; import org.apache.lucene.index.IndexWriter; import org.apache.lucene.index.IndexWriterConfig; @@ -132,8 +131,8 @@ void recoverFromLocalShards( throw new IllegalArgumentException("can't add shards from more than one index"); } IndexMetadata sourceMetadata = shards.get(0).getIndexMetadata(); - for (ObjectObjectCursor mapping : sourceMetadata.getMappings()) { - mappingUpdateConsumer.accept(mapping.value); + if (sourceMetadata.mapping() != null) { + mappingUpdateConsumer.accept(sourceMetadata.mapping()); } indexShard.mapperService().merge(sourceMetadata, MapperService.MergeReason.MAPPING_RECOVERY); // now that the mapping is merged we can validate the index sort configuration. diff --git a/server/src/main/java/org/opensearch/tasks/TaskResultsService.java b/server/src/main/java/org/opensearch/tasks/TaskResultsService.java index 60de452c3149e..e22793e057c6a 100644 --- a/server/src/main/java/org/opensearch/tasks/TaskResultsService.java +++ b/server/src/main/java/org/opensearch/tasks/TaskResultsService.java @@ -80,13 +80,11 @@ public class TaskResultsService { public static final String TASK_INDEX = ".tasks"; - public static final String TASK_TYPE = "task"; - public static final String TASK_RESULT_INDEX_MAPPING_FILE = "task-index-mapping.json"; public static final String TASK_RESULT_MAPPING_VERSION_META_FIELD = "version"; - public static final int TASK_RESULT_MAPPING_VERSION = 3; + public static final int TASK_RESULT_MAPPING_VERSION = 3; // must match version in task-index-mapping.json /** * The backoff policy to use when saving a task result fails. The total wait @@ -115,7 +113,7 @@ public void storeResult(TaskResult taskResult, ActionListener listener) { CreateIndexRequest createIndexRequest = new CreateIndexRequest(); createIndexRequest.settings(taskResultIndexSettings()); createIndexRequest.index(TASK_INDEX); - createIndexRequest.mapping(TASK_TYPE, taskResultIndexMapping(), XContentType.JSON); + createIndexRequest.mapping(taskResultIndexMapping()); createIndexRequest.cause("auto(task api)"); client.admin().indices().create(createIndexRequest, new ActionListener() { @@ -155,7 +153,7 @@ public void onFailure(Exception e) { } private int getTaskResultMappingVersion(IndexMetadata metadata) { - MappingMetadata mappingMetadata = metadata.getMappings().get(TASK_TYPE); + MappingMetadata mappingMetadata = metadata.mapping(); if (mappingMetadata == null) { return 0; } diff --git a/server/src/main/resources/org/opensearch/tasks/task-index-mapping.json b/server/src/main/resources/org/opensearch/tasks/task-index-mapping.json index 76b07bf3570f2..54e9d39902f03 100644 --- a/server/src/main/resources/org/opensearch/tasks/task-index-mapping.json +++ b/server/src/main/resources/org/opensearch/tasks/task-index-mapping.json @@ -1,5 +1,5 @@ { - "task" : { + "_doc" : { "_meta": { "version": 3 }, diff --git a/server/src/test/java/org/opensearch/cluster/metadata/MetadataMappingServiceTests.java b/server/src/test/java/org/opensearch/cluster/metadata/MetadataMappingServiceTests.java index a87ec461e5dc8..94bf162303127 100644 --- a/server/src/test/java/org/opensearch/cluster/metadata/MetadataMappingServiceTests.java +++ b/server/src/test/java/org/opensearch/cluster/metadata/MetadataMappingServiceTests.java @@ -79,7 +79,7 @@ public void testMappingClusterStateUpdateDoesntChangeExistingIndices() throws Ex // the task really was a mapping update assertThat( indexService.mapperService().documentMapper().mappingSource(), - not(equalTo(result.resultingState.metadata().index("test").getMappings().get(MapperService.SINGLE_MAPPING_NAME).source())) + not(equalTo(result.resultingState.metadata().index("test").mapping().source())) ); // since we never committed the cluster state update, the in-memory state is unchanged assertThat(indexService.mapperService().documentMapper().mappingSource(), equalTo(currentMapping)); From b619a050bf1048d3edc7b80dd05801a89698ccf1 Mon Sep 17 00:00:00 2001 From: Nick Knize Date: Tue, 15 Mar 2022 10:31:31 -0500 Subject: [PATCH 29/46] [Remove] types based addMapping method from CreateIndexRequest and Builder (#2460) Removes the obsolete types based .addMapping method from CreateIndexRequest and CreateIndexRequestBuilder. Tests are refactored to use the new source only setMapping method. Signed-off-by: Nicholas Walter Knize --- .../admin/cluster/stats/ClusterStatsIT.java | 14 ++----- .../action/admin/indices/get/GetIndexIT.java | 8 +--- .../cluster/SpecificMasterNodesIT.java | 9 ++-- .../gateway/GatewayIndexStateIT.java | 17 +++----- .../gateway/RecoveryFromGatewayIT.java | 9 +--- .../java/org/opensearch/get/GetActionIT.java | 4 +- .../mapper/CopyToMapperIntegrationIT.java | 5 +-- .../mapping/ConcurrentDynamicTemplateIT.java | 11 ++--- .../mapping/UpdateMappingIntegrationIT.java | 12 ++---- .../RandomExceptionCircuitBreakerIT.java | 5 +-- .../indices/state/OpenCloseIndexIT.java | 5 +-- .../indices/stats/IndexStatsIT.java | 6 +-- .../routing/PartitionedRoutingIT.java | 14 +------ .../aggregations/bucket/DateHistogramIT.java | 5 +-- .../bucket/TermsDocCountErrorIT.java | 9 +--- .../basic/SearchWithRandomExceptionsIT.java | 5 +-- .../basic/SearchWithRandomIOExceptionsIT.java | 7 +--- .../highlight/HighlighterSearchIT.java | 9 +--- .../opensearch/search/geo/GeoFilterIT.java | 7 +--- .../search/geo/GeoShapeIntegrationIT.java | 14 +++---- .../geo/LegacyGeoShapeIntegrationIT.java | 10 ++--- .../search/morelikethis/MoreLikeThisIT.java | 30 ++++--------- .../search/nested/SimpleNestedIT.java | 42 ++++++++----------- .../search/query/SimpleQueryStringIT.java | 14 +------ .../search/slice/SearchSliceIT.java | 5 +-- .../opensearch/search/sort/SimpleSortIT.java | 9 +--- .../indices/create/CreateIndexRequest.java | 13 ------ .../create/CreateIndexRequestBuilder.java | 6 +-- .../rollover/RolloverRequestBuilder.java | 6 --- .../create/CreateIndexRequestTests.java | 31 +++----------- .../index/mapper/MapperServiceTests.java | 5 +-- .../search/geo/GeoShapeQueryTests.java | 18 +++----- 32 files changed, 93 insertions(+), 271 deletions(-) diff --git a/server/src/internalClusterTest/java/org/opensearch/action/admin/cluster/stats/ClusterStatsIT.java b/server/src/internalClusterTest/java/org/opensearch/action/admin/cluster/stats/ClusterStatsIT.java index 19d1728a1fecd..72f34133067ee 100644 --- a/server/src/internalClusterTest/java/org/opensearch/action/admin/cluster/stats/ClusterStatsIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/action/admin/cluster/stats/ClusterStatsIT.java @@ -42,8 +42,6 @@ import org.opensearch.common.Priority; import org.opensearch.common.settings.Settings; import org.opensearch.common.util.concurrent.OpenSearchExecutors; -import org.opensearch.common.xcontent.XContentType; -import org.opensearch.index.mapper.MapperService; import org.opensearch.monitor.os.OsStats; import org.opensearch.node.NodeRoleSettings; import org.opensearch.test.OpenSearchIntegTestCase; @@ -276,19 +274,13 @@ public void testFieldTypes() { assertThat(response.getStatus(), Matchers.equalTo(ClusterHealthStatus.GREEN)); assertTrue(response.getIndicesStats().getMappings().getFieldTypeStats().isEmpty()); - client().admin() - .indices() - .prepareCreate("test1") - .addMapping(MapperService.SINGLE_MAPPING_NAME, "{\"properties\":{\"foo\":{\"type\": \"keyword\"}}}", XContentType.JSON) - .get(); + client().admin().indices().prepareCreate("test1").setMapping("{\"properties\":{\"foo\":{\"type\": \"keyword\"}}}").get(); client().admin() .indices() .prepareCreate("test2") - .addMapping( - MapperService.SINGLE_MAPPING_NAME, + .setMapping( "{\"properties\":{\"foo\":{\"type\": \"keyword\"},\"bar\":{\"properties\":{\"baz\":{\"type\":\"keyword\"}," - + "\"eggplant\":{\"type\":\"integer\"}}}}}", - XContentType.JSON + + "\"eggplant\":{\"type\":\"integer\"}}}}}" ) .get(); response = client().admin().cluster().prepareClusterStats().get(); diff --git a/server/src/internalClusterTest/java/org/opensearch/action/admin/indices/get/GetIndexIT.java b/server/src/internalClusterTest/java/org/opensearch/action/admin/indices/get/GetIndexIT.java index bbe8b616ad87e..ffc738ac98de5 100644 --- a/server/src/internalClusterTest/java/org/opensearch/action/admin/indices/get/GetIndexIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/action/admin/indices/get/GetIndexIT.java @@ -40,7 +40,6 @@ import org.opensearch.cluster.metadata.MappingMetadata; import org.opensearch.common.collect.ImmutableOpenMap; import org.opensearch.common.settings.Settings; -import org.opensearch.common.xcontent.XContentType; import org.opensearch.index.IndexNotFoundException; import org.opensearch.test.OpenSearchIntegTestCase; @@ -64,12 +63,7 @@ public class GetIndexIT extends OpenSearchIntegTestCase { @Override protected void setupSuiteScopeCluster() throws Exception { - assertAcked( - prepareCreate("idx").addAlias(new Alias("alias_idx")) - .addMapping("type1", "{\"type1\":{}}", XContentType.JSON) - .setSettings(Settings.builder().put("number_of_shards", 1)) - .get() - ); + assertAcked(prepareCreate("idx").addAlias(new Alias("alias_idx")).setSettings(Settings.builder().put("number_of_shards", 1)).get()); ensureSearchable("idx"); createIndex("empty_idx"); ensureSearchable("idx", "empty_idx"); diff --git a/server/src/internalClusterTest/java/org/opensearch/cluster/SpecificMasterNodesIT.java b/server/src/internalClusterTest/java/org/opensearch/cluster/SpecificMasterNodesIT.java index 9377fe284fce7..fc193163f75cc 100644 --- a/server/src/internalClusterTest/java/org/opensearch/cluster/SpecificMasterNodesIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/cluster/SpecificMasterNodesIT.java @@ -36,7 +36,6 @@ import org.opensearch.action.admin.cluster.configuration.AddVotingConfigExclusionsAction; import org.opensearch.action.admin.cluster.configuration.AddVotingConfigExclusionsRequest; import org.opensearch.common.settings.Settings; -import org.opensearch.common.xcontent.XContentType; import org.opensearch.discovery.MasterNotDiscoveredException; import org.opensearch.index.query.QueryBuilders; import org.opensearch.test.OpenSearchIntegTestCase; @@ -321,11 +320,9 @@ public void testAliasFilterValidation() { internalCluster().startDataOnlyNode(); assertAcked( - prepareCreate("test").addMapping( - "type1", - "{\"type1\" : {\"properties\" : {\"table_a\" : { \"type\" : \"nested\", " - + "\"properties\" : {\"field_a\" : { \"type\" : \"keyword\" },\"field_b\" :{ \"type\" : \"keyword\" }}}}}}", - XContentType.JSON + prepareCreate("test").setMapping( + "{\"properties\" : {\"table_a\" : { \"type\" : \"nested\", " + + "\"properties\" : {\"field_a\" : { \"type\" : \"keyword\" },\"field_b\" :{ \"type\" : \"keyword\" }}}}}" ) ); client().admin() diff --git a/server/src/internalClusterTest/java/org/opensearch/gateway/GatewayIndexStateIT.java b/server/src/internalClusterTest/java/org/opensearch/gateway/GatewayIndexStateIT.java index 2138e24cc9b4c..24aff104ce837 100644 --- a/server/src/internalClusterTest/java/org/opensearch/gateway/GatewayIndexStateIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/gateway/GatewayIndexStateIT.java @@ -57,7 +57,6 @@ import org.opensearch.common.Priority; import org.opensearch.common.settings.Settings; import org.opensearch.common.xcontent.XContentFactory; -import org.opensearch.common.xcontent.XContentType; import org.opensearch.env.NodeEnvironment; import org.opensearch.index.mapper.MapperParsingException; import org.opensearch.indices.IndexClosedException; @@ -483,19 +482,15 @@ public void testRecoverMissingAnalyzer() throws Exception { prepareCreate("test").setSettings( Settings.builder().put("index.analysis.analyzer.test.tokenizer", "standard").put("index.number_of_shards", "1") ) - .addMapping( - "type1", + .setMapping( "{\n" - + " \"type1\": {\n" - + " \"properties\": {\n" - + " \"field1\": {\n" - + " \"type\": \"text\",\n" - + " \"analyzer\": \"test\"\n" - + " }\n" + + " \"properties\": {\n" + + " \"field1\": {\n" + + " \"type\": \"text\",\n" + + " \"analyzer\": \"test\"\n" + " }\n" + " }\n" - + " }}", - XContentType.JSON + + " }" ) .get(); logger.info("--> indexing a simple document"); diff --git a/server/src/internalClusterTest/java/org/opensearch/gateway/RecoveryFromGatewayIT.java b/server/src/internalClusterTest/java/org/opensearch/gateway/RecoveryFromGatewayIT.java index 612abee7dbf5b..3c5f2828ff94f 100644 --- a/server/src/internalClusterTest/java/org/opensearch/gateway/RecoveryFromGatewayIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/gateway/RecoveryFromGatewayIT.java @@ -51,7 +51,6 @@ import org.opensearch.common.Strings; import org.opensearch.common.settings.Settings; import org.opensearch.common.xcontent.XContentFactory; -import org.opensearch.common.xcontent.XContentType; import org.opensearch.env.NodeEnvironment; import org.opensearch.index.Index; import org.opensearch.index.IndexService; @@ -115,16 +114,14 @@ public void testOneNodeRecoverFromGateway() throws Exception { String mapping = Strings.toString( XContentFactory.jsonBuilder() .startObject() - .startObject("type1") .startObject("properties") .startObject("appAccountIds") .field("type", "text") .endObject() .endObject() .endObject() - .endObject() ); - assertAcked(prepareCreate("test").addMapping("type1", mapping, XContentType.JSON)); + assertAcked(prepareCreate("test").setMapping(mapping)); client().prepareIndex("test") .setId("10990239") @@ -212,7 +209,6 @@ public void testSingleNodeNoFlush() throws Exception { String mapping = Strings.toString( XContentFactory.jsonBuilder() .startObject() - .startObject("type1") .startObject("properties") .startObject("field") .field("type", "text") @@ -222,14 +218,13 @@ public void testSingleNodeNoFlush() throws Exception { .endObject() .endObject() .endObject() - .endObject() ); // note: default replica settings are tied to #data nodes-1 which is 0 here. We can do with 1 in this test. int numberOfShards = numberOfShards(); assertAcked( prepareCreate("test").setSettings( Settings.builder().put(SETTING_NUMBER_OF_SHARDS, numberOfShards()).put(SETTING_NUMBER_OF_REPLICAS, randomIntBetween(0, 1)) - ).addMapping("type1", mapping, XContentType.JSON) + ).setMapping(mapping) ); int value1Docs; diff --git a/server/src/internalClusterTest/java/org/opensearch/get/GetActionIT.java b/server/src/internalClusterTest/java/org/opensearch/get/GetActionIT.java index 30cb18669ebbd..ec0b47ccd0ecf 100644 --- a/server/src/internalClusterTest/java/org/opensearch/get/GetActionIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/get/GetActionIT.java @@ -291,7 +291,6 @@ public void testGetDocWithMultivaluedFields() throws Exception { String mapping1 = Strings.toString( XContentFactory.jsonBuilder() .startObject() - .startObject("type1") .startObject("properties") .startObject("field") .field("type", "text") @@ -299,9 +298,8 @@ public void testGetDocWithMultivaluedFields() throws Exception { .endObject() .endObject() .endObject() - .endObject() ); - assertAcked(prepareCreate("test").addMapping("type1", mapping1, XContentType.JSON)); + assertAcked(prepareCreate("test").setMapping(mapping1)); ensureGreen(); GetResponse response = client().prepareGet("test", "1").get(); diff --git a/server/src/internalClusterTest/java/org/opensearch/index/mapper/CopyToMapperIntegrationIT.java b/server/src/internalClusterTest/java/org/opensearch/index/mapper/CopyToMapperIntegrationIT.java index f2cc3c289e8e4..f23e319a5e8d2 100644 --- a/server/src/internalClusterTest/java/org/opensearch/index/mapper/CopyToMapperIntegrationIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/index/mapper/CopyToMapperIntegrationIT.java @@ -36,7 +36,6 @@ import org.opensearch.common.Strings; import org.opensearch.common.xcontent.XContentBuilder; import org.opensearch.common.xcontent.XContentFactory; -import org.opensearch.common.xcontent.XContentType; import org.opensearch.index.query.QueryBuilders; import org.opensearch.search.aggregations.AggregationBuilders; import org.opensearch.search.aggregations.Aggregator.SubAggCollectionMode; @@ -81,7 +80,6 @@ public void testDynamicTemplateCopyTo() throws Exception { public void testDynamicObjectCopyTo() throws Exception { String mapping = Strings.toString( jsonBuilder().startObject() - .startObject("_doc") .startObject("properties") .startObject("foo") .field("type", "text") @@ -89,9 +87,8 @@ public void testDynamicObjectCopyTo() throws Exception { .endObject() .endObject() .endObject() - .endObject() ); - assertAcked(client().admin().indices().prepareCreate("test-idx").addMapping("_doc", mapping, XContentType.JSON)); + assertAcked(client().admin().indices().prepareCreate("test-idx").setMapping(mapping)); client().prepareIndex("test-idx").setId("1").setSource("foo", "bar").get(); client().admin().indices().prepareRefresh("test-idx").execute().actionGet(); SearchResponse response = client().prepareSearch("test-idx").setQuery(QueryBuilders.termQuery("root.top.child", "bar")).get(); diff --git a/server/src/internalClusterTest/java/org/opensearch/indices/mapping/ConcurrentDynamicTemplateIT.java b/server/src/internalClusterTest/java/org/opensearch/indices/mapping/ConcurrentDynamicTemplateIT.java index 7dc1933575ea3..e731b0074f04d 100644 --- a/server/src/internalClusterTest/java/org/opensearch/indices/mapping/ConcurrentDynamicTemplateIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/indices/mapping/ConcurrentDynamicTemplateIT.java @@ -34,7 +34,6 @@ import org.opensearch.action.ActionListener; import org.opensearch.action.index.IndexResponse; -import org.opensearch.common.xcontent.XContentType; import org.opensearch.index.query.QueryBuilders; import org.opensearch.test.OpenSearchIntegTestCase; @@ -49,14 +48,10 @@ import static org.hamcrest.Matchers.emptyIterable; public class ConcurrentDynamicTemplateIT extends OpenSearchIntegTestCase { - private final String mappingType = "test-mapping"; - // see #3544 public void testConcurrentDynamicMapping() throws Exception { final String fieldName = "field"; - final String mapping = "{ \"" - + mappingType - + "\": {" + final String mapping = "{ " + "\"dynamic_templates\": [" + "{ \"" + fieldName @@ -65,14 +60,14 @@ public void testConcurrentDynamicMapping() throws Exception { + "\"mapping\": {" + "\"type\": \"text\"," + "\"store\": true," - + "\"analyzer\": \"whitespace\" } } } ] } }"; + + "\"analyzer\": \"whitespace\" } } } ] }"; // The 'fieldNames' array is used to help with retrieval of index terms // after testing int iters = scaledRandomIntBetween(5, 15); for (int i = 0; i < iters; i++) { cluster().wipeIndices("test"); - assertAcked(prepareCreate("test").addMapping(mappingType, mapping, XContentType.JSON)); + assertAcked(prepareCreate("test").setMapping(mapping)); int numDocs = scaledRandomIntBetween(10, 100); final CountDownLatch latch = new CountDownLatch(numDocs); final List throwable = new CopyOnWriteArrayList<>(); diff --git a/server/src/internalClusterTest/java/org/opensearch/indices/mapping/UpdateMappingIntegrationIT.java b/server/src/internalClusterTest/java/org/opensearch/indices/mapping/UpdateMappingIntegrationIT.java index a325bbc62f8a8..0a29794add5a8 100644 --- a/server/src/internalClusterTest/java/org/opensearch/indices/mapping/UpdateMappingIntegrationIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/indices/mapping/UpdateMappingIntegrationIT.java @@ -145,7 +145,7 @@ public void testUpdateMappingWithoutType() { .indices() .prepareCreate("test") .setSettings(Settings.builder().put("index.number_of_shards", 1).put("index.number_of_replicas", 0)) - .addMapping("_doc", "{\"_doc\":{\"properties\":{\"body\":{\"type\":\"text\"}}}}", XContentType.JSON) + .setMapping("{\"properties\":{\"body\":{\"type\":\"text\"}}}") .execute() .actionGet(); client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForGreenStatus().execute().actionGet(); @@ -196,7 +196,7 @@ public void testUpdateMappingWithConflicts() { .indices() .prepareCreate("test") .setSettings(Settings.builder().put("index.number_of_shards", 2).put("index.number_of_replicas", 0)) - .addMapping("type", "{\"type\":{\"properties\":{\"body\":{\"type\":\"text\"}}}}", XContentType.JSON) + .setMapping("{\"properties\":{\"body\":{\"type\":\"text\"}}}") .execute() .actionGet(); client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForGreenStatus().execute().actionGet(); @@ -221,7 +221,7 @@ public void testUpdateMappingWithNormsConflicts() { client().admin() .indices() .prepareCreate("test") - .addMapping("type", "{\"type\":{\"properties\":{\"body\":{\"type\":\"text\", \"norms\": false }}}}", XContentType.JSON) + .setMapping("{\"properties\":{\"body\":{\"type\":\"text\", \"norms\": false }}}") .execute() .actionGet(); try { @@ -248,11 +248,7 @@ public void testUpdateMappingNoChanges() { .indices() .prepareCreate("test") .setSettings(Settings.builder().put("index.number_of_shards", 2).put("index.number_of_replicas", 0)) - .addMapping( - MapperService.SINGLE_MAPPING_NAME, - "{\"" + MapperService.SINGLE_MAPPING_NAME + "\":{\"properties\":{\"body\":{\"type\":\"text\"}}}}", - XContentType.JSON - ) + .setMapping("{\"properties\":{\"body\":{\"type\":\"text\"}}}") .execute() .actionGet(); client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForGreenStatus().execute().actionGet(); diff --git a/server/src/internalClusterTest/java/org/opensearch/indices/memory/breaker/RandomExceptionCircuitBreakerIT.java b/server/src/internalClusterTest/java/org/opensearch/indices/memory/breaker/RandomExceptionCircuitBreakerIT.java index 3d907bcaf3198..341c0a965f94e 100644 --- a/server/src/internalClusterTest/java/org/opensearch/indices/memory/breaker/RandomExceptionCircuitBreakerIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/indices/memory/breaker/RandomExceptionCircuitBreakerIT.java @@ -50,7 +50,6 @@ import org.opensearch.common.settings.Settings; import org.opensearch.common.unit.TimeValue; import org.opensearch.common.xcontent.XContentFactory; -import org.opensearch.common.xcontent.XContentType; import org.opensearch.index.MockEngineFactoryPlugin; import org.opensearch.index.query.QueryBuilders; import org.opensearch.indices.IndicesService; @@ -104,7 +103,6 @@ public void testBreakerWithRandomExceptions() throws IOException, InterruptedExc .toString( XContentFactory.jsonBuilder() .startObject() - .startObject("type") .startObject("properties") .startObject("test-str") .field("type", "keyword") @@ -115,7 +113,6 @@ public void testBreakerWithRandomExceptions() throws IOException, InterruptedExc .field("type", randomFrom(Arrays.asList("float", "long", "double", "short", "integer"))) .endObject() // test-num .endObject() // properties - .endObject() // type .endObject() ); final double topLevelRate; @@ -149,7 +146,7 @@ public void testBreakerWithRandomExceptions() throws IOException, InterruptedExc .indices() .prepareCreate("test") .setSettings(settings) - .addMapping("type", mapping, XContentType.JSON) + .setMapping(mapping) .execute() .actionGet(); final int numDocs; diff --git a/server/src/internalClusterTest/java/org/opensearch/indices/state/OpenCloseIndexIT.java b/server/src/internalClusterTest/java/org/opensearch/indices/state/OpenCloseIndexIT.java index b8baa35507892..ca1e1399f8fdc 100644 --- a/server/src/internalClusterTest/java/org/opensearch/indices/state/OpenCloseIndexIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/indices/state/OpenCloseIndexIT.java @@ -47,7 +47,6 @@ import org.opensearch.common.Strings; import org.opensearch.common.settings.Settings; import org.opensearch.common.xcontent.XContentFactory; -import org.opensearch.common.xcontent.XContentType; import org.opensearch.index.IndexNotFoundException; import org.opensearch.index.query.QueryBuilders; import org.opensearch.rest.RestStatus; @@ -305,17 +304,15 @@ public void testOpenCloseWithDocs() throws IOException, ExecutionException, Inte String mapping = Strings.toString( XContentFactory.jsonBuilder() .startObject() - .startObject("type") .startObject("properties") .startObject("test") .field("type", "keyword") .endObject() .endObject() .endObject() - .endObject() ); - assertAcked(client().admin().indices().prepareCreate("test").addMapping("type", mapping, XContentType.JSON)); + assertAcked(client().admin().indices().prepareCreate("test").setMapping(mapping)); ensureGreen(); int docs = between(10, 100); IndexRequestBuilder[] builder = new IndexRequestBuilder[docs]; diff --git a/server/src/internalClusterTest/java/org/opensearch/indices/stats/IndexStatsIT.java b/server/src/internalClusterTest/java/org/opensearch/indices/stats/IndexStatsIT.java index 07c8471e360f6..c503dd9f83273 100644 --- a/server/src/internalClusterTest/java/org/opensearch/indices/stats/IndexStatsIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/indices/stats/IndexStatsIT.java @@ -1004,11 +1004,9 @@ public void testMultiIndex() throws Exception { public void testCompletionFieldsParam() throws Exception { assertAcked( - prepareCreate("test1").addMapping( - "_doc", + prepareCreate("test1").setMapping( "{ \"properties\": { \"bar\": { \"type\": \"text\", \"fields\": { \"completion\": { \"type\": \"completion\" }}}" - + ",\"baz\": { \"type\": \"text\", \"fields\": { \"completion\": { \"type\": \"completion\" }}}}}", - XContentType.JSON + + ",\"baz\": { \"type\": \"text\", \"fields\": { \"completion\": { \"type\": \"completion\" }}}}}" ) ); ensureGreen(); diff --git a/server/src/internalClusterTest/java/org/opensearch/routing/PartitionedRoutingIT.java b/server/src/internalClusterTest/java/org/opensearch/routing/PartitionedRoutingIT.java index 99742166cda7f..a64e857f089f0 100644 --- a/server/src/internalClusterTest/java/org/opensearch/routing/PartitionedRoutingIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/routing/PartitionedRoutingIT.java @@ -36,8 +36,6 @@ import org.opensearch.action.search.SearchResponse; import org.opensearch.cluster.node.DiscoveryNode; import org.opensearch.common.settings.Settings; -import org.opensearch.common.xcontent.XContentType; -import org.opensearch.index.mapper.MapperService; import org.opensearch.index.query.QueryBuilders; import org.opensearch.test.OpenSearchIntegTestCase; import org.mockito.internal.util.collections.Sets; @@ -63,11 +61,7 @@ public void testVariousPartitionSizes() throws Exception { .put("index.number_of_routing_shards", shards) .put("index.routing_partition_size", partitionSize) ) - .addMapping( - MapperService.SINGLE_MAPPING_NAME, - "{\"" + MapperService.SINGLE_MAPPING_NAME + "\":{\"_routing\":{\"required\":true}}}", - XContentType.JSON - ) + .setMapping("{\"_routing\":{\"required\":true}}") .execute() .actionGet(); ensureGreen(); @@ -101,11 +95,7 @@ public void testShrinking() throws Exception { .put("index.number_of_replicas", numberOfReplicas()) .put("index.routing_partition_size", partitionSize) ) - .addMapping( - MapperService.SINGLE_MAPPING_NAME, - "{\"" + MapperService.SINGLE_MAPPING_NAME + "\":{\"_routing\":{\"required\":true}}}", - XContentType.JSON - ) + .setMapping("{\"_routing\":{\"required\":true}}}") .execute() .actionGet(); ensureGreen(); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/DateHistogramIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/DateHistogramIT.java index 971afdd20e1fa..2c095857089e1 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/DateHistogramIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/DateHistogramIT.java @@ -41,7 +41,6 @@ import org.opensearch.common.time.DateFormatter; import org.opensearch.common.time.DateFormatters; import org.opensearch.common.time.DateMathParser; -import org.opensearch.common.xcontent.XContentType; import org.opensearch.index.mapper.DateFieldMapper; import org.opensearch.index.query.MatchNoneQueryBuilder; import org.opensearch.index.query.QueryBuilders; @@ -1316,7 +1315,6 @@ public void testSingleValueFieldWithExtendedBoundsOffset() throws Exception { public void testSingleValueWithMultipleDateFormatsFromMapping() throws Exception { String mappingJson = Strings.toString( jsonBuilder().startObject() - .startObject("type") .startObject("properties") .startObject("date") .field("type", "date") @@ -1324,9 +1322,8 @@ public void testSingleValueWithMultipleDateFormatsFromMapping() throws Exception .endObject() .endObject() .endObject() - .endObject() ); - prepareCreate("idx2").addMapping("type", mappingJson, XContentType.JSON).get(); + prepareCreate("idx2").setMapping(mappingJson).get(); IndexRequestBuilder[] reqs = new IndexRequestBuilder[5]; for (int i = 0; i < reqs.length; i++) { reqs[i] = client().prepareIndex("idx2") diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/TermsDocCountErrorIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/TermsDocCountErrorIT.java index c21f78c5e942d..9b941860177bb 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/TermsDocCountErrorIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/TermsDocCountErrorIT.java @@ -36,7 +36,6 @@ import org.opensearch.action.search.SearchResponse; import org.opensearch.cluster.metadata.IndexMetadata; import org.opensearch.common.settings.Settings; -import org.opensearch.common.xcontent.XContentType; import org.opensearch.search.aggregations.Aggregator.SubAggCollectionMode; import org.opensearch.search.aggregations.bucket.terms.Terms; import org.opensearch.search.aggregations.bucket.terms.Terms.Bucket; @@ -110,13 +109,7 @@ public void setupSuiteScopeCluster() throws Exception { ); } numRoutingValues = between(1, 40); - assertAcked( - prepareCreate("idx_with_routing").addMapping( - "type", - "{ \"type\" : { \"_routing\" : { \"required\" : true } } }", - XContentType.JSON - ) - ); + assertAcked(prepareCreate("idx_with_routing").setMapping("{ \"_routing\" : { \"required\" : true } }")); for (int i = 0; i < numDocs; i++) { builders.add( client().prepareIndex("idx_single_shard") diff --git a/server/src/internalClusterTest/java/org/opensearch/search/basic/SearchWithRandomExceptionsIT.java b/server/src/internalClusterTest/java/org/opensearch/search/basic/SearchWithRandomExceptionsIT.java index ed7f764c798e5..9efb07fc7e581 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/basic/SearchWithRandomExceptionsIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/basic/SearchWithRandomExceptionsIT.java @@ -50,7 +50,6 @@ import org.opensearch.common.settings.Settings.Builder; import org.opensearch.common.unit.TimeValue; import org.opensearch.common.xcontent.XContentFactory; -import org.opensearch.common.xcontent.XContentType; import org.opensearch.index.MockEngineFactoryPlugin; import org.opensearch.index.query.QueryBuilders; import org.opensearch.plugins.Plugin; @@ -85,14 +84,12 @@ public void testRandomExceptions() throws IOException, InterruptedException, Exe String mapping = Strings.toString( XContentFactory.jsonBuilder() .startObject() - .startObject("type") .startObject("properties") .startObject("test") .field("type", "keyword") .endObject() .endObject() .endObject() - .endObject() ); final double lowLevelRate; final double topLevelRate; @@ -121,7 +118,7 @@ public void testRandomExceptions() throws IOException, InterruptedException, Exe .put(EXCEPTION_LOW_LEVEL_RATIO_KEY, lowLevelRate) .put(MockEngineSupport.WRAP_READER_RATIO.getKey(), 1.0d); logger.info("creating index: [test] using settings: [{}]", settings.build()); - assertAcked(prepareCreate("test").setSettings(settings).addMapping("type", mapping, XContentType.JSON)); + assertAcked(prepareCreate("test").setSettings(settings).setMapping(mapping)); ensureSearchable(); final int numDocs = between(10, 100); int numCreated = 0; diff --git a/server/src/internalClusterTest/java/org/opensearch/search/basic/SearchWithRandomIOExceptionsIT.java b/server/src/internalClusterTest/java/org/opensearch/search/basic/SearchWithRandomIOExceptionsIT.java index f35d07d6d513c..094ab8a19c88b 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/basic/SearchWithRandomIOExceptionsIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/basic/SearchWithRandomIOExceptionsIT.java @@ -46,7 +46,6 @@ import org.opensearch.common.settings.Settings; import org.opensearch.common.unit.TimeValue; import org.opensearch.common.xcontent.XContentFactory; -import org.opensearch.common.xcontent.XContentType; import org.opensearch.index.query.QueryBuilders; import org.opensearch.plugins.Plugin; import org.opensearch.search.sort.SortOrder; @@ -73,14 +72,12 @@ public void testRandomDirectoryIOExceptions() throws IOException, InterruptedExc String mapping = Strings.toString( XContentFactory.jsonBuilder() .startObject() - .startObject("type") .startObject("properties") .startObject("test") .field("type", "keyword") .endObject() .endObject() .endObject() - .endObject() ); final double exceptionRate; final double exceptionOnOpenRate; @@ -108,7 +105,7 @@ public void testRandomDirectoryIOExceptions() throws IOException, InterruptedExc if (createIndexWithoutErrors) { Settings.Builder settings = Settings.builder().put("index.number_of_replicas", numberOfReplicas()); logger.info("creating index: [test] using settings: [{}]", settings.build()); - client().admin().indices().prepareCreate("test").setSettings(settings).addMapping("type", mapping, XContentType.JSON).get(); + client().admin().indices().prepareCreate("test").setSettings(settings).setMapping(mapping).get(); numInitialDocs = between(10, 100); ensureGreen(); for (int i = 0; i < numInitialDocs; i++) { @@ -134,7 +131,7 @@ public void testRandomDirectoryIOExceptions() throws IOException, InterruptedExc // we cannot expect that the index will be valid .put(MockFSDirectoryFactory.RANDOM_IO_EXCEPTION_RATE_ON_OPEN_SETTING.getKey(), exceptionOnOpenRate); logger.info("creating index: [test] using settings: [{}]", settings.build()); - client().admin().indices().prepareCreate("test").setSettings(settings).addMapping("type", mapping, XContentType.JSON).get(); + client().admin().indices().prepareCreate("test").setSettings(settings).setMapping(mapping).get(); } ClusterHealthResponse clusterHealthResponse = client().admin() .cluster() diff --git a/server/src/internalClusterTest/java/org/opensearch/search/fetch/subphase/highlight/HighlighterSearchIT.java b/server/src/internalClusterTest/java/org/opensearch/search/fetch/subphase/highlight/HighlighterSearchIT.java index de2926cadc032..f0fe5e4479b76 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/fetch/subphase/highlight/HighlighterSearchIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/fetch/subphase/highlight/HighlighterSearchIT.java @@ -51,7 +51,6 @@ import org.opensearch.common.time.DateFormatter; import org.opensearch.common.xcontent.XContentBuilder; import org.opensearch.common.xcontent.XContentFactory; -import org.opensearch.common.xcontent.XContentType; import org.opensearch.index.analysis.AbstractIndexAnalyzerProvider; import org.opensearch.index.analysis.AnalyzerProvider; import org.opensearch.index.analysis.PreConfiguredTokenFilter; @@ -3292,7 +3291,6 @@ public void testKeywordFieldHighlighting() throws IOException { public void testACopyFieldWithNestedQuery() throws Exception { String mapping = Strings.toString( jsonBuilder().startObject() - .startObject("type") .startObject("properties") .startObject("foo") .field("type", "nested") @@ -3310,9 +3308,8 @@ public void testACopyFieldWithNestedQuery() throws Exception { .endObject() .endObject() .endObject() - .endObject() ); - prepareCreate("test").addMapping("type", mapping, XContentType.JSON).get(); + prepareCreate("test").setMapping(mapping).get(); client().prepareIndex("test") .setId("1") @@ -3424,7 +3421,6 @@ public void testHighlightQueryRewriteDatesWithNow() throws Exception { public void testWithNestedQuery() throws Exception { String mapping = Strings.toString( jsonBuilder().startObject() - .startObject("type") .startObject("properties") .startObject("text") .field("type", "text") @@ -3441,9 +3437,8 @@ public void testWithNestedQuery() throws Exception { .endObject() .endObject() .endObject() - .endObject() ); - prepareCreate("test").addMapping("type", mapping, XContentType.JSON).get(); + prepareCreate("test").setMapping(mapping).get(); client().prepareIndex("test") .setId("1") diff --git a/server/src/internalClusterTest/java/org/opensearch/search/geo/GeoFilterIT.java b/server/src/internalClusterTest/java/org/opensearch/search/geo/GeoFilterIT.java index d899451660cb7..8322c9704eecb 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/geo/GeoFilterIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/geo/GeoFilterIT.java @@ -214,7 +214,6 @@ public void testShapeRelations() throws Exception { String mapping = Strings.toString( XContentFactory.jsonBuilder() .startObject() - .startObject("polygon") .startObject("properties") .startObject("area") .field("type", "geo_shape") @@ -222,13 +221,9 @@ public void testShapeRelations() throws Exception { .endObject() .endObject() .endObject() - .endObject() ); - CreateIndexRequestBuilder mappingRequest = client().admin() - .indices() - .prepareCreate("shapes") - .addMapping("polygon", mapping, XContentType.JSON); + CreateIndexRequestBuilder mappingRequest = client().admin().indices().prepareCreate("shapes").setMapping(mapping); mappingRequest.get(); client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForGreenStatus().get(); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/geo/GeoShapeIntegrationIT.java b/server/src/internalClusterTest/java/org/opensearch/search/geo/GeoShapeIntegrationIT.java index 2db5973a2aa85..7315155e39520 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/geo/GeoShapeIntegrationIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/geo/GeoShapeIntegrationIT.java @@ -76,7 +76,6 @@ public void testOrientationPersistence() throws Exception { String mapping = Strings.toString( XContentFactory.jsonBuilder() .startObject() - .startObject("shape") .startObject("properties") .startObject("location") .field("type", "geo_shape") @@ -84,16 +83,14 @@ public void testOrientationPersistence() throws Exception { .endObject() .endObject() .endObject() - .endObject() ); // create index - assertAcked(prepareCreate(idxName).addMapping("shape", mapping, XContentType.JSON)); + assertAcked(prepareCreate(idxName).setMapping(mapping)); mapping = Strings.toString( XContentFactory.jsonBuilder() .startObject() - .startObject("shape") .startObject("properties") .startObject("location") .field("type", "geo_shape") @@ -101,10 +98,9 @@ public void testOrientationPersistence() throws Exception { .endObject() .endObject() .endObject() - .endObject() ); - assertAcked(prepareCreate(idxName + "2").addMapping("shape", mapping, XContentType.JSON)); + assertAcked(prepareCreate(idxName + "2").setMapping(mapping)); ensureGreen(idxName, idxName + "2"); internalCluster().fullRestart(); @@ -227,7 +223,7 @@ public void testIndexShapeRouting() throws Exception { + " }"; // create index - assertAcked(client().admin().indices().prepareCreate("test").addMapping("doc", mapping, XContentType.JSON).get()); + assertAcked(client().admin().indices().prepareCreate("test").setMapping(mapping).get()); ensureGreen(); String source = "{\n" @@ -265,10 +261,10 @@ public void testIndexPolygonDateLine() throws Exception { + " }"; // create index - assertAcked(client().admin().indices().prepareCreate("vector").addMapping("doc", mappingVector, XContentType.JSON).get()); + assertAcked(client().admin().indices().prepareCreate("vector").setMapping(mappingVector).get()); ensureGreen(); - assertAcked(client().admin().indices().prepareCreate("quad").addMapping("doc", mappingQuad, XContentType.JSON).get()); + assertAcked(client().admin().indices().prepareCreate("quad").setMapping(mappingQuad).get()); ensureGreen(); String source = "{\n" + " \"shape\" : \"POLYGON((179 0, -179 0, -179 2, 179 2, 179 0))\"" + "}"; diff --git a/server/src/internalClusterTest/java/org/opensearch/search/geo/LegacyGeoShapeIntegrationIT.java b/server/src/internalClusterTest/java/org/opensearch/search/geo/LegacyGeoShapeIntegrationIT.java index 479fd00e5e08b..28b00acd21479 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/geo/LegacyGeoShapeIntegrationIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/geo/LegacyGeoShapeIntegrationIT.java @@ -68,7 +68,6 @@ public void testOrientationPersistence() throws Exception { String mapping = Strings.toString( XContentFactory.jsonBuilder() .startObject() - .startObject("shape") .startObject("properties") .startObject("location") .field("type", "geo_shape") @@ -77,16 +76,14 @@ public void testOrientationPersistence() throws Exception { .endObject() .endObject() .endObject() - .endObject() ); // create index - assertAcked(prepareCreate(idxName).addMapping("shape", mapping, XContentType.JSON)); + assertAcked(prepareCreate(idxName).setMapping(mapping)); mapping = Strings.toString( XContentFactory.jsonBuilder() .startObject() - .startObject("shape") .startObject("properties") .startObject("location") .field("type", "geo_shape") @@ -95,10 +92,9 @@ public void testOrientationPersistence() throws Exception { .endObject() .endObject() .endObject() - .endObject() ); - assertAcked(prepareCreate(idxName + "2").addMapping("shape", mapping, XContentType.JSON)); + assertAcked(prepareCreate(idxName + "2").setMapping(mapping)); ensureGreen(idxName, idxName + "2"); internalCluster().fullRestart(); @@ -205,7 +201,7 @@ public void testIndexShapeRouting() throws Exception { + " }"; // create index - assertAcked(client().admin().indices().prepareCreate("test").addMapping("doc", mapping, XContentType.JSON).get()); + assertAcked(client().admin().indices().prepareCreate("test").setMapping(mapping).get()); ensureGreen(); String source = "{\n" diff --git a/server/src/internalClusterTest/java/org/opensearch/search/morelikethis/MoreLikeThisIT.java b/server/src/internalClusterTest/java/org/opensearch/search/morelikethis/MoreLikeThisIT.java index f5a2b76b89213..7ffd648d06611 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/morelikethis/MoreLikeThisIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/morelikethis/MoreLikeThisIT.java @@ -42,7 +42,6 @@ import org.opensearch.common.settings.Settings; import org.opensearch.common.xcontent.XContentBuilder; import org.opensearch.common.xcontent.XContentFactory; -import org.opensearch.common.xcontent.XContentType; import org.opensearch.index.mapper.MapperService; import org.opensearch.index.query.MoreLikeThisQueryBuilder; import org.opensearch.index.query.MoreLikeThisQueryBuilder.Item; @@ -283,12 +282,9 @@ public void testMoreLikeThisWithAliases() throws Exception { public void testMoreLikeThisWithAliasesInLikeDocuments() throws Exception { String indexName = "foo"; String aliasName = "foo_name"; - String typeName = "bar"; - String mapping = Strings.toString( - XContentFactory.jsonBuilder().startObject().startObject("bar").startObject("properties").endObject().endObject().endObject() - ); - client().admin().indices().prepareCreate(indexName).addMapping(typeName, mapping, XContentType.JSON).get(); + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("properties").endObject().endObject()); + client().admin().indices().prepareCreate(indexName).setMapping(mapping).get(); client().admin().indices().prepareAliases().addAlias(indexName, aliasName).get(); assertThat(ensureGreen(), equalTo(ClusterHealthStatus.GREEN)); @@ -309,10 +305,8 @@ public void testMoreLikeThisWithAliasesInLikeDocuments() throws Exception { } public void testMoreLikeThisIssue2197() throws Exception { - String mapping = Strings.toString( - XContentFactory.jsonBuilder().startObject().startObject("bar").startObject("properties").endObject().endObject().endObject() - ); - client().admin().indices().prepareCreate("foo").addMapping("bar", mapping, XContentType.JSON).get(); + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("properties").endObject().endObject()); + client().admin().indices().prepareCreate("foo").setMapping(mapping).get(); client().prepareIndex("foo") .setId("1") .setSource(jsonBuilder().startObject().startObject("foo").field("bar", "boz").endObject().endObject()) @@ -332,10 +326,8 @@ public void testMoreLikeThisIssue2197() throws Exception { // Issue #2489 public void testMoreLikeWithCustomRouting() throws Exception { - String mapping = Strings.toString( - XContentFactory.jsonBuilder().startObject().startObject("bar").startObject("properties").endObject().endObject().endObject() - ); - client().admin().indices().prepareCreate("foo").addMapping("bar", mapping, XContentType.JSON).get(); + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("properties").endObject().endObject()); + client().admin().indices().prepareCreate("foo").setMapping(mapping).get(); ensureGreen(); client().prepareIndex("foo") @@ -354,14 +346,10 @@ public void testMoreLikeWithCustomRouting() throws Exception { // Issue #3039 public void testMoreLikeThisIssueRoutingNotSerialized() throws Exception { - String mapping = Strings.toString( - XContentFactory.jsonBuilder().startObject().startObject("bar").startObject("properties").endObject().endObject().endObject() - ); + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("properties").endObject().endObject()); assertAcked( - prepareCreate("foo", 2, Settings.builder().put(SETTING_NUMBER_OF_SHARDS, 2).put(SETTING_NUMBER_OF_REPLICAS, 0)).addMapping( - "bar", - mapping, - XContentType.JSON + prepareCreate("foo", 2, Settings.builder().put(SETTING_NUMBER_OF_SHARDS, 2).put(SETTING_NUMBER_OF_REPLICAS, 0)).setMapping( + mapping ) ); ensureGreen(); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/nested/SimpleNestedIT.java b/server/src/internalClusterTest/java/org/opensearch/search/nested/SimpleNestedIT.java index e4ad46c7599fe..c6c58e6fcb6a5 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/nested/SimpleNestedIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/nested/SimpleNestedIT.java @@ -719,25 +719,22 @@ public void testSimpleNestedSortingWithNestedFilterMissing() throws Exception { public void testNestedSortWithMultiLevelFiltering() throws Exception { assertAcked( - prepareCreate("test").addMapping( - "type1", + prepareCreate("test").setMapping( "{\n" - + " \"type1\": {\n" - + " \"properties\": {\n" - + " \"acl\": {\n" - + " \"type\": \"nested\",\n" - + " \"properties\": {\n" - + " \"access_id\": {\"type\": \"keyword\"},\n" - + " \"operation\": {\n" - + " \"type\": \"nested\",\n" - + " \"properties\": {\n" - + " \"name\": {\"type\": \"keyword\"},\n" - + " \"user\": {\n" - + " \"type\": \"nested\",\n" - + " \"properties\": {\n" - + " \"username\": {\"type\": \"keyword\"},\n" - + " \"id\": {\"type\": \"integer\"}\n" - + " }\n" + + " \"properties\": {\n" + + " \"acl\": {\n" + + " \"type\": \"nested\",\n" + + " \"properties\": {\n" + + " \"access_id\": {\"type\": \"keyword\"},\n" + + " \"operation\": {\n" + + " \"type\": \"nested\",\n" + + " \"properties\": {\n" + + " \"name\": {\"type\": \"keyword\"},\n" + + " \"user\": {\n" + + " \"type\": \"nested\",\n" + + " \"properties\": {\n" + + " \"username\": {\"type\": \"keyword\"},\n" + + " \"id\": {\"type\": \"integer\"}\n" + " }\n" + " }\n" + " }\n" @@ -745,8 +742,7 @@ public void testNestedSortWithMultiLevelFiltering() throws Exception { + " }\n" + " }\n" + " }\n" - + "}", - XContentType.JSON + + "}" ) ); ensureGreen(); @@ -965,8 +961,7 @@ public void testNestedSortWithMultiLevelFiltering() throws Exception { public void testLeakingSortValues() throws Exception { assertAcked( prepareCreate("test").setSettings(Settings.builder().put("number_of_shards", 1)) - .addMapping( - "test-type", + .setMapping( "{\n" + " \"dynamic\": \"strict\",\n" + " \"properties\": {\n" @@ -987,8 +982,7 @@ public void testLeakingSortValues() throws Exception { + " }\n" + " }\n" + " }\n" - + " }\n", - XContentType.JSON + + " }\n" ) ); ensureGreen(); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/query/SimpleQueryStringIT.java b/server/src/internalClusterTest/java/org/opensearch/search/query/SimpleQueryStringIT.java index 6bd4eec37407f..c53eda63f155f 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/query/SimpleQueryStringIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/query/SimpleQueryStringIT.java @@ -379,7 +379,6 @@ public void testSimpleQueryStringAnalyzeWildcard() throws ExecutionException, In String mapping = Strings.toString( XContentFactory.jsonBuilder() .startObject() - .startObject("type1") .startObject("properties") .startObject("location") .field("type", "text") @@ -387,13 +386,9 @@ public void testSimpleQueryStringAnalyzeWildcard() throws ExecutionException, In .endObject() .endObject() .endObject() - .endObject() ); - CreateIndexRequestBuilder mappingRequest = client().admin() - .indices() - .prepareCreate("test1") - .addMapping("type1", mapping, XContentType.JSON); + CreateIndexRequestBuilder mappingRequest = client().admin().indices().prepareCreate("test1").setMapping(mapping); mappingRequest.get(); indexRandom(true, client().prepareIndex("test1").setId("1").setSource("location", "Köln")); refresh(); @@ -431,7 +426,6 @@ public void testEmptySimpleQueryStringWithAnalysis() throws Exception { String mapping = Strings.toString( XContentFactory.jsonBuilder() .startObject() - .startObject("type1") .startObject("properties") .startObject("body") .field("type", "text") @@ -439,13 +433,9 @@ public void testEmptySimpleQueryStringWithAnalysis() throws Exception { .endObject() .endObject() .endObject() - .endObject() ); - CreateIndexRequestBuilder mappingRequest = client().admin() - .indices() - .prepareCreate("test1") - .addMapping("type1", mapping, XContentType.JSON); + CreateIndexRequestBuilder mappingRequest = client().admin().indices().prepareCreate("test1").setMapping(mapping); mappingRequest.get(); indexRandom(true, client().prepareIndex("test1").setId("1").setSource("body", "Some Text")); refresh(); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/slice/SearchSliceIT.java b/server/src/internalClusterTest/java/org/opensearch/search/slice/SearchSliceIT.java index c4697e63cb4f7..9c735c42052e3 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/slice/SearchSliceIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/slice/SearchSliceIT.java @@ -43,7 +43,6 @@ import org.opensearch.common.unit.TimeValue; import org.opensearch.common.xcontent.XContentBuilder; import org.opensearch.common.xcontent.XContentFactory; -import org.opensearch.common.xcontent.XContentType; import org.opensearch.search.Scroll; import org.opensearch.search.SearchException; import org.opensearch.search.SearchHit; @@ -67,7 +66,6 @@ private void setupIndex(int numDocs, int numberOfShards) throws IOException, Exe String mapping = Strings.toString( XContentFactory.jsonBuilder() .startObject() - .startObject("type") .startObject("properties") .startObject("invalid_random_kw") .field("type", "keyword") @@ -83,14 +81,13 @@ private void setupIndex(int numDocs, int numberOfShards) throws IOException, Exe .endObject() .endObject() .endObject() - .endObject() ); assertAcked( client().admin() .indices() .prepareCreate("test") .setSettings(Settings.builder().put("number_of_shards", numberOfShards).put("index.max_slices_per_scroll", 10000)) - .addMapping("type", mapping, XContentType.JSON) + .setMapping(mapping) ); ensureGreen(); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/sort/SimpleSortIT.java b/server/src/internalClusterTest/java/org/opensearch/search/sort/SimpleSortIT.java index b4f511c3be123..70bb24532aa7d 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/sort/SimpleSortIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/sort/SimpleSortIT.java @@ -38,7 +38,6 @@ import org.opensearch.common.Strings; import org.opensearch.common.geo.GeoPoint; import org.opensearch.common.geo.GeoUtils; -import org.opensearch.common.xcontent.XContentType; import org.opensearch.index.fielddata.ScriptDocValues; import org.opensearch.plugins.Plugin; import org.opensearch.script.MockScriptPlugin; @@ -243,7 +242,6 @@ public void testSimpleSorts() throws Exception { public void testSortMinValueScript() throws IOException { String mapping = Strings.toString( jsonBuilder().startObject() - .startObject("type1") .startObject("properties") .startObject("lvalue") .field("type", "long") @@ -259,10 +257,9 @@ public void testSortMinValueScript() throws IOException { .endObject() .endObject() .endObject() - .endObject() ); - assertAcked(prepareCreate("test").addMapping("type1", mapping, XContentType.JSON)); + assertAcked(prepareCreate("test").setMapping(mapping)); ensureGreen(); for (int i = 0; i < 10; i++) { @@ -359,7 +356,6 @@ public void testDocumentsWithNullValue() throws Exception { // be propagated to all nodes yet and sort operation fail when the sort field is not defined String mapping = Strings.toString( jsonBuilder().startObject() - .startObject("type1") .startObject("properties") .startObject("id") .field("type", "keyword") @@ -369,9 +365,8 @@ public void testDocumentsWithNullValue() throws Exception { .endObject() .endObject() .endObject() - .endObject() ); - assertAcked(prepareCreate("test").addMapping("type1", mapping, XContentType.JSON)); + assertAcked(prepareCreate("test").setMapping(mapping)); ensureGreen(); client().prepareIndex("test").setSource(jsonBuilder().startObject().field("id", "1").field("svalue", "aaa").endObject()).get(); diff --git a/server/src/main/java/org/opensearch/action/admin/indices/create/CreateIndexRequest.java b/server/src/main/java/org/opensearch/action/admin/indices/create/CreateIndexRequest.java index dd8fcdec1ddf8..8b38308d39c93 100644 --- a/server/src/main/java/org/opensearch/action/admin/indices/create/CreateIndexRequest.java +++ b/server/src/main/java/org/opensearch/action/admin/indices/create/CreateIndexRequest.java @@ -245,19 +245,6 @@ public CreateIndexRequest mapping(String mapping) { return this; } - /** - * Adds mapping that will be added when the index gets created. - * - * @param type The mapping type - * @param source The mapping source - * @param xContentType The content type of the source - * @deprecated types are being removed - */ - @Deprecated - public CreateIndexRequest mapping(String type, String source, XContentType xContentType) { - return mapping(type, new BytesArray(source), xContentType); - } - /** * Adds mapping that will be added when the index gets created. * diff --git a/server/src/main/java/org/opensearch/action/admin/indices/create/CreateIndexRequestBuilder.java b/server/src/main/java/org/opensearch/action/admin/indices/create/CreateIndexRequestBuilder.java index 94fec1d2a08f2..77e48d079cb5c 100644 --- a/server/src/main/java/org/opensearch/action/admin/indices/create/CreateIndexRequestBuilder.java +++ b/server/src/main/java/org/opensearch/action/admin/indices/create/CreateIndexRequestBuilder.java @@ -111,13 +111,11 @@ public CreateIndexRequestBuilder setSettings(Map source) { /** * Adds mapping that will be added when the index gets created. * - * @param type The mapping type * @param source The mapping source - * @param xContentType The content type of the source */ @Deprecated - public CreateIndexRequestBuilder addMapping(String type, String source, XContentType xContentType) { - request.mapping(type, source, xContentType); + public CreateIndexRequestBuilder setMapping(String source) { + request.mapping(source); return this; } diff --git a/server/src/main/java/org/opensearch/action/admin/indices/rollover/RolloverRequestBuilder.java b/server/src/main/java/org/opensearch/action/admin/indices/rollover/RolloverRequestBuilder.java index 6f631e7c086a9..a7af2f963d15b 100644 --- a/server/src/main/java/org/opensearch/action/admin/indices/rollover/RolloverRequestBuilder.java +++ b/server/src/main/java/org/opensearch/action/admin/indices/rollover/RolloverRequestBuilder.java @@ -38,7 +38,6 @@ import org.opensearch.common.settings.Settings; import org.opensearch.common.unit.ByteSizeValue; import org.opensearch.common.unit.TimeValue; -import org.opensearch.common.xcontent.XContentType; public class RolloverRequestBuilder extends MasterNodeOperationRequestBuilder { public RolloverRequestBuilder(OpenSearchClient client, RolloverAction action) { @@ -90,11 +89,6 @@ public RolloverRequestBuilder mapping(String type, Object... source) { return this; } - public RolloverRequestBuilder mapping(String type, String source, XContentType xContentType) { - this.request.getCreateIndexRequest().mapping(type, source, xContentType); - return this; - } - /** * Sets the number of shard copies that should be active for creation of the * new rollover index to return. Defaults to {@link ActiveShardCount#DEFAULT}, which will diff --git a/server/src/test/java/org/opensearch/action/admin/indices/create/CreateIndexRequestTests.java b/server/src/test/java/org/opensearch/action/admin/indices/create/CreateIndexRequestTests.java index 472d389a23890..320db79428300 100644 --- a/server/src/test/java/org/opensearch/action/admin/indices/create/CreateIndexRequestTests.java +++ b/server/src/test/java/org/opensearch/action/admin/indices/create/CreateIndexRequestTests.java @@ -35,7 +35,6 @@ import org.opensearch.OpenSearchParseException; import org.opensearch.action.admin.indices.alias.Alias; import org.opensearch.common.Strings; -import org.opensearch.common.collect.MapBuilder; import org.opensearch.common.io.stream.BytesStreamOutput; import org.opensearch.common.io.stream.StreamInput; import org.opensearch.common.xcontent.LoggingDeprecationHandler; @@ -45,6 +44,7 @@ import org.opensearch.common.xcontent.XContentParser; import org.opensearch.common.xcontent.XContentType; import org.opensearch.common.xcontent.json.JsonXContent; +import org.opensearch.index.mapper.MapperService; import org.opensearch.test.OpenSearchTestCase; import java.io.IOException; @@ -57,8 +57,10 @@ public class CreateIndexRequestTests extends OpenSearchTestCase { public void testSerialization() throws IOException { CreateIndexRequest request = new CreateIndexRequest("foo"); - String mapping = Strings.toString(JsonXContent.contentBuilder().startObject().startObject("my_type").endObject().endObject()); - request.mapping("my_type", mapping, XContentType.JSON); + String mapping = Strings.toString( + JsonXContent.contentBuilder().startObject().startObject(MapperService.SINGLE_MAPPING_NAME).endObject().endObject() + ); + request.mapping(mapping); try (BytesStreamOutput output = new BytesStreamOutput()) { request.writeTo(output); @@ -135,29 +137,6 @@ public void testMappingKeyedByType() throws IOException { request2.mapping("type1", builder); assertEquals(request1.mappings(), request2.mappings()); } - { - request1 = new CreateIndexRequest("foo"); - request2 = new CreateIndexRequest("bar"); - String nakedMapping = "{\"properties\": {\"foo\": {\"type\": \"integer\"}}}"; - request1.mapping("type2", nakedMapping, XContentType.JSON); - request2.mapping("type2", "{\"type2\": " + nakedMapping + "}", XContentType.JSON); - assertEquals(request1.mappings(), request2.mappings()); - } - { - request1 = new CreateIndexRequest("foo"); - request2 = new CreateIndexRequest("bar"); - Map nakedMapping = MapBuilder.newMapBuilder() - .put( - "properties", - MapBuilder.newMapBuilder() - .put("bar", MapBuilder.newMapBuilder().put("type", "scaled_float").put("scaling_factor", 100).map()) - .map() - ) - .map(); - request1.mapping("type3", nakedMapping); - request2.mapping("type3", MapBuilder.newMapBuilder().put("type3", nakedMapping).map()); - assertEquals(request1.mappings(), request2.mappings()); - } } public void testSettingsType() throws IOException { diff --git a/server/src/test/java/org/opensearch/index/mapper/MapperServiceTests.java b/server/src/test/java/org/opensearch/index/mapper/MapperServiceTests.java index eae52efa391a1..b58c0bf69c298 100644 --- a/server/src/test/java/org/opensearch/index/mapper/MapperServiceTests.java +++ b/server/src/test/java/org/opensearch/index/mapper/MapperServiceTests.java @@ -40,7 +40,6 @@ import org.opensearch.common.settings.Settings; import org.opensearch.common.xcontent.XContentBuilder; import org.opensearch.common.xcontent.XContentFactory; -import org.opensearch.common.xcontent.XContentType; import org.opensearch.env.Environment; import org.opensearch.index.IndexService; import org.opensearch.index.IndexSettings; @@ -175,7 +174,7 @@ public void testPartitionedConstraints() { client().admin() .indices() .prepareCreate("test-index") - .addMapping("type", "{\"type\":{}}", XContentType.JSON) + .setMapping("{\"" + MapperService.SINGLE_MAPPING_NAME + "\":{}}") .setSettings(Settings.builder().put("index.number_of_shards", 4).put("index.routing_partition_size", 2)) .execute() .actionGet(); @@ -187,7 +186,7 @@ public void testPartitionedConstraints() { client().admin() .indices() .prepareCreate("test-index") - .addMapping("type", "{\"type\":{\"_routing\":{\"required\":true}}}", XContentType.JSON) + .setMapping("{\"_routing\":{\"required\":true}}") .setSettings(Settings.builder().put("index.number_of_shards", 4).put("index.routing_partition_size", 2)) .execute() .actionGet() diff --git a/server/src/test/java/org/opensearch/search/geo/GeoShapeQueryTests.java b/server/src/test/java/org/opensearch/search/geo/GeoShapeQueryTests.java index eb8cc7e6113e1..1722cb564e231 100644 --- a/server/src/test/java/org/opensearch/search/geo/GeoShapeQueryTests.java +++ b/server/src/test/java/org/opensearch/search/geo/GeoShapeQueryTests.java @@ -432,16 +432,14 @@ public void testGeometryCollectionRelations() throws Exception { public void testEdgeCases() throws Exception { XContentBuilder xcb = XContentFactory.jsonBuilder() .startObject() - .startObject("type1") .startObject("properties") .startObject("geo") .field("type", "geo_shape") .endObject() .endObject() - .endObject() .endObject(); String mapping = Strings.toString(xcb); - client().admin().indices().prepareCreate("test").addMapping("type1", mapping, XContentType.JSON).get(); + client().admin().indices().prepareCreate("test").setMapping(mapping).get(); ensureGreen(); client().prepareIndex("test") @@ -629,7 +627,6 @@ public void testPointsOnly() throws Exception { String mapping = Strings.toString( XContentFactory.jsonBuilder() .startObject() - .startObject("type1") .startObject("properties") .startObject("location") .field("type", "geo_shape") @@ -640,10 +637,9 @@ public void testPointsOnly() throws Exception { .endObject() .endObject() .endObject() - .endObject() ); - client().admin().indices().prepareCreate("geo_points_only").addMapping("type1", mapping, XContentType.JSON).get(); + client().admin().indices().prepareCreate("geo_points_only").setMapping(mapping).get(); ensureGreen(); ShapeBuilder shape = RandomShapeGenerator.createShape(random()); @@ -669,7 +665,6 @@ public void testPointsOnlyExplicit() throws Exception { String mapping = Strings.toString( XContentFactory.jsonBuilder() .startObject() - .startObject("type1") .startObject("properties") .startObject("geo") .field("type", "geo_shape") @@ -680,10 +675,9 @@ public void testPointsOnlyExplicit() throws Exception { .endObject() .endObject() .endObject() - .endObject() ); - client().admin().indices().prepareCreate("geo_points_only").addMapping("type1", mapping, XContentType.JSON).get(); + client().admin().indices().prepareCreate("geo_points_only").setMapping(mapping).get(); ensureGreen(); // MULTIPOINT @@ -710,7 +704,7 @@ public void testPointsOnlyExplicit() throws Exception { public void testIndexedShapeReference() throws Exception { String mapping = Strings.toString(createDefaultMapping()); - client().admin().indices().prepareCreate("test").addMapping("type1", mapping, XContentType.JSON).get(); + client().admin().indices().prepareCreate("test").setMapping(mapping).get(); createIndex("shapes"); ensureGreen(); @@ -907,7 +901,7 @@ public void testShapeFilterWithDefinedGeoCollection() throws Exception { public void testDistanceQuery() throws Exception { String mapping = Strings.toString(createRandomMapping()); - client().admin().indices().prepareCreate("test_distance").addMapping("type1", mapping, XContentType.JSON).get(); + client().admin().indices().prepareCreate("test_distance").setMapping(mapping).get(); ensureGreen(); CircleBuilder circleBuilder = new CircleBuilder().center(new Coordinate(1, 0)).radius(350, DistanceUnit.KILOMETERS); @@ -950,7 +944,7 @@ public void testDistanceQuery() throws Exception { public void testIndexRectangleSpanningDateLine() throws Exception { String mapping = Strings.toString(createRandomMapping()); - client().admin().indices().prepareCreate("test").addMapping("type1", mapping, XContentType.JSON).get(); + client().admin().indices().prepareCreate("test").setMapping(mapping).get(); ensureGreen(); EnvelopeBuilder envelopeBuilder = new EnvelopeBuilder(new Coordinate(178, 10), new Coordinate(-178, -10)); From 12dd5d76b5c85b7abea05a9c329183ddabb5f602 Mon Sep 17 00:00:00 2001 From: Andriy Redko Date: Tue, 15 Mar 2022 11:43:20 -0400 Subject: [PATCH 30/46] repository-azure: revert the fix for https://github.com/opensearch-project/OpenSearch/issues/1734 once upstream solution is available (#2446) Signed-off-by: Andriy Redko --- plugins/repository-azure/build.gradle | 14 +- .../licenses/azure-core-1.22.0.jar.sha1 | 1 - .../licenses/azure-core-1.26.0.jar.sha1 | 1 + .../azure-core-http-netty-1.11.7.jar.sha1 | 1 - .../azure-core-http-netty-1.11.8.jar.sha1 | 1 + .../azure-storage-blob-12.14.1.jar.sha1 | 1 - .../azure-storage-blob-12.14.4.jar.sha1 | 1 + .../azure-storage-common-12.14.3.jar.sha1 | 1 - .../azure-storage-common-12.15.0.jar.sha1 | 1 + .../licenses/reactor-netty-1.0.13.jar.sha1 | 1 - .../licenses/reactor-netty-1.0.16.jar.sha1 | 1 + .../reactor-netty-core-1.0.13.jar.sha1 | 1 - .../reactor-netty-core-1.0.16.jar.sha1 | 1 + .../reactor-netty-http-1.0.13.jar.sha1 | 1 - .../reactor-netty-http-1.0.16.jar.sha1 | 1 + .../repositories/azure/AzureBlobStore.java | 181 ++++++------------ 16 files changed, 73 insertions(+), 136 deletions(-) delete mode 100644 plugins/repository-azure/licenses/azure-core-1.22.0.jar.sha1 create mode 100644 plugins/repository-azure/licenses/azure-core-1.26.0.jar.sha1 delete mode 100644 plugins/repository-azure/licenses/azure-core-http-netty-1.11.7.jar.sha1 create mode 100644 plugins/repository-azure/licenses/azure-core-http-netty-1.11.8.jar.sha1 delete mode 100644 plugins/repository-azure/licenses/azure-storage-blob-12.14.1.jar.sha1 create mode 100644 plugins/repository-azure/licenses/azure-storage-blob-12.14.4.jar.sha1 delete mode 100644 plugins/repository-azure/licenses/azure-storage-common-12.14.3.jar.sha1 create mode 100644 plugins/repository-azure/licenses/azure-storage-common-12.15.0.jar.sha1 delete mode 100644 plugins/repository-azure/licenses/reactor-netty-1.0.13.jar.sha1 create mode 100644 plugins/repository-azure/licenses/reactor-netty-1.0.16.jar.sha1 delete mode 100644 plugins/repository-azure/licenses/reactor-netty-core-1.0.13.jar.sha1 create mode 100644 plugins/repository-azure/licenses/reactor-netty-core-1.0.16.jar.sha1 delete mode 100644 plugins/repository-azure/licenses/reactor-netty-http-1.0.13.jar.sha1 create mode 100644 plugins/repository-azure/licenses/reactor-netty-http-1.0.16.jar.sha1 diff --git a/plugins/repository-azure/build.gradle b/plugins/repository-azure/build.gradle index 3dc089ef8acb7..60fb99f459454 100644 --- a/plugins/repository-azure/build.gradle +++ b/plugins/repository-azure/build.gradle @@ -44,9 +44,9 @@ opensearchplugin { } dependencies { - api 'com.azure:azure-core:1.22.0' - api 'com.azure:azure-storage-common:12.14.3' - api 'com.azure:azure-core-http-netty:1.11.7' + api 'com.azure:azure-core:1.26.0' + api 'com.azure:azure-storage-common:12.15.0' + api 'com.azure:azure-core-http-netty:1.11.8' api "io.netty:netty-codec-dns:${versions.netty}" api "io.netty:netty-codec-socks:${versions.netty}" api "io.netty:netty-codec-http2:${versions.netty}" @@ -54,12 +54,12 @@ dependencies { api "io.netty:netty-resolver-dns:${versions.netty}" api "io.netty:netty-transport-native-unix-common:${versions.netty}" implementation project(':modules:transport-netty4') - api 'com.azure:azure-storage-blob:12.14.1' + api 'com.azure:azure-storage-blob:12.14.4' api 'org.reactivestreams:reactive-streams:1.0.3' api 'io.projectreactor:reactor-core:3.4.15' - api 'io.projectreactor.netty:reactor-netty:1.0.13' - api 'io.projectreactor.netty:reactor-netty-core:1.0.13' - api 'io.projectreactor.netty:reactor-netty-http:1.0.13' + api 'io.projectreactor.netty:reactor-netty:1.0.16' + api 'io.projectreactor.netty:reactor-netty-core:1.0.16' + api 'io.projectreactor.netty:reactor-netty-http:1.0.16' api "org.slf4j:slf4j-api:${versions.slf4j}" api "com.fasterxml.jackson.core:jackson-annotations:${versions.jackson}" api "com.fasterxml.jackson.core:jackson-databind:${versions.jackson}" diff --git a/plugins/repository-azure/licenses/azure-core-1.22.0.jar.sha1 b/plugins/repository-azure/licenses/azure-core-1.22.0.jar.sha1 deleted file mode 100644 index f57b83e5d9715..0000000000000 --- a/plugins/repository-azure/licenses/azure-core-1.22.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -194b21b804c20c85f7d2a6199280075f6747e188 \ No newline at end of file diff --git a/plugins/repository-azure/licenses/azure-core-1.26.0.jar.sha1 b/plugins/repository-azure/licenses/azure-core-1.26.0.jar.sha1 new file mode 100644 index 0000000000000..693c6a721959c --- /dev/null +++ b/plugins/repository-azure/licenses/azure-core-1.26.0.jar.sha1 @@ -0,0 +1 @@ +461b89dcf8948a0c4a97d4f1d876f778d0cac7aa \ No newline at end of file diff --git a/plugins/repository-azure/licenses/azure-core-http-netty-1.11.7.jar.sha1 b/plugins/repository-azure/licenses/azure-core-http-netty-1.11.7.jar.sha1 deleted file mode 100644 index 25db85393f2af..0000000000000 --- a/plugins/repository-azure/licenses/azure-core-http-netty-1.11.7.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -c6b14fcca3e75acc8dbe07ac101afd05d48a1647 \ No newline at end of file diff --git a/plugins/repository-azure/licenses/azure-core-http-netty-1.11.8.jar.sha1 b/plugins/repository-azure/licenses/azure-core-http-netty-1.11.8.jar.sha1 new file mode 100644 index 0000000000000..df7d7ae4ce285 --- /dev/null +++ b/plugins/repository-azure/licenses/azure-core-http-netty-1.11.8.jar.sha1 @@ -0,0 +1 @@ +0ea66d4531fb41cb3b5ab55e2e7b7f301e7f8503 \ No newline at end of file diff --git a/plugins/repository-azure/licenses/azure-storage-blob-12.14.1.jar.sha1 b/plugins/repository-azure/licenses/azure-storage-blob-12.14.1.jar.sha1 deleted file mode 100644 index d9c6f462089e3..0000000000000 --- a/plugins/repository-azure/licenses/azure-storage-blob-12.14.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -384763aef32d779ee22ef3faa03049fee7e0f6de \ No newline at end of file diff --git a/plugins/repository-azure/licenses/azure-storage-blob-12.14.4.jar.sha1 b/plugins/repository-azure/licenses/azure-storage-blob-12.14.4.jar.sha1 new file mode 100644 index 0000000000000..5333f8fa90ada --- /dev/null +++ b/plugins/repository-azure/licenses/azure-storage-blob-12.14.4.jar.sha1 @@ -0,0 +1 @@ +2b92020693d09e4980b96d278e8038a1087afea0 \ No newline at end of file diff --git a/plugins/repository-azure/licenses/azure-storage-common-12.14.3.jar.sha1 b/plugins/repository-azure/licenses/azure-storage-common-12.14.3.jar.sha1 deleted file mode 100644 index b7cb4342c014c..0000000000000 --- a/plugins/repository-azure/licenses/azure-storage-common-12.14.3.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -e8d6258aa8bf1594980c01294e60de74d13a815f \ No newline at end of file diff --git a/plugins/repository-azure/licenses/azure-storage-common-12.15.0.jar.sha1 b/plugins/repository-azure/licenses/azure-storage-common-12.15.0.jar.sha1 new file mode 100644 index 0000000000000..1f3adfc161c7f --- /dev/null +++ b/plugins/repository-azure/licenses/azure-storage-common-12.15.0.jar.sha1 @@ -0,0 +1 @@ +4d63ce8bbd20379c5e5262b1204ceac7b31a7743 \ No newline at end of file diff --git a/plugins/repository-azure/licenses/reactor-netty-1.0.13.jar.sha1 b/plugins/repository-azure/licenses/reactor-netty-1.0.13.jar.sha1 deleted file mode 100644 index be6cfc229b9b2..0000000000000 --- a/plugins/repository-azure/licenses/reactor-netty-1.0.13.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -cf216a9ba6b50210664761add9db744c9c3f51d8 \ No newline at end of file diff --git a/plugins/repository-azure/licenses/reactor-netty-1.0.16.jar.sha1 b/plugins/repository-azure/licenses/reactor-netty-1.0.16.jar.sha1 new file mode 100644 index 0000000000000..582380e449a1d --- /dev/null +++ b/plugins/repository-azure/licenses/reactor-netty-1.0.16.jar.sha1 @@ -0,0 +1 @@ +d90829f6127966b0c35c4a3e8e23ca9ed29cd8a5 \ No newline at end of file diff --git a/plugins/repository-azure/licenses/reactor-netty-core-1.0.13.jar.sha1 b/plugins/repository-azure/licenses/reactor-netty-core-1.0.13.jar.sha1 deleted file mode 100644 index 8f81861f48dde..0000000000000 --- a/plugins/repository-azure/licenses/reactor-netty-core-1.0.13.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -a67949c5946dd66c7ab0a3b059213c23345c32b1 \ No newline at end of file diff --git a/plugins/repository-azure/licenses/reactor-netty-core-1.0.16.jar.sha1 b/plugins/repository-azure/licenses/reactor-netty-core-1.0.16.jar.sha1 new file mode 100644 index 0000000000000..0d1a0cb20c80f --- /dev/null +++ b/plugins/repository-azure/licenses/reactor-netty-core-1.0.16.jar.sha1 @@ -0,0 +1 @@ +8f842a912677f2bc614ff60fb9e786d4fa429c34 \ No newline at end of file diff --git a/plugins/repository-azure/licenses/reactor-netty-http-1.0.13.jar.sha1 b/plugins/repository-azure/licenses/reactor-netty-http-1.0.13.jar.sha1 deleted file mode 100644 index e6b4cb0b9a4e8..0000000000000 --- a/plugins/repository-azure/licenses/reactor-netty-http-1.0.13.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -de7a38101098db9438c18fdd09acc5b79a2ec02a \ No newline at end of file diff --git a/plugins/repository-azure/licenses/reactor-netty-http-1.0.16.jar.sha1 b/plugins/repository-azure/licenses/reactor-netty-http-1.0.16.jar.sha1 new file mode 100644 index 0000000000000..d737315b06b62 --- /dev/null +++ b/plugins/repository-azure/licenses/reactor-netty-http-1.0.16.jar.sha1 @@ -0,0 +1 @@ +93edb9a1dc774d843551a616e0f316e11ffa81ed \ No newline at end of file diff --git a/plugins/repository-azure/src/main/java/org/opensearch/repositories/azure/AzureBlobStore.java b/plugins/repository-azure/src/main/java/org/opensearch/repositories/azure/AzureBlobStore.java index 753c902a6eb01..b540dd83c95a2 100644 --- a/plugins/repository-azure/src/main/java/org/opensearch/repositories/azure/AzureBlobStore.java +++ b/plugins/repository-azure/src/main/java/org/opensearch/repositories/azure/AzureBlobStore.java @@ -35,7 +35,6 @@ import com.azure.core.http.HttpMethod; import com.azure.core.http.HttpRequest; import com.azure.core.http.HttpResponse; -import com.azure.core.http.rest.PagedResponse; import com.azure.core.http.rest.Response; import com.azure.core.util.Context; import com.azure.storage.blob.BlobClient; @@ -52,7 +51,6 @@ import com.azure.storage.blob.options.BlobParallelUploadOptions; import com.azure.storage.common.implementation.Constants; -import org.apache.commons.lang3.StringUtils; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.core.util.Throwables; @@ -84,7 +82,6 @@ import java.util.HashMap; import java.util.HashSet; import java.util.Map; -import java.util.Optional; import java.util.Set; import java.util.concurrent.Executor; import java.util.concurrent.atomic.AtomicLong; @@ -220,71 +217,50 @@ public DeleteResult deleteBlobDirectory(String path, Executor executor) throws U final ListBlobsOptions listBlobsOptions = new ListBlobsOptions().setPrefix(path); SocketAccess.doPrivilegedVoidException(() -> { - String continuationToken = null; - - do { - // Fetch one page at a time, others are going to be fetched by continuation token - // TODO: reconsider reverting to simplified approach once https://github.com/Azure/azure-sdk-for-java/issues/26064 - // gets addressed. - final Optional> pageOpt = blobContainer.listBlobs(listBlobsOptions, timeout()) - .streamByPage(continuationToken) - .findFirst(); - - if (!pageOpt.isPresent()) { - // No more pages, should never happen - break; - } - - final PagedResponse page = pageOpt.get(); - for (final BlobItem blobItem : page.getValue()) { - // Skipping prefixes as those are not deletable and should not be there - assert (blobItem.isPrefix() == null || !blobItem.isPrefix()) : "Only blobs (not prefixes) are expected"; - - outstanding.incrementAndGet(); - executor.execute(new AbstractRunnable() { - @Override - protected void doRun() throws Exception { - final long len = blobItem.getProperties().getContentLength(); - - final BlobClient azureBlob = blobContainer.getBlobClient(blobItem.getName()); - logger.trace( - () -> new ParameterizedMessage("container [{}]: blob [{}] found. removing.", container, blobItem.getName()) - ); - final Response response = azureBlob.deleteWithResponse(null, null, timeout(), client.v2().get()); - logger.trace( - () -> new ParameterizedMessage( - "container [{}]: blob [{}] deleted status [{}].", - container, - blobItem.getName(), - response.getStatusCode() - ) - ); - - blobsDeleted.incrementAndGet(); - if (len >= 0) { - bytesDeleted.addAndGet(len); - } + for (final BlobItem blobItem : blobContainer.listBlobs(listBlobsOptions, timeout())) { + // Skipping prefixes as those are not deletable and should not be there + assert (blobItem.isPrefix() == null || !blobItem.isPrefix()) : "Only blobs (not prefixes) are expected"; + + outstanding.incrementAndGet(); + executor.execute(new AbstractRunnable() { + @Override + protected void doRun() throws Exception { + final long len = blobItem.getProperties().getContentLength(); + + final BlobClient azureBlob = blobContainer.getBlobClient(blobItem.getName()); + logger.trace( + () -> new ParameterizedMessage("container [{}]: blob [{}] found. removing.", container, blobItem.getName()) + ); + final Response response = azureBlob.deleteWithResponse(null, null, timeout(), client.v2().get()); + logger.trace( + () -> new ParameterizedMessage( + "container [{}]: blob [{}] deleted status [{}].", + container, + blobItem.getName(), + response.getStatusCode() + ) + ); + + blobsDeleted.incrementAndGet(); + if (len >= 0) { + bytesDeleted.addAndGet(len); } + } - @Override - public void onFailure(Exception e) { - exceptions.add(e); - } + @Override + public void onFailure(Exception e) { + exceptions.add(e); + } - @Override - public void onAfter() { - if (outstanding.decrementAndGet() == 0) { - result.onResponse(null); - } + @Override + public void onAfter() { + if (outstanding.decrementAndGet() == 0) { + result.onResponse(null); } - }); - } - - // Fetch next continuation token - continuationToken = page.getContinuationToken(); - } while (StringUtils.isNotBlank(continuationToken)); + } + }); + } }); - if (outstanding.decrementAndGet() == 0) { result.onResponse(null); } @@ -325,39 +301,19 @@ public Map listBlobsByPrefix(String keyPath, String prefix .setPrefix(keyPath + (prefix == null ? "" : prefix)); SocketAccess.doPrivilegedVoidException(() -> { - String continuationToken = null; - - do { - // Fetch one page at a time, others are going to be fetched by continuation token - // TODO: reconsider reverting to simplified approach once https://github.com/Azure/azure-sdk-for-java/issues/26064 - // gets addressed - final Optional> pageOpt = blobContainer.listBlobsByHierarchy("/", listBlobsOptions, timeout()) - .streamByPage(continuationToken) - .findFirst(); - - if (!pageOpt.isPresent()) { - // No more pages, should never happen - break; + for (final BlobItem blobItem : blobContainer.listBlobsByHierarchy("/", listBlobsOptions, timeout())) { + // Skipping over the prefixes, only look for the blobs + if (blobItem.isPrefix() != null && blobItem.isPrefix()) { + continue; } - final PagedResponse page = pageOpt.get(); - for (final BlobItem blobItem : page.getValue()) { - // Skipping over the prefixes, only look for the blobs - if (blobItem.isPrefix() != null && blobItem.isPrefix()) { - continue; - } + final String name = getBlobName(blobItem.getName(), container, keyPath); + logger.trace(() -> new ParameterizedMessage("blob name [{}]", name)); - final String name = getBlobName(blobItem.getName(), container, keyPath); - logger.trace(() -> new ParameterizedMessage("blob name [{}]", name)); - - final BlobItemProperties properties = blobItem.getProperties(); - logger.trace(() -> new ParameterizedMessage("blob name [{}], size [{}]", name, properties.getContentLength())); - blobsBuilder.put(name, new PlainBlobMetadata(name, properties.getContentLength())); - } - - // Fetch next continuation token - continuationToken = page.getContinuationToken(); - } while (StringUtils.isNotBlank(continuationToken)); + final BlobItemProperties properties = blobItem.getProperties(); + logger.trace(() -> new ParameterizedMessage("blob name [{}], size [{}]", name, properties.getContentLength())); + blobsBuilder.put(name, new PlainBlobMetadata(name, properties.getContentLength())); + } }); return MapBuilder.newMapBuilder(blobsBuilder).immutableMap(); @@ -373,36 +329,17 @@ public Map children(BlobPath path) throws URISyntaxExcept .setPrefix(keyPath); SocketAccess.doPrivilegedVoidException(() -> { - String continuationToken = null; - - do { - // Fetch one page at a time, others are going to be fetched by continuation token - // TODO: reconsider reverting to simplified approach once https://github.com/Azure/azure-sdk-for-java/issues/26064 - // gets addressed - final Optional> pageOpt = blobContainer.listBlobsByHierarchy("/", listBlobsOptions, timeout()) - .streamByPage(continuationToken) - .findFirst(); - - if (!pageOpt.isPresent()) { - // No more pages, should never happen - break; - } - - final PagedResponse page = pageOpt.get(); - for (final BlobItem blobItem : page.getValue()) { - // Skipping over the blobs, only look for prefixes - if (blobItem.isPrefix() != null && blobItem.isPrefix()) { - // Expecting name in the form /container/keyPath.* and we want to strip off the /container/ - // this requires 1 + container.length() + 1, with each 1 corresponding to one of the /. - // Lastly, we add the length of keyPath to the offset to strip this container's path. - final String name = getBlobName(blobItem.getName(), container, keyPath).replaceAll("/$", ""); - logger.trace(() -> new ParameterizedMessage("blob name [{}]", name)); - blobsBuilder.add(name); - } + for (final BlobItem blobItem : blobContainer.listBlobsByHierarchy("/", listBlobsOptions, timeout())) { + // Skipping over the blobs, only look for prefixes + if (blobItem.isPrefix() != null && blobItem.isPrefix()) { + // Expecting name in the form /container/keyPath.* and we want to strip off the /container/ + // this requires 1 + container.length() + 1, with each 1 corresponding to one of the /. + // Lastly, we add the length of keyPath to the offset to strip this container's path. + final String name = getBlobName(blobItem.getName(), container, keyPath).replaceAll("/$", ""); + logger.trace(() -> new ParameterizedMessage("blob name [{}]", name)); + blobsBuilder.add(name); } - // Fetch next continuation token - continuationToken = page.getContinuationToken(); - } while (StringUtils.isNotBlank(continuationToken)); + } }); return Collections.unmodifiableMap( From b69dc335ad4cfaf421abaca9e921fcf08c1d45d8 Mon Sep 17 00:00:00 2001 From: "Kyle J. Davis" Date: Tue, 15 Mar 2022 09:56:00 -0600 Subject: [PATCH 31/46] Add trademark notice (#2473) * adds notice to README Signed-off-by: Kyle Davis * adds trademark heading Signed-off-by: Kyle Davis --- README.md | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/README.md b/README.md index e4bdb4d85b632..ec9cae6e432b2 100644 --- a/README.md +++ b/README.md @@ -39,3 +39,9 @@ This project is licensed under the [Apache v2.0 License](LICENSE.txt). ## Copyright Copyright OpenSearch Contributors. See [NOTICE](NOTICE.txt) for details. + +## Trademark + +OpenSearch is a registered trademark of Amazon Web Services. + +OpenSearch includes certain Apache-licensed Elasticsearch code from Elasticsearch B.V. and other source code. Elasticsearch B.V. is not the source of that other source code. ELASTICSEARCH is a registered trademark of Elasticsearch B.V. \ No newline at end of file From 757abdb9a0d22682cac537b32d19e22ee47f0e33 Mon Sep 17 00:00:00 2001 From: Nick Knize Date: Tue, 15 Mar 2022 13:13:54 -0500 Subject: [PATCH 32/46] [Refactor] LuceneChangesSnapshot to use accurate ops history (#2452) Improves the LuceneChangesSnapshot to get an accurate count of recovery operations using sort by sequence number optimization. Signed-off-by: Nicholas Walter Knize --- .../20_missing_field.yml | 4 ++ .../opensearch/index/shard/IndexShardIT.java | 2 +- .../org/opensearch/index/engine/Engine.java | 18 ++++++- .../index/engine/InternalEngine.java | 26 +++++++++- .../index/engine/LuceneChangesSnapshot.java | 50 ++++++++++++++----- .../index/engine/ReadOnlyEngine.java | 15 +++++- .../opensearch/index/shard/IndexShard.java | 27 ++++++++-- .../index/shard/PrimaryReplicaSyncer.java | 2 +- .../recovery/RecoverySourceHandler.java | 27 ++++++---- .../indices/recovery/RecoveryTarget.java | 10 ++-- .../index/engine/InternalEngineTests.java | 8 ++- .../engine/LuceneChangesSnapshotTests.java | 32 +++++++----- .../IndexLevelReplicationTests.java | 6 +-- .../indices/recovery/RecoveryTests.java | 2 +- .../index/engine/EngineTestCase.java | 4 +- 15 files changed, 174 insertions(+), 59 deletions(-) diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_field_mapping/20_missing_field.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_field_mapping/20_missing_field.yml index 2f15334f882a9..a36f807e63e0e 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_field_mapping/20_missing_field.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_field_mapping/20_missing_field.yml @@ -1,5 +1,9 @@ --- "Return empty object if field doesn't exist, but index does": + - skip: + version: "all" + reason: "AwaitsFix https://github.com/opensearch-project/OpenSearch/issues/2440" + - do: indices.create: index: test_index diff --git a/server/src/internalClusterTest/java/org/opensearch/index/shard/IndexShardIT.java b/server/src/internalClusterTest/java/org/opensearch/index/shard/IndexShardIT.java index 0e915577dc467..efc522a1f9741 100644 --- a/server/src/internalClusterTest/java/org/opensearch/index/shard/IndexShardIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/index/shard/IndexShardIT.java @@ -764,7 +764,7 @@ public void testShardChangesWithDefaultDocType() throws Exception { } IndexShard shard = indexService.getShard(0); try ( - Translog.Snapshot luceneSnapshot = shard.newChangesSnapshot("test", 0, numOps - 1, true); + Translog.Snapshot luceneSnapshot = shard.newChangesSnapshot("test", 0, numOps - 1, true, randomBoolean()); Translog.Snapshot translogSnapshot = getTranslog(shard).newSnapshot() ) { List opsFromLucene = TestTranslog.drainSnapshot(luceneSnapshot, true); diff --git a/server/src/main/java/org/opensearch/index/engine/Engine.java b/server/src/main/java/org/opensearch/index/engine/Engine.java index 7cf7b3245c0e5..825d71d6d1024 100644 --- a/server/src/main/java/org/opensearch/index/engine/Engine.java +++ b/server/src/main/java/org/opensearch/index/engine/Engine.java @@ -735,8 +735,22 @@ public enum SearcherScope { * Creates a new history snapshot from Lucene for reading operations whose seqno in the requesting seqno range (both inclusive). * This feature requires soft-deletes enabled. If soft-deletes are disabled, this method will throw an {@link IllegalStateException}. */ - public abstract Translog.Snapshot newChangesSnapshot(String source, long fromSeqNo, long toSeqNo, boolean requiredFullRange) - throws IOException; + public abstract Translog.Snapshot newChangesSnapshot( + String source, + long fromSeqNo, + long toSeqNo, + boolean requiredFullRange, + boolean accurateCount + ) throws IOException; + + /** + * Counts the number of history operations in the given sequence number range + * @param source source of the request + * @param fromSeqNo from sequence number; included + * @param toSeqNumber to sequence number; included + * @return number of history operations + */ + public abstract int countNumberOfHistoryOperations(String source, long fromSeqNo, long toSeqNumber) throws IOException; public abstract boolean hasCompleteOperationHistory(String reason, long startingSeqNo); diff --git a/server/src/main/java/org/opensearch/index/engine/InternalEngine.java b/server/src/main/java/org/opensearch/index/engine/InternalEngine.java index 438bb0b290b9c..1c5f06e85cb88 100644 --- a/server/src/main/java/org/opensearch/index/engine/InternalEngine.java +++ b/server/src/main/java/org/opensearch/index/engine/InternalEngine.java @@ -2772,7 +2772,13 @@ long getNumDocUpdates() { } @Override - public Translog.Snapshot newChangesSnapshot(String source, long fromSeqNo, long toSeqNo, boolean requiredFullRange) throws IOException { + public Translog.Snapshot newChangesSnapshot( + String source, + long fromSeqNo, + long toSeqNo, + boolean requiredFullRange, + boolean accurateCount + ) throws IOException { ensureOpen(); refreshIfNeeded(source, toSeqNo); Searcher searcher = acquireSearcher(source, SearcherScope.INTERNAL); @@ -2782,7 +2788,8 @@ public Translog.Snapshot newChangesSnapshot(String source, long fromSeqNo, long LuceneChangesSnapshot.DEFAULT_BATCH_SIZE, fromSeqNo, toSeqNo, - requiredFullRange + requiredFullRange, + accurateCount ); searcher = null; return snapshot; @@ -2798,6 +2805,21 @@ public Translog.Snapshot newChangesSnapshot(String source, long fromSeqNo, long } } + public int countNumberOfHistoryOperations(String source, long fromSeqNo, long toSeqNo) throws IOException { + ensureOpen(); + refreshIfNeeded(source, toSeqNo); + try (Searcher s = acquireSearcher(source, SearcherScope.INTERNAL)) { + return LuceneChangesSnapshot.countNumberOfHistoryOperations(s, fromSeqNo, toSeqNo); + } catch (IOException e) { + try { + maybeFailEngine(source, e); + } catch (Exception innerException) { + e.addSuppressed(innerException); + } + throw e; + } + } + public boolean hasCompleteOperationHistory(String reason, long startingSeqNo) { return getMinRetainedSeqNo() <= startingSeqNo; } diff --git a/server/src/main/java/org/opensearch/index/engine/LuceneChangesSnapshot.java b/server/src/main/java/org/opensearch/index/engine/LuceneChangesSnapshot.java index d640cf1468ec3..ae1dc9e647073 100644 --- a/server/src/main/java/org/opensearch/index/engine/LuceneChangesSnapshot.java +++ b/server/src/main/java/org/opensearch/index/engine/LuceneChangesSnapshot.java @@ -38,16 +38,19 @@ import org.apache.lucene.index.NumericDocValues; import org.apache.lucene.search.BooleanClause; import org.apache.lucene.search.BooleanQuery; -import org.apache.lucene.search.DocValuesFieldExistsQuery; +import org.apache.lucene.search.FieldDoc; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.Query; import org.apache.lucene.search.ScoreDoc; import org.apache.lucene.search.Sort; import org.apache.lucene.search.SortField; import org.apache.lucene.search.TopDocs; +import org.apache.lucene.search.TopFieldCollector; import org.apache.lucene.util.ArrayUtil; +import org.opensearch.Version; import org.opensearch.common.bytes.BytesReference; import org.opensearch.common.lucene.Lucene; +import org.opensearch.common.lucene.search.Queries; import org.opensearch.core.internal.io.IOUtils; import org.opensearch.index.fieldvisitor.FieldsVisitor; import org.opensearch.index.mapper.SeqNoFieldMapper; @@ -88,8 +91,14 @@ final class LuceneChangesSnapshot implements Translog.Snapshot { * @param toSeqNo the maximum requesting seq# - inclusive * @param requiredFullRange if true, the snapshot will strictly check for the existence of operations between fromSeqNo and toSeqNo */ - LuceneChangesSnapshot(Engine.Searcher engineSearcher, int searchBatchSize, long fromSeqNo, long toSeqNo, boolean requiredFullRange) - throws IOException { + LuceneChangesSnapshot( + Engine.Searcher engineSearcher, + int searchBatchSize, + long fromSeqNo, + long toSeqNo, + boolean requiredFullRange, + boolean accurateCount + ) throws IOException { if (fromSeqNo < 0 || toSeqNo < 0 || fromSeqNo > toSeqNo) { throw new IllegalArgumentException("Invalid range; from_seqno [" + fromSeqNo + "], to_seqno [" + toSeqNo + "]"); } @@ -111,7 +120,7 @@ final class LuceneChangesSnapshot implements Translog.Snapshot { this.indexSearcher = new IndexSearcher(Lucene.wrapAllDocsLive(engineSearcher.getDirectoryReader())); this.indexSearcher.setQueryCache(null); this.parallelArray = new ParallelArray(this.searchBatchSize); - final TopDocs topDocs = searchOperations(null); + final TopDocs topDocs = searchOperations(null, accurateCount); this.totalHits = Math.toIntExact(topDocs.totalHits.value); this.scoreDocs = topDocs.scoreDocs; fillParallelArray(scoreDocs, parallelArray); @@ -187,7 +196,7 @@ private int nextDocIndex() throws IOException { // we have processed all docs in the current search - fetch the next batch if (docIndex == scoreDocs.length && docIndex > 0) { final ScoreDoc prev = scoreDocs[scoreDocs.length - 1]; - scoreDocs = searchOperations(prev).scoreDocs; + scoreDocs = searchOperations((FieldDoc) prev, false).scoreDocs; fillParallelArray(scoreDocs, parallelArray); docIndex = 0; } @@ -236,16 +245,31 @@ private void fillParallelArray(ScoreDoc[] scoreDocs, ParallelArray parallelArray } } - private TopDocs searchOperations(ScoreDoc after) throws IOException { - final Query rangeQuery = new BooleanQuery.Builder().add( - LongPoint.newRangeQuery(SeqNoFieldMapper.NAME, Math.max(fromSeqNo, lastSeenSeqNo), toSeqNo), - BooleanClause.Occur.MUST - ) - // exclude non-root nested documents - .add(new DocValuesFieldExistsQuery(SeqNoFieldMapper.PRIMARY_TERM_NAME), BooleanClause.Occur.MUST) + private static Query operationsRangeQuery(long fromSeqNo, long toSeqNo) { + return new BooleanQuery.Builder().add(LongPoint.newRangeQuery(SeqNoFieldMapper.NAME, fromSeqNo, toSeqNo), BooleanClause.Occur.MUST) + .add(Queries.newNonNestedFilter(Version.CURRENT), BooleanClause.Occur.MUST) // exclude non-root nested docs .build(); + } + + static int countNumberOfHistoryOperations(Engine.Searcher searcher, long fromSeqNo, long toSeqNo) throws IOException { + if (fromSeqNo > toSeqNo || fromSeqNo < 0 || toSeqNo < 0) { + throw new IllegalArgumentException("Invalid sequence range; fromSeqNo [" + fromSeqNo + "] toSeqNo [" + toSeqNo + "]"); + } + IndexSearcher indexSearcher = new IndexSearcher(Lucene.wrapAllDocsLive(searcher.getDirectoryReader())); + return indexSearcher.count(operationsRangeQuery(fromSeqNo, toSeqNo)); + } + + private TopDocs searchOperations(FieldDoc after, boolean accurate) throws IOException { + final Query rangeQuery = operationsRangeQuery(Math.max(fromSeqNo, lastSeenSeqNo), toSeqNo); final Sort sortedBySeqNo = new Sort(new SortField(SeqNoFieldMapper.NAME, SortField.Type.LONG)); - return indexSearcher.searchAfter(after, rangeQuery, searchBatchSize, sortedBySeqNo); + final TopFieldCollector topFieldCollector = TopFieldCollector.create( + sortedBySeqNo, + searchBatchSize, + after, + accurate ? Integer.MAX_VALUE : 0 + ); + indexSearcher.search(rangeQuery, topFieldCollector); + return topFieldCollector.topDocs(); } private Translog.Operation readDocAsOp(int docIndex) throws IOException { diff --git a/server/src/main/java/org/opensearch/index/engine/ReadOnlyEngine.java b/server/src/main/java/org/opensearch/index/engine/ReadOnlyEngine.java index 32d6b9b98d169..43fe10c217270 100644 --- a/server/src/main/java/org/opensearch/index/engine/ReadOnlyEngine.java +++ b/server/src/main/java/org/opensearch/index/engine/ReadOnlyEngine.java @@ -325,10 +325,23 @@ public Closeable acquireHistoryRetentionLock() { } @Override - public Translog.Snapshot newChangesSnapshot(String source, long fromSeqNo, long toSeqNo, boolean requiredFullRange) { + public Translog.Snapshot newChangesSnapshot( + String source, + long fromSeqNo, + long toSeqNo, + boolean requiredFullRange, + boolean accurateCount + ) { return newEmptySnapshot(); } + @Override + public int countNumberOfHistoryOperations(String source, long fromSeqNo, long toSeqNo) throws IOException { + try (Translog.Snapshot snapshot = newChangesSnapshot(source, fromSeqNo, toSeqNo, false, true)) { + return snapshot.totalOperations(); + } + } + public boolean hasCompleteOperationHistory(String reason, long startingSeqNo) { // we can do operation-based recovery if we don't have to replay any operation. return startingSeqNo > seqNoStats.getMaxSeqNo(); diff --git a/server/src/main/java/org/opensearch/index/shard/IndexShard.java b/server/src/main/java/org/opensearch/index/shard/IndexShard.java index ad370051c53ac..f2630ad05b488 100644 --- a/server/src/main/java/org/opensearch/index/shard/IndexShard.java +++ b/server/src/main/java/org/opensearch/index/shard/IndexShard.java @@ -2231,13 +2231,13 @@ public Closeable acquireHistoryRetentionLock() { } /** - * * Creates a new history snapshot for reading operations since * the provided starting seqno (inclusive) and ending seqno (inclusive) * The returned snapshot can be retrieved from either Lucene index or translog files. */ - public Translog.Snapshot getHistoryOperations(String reason, long startingSeqNo, long endSeqNo) throws IOException { - return getEngine().newChangesSnapshot(reason, startingSeqNo, endSeqNo, true); + public Translog.Snapshot getHistoryOperations(String reason, long startingSeqNo, long endSeqNo, boolean accurateCount) + throws IOException { + return getEngine().newChangesSnapshot(reason, startingSeqNo, endSeqNo, true, accurateCount); } /** @@ -2257,6 +2257,17 @@ public long getMinRetainedSeqNo() { return getEngine().getMinRetainedSeqNo(); } + /** + * Counts the number of history operations within the provided sequence numbers + * @param source source of the requester (e.g., peer-recovery) + * @param fromSeqNo from sequence number, included + * @param toSeqNo to sequence number, included + * @return number of history operations in the sequence number range + */ + public int countNumberOfHistoryOperations(String source, long fromSeqNo, long toSeqNo) throws IOException { + return getEngine().countNumberOfHistoryOperations(source, fromSeqNo, toSeqNo); + } + /** * Creates a new changes snapshot for reading operations whose seq_no are between {@code fromSeqNo}(inclusive) * and {@code toSeqNo}(inclusive). The caller has to close the returned snapshot after finishing the reading. @@ -2268,8 +2279,14 @@ public long getMinRetainedSeqNo() { * if any operation between {@code fromSeqNo} and {@code toSeqNo} is missing. * This parameter should be only enabled when the entire requesting range is below the global checkpoint. */ - public Translog.Snapshot newChangesSnapshot(String source, long fromSeqNo, long toSeqNo, boolean requiredFullRange) throws IOException { - return getEngine().newChangesSnapshot(source, fromSeqNo, toSeqNo, requiredFullRange); + public Translog.Snapshot newChangesSnapshot( + String source, + long fromSeqNo, + long toSeqNo, + boolean requiredFullRange, + boolean accurateCount + ) throws IOException { + return getEngine().newChangesSnapshot(source, fromSeqNo, toSeqNo, requiredFullRange, accurateCount); } public List segments(boolean verbose) { diff --git a/server/src/main/java/org/opensearch/index/shard/PrimaryReplicaSyncer.java b/server/src/main/java/org/opensearch/index/shard/PrimaryReplicaSyncer.java index bbdf948af5c32..726d2925177fa 100644 --- a/server/src/main/java/org/opensearch/index/shard/PrimaryReplicaSyncer.java +++ b/server/src/main/java/org/opensearch/index/shard/PrimaryReplicaSyncer.java @@ -104,7 +104,7 @@ public void resync(final IndexShard indexShard, final ActionListener // Wrap translog snapshot to make it synchronized as it is accessed by different threads through SnapshotSender. // Even though those calls are not concurrent, snapshot.next() uses non-synchronized state and is not multi-thread-compatible // Also fail the resync early if the shard is shutting down - snapshot = indexShard.newChangesSnapshot("resync", startingSeqNo, Long.MAX_VALUE, false); + snapshot = indexShard.newChangesSnapshot("resync", startingSeqNo, Long.MAX_VALUE, false, true); final Translog.Snapshot originalSnapshot = snapshot; final Translog.Snapshot wrappedSnapshot = new Translog.Snapshot() { @Override diff --git a/server/src/main/java/org/opensearch/indices/recovery/RecoverySourceHandler.java b/server/src/main/java/org/opensearch/indices/recovery/RecoverySourceHandler.java index 7899b11330a34..77596f50a8a5e 100644 --- a/server/src/main/java/org/opensearch/indices/recovery/RecoverySourceHandler.java +++ b/server/src/main/java/org/opensearch/indices/recovery/RecoverySourceHandler.java @@ -132,7 +132,7 @@ public class RecoverySourceHandler { private final CancellableThreads cancellableThreads = new CancellableThreads(); private final List resources = new CopyOnWriteArrayList<>(); private final ListenableFuture future = new ListenableFuture<>(); - private static final String PEER_RECOVERY_NAME = "peer-recovery"; + public static final String PEER_RECOVERY_NAME = "peer-recovery"; public RecoverySourceHandler( IndexShard shard, @@ -272,7 +272,7 @@ && isTargetSameHistory() logger.trace("performing file-based recovery followed by history replay starting at [{}]", startingSeqNo); try { - final int estimateNumOps = estimateNumberOfHistoryOperations(startingSeqNo); + final int estimateNumOps = countNumberOfHistoryOperations(startingSeqNo); final Releasable releaseStore = acquireStore(shard.store()); resources.add(releaseStore); sendFileStep.whenComplete(r -> IOUtils.close(wrappedSafeCommit, releaseStore), e -> { @@ -319,7 +319,7 @@ && isTargetSameHistory() sendFileStep.whenComplete(r -> { assert Transports.assertNotTransportThread(RecoverySourceHandler.this + "[prepareTargetForTranslog]"); // For a sequence based recovery, the target can keep its local translog - prepareTargetForTranslog(estimateNumberOfHistoryOperations(startingSeqNo), prepareEngineStep); + prepareTargetForTranslog(countNumberOfHistoryOperations(startingSeqNo), prepareEngineStep); }, onFailure); prepareEngineStep.whenComplete(prepareEngineTime -> { @@ -340,9 +340,15 @@ && isTargetSameHistory() final long endingSeqNo = shard.seqNoStats().getMaxSeqNo(); if (logger.isTraceEnabled()) { - logger.trace("snapshot translog for recovery; current size is [{}]", estimateNumberOfHistoryOperations(startingSeqNo)); + logger.trace("snapshot translog for recovery; current size is [{}]", countNumberOfHistoryOperations(startingSeqNo)); } - final Translog.Snapshot phase2Snapshot = shard.newChangesSnapshot(PEER_RECOVERY_NAME, startingSeqNo, Long.MAX_VALUE, false); + final Translog.Snapshot phase2Snapshot = shard.newChangesSnapshot( + PEER_RECOVERY_NAME, + startingSeqNo, + Long.MAX_VALUE, + false, + true + ); resources.add(phase2Snapshot); retentionLock.close(); @@ -403,10 +409,13 @@ private boolean isTargetSameHistory() { return targetHistoryUUID.equals(shard.getHistoryUUID()); } - private int estimateNumberOfHistoryOperations(long startingSeqNo) throws IOException { - try (Translog.Snapshot snapshot = shard.newChangesSnapshot(PEER_RECOVERY_NAME, startingSeqNo, Long.MAX_VALUE, false)) { - return snapshot.totalOperations(); - } + /** + * Counts the number of history operations from the starting sequence number + * @param startingSeqNo the starting sequence number to count; included + * @return number of history operations + */ + private int countNumberOfHistoryOperations(long startingSeqNo) throws IOException { + return shard.countNumberOfHistoryOperations(PEER_RECOVERY_NAME, startingSeqNo, Long.MAX_VALUE); } static void runUnderPrimaryPermit( diff --git a/server/src/main/java/org/opensearch/indices/recovery/RecoveryTarget.java b/server/src/main/java/org/opensearch/indices/recovery/RecoveryTarget.java index 3ea7cad528e82..394b093059385 100644 --- a/server/src/main/java/org/opensearch/indices/recovery/RecoveryTarget.java +++ b/server/src/main/java/org/opensearch/indices/recovery/RecoveryTarget.java @@ -344,11 +344,11 @@ public void finalizeRecovery(final long globalCheckpoint, final long trimAboveSe private boolean hasUncommittedOperations() throws IOException { long localCheckpointOfCommit = Long.parseLong(indexShard.commitStats().getUserData().get(SequenceNumbers.LOCAL_CHECKPOINT_KEY)); - try ( - Translog.Snapshot snapshot = indexShard.newChangesSnapshot("peer-recovery", localCheckpointOfCommit + 1, Long.MAX_VALUE, false) - ) { - return snapshot.totalOperations() > 0; - } + return indexShard.countNumberOfHistoryOperations( + RecoverySourceHandler.PEER_RECOVERY_NAME, + localCheckpointOfCommit + 1, + Long.MAX_VALUE + ) > 0; } @Override diff --git a/server/src/test/java/org/opensearch/index/engine/InternalEngineTests.java b/server/src/test/java/org/opensearch/index/engine/InternalEngineTests.java index af9b913b11d56..33f09a3e67db8 100644 --- a/server/src/test/java/org/opensearch/index/engine/InternalEngineTests.java +++ b/server/src/test/java/org/opensearch/index/engine/InternalEngineTests.java @@ -6362,8 +6362,12 @@ public void onFailure(Exception e) { @Override protected void doRun() throws Exception { latch.await(); - Translog.Snapshot changes = engine.newChangesSnapshot("test", min, max, true); - changes.close(); + if (randomBoolean()) { + Translog.Snapshot changes = engine.newChangesSnapshot("test", min, max, true, randomBoolean()); + changes.close(); + } else { + engine.countNumberOfHistoryOperations("test", min, max); + } } }); snapshotThreads[i].start(); diff --git a/server/src/test/java/org/opensearch/index/engine/LuceneChangesSnapshotTests.java b/server/src/test/java/org/opensearch/index/engine/LuceneChangesSnapshotTests.java index bd191e235369d..e3117e179e7fa 100644 --- a/server/src/test/java/org/opensearch/index/engine/LuceneChangesSnapshotTests.java +++ b/server/src/test/java/org/opensearch/index/engine/LuceneChangesSnapshotTests.java @@ -74,14 +74,14 @@ public void testBasics() throws Exception { long fromSeqNo = randomNonNegativeLong(); long toSeqNo = randomLongBetween(fromSeqNo, Long.MAX_VALUE); // Empty engine - try (Translog.Snapshot snapshot = engine.newChangesSnapshot("test", fromSeqNo, toSeqNo, true)) { + try (Translog.Snapshot snapshot = engine.newChangesSnapshot("test", fromSeqNo, toSeqNo, true, randomBoolean())) { IllegalStateException error = expectThrows(IllegalStateException.class, () -> drainAll(snapshot)); assertThat( error.getMessage(), containsString("Not all operations between from_seqno [" + fromSeqNo + "] and to_seqno [" + toSeqNo + "] found") ); } - try (Translog.Snapshot snapshot = engine.newChangesSnapshot("test", fromSeqNo, toSeqNo, false)) { + try (Translog.Snapshot snapshot = engine.newChangesSnapshot("test", fromSeqNo, toSeqNo, false, randomBoolean())) { assertThat(snapshot, SnapshotMatchers.size(0)); } int numOps = between(1, 100); @@ -114,7 +114,8 @@ public void testBasics() throws Exception { between(1, LuceneChangesSnapshot.DEFAULT_BATCH_SIZE), fromSeqNo, toSeqNo, - false + false, + randomBoolean() ) ) { searcher = null; @@ -130,7 +131,8 @@ public void testBasics() throws Exception { between(1, LuceneChangesSnapshot.DEFAULT_BATCH_SIZE), fromSeqNo, toSeqNo, - true + true, + randomBoolean() ) ) { searcher = null; @@ -152,7 +154,8 @@ public void testBasics() throws Exception { between(1, LuceneChangesSnapshot.DEFAULT_BATCH_SIZE), fromSeqNo, toSeqNo, - false + false, + randomBoolean() ) ) { searcher = null; @@ -167,7 +170,8 @@ public void testBasics() throws Exception { between(1, LuceneChangesSnapshot.DEFAULT_BATCH_SIZE), fromSeqNo, toSeqNo, - true + true, + randomBoolean() ) ) { searcher = null; @@ -187,7 +191,8 @@ public void testBasics() throws Exception { between(1, LuceneChangesSnapshot.DEFAULT_BATCH_SIZE), fromSeqNo, toSeqNo, - true + true, + randomBoolean() ) ) { searcher = null; @@ -199,7 +204,7 @@ public void testBasics() throws Exception { // Get snapshot via engine will auto refresh fromSeqNo = randomLongBetween(0, numOps - 1); toSeqNo = randomLongBetween(fromSeqNo, numOps - 1); - try (Translog.Snapshot snapshot = engine.newChangesSnapshot("test", fromSeqNo, toSeqNo, randomBoolean())) { + try (Translog.Snapshot snapshot = engine.newChangesSnapshot("test", fromSeqNo, toSeqNo, randomBoolean(), randomBoolean())) { assertThat(snapshot, SnapshotMatchers.containsSeqNoRange(fromSeqNo, toSeqNo)); } } @@ -230,8 +235,11 @@ public void testSkipNonRootOfNestedDocuments() throws Exception { long maxSeqNo = engine.getLocalCheckpointTracker().getMaxSeqNo(); engine.refresh("test"); Engine.Searcher searcher = engine.acquireSearcher("test", Engine.SearcherScope.INTERNAL); - try (Translog.Snapshot snapshot = new LuceneChangesSnapshot(searcher, between(1, 100), 0, maxSeqNo, false)) { - assertThat(snapshot.totalOperations(), equalTo(seqNoToTerm.size())); + final boolean accurateCount = randomBoolean(); + try (Translog.Snapshot snapshot = new LuceneChangesSnapshot(searcher, between(1, 100), 0, maxSeqNo, false, accurateCount)) { + if (accurateCount == true) { + assertThat(snapshot.totalOperations(), equalTo(seqNoToTerm.size())); + } Translog.Operation op; while ((op = snapshot.next()) != null) { assertThat(op.toString(), op.primaryTerm(), equalTo(seqNoToTerm.get(op.seqNo()))); @@ -306,7 +314,7 @@ void pullOperations(InternalEngine follower) throws IOException { long fromSeqNo = followerCheckpoint + 1; long batchSize = randomLongBetween(0, 100); long toSeqNo = Math.min(fromSeqNo + batchSize, leaderCheckpoint); - try (Translog.Snapshot snapshot = leader.newChangesSnapshot("test", fromSeqNo, toSeqNo, true)) { + try (Translog.Snapshot snapshot = leader.newChangesSnapshot("test", fromSeqNo, toSeqNo, true, randomBoolean())) { translogHandler.run(follower, snapshot); } } @@ -352,7 +360,7 @@ private List drainAll(Translog.Snapshot snapshot) throws IOE public void testOverFlow() throws Exception { long fromSeqNo = randomLongBetween(0, 5); long toSeqNo = randomLongBetween(Long.MAX_VALUE - 5, Long.MAX_VALUE); - try (Translog.Snapshot snapshot = engine.newChangesSnapshot("test", fromSeqNo, toSeqNo, true)) { + try (Translog.Snapshot snapshot = engine.newChangesSnapshot("test", fromSeqNo, toSeqNo, true, randomBoolean())) { IllegalStateException error = expectThrows(IllegalStateException.class, () -> drainAll(snapshot)); assertThat( error.getMessage(), diff --git a/server/src/test/java/org/opensearch/index/replication/IndexLevelReplicationTests.java b/server/src/test/java/org/opensearch/index/replication/IndexLevelReplicationTests.java index a88db8473cae0..d262b5abec0f3 100644 --- a/server/src/test/java/org/opensearch/index/replication/IndexLevelReplicationTests.java +++ b/server/src/test/java/org/opensearch/index/replication/IndexLevelReplicationTests.java @@ -499,7 +499,7 @@ protected EngineFactory getEngineFactory(ShardRouting routing) { assertThat(snapshot.totalOperations(), equalTo(0)); } } - try (Translog.Snapshot snapshot = shard.newChangesSnapshot("test", 0, Long.MAX_VALUE, false)) { + try (Translog.Snapshot snapshot = shard.newChangesSnapshot("test", 0, Long.MAX_VALUE, false, randomBoolean())) { assertThat(snapshot, SnapshotMatchers.containsOperationsInAnyOrder(expectedTranslogOps)); } } @@ -517,7 +517,7 @@ protected EngineFactory getEngineFactory(ShardRouting routing) { assertThat(snapshot, SnapshotMatchers.containsOperationsInAnyOrder(Collections.singletonList(noop2))); } } - try (Translog.Snapshot snapshot = shard.newChangesSnapshot("test", 0, Long.MAX_VALUE, false)) { + try (Translog.Snapshot snapshot = shard.newChangesSnapshot("test", 0, Long.MAX_VALUE, false, randomBoolean())) { assertThat(snapshot, SnapshotMatchers.containsOperationsInAnyOrder(expectedTranslogOps)); } } @@ -619,7 +619,7 @@ public void testSeqNoCollision() throws Exception { shards.promoteReplicaToPrimary(replica2).get(); logger.info("--> Recover replica3 from replica2"); recoverReplica(replica3, replica2, true); - try (Translog.Snapshot snapshot = replica3.newChangesSnapshot("test", 0, Long.MAX_VALUE, false)) { + try (Translog.Snapshot snapshot = replica3.newChangesSnapshot("test", 0, Long.MAX_VALUE, false, true)) { assertThat(snapshot.totalOperations(), equalTo(initDocs + 1)); final List expectedOps = new ArrayList<>(initOperations); expectedOps.add(op2); diff --git a/server/src/test/java/org/opensearch/indices/recovery/RecoveryTests.java b/server/src/test/java/org/opensearch/indices/recovery/RecoveryTests.java index c714bd0eb85a2..5e09e0f2253df 100644 --- a/server/src/test/java/org/opensearch/indices/recovery/RecoveryTests.java +++ b/server/src/test/java/org/opensearch/indices/recovery/RecoveryTests.java @@ -225,7 +225,7 @@ public void testRecoveryWithOutOfOrderDeleteWithSoftDeletes() throws Exception { IndexShard newReplica = shards.addReplicaWithExistingPath(orgPrimary.shardPath(), orgPrimary.routingEntry().currentNodeId()); shards.recoverReplica(newReplica); shards.assertAllEqual(3); - try (Translog.Snapshot snapshot = newReplica.newChangesSnapshot("test", 0, Long.MAX_VALUE, false)) { + try (Translog.Snapshot snapshot = newReplica.newChangesSnapshot("test", 0, Long.MAX_VALUE, false, randomBoolean())) { assertThat(snapshot, SnapshotMatchers.size(6)); } } diff --git a/test/framework/src/main/java/org/opensearch/index/engine/EngineTestCase.java b/test/framework/src/main/java/org/opensearch/index/engine/EngineTestCase.java index fe810a87358d0..2bce5a7c81794 100644 --- a/test/framework/src/main/java/org/opensearch/index/engine/EngineTestCase.java +++ b/test/framework/src/main/java/org/opensearch/index/engine/EngineTestCase.java @@ -1312,7 +1312,7 @@ public static List getDocIds(Engine engine, boolean refresh */ public static List readAllOperationsInLucene(Engine engine) throws IOException { final List operations = new ArrayList<>(); - try (Translog.Snapshot snapshot = engine.newChangesSnapshot("test", 0, Long.MAX_VALUE, false)) { + try (Translog.Snapshot snapshot = engine.newChangesSnapshot("test", 0, Long.MAX_VALUE, false, randomBoolean())) { Translog.Operation op; while ((op = snapshot.next()) != null) { operations.add(op); @@ -1326,7 +1326,7 @@ public static List readAllOperationsInLucene(Engine engine) */ public static List readAllOperationsBasedOnSource(Engine engine) throws IOException { final List operations = new ArrayList<>(); - try (Translog.Snapshot snapshot = engine.newChangesSnapshot("test", 0, Long.MAX_VALUE, false)) { + try (Translog.Snapshot snapshot = engine.newChangesSnapshot("test", 0, Long.MAX_VALUE, false, randomBoolean())) { Translog.Operation op; while ((op = snapshot.next()) != null) { operations.add(op); From 006c832c5fe8f509aa6285de90f2c7583b3dff35 Mon Sep 17 00:00:00 2001 From: Nick Knize Date: Tue, 15 Mar 2022 15:48:13 -0500 Subject: [PATCH 33/46] [Upgrade] Lucene 9.0.0 release (#1109) This commit upgrades the core codebase from Lucene 8.10.1 to lucene 9.0.0. It includes all necessary refactoring of features and API changes when upgrading to a new major Lucene release. Signed-off-by: Nicholas Walter Knize Co-authored-by: Andriy Redko --- build.gradle | 5 +- .../src/main/resources/minimumRuntimeVersion | 2 +- buildSrc/version.properties | 4 +- .../common/settings/KeyStoreWrapperTests.java | 21 +- .../analysis/common/ClassicFilterFactory.java | 2 +- .../common/ClassicTokenizerFactory.java | 2 +- .../analysis/common/CommonAnalysisPlugin.java | 6 +- .../common/MinHashTokenFilterFactory.java | 16 +- .../common/UAX29URLEmailTokenizerFactory.java | 2 +- .../common/CommonAnalysisFactoryTests.java | 4 +- .../common/DisableGraphQueryTests.java | 8 +- .../lucene-expressions-8.10.1.jar.sha1 | 1 - .../lucene-expressions-9.0.0.jar.sha1 | 1 + .../expression/ExpressionScriptEngine.java | 7 +- .../plugin-metadata/plugin-security.policy | 1 + .../mapper/SearchAsYouTypeFieldMapper.java | 8 +- .../SearchAsYouTypeFieldMapperTests.java | 6 +- .../join/query/HasChildQueryBuilder.java | 6 + .../opensearch/percolator/PercolateQuery.java | 10 +- .../percolator/PercolatorFieldMapper.java | 4 +- .../PercolatorMatchedSlotSubFetchPhase.java | 5 +- .../opensearch/percolator/QueryAnalyzer.java | 4 +- .../percolator/CandidateQueryTests.java | 27 +- .../percolator/PercolateQueryTests.java | 4 +- .../PercolatorFieldMapperTests.java | 4 +- .../percolator/QueryAnalyzerTests.java | 16 +- plugins/analysis-icu/build.gradle | 4 +- .../analysis-icu/licenses/icu4j-62.1.jar.sha1 | 1 - .../analysis-icu/licenses/icu4j-68.2.jar.sha1 | 1 + .../lucene-analysis-icu-9.0.0.jar.sha1 | 1 + .../lucene-analyzers-icu-8.10.1.jar.sha1 | 1 - .../index/analysis/ICUCollationKeyFilter.java | 2 +- plugins/analysis-kuromoji/build.gradle | 2 +- .../lucene-analysis-kuromoji-9.0.0.jar.sha1 | 1 + .../lucene-analyzers-kuromoji-8.10.1.jar.sha1 | 1 - plugins/analysis-nori/build.gradle | 2 +- .../lucene-analysis-nori-9.0.0.jar.sha1 | 1 + .../lucene-analyzers-nori-8.10.1.jar.sha1 | 1 - plugins/analysis-phonetic/build.gradle | 2 +- .../lucene-analysis-phonetic-9.0.0.jar.sha1 | 1 + .../lucene-analyzers-phonetic-8.10.1.jar.sha1 | 1 - plugins/analysis-smartcn/build.gradle | 2 +- .../lucene-analysis-smartcn-9.0.0.jar.sha1 | 1 + .../lucene-analyzers-smartcn-8.10.1.jar.sha1 | 1 - plugins/analysis-stempel/build.gradle | 2 +- .../lucene-analysis-stempel-9.0.0.jar.sha1 | 1 + .../lucene-analyzers-stempel-8.10.1.jar.sha1 | 1 - plugins/analysis-ukrainian/build.gradle | 2 +- .../lucene-analysis-morfologik-9.0.0.jar.sha1 | 1 + ...ucene-analyzers-morfologik-8.10.1.jar.sha1 | 1 - .../AnnotatedTextHighlighterTests.java | 1 - .../opensearch/index/store/SmbNIOFsTests.java | 27 +- server/build.gradle | 2 +- .../lucene-analysis-common-9.0.0.jar.sha1 | 1 + .../lucene-analyzers-common-8.10.1.jar.sha1 | 1 - .../lucene-backward-codecs-8.10.1.jar.sha1 | 1 - .../lucene-backward-codecs-9.0.0.jar.sha1 | 1 + server/licenses/lucene-core-8.10.1.jar.sha1 | 1 - server/licenses/lucene-core-9.0.0.jar.sha1 | 1 + .../licenses/lucene-grouping-8.10.1.jar.sha1 | 1 - .../licenses/lucene-grouping-9.0.0.jar.sha1 | 1 + .../lucene-highlighter-8.10.1.jar.sha1 | 1 - .../lucene-highlighter-9.0.0.jar.sha1 | 1 + server/licenses/lucene-join-8.10.1.jar.sha1 | 1 - server/licenses/lucene-join-9.0.0.jar.sha1 | 1 + server/licenses/lucene-memory-8.10.1.jar.sha1 | 1 - server/licenses/lucene-memory-9.0.0.jar.sha1 | 1 + server/licenses/lucene-misc-8.10.1.jar.sha1 | 1 - server/licenses/lucene-misc-9.0.0.jar.sha1 | 1 + .../licenses/lucene-queries-8.10.1.jar.sha1 | 1 - server/licenses/lucene-queries-9.0.0.jar.sha1 | 1 + .../lucene-queryparser-8.10.1.jar.sha1 | 1 - .../lucene-queryparser-9.0.0.jar.sha1 | 1 + .../licenses/lucene-sandbox-8.10.1.jar.sha1 | 1 - server/licenses/lucene-sandbox-9.0.0.jar.sha1 | 1 + .../lucene-spatial-extras-8.10.1.jar.sha1 | 1 - .../lucene-spatial-extras-9.0.0.jar.sha1 | 1 + .../licenses/lucene-spatial3d-8.10.1.jar.sha1 | 1 - .../licenses/lucene-spatial3d-9.0.0.jar.sha1 | 1 + .../licenses/lucene-suggest-8.10.1.jar.sha1 | 1 - server/licenses/lucene-suggest-9.0.0.jar.sha1 | 1 + .../recovery/IndexPrimaryRelocationIT.java | 1 + .../org/opensearch/recovery/RelocationIT.java | 14 +- .../highlight/HighlighterSearchIT.java | 30 + .../search/query/QueryStringIT.java | 2 - .../validate/SimpleValidateQueryIT.java | 7 +- .../similarity/LegacyBM25Similarity.java | 117 +++ .../queries/BinaryDocValuesRangeQuery.java | 8 + .../lucene/queries/SpanMatchNoDocsQuery.java | 16 +- .../uhighlight/CustomUnifiedHighlighter.java | 26 +- .../vectorhighlight/CustomFieldQuery.java | 2 +- .../apache/lucene/util/CombinedBitSet.java | 5 + .../apache/lucene/util/SPIClassIterator.java | 186 +++++ .../apache/lucene/util/packed/XPacked64.java | 317 ++++++++ .../util/packed/XPacked64SingleBlock.java | 574 ++++++++++++++ .../lucene/util/packed/XPackedInts.java | 740 ++++++++++++++++++ .../src/main/java/org/opensearch/Version.java | 2 +- .../segments/IndicesSegmentResponse.java | 7 - .../action/search/SearchPhaseController.java | 7 +- .../action/search/TransportSearchHelper.java | 9 +- .../opensearch/common/bytes/BytesArray.java | 4 +- .../common/bytes/CompositeBytesReference.java | 5 +- .../org/opensearch/common/geo/GeoUtils.java | 4 +- .../org/opensearch/common/lucene/Lucene.java | 17 +- .../common/lucene/MinimumScoreCollector.java | 2 +- .../lucene/search/MoreLikeThisQuery.java | 6 + .../lucene/search/MultiPhrasePrefixQuery.java | 6 + .../common/lucene/search/Queries.java | 4 +- .../SpanBooleanQueryRewriteWithMaxClause.java | 8 +- .../search/function/FunctionScoreQuery.java | 7 - .../search/function/ScriptScoreQuery.java | 7 - .../common/settings/KeyStoreWrapper.java | 8 +- .../opensearch/common/util/CuckooFilter.java | 14 +- .../gateway/MetadataStateFormat.java | 5 +- .../index/cache/bitset/BitsetFilterCache.java | 5 +- .../opensearch/index/codec/CodecService.java | 8 +- .../PerFieldMappingPostingFormatCodec.java | 8 +- .../org/opensearch/index/engine/Engine.java | 4 - .../index/engine/InternalEngine.java | 4 +- .../index/engine/LuceneChangesSnapshot.java | 3 +- .../engine/PrunePostingsMergePolicy.java | 5 - .../RecoverySourcePruneMergePolicy.java | 16 +- .../org/opensearch/index/engine/Segment.java | 28 +- .../index/engine/TranslogLeafReader.java | 22 +- .../fielddata/IndexNumericFieldData.java | 2 + .../plain/PagedBytesIndexFieldData.java | 55 +- .../plain/SortedNumericIndexFieldData.java | 2 +- .../index/fieldvisitor/FieldsVisitor.java | 10 +- .../fieldvisitor/SingleFieldsVisitor.java | 5 +- .../opensearch/index/get/ShardGetService.java | 5 +- .../index/mapper/CompletionFieldMapper.java | 4 +- .../index/mapper/DateFieldMapper.java | 2 +- .../index/mapper/MappedFieldType.java | 4 +- .../index/mapper/NumberFieldMapper.java | 4 +- .../opensearch/index/mapper/RangeType.java | 6 +- .../index/mapper/TextFieldMapper.java | 17 +- .../index/query/AbstractQueryBuilder.java | 6 +- .../query/FieldMaskingSpanQueryBuilder.java | 7 +- .../index/query/InnerHitContextBuilder.java | 4 +- .../index/query/NestedQueryBuilder.java | 4 +- .../index/query/ScriptQueryBuilder.java | 6 + .../query/SpanContainingQueryBuilder.java | 6 +- .../index/query/SpanFirstQueryBuilder.java | 4 +- .../query/SpanMultiTermQueryBuilder.java | 2 +- .../index/query/SpanNearQueryBuilder.java | 4 +- .../index/query/SpanNotQueryBuilder.java | 4 +- .../index/query/SpanOrQueryBuilder.java | 4 +- .../index/query/SpanTermQueryBuilder.java | 4 +- .../index/query/SpanWithinQueryBuilder.java | 6 +- .../index/query/TermsSetQueryBuilder.java | 2 +- .../opensearch/index/search/MatchQuery.java | 15 +- .../index/search/QueryStringQueryParser.java | 14 +- .../search/SimpleQueryStringQueryParser.java | 9 +- .../index/shard/ShardSplittingQuery.java | 15 +- .../opensearch/index/shard/StoreRecovery.java | 2 +- .../index/similarity/SimilarityProviders.java | 6 +- .../index/similarity/SimilarityService.java | 2 +- .../org/opensearch/index/store/Store.java | 36 +- .../opensearch/index/translog/Checkpoint.java | 31 +- .../opensearch/index/translog/Translog.java | 1 - .../index/translog/TranslogReader.java | 1 - .../translog/TruncateTranslogAction.java | 2 - .../opensearch/indices/IndicesQueryCache.java | 6 - .../indices/analysis/AnalysisModule.java | 2 +- .../indices/analysis/PreBuiltAnalyzers.java | 22 +- .../recovery/PeerRecoveryTargetService.java | 23 +- .../lucene/queries/MinDocQuery.java | 8 +- .../queries/SearchAfterSortedDocQuery.java | 8 +- .../opensearch/plugins/PluginsService.java | 7 - .../search/DefaultSearchContext.java | 2 +- .../org/opensearch/search/SearchService.java | 6 +- .../aggregations/MultiBucketCollector.java | 2 +- .../bucket/composite/CompositeAggregator.java | 2 +- .../composite/PointsSortedDocsProducer.java | 7 +- .../bucket/nested/NestedAggregator.java | 4 +- .../nested/ReverseNestedAggregator.java | 2 +- ...DiversifiedBytesHashSamplerAggregator.java | 4 +- .../DiversifiedMapSamplerAggregator.java | 4 +- .../DiversifiedNumericSamplerAggregator.java | 4 +- .../DiversifiedOrdinalsSamplerAggregator.java | 4 +- .../bucket/sampler/SamplerAggregator.java | 2 +- .../aggregations/metrics/InternalTopHits.java | 10 +- .../aggregations/metrics/MaxAggregator.java | 4 +- .../metrics/TopHitsAggregatorFactory.java | 6 +- .../opensearch/search/fetch/FetchPhase.java | 19 +- .../fetch/subphase/FetchDocValuesContext.java | 12 +- .../fetch/subphase/InnerHitsContext.java | 4 +- .../highlight/UnifiedHighlighter.java | 10 - .../search/internal/ContextIndexSearcher.java | 8 +- .../search/profile/query/ProfileWeight.java | 6 +- .../opensearch/search/query/QueryPhase.java | 33 +- .../search/query/TopDocsCollectorContext.java | 2 +- .../opensearch/search/slice/SliceQuery.java | 7 + .../opensearch/search/sort/SortBuilder.java | 2 +- .../org/opensearch/bootstrap/security.policy | 2 + .../org/opensearch/LegacyESVersionTests.java | 6 +- .../segments/IndicesSegmentsRequestTests.java | 14 - .../allocation/IndexShardHotSpotTests.java | 1 + .../opensearch/common/lucene/LuceneTests.java | 11 +- .../common/lucene/search/QueriesTests.java | 6 +- .../search/function/MinScoreScorerTests.java | 5 - .../gateway/MetadataStateFormatTests.java | 2 +- .../opensearch/index/codec/CodecTests.java | 41 +- .../engine/CompletionStatsCacheTests.java | 8 +- .../index/engine/InternalEngineTests.java | 4 - .../index/engine/LiveVersionMapTests.java | 4 +- .../opensearch/index/engine/SegmentTests.java | 2 +- .../index/engine/VersionValueTests.java | 4 +- .../index/mapper/DateFieldTypeTests.java | 2 +- .../index/mapper/NumberFieldTypeTests.java | 5 +- .../mapper/StoredNumericValuesTests.java | 2 +- .../index/mapper/TextFieldMapperTests.java | 8 +- .../index/query/DisMaxQueryBuilderTests.java | 31 +- .../FieldMaskingSpanQueryBuilderTests.java | 43 +- .../MatchBoolPrefixQueryBuilderTests.java | 4 +- .../index/query/MatchQueryBuilderTests.java | 8 +- .../query/MultiMatchQueryBuilderTests.java | 58 +- .../query/QueryStringQueryBuilderTests.java | 77 +- .../query/SimpleQueryStringBuilderTests.java | 35 +- .../SpanContainingQueryBuilderTests.java | 2 +- .../query/SpanFirstQueryBuilderTests.java | 2 +- .../index/query/SpanGapQueryBuilderTests.java | 13 +- .../query/SpanMultiTermQueryBuilderTests.java | 13 +- .../query/SpanNearQueryBuilderTests.java | 10 +- .../index/query/SpanNotQueryBuilderTests.java | 2 +- .../index/query/SpanOrQueryBuilderTests.java | 4 +- .../query/SpanTermQueryBuilderTests.java | 2 +- .../query/SpanWithinQueryBuilderTests.java | 2 +- .../query/TermsSetQueryBuilderTests.java | 2 +- .../query/plugin/DummyQueryParserPlugin.java | 6 + .../index/search/MultiMatchQueryTests.java | 9 +- .../search/nested/NestedSortingTests.java | 3 +- .../similarity/ScriptedSimilarityTests.java | 3 +- .../similarity/SimilarityServiceTests.java | 2 +- .../index/similarity/SimilarityTests.java | 2 +- .../opensearch/index/store/StoreTests.java | 11 +- .../index/translog/TranslogTests.java | 26 +- .../indices/IndicesQueryCacheTests.java | 13 +- .../indices/analysis/AnalysisModuleTests.java | 1 - .../indices/recovery/RecoveryStatusTests.java | 5 +- .../DeDuplicatingTokenFilterTests.java | 5 +- .../TruncateTokenFilterTests.java | 3 +- .../CollapsingTopDocsCollectorTests.java | 6 +- .../index/ShuffleForcedMergePolicyTests.java | 11 +- .../similarity/LegacyBM25SimilarityTests.java | 121 +++ ...ndomBinaryDocValuesRangeQueryTestCase.java | 2 +- .../BinaryDocValuesRangeQueryTests.java | 2 +- .../lucene/queries/BlendedTermQueryTests.java | 9 +- ...eRandomBinaryDocValuesRangeQueryTests.java | 2 +- ...tRandomBinaryDocValuesRangeQueryTests.java | 2 +- ...sRandomBinaryDocValuesRangeQueryTests.java | 21 +- ...rRandomBinaryDocValuesRangeQueryTests.java | 2 +- ...gRandomBinaryDocValuesRangeQueryTests.java | 2 +- .../lucene/queries/MinDocQueryTests.java | 2 +- .../SearchAfterSortedDocQueryTests.java | 2 +- .../queries/SpanMatchNoDocsQueryTests.java | 11 +- .../BoundedBreakIteratorScannerTests.java | 3 +- .../CustomPassageFormatterTests.java | 5 +- .../CustomUnifiedHighlighterTests.java | 90 ++- .../lucene/util/CombinedBitSetTests.java | 6 +- .../plugins/PluginsServiceTests.java | 4 + .../composite/CompositeAggregatorTests.java | 18 +- .../bucket/nested/NestedAggregatorTests.java | 6 +- .../metrics/MaxAggregatorTests.java | 3 +- .../internal/ContextIndexSearcherTests.java | 11 +- .../search/lookup/LeafFieldsLookupTests.java | 3 + .../profile/query/QueryProfilerTests.java | 15 +- .../search/query/QueryPhaseTests.java | 12 +- .../search/sort/FieldSortBuilderTests.java | 2 +- .../analysis/AnalysisFactoryTestCase.java | 17 +- .../aggregations/AggregatorTestCase.java | 2 +- .../test/AbstractQueryTestCase.java | 12 +- .../org/opensearch/test/CorruptionUtils.java | 2 +- .../test/hamcrest/OpenSearchAssertions.java | 9 - 274 files changed, 3052 insertions(+), 980 deletions(-) delete mode 100644 modules/lang-expression/licenses/lucene-expressions-8.10.1.jar.sha1 create mode 100644 modules/lang-expression/licenses/lucene-expressions-9.0.0.jar.sha1 delete mode 100644 plugins/analysis-icu/licenses/icu4j-62.1.jar.sha1 create mode 100644 plugins/analysis-icu/licenses/icu4j-68.2.jar.sha1 create mode 100644 plugins/analysis-icu/licenses/lucene-analysis-icu-9.0.0.jar.sha1 delete mode 100644 plugins/analysis-icu/licenses/lucene-analyzers-icu-8.10.1.jar.sha1 create mode 100644 plugins/analysis-kuromoji/licenses/lucene-analysis-kuromoji-9.0.0.jar.sha1 delete mode 100644 plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-8.10.1.jar.sha1 create mode 100644 plugins/analysis-nori/licenses/lucene-analysis-nori-9.0.0.jar.sha1 delete mode 100644 plugins/analysis-nori/licenses/lucene-analyzers-nori-8.10.1.jar.sha1 create mode 100644 plugins/analysis-phonetic/licenses/lucene-analysis-phonetic-9.0.0.jar.sha1 delete mode 100644 plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-8.10.1.jar.sha1 create mode 100644 plugins/analysis-smartcn/licenses/lucene-analysis-smartcn-9.0.0.jar.sha1 delete mode 100644 plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-8.10.1.jar.sha1 create mode 100644 plugins/analysis-stempel/licenses/lucene-analysis-stempel-9.0.0.jar.sha1 delete mode 100644 plugins/analysis-stempel/licenses/lucene-analyzers-stempel-8.10.1.jar.sha1 create mode 100644 plugins/analysis-ukrainian/licenses/lucene-analysis-morfologik-9.0.0.jar.sha1 delete mode 100644 plugins/analysis-ukrainian/licenses/lucene-analyzers-morfologik-8.10.1.jar.sha1 create mode 100644 server/licenses/lucene-analysis-common-9.0.0.jar.sha1 delete mode 100644 server/licenses/lucene-analyzers-common-8.10.1.jar.sha1 delete mode 100644 server/licenses/lucene-backward-codecs-8.10.1.jar.sha1 create mode 100644 server/licenses/lucene-backward-codecs-9.0.0.jar.sha1 delete mode 100644 server/licenses/lucene-core-8.10.1.jar.sha1 create mode 100644 server/licenses/lucene-core-9.0.0.jar.sha1 delete mode 100644 server/licenses/lucene-grouping-8.10.1.jar.sha1 create mode 100644 server/licenses/lucene-grouping-9.0.0.jar.sha1 delete mode 100644 server/licenses/lucene-highlighter-8.10.1.jar.sha1 create mode 100644 server/licenses/lucene-highlighter-9.0.0.jar.sha1 delete mode 100644 server/licenses/lucene-join-8.10.1.jar.sha1 create mode 100644 server/licenses/lucene-join-9.0.0.jar.sha1 delete mode 100644 server/licenses/lucene-memory-8.10.1.jar.sha1 create mode 100644 server/licenses/lucene-memory-9.0.0.jar.sha1 delete mode 100644 server/licenses/lucene-misc-8.10.1.jar.sha1 create mode 100644 server/licenses/lucene-misc-9.0.0.jar.sha1 delete mode 100644 server/licenses/lucene-queries-8.10.1.jar.sha1 create mode 100644 server/licenses/lucene-queries-9.0.0.jar.sha1 delete mode 100644 server/licenses/lucene-queryparser-8.10.1.jar.sha1 create mode 100644 server/licenses/lucene-queryparser-9.0.0.jar.sha1 delete mode 100644 server/licenses/lucene-sandbox-8.10.1.jar.sha1 create mode 100644 server/licenses/lucene-sandbox-9.0.0.jar.sha1 delete mode 100644 server/licenses/lucene-spatial-extras-8.10.1.jar.sha1 create mode 100644 server/licenses/lucene-spatial-extras-9.0.0.jar.sha1 delete mode 100644 server/licenses/lucene-spatial3d-8.10.1.jar.sha1 create mode 100644 server/licenses/lucene-spatial3d-9.0.0.jar.sha1 delete mode 100644 server/licenses/lucene-suggest-8.10.1.jar.sha1 create mode 100644 server/licenses/lucene-suggest-9.0.0.jar.sha1 create mode 100644 server/src/main/java/org/apache/lucene/misc/search/similarity/LegacyBM25Similarity.java create mode 100644 server/src/main/java/org/apache/lucene/util/SPIClassIterator.java create mode 100644 server/src/main/java/org/apache/lucene/util/packed/XPacked64.java create mode 100644 server/src/main/java/org/apache/lucene/util/packed/XPacked64SingleBlock.java create mode 100644 server/src/main/java/org/apache/lucene/util/packed/XPackedInts.java rename server/src/main/java/org/{apache => opensearch}/lucene/queries/MinDocQuery.java (96%) rename server/src/main/java/org/{apache => opensearch}/lucene/queries/SearchAfterSortedDocQuery.java (97%) rename server/src/test/java/org/{apache => opensearch}/lucene/analysis/miscellaneous/DeDuplicatingTokenFilterTests.java (95%) rename server/src/test/java/org/{apache => opensearch}/lucene/analysis/miscellaneous/TruncateTokenFilterTests.java (95%) rename server/src/test/java/org/{apache => opensearch}/lucene/grouping/CollapsingTopDocsCollectorTests.java (99%) rename server/src/test/java/org/{apache => opensearch}/lucene/index/ShuffleForcedMergePolicyTests.java (89%) create mode 100644 server/src/test/java/org/opensearch/lucene/misc/search/similarity/LegacyBM25SimilarityTests.java rename server/src/test/java/org/{apache => opensearch}/lucene/queries/BaseRandomBinaryDocValuesRangeQueryTestCase.java (99%) rename server/src/test/java/org/{apache => opensearch}/lucene/queries/BinaryDocValuesRangeQueryTests.java (99%) rename server/src/test/java/org/{apache => opensearch}/lucene/queries/BlendedTermQueryTests.java (98%) rename server/src/test/java/org/{apache => opensearch}/lucene/queries/DoubleRandomBinaryDocValuesRangeQueryTests.java (99%) rename server/src/test/java/org/{apache => opensearch}/lucene/queries/FloatRandomBinaryDocValuesRangeQueryTests.java (99%) rename server/src/test/java/org/{apache => opensearch}/lucene/queries/InetAddressRandomBinaryDocValuesRangeQueryTests.java (81%) rename server/src/test/java/org/{apache => opensearch}/lucene/queries/IntegerRandomBinaryDocValuesRangeQueryTests.java (99%) rename server/src/test/java/org/{apache => opensearch}/lucene/queries/LongRandomBinaryDocValuesRangeQueryTests.java (99%) rename server/src/test/java/org/{apache => opensearch}/lucene/queries/MinDocQueryTests.java (98%) rename server/src/test/java/org/{apache => opensearch}/lucene/queries/SearchAfterSortedDocQueryTests.java (99%) rename server/src/test/java/org/{apache => opensearch}/lucene/queries/SpanMatchNoDocsQueryTests.java (93%) rename server/src/test/java/org/{apache => opensearch}/lucene/search/uhighlight/BoundedBreakIteratorScannerTests.java (98%) rename server/src/test/java/org/{apache => opensearch}/lucene/search/uhighlight/CustomPassageFormatterTests.java (95%) rename server/src/test/java/org/{apache => opensearch}/lucene/search/uhighlight/CustomUnifiedHighlighterTests.java (82%) rename server/src/test/java/org/{apache => opensearch}/lucene/util/CombinedBitSetTests.java (95%) diff --git a/build.gradle b/build.gradle index 374bfb3ccfae3..be5766f327e0d 100644 --- a/build.gradle +++ b/build.gradle @@ -230,7 +230,10 @@ tasks.register("branchConsistency") { allprojects { // configure compiler options tasks.withType(JavaCompile).configureEach { JavaCompile compile -> - compile.options.compilerArgs << '-Werror' + // See please https://bugs.openjdk.java.net/browse/JDK-8209058 + if (BuildParams.runtimeJavaVersion > JavaVersion.VERSION_11) { + compile.options.compilerArgs << '-Werror' + } compile.options.compilerArgs << '-Xlint:auxiliaryclass' compile.options.compilerArgs << '-Xlint:cast' compile.options.compilerArgs << '-Xlint:classfile' diff --git a/buildSrc/src/main/resources/minimumRuntimeVersion b/buildSrc/src/main/resources/minimumRuntimeVersion index 9d607966b721a..b4de394767536 100644 --- a/buildSrc/src/main/resources/minimumRuntimeVersion +++ b/buildSrc/src/main/resources/minimumRuntimeVersion @@ -1 +1 @@ -11 \ No newline at end of file +11 diff --git a/buildSrc/version.properties b/buildSrc/version.properties index bfc939394bdaa..7682a982e8186 100644 --- a/buildSrc/version.properties +++ b/buildSrc/version.properties @@ -1,5 +1,5 @@ opensearch = 2.0.0 -lucene = 8.10.1 +lucene = 9.0.0 bundled_jdk_vendor = adoptium bundled_jdk = 17.0.2+8 @@ -11,7 +11,7 @@ spatial4j = 0.7 jts = 1.15.0 jackson = 2.12.6 snakeyaml = 1.26 -icu4j = 62.1 +icu4j = 68.2 supercsv = 2.4.0 log4j = 2.17.1 slf4j = 1.6.2 diff --git a/distribution/tools/keystore-cli/src/test/java/org/opensearch/common/settings/KeyStoreWrapperTests.java b/distribution/tools/keystore-cli/src/test/java/org/opensearch/common/settings/KeyStoreWrapperTests.java index 52130d6e270df..2688e7637c9ba 100644 --- a/distribution/tools/keystore-cli/src/test/java/org/opensearch/common/settings/KeyStoreWrapperTests.java +++ b/distribution/tools/keystore-cli/src/test/java/org/opensearch/common/settings/KeyStoreWrapperTests.java @@ -32,7 +32,9 @@ package org.opensearch.common.settings; +import org.apache.lucene.backward_codecs.store.EndiannessReverserUtil; import org.apache.lucene.codecs.CodecUtil; +import org.apache.lucene.store.DataOutput; import org.apache.lucene.store.IOContext; import org.apache.lucene.store.IndexOutput; import org.apache.lucene.store.NIOFSDirectory; @@ -328,13 +330,14 @@ private void possiblyAlterEncryptedBytes( byte[] encryptedBytes, int truncEncryptedDataLength ) throws Exception { - indexOutput.writeInt(4 + salt.length + 4 + iv.length + 4 + encryptedBytes.length); - indexOutput.writeInt(salt.length); - indexOutput.writeBytes(salt, salt.length); - indexOutput.writeInt(iv.length); - indexOutput.writeBytes(iv, iv.length); - indexOutput.writeInt(encryptedBytes.length - truncEncryptedDataLength); - indexOutput.writeBytes(encryptedBytes, encryptedBytes.length); + DataOutput io = EndiannessReverserUtil.wrapDataOutput(indexOutput); + io.writeInt(4 + salt.length + 4 + iv.length + 4 + encryptedBytes.length); + io.writeInt(salt.length); + io.writeBytes(salt, salt.length); + io.writeInt(iv.length); + io.writeBytes(iv, iv.length); + io.writeInt(encryptedBytes.length - truncEncryptedDataLength); + io.writeBytes(encryptedBytes, encryptedBytes.length); } public void testUpgradeAddsSeed() throws Exception { @@ -363,7 +366,7 @@ public void testBackcompatV1() throws Exception { assumeFalse("Can't run in a FIPS JVM as PBE is not available", inFipsJvm()); Path configDir = env.configFile(); NIOFSDirectory directory = new NIOFSDirectory(configDir); - try (IndexOutput output = directory.createOutput("opensearch.keystore", IOContext.DEFAULT)) { + try (IndexOutput output = EndiannessReverserUtil.createOutput(directory, "opensearch.keystore", IOContext.DEFAULT)) { CodecUtil.writeHeader(output, "opensearch.keystore", 1); output.writeByte((byte) 0); // hasPassword = false output.writeString("PKCS12"); @@ -396,7 +399,7 @@ public void testBackcompatV2() throws Exception { NIOFSDirectory directory = new NIOFSDirectory(configDir); byte[] fileBytes = new byte[20]; random().nextBytes(fileBytes); - try (IndexOutput output = directory.createOutput("opensearch.keystore", IOContext.DEFAULT)) { + try (IndexOutput output = EndiannessReverserUtil.createOutput(directory, "opensearch.keystore", IOContext.DEFAULT)) { CodecUtil.writeHeader(output, "opensearch.keystore", 2); output.writeByte((byte) 0); // hasPassword = false diff --git a/modules/analysis-common/src/main/java/org/opensearch/analysis/common/ClassicFilterFactory.java b/modules/analysis-common/src/main/java/org/opensearch/analysis/common/ClassicFilterFactory.java index a50ff8ac5bdd1..92e28b2ad9ee7 100644 --- a/modules/analysis-common/src/main/java/org/opensearch/analysis/common/ClassicFilterFactory.java +++ b/modules/analysis-common/src/main/java/org/opensearch/analysis/common/ClassicFilterFactory.java @@ -32,7 +32,7 @@ package org.opensearch.analysis.common; import org.apache.lucene.analysis.TokenStream; -import org.apache.lucene.analysis.standard.ClassicFilter; +import org.apache.lucene.analysis.classic.ClassicFilter; import org.opensearch.common.settings.Settings; import org.opensearch.env.Environment; import org.opensearch.index.IndexSettings; diff --git a/modules/analysis-common/src/main/java/org/opensearch/analysis/common/ClassicTokenizerFactory.java b/modules/analysis-common/src/main/java/org/opensearch/analysis/common/ClassicTokenizerFactory.java index 978ba807336a5..9528e0991fe82 100644 --- a/modules/analysis-common/src/main/java/org/opensearch/analysis/common/ClassicTokenizerFactory.java +++ b/modules/analysis-common/src/main/java/org/opensearch/analysis/common/ClassicTokenizerFactory.java @@ -33,7 +33,7 @@ package org.opensearch.analysis.common; import org.apache.lucene.analysis.Tokenizer; -import org.apache.lucene.analysis.standard.ClassicTokenizer; +import org.apache.lucene.analysis.classic.ClassicTokenizer; import org.apache.lucene.analysis.standard.StandardAnalyzer; import org.opensearch.common.settings.Settings; import org.opensearch.env.Environment; diff --git a/modules/analysis-common/src/main/java/org/opensearch/analysis/common/CommonAnalysisPlugin.java b/modules/analysis-common/src/main/java/org/opensearch/analysis/common/CommonAnalysisPlugin.java index 47a144311c0a7..c69917ed52be8 100644 --- a/modules/analysis-common/src/main/java/org/opensearch/analysis/common/CommonAnalysisPlugin.java +++ b/modules/analysis-common/src/main/java/org/opensearch/analysis/common/CommonAnalysisPlugin.java @@ -51,6 +51,8 @@ import org.apache.lucene.analysis.cjk.CJKWidthFilter; import org.apache.lucene.analysis.ckb.SoraniAnalyzer; import org.apache.lucene.analysis.ckb.SoraniNormalizationFilter; +import org.apache.lucene.analysis.classic.ClassicFilter; +import org.apache.lucene.analysis.classic.ClassicTokenizer; import org.apache.lucene.analysis.commongrams.CommonGramsFilter; import org.apache.lucene.analysis.core.DecimalDigitFilter; import org.apache.lucene.analysis.core.KeywordTokenizer; @@ -64,6 +66,7 @@ import org.apache.lucene.analysis.de.GermanNormalizationFilter; import org.apache.lucene.analysis.de.GermanStemFilter; import org.apache.lucene.analysis.el.GreekAnalyzer; +import org.apache.lucene.analysis.email.UAX29URLEmailTokenizer; import org.apache.lucene.analysis.en.EnglishAnalyzer; import org.apache.lucene.analysis.en.KStemFilter; import org.apache.lucene.analysis.en.PorterStemFilter; @@ -113,10 +116,7 @@ import org.apache.lucene.analysis.ru.RussianAnalyzer; import org.apache.lucene.analysis.shingle.ShingleFilter; import org.apache.lucene.analysis.snowball.SnowballFilter; -import org.apache.lucene.analysis.standard.ClassicFilter; -import org.apache.lucene.analysis.standard.ClassicTokenizer; import org.apache.lucene.analysis.standard.StandardAnalyzer; -import org.apache.lucene.analysis.standard.UAX29URLEmailTokenizer; import org.apache.lucene.analysis.sv.SwedishAnalyzer; import org.apache.lucene.analysis.th.ThaiAnalyzer; import org.apache.lucene.analysis.th.ThaiTokenizer; diff --git a/modules/analysis-common/src/main/java/org/opensearch/analysis/common/MinHashTokenFilterFactory.java b/modules/analysis-common/src/main/java/org/opensearch/analysis/common/MinHashTokenFilterFactory.java index c9786ac89c005..e76354ae3a765 100644 --- a/modules/analysis-common/src/main/java/org/opensearch/analysis/common/MinHashTokenFilterFactory.java +++ b/modules/analysis-common/src/main/java/org/opensearch/analysis/common/MinHashTokenFilterFactory.java @@ -62,10 +62,18 @@ public TokenStream create(TokenStream tokenStream) { private Map convertSettings(Settings settings) { Map settingMap = new HashMap<>(); - settingMap.put("hashCount", settings.get("hash_count")); - settingMap.put("bucketCount", settings.get("bucket_count")); - settingMap.put("hashSetSize", settings.get("hash_set_size")); - settingMap.put("withRotation", settings.get("with_rotation")); + if (settings.hasValue("hash_count")) { + settingMap.put("hashCount", settings.get("hash_count")); + } + if (settings.hasValue("bucketCount")) { + settingMap.put("bucketCount", settings.get("bucket_count")); + } + if (settings.hasValue("hashSetSize")) { + settingMap.put("hashSetSize", settings.get("hash_set_size")); + } + if (settings.hasValue("with_rotation")) { + settingMap.put("withRotation", settings.get("with_rotation")); + } return settingMap; } } diff --git a/modules/analysis-common/src/main/java/org/opensearch/analysis/common/UAX29URLEmailTokenizerFactory.java b/modules/analysis-common/src/main/java/org/opensearch/analysis/common/UAX29URLEmailTokenizerFactory.java index 8fcfb2c599ae0..8d6e0ec0815b4 100644 --- a/modules/analysis-common/src/main/java/org/opensearch/analysis/common/UAX29URLEmailTokenizerFactory.java +++ b/modules/analysis-common/src/main/java/org/opensearch/analysis/common/UAX29URLEmailTokenizerFactory.java @@ -34,7 +34,7 @@ import org.apache.lucene.analysis.Tokenizer; import org.apache.lucene.analysis.standard.StandardAnalyzer; -import org.apache.lucene.analysis.standard.UAX29URLEmailTokenizer; +import org.apache.lucene.analysis.email.UAX29URLEmailTokenizer; import org.opensearch.common.settings.Settings; import org.opensearch.env.Environment; import org.opensearch.index.IndexSettings; diff --git a/modules/analysis-common/src/test/java/org/opensearch/analysis/common/CommonAnalysisFactoryTests.java b/modules/analysis-common/src/test/java/org/opensearch/analysis/common/CommonAnalysisFactoryTests.java index bced9c334d9f6..4cf0d1de28717 100644 --- a/modules/analysis-common/src/test/java/org/opensearch/analysis/common/CommonAnalysisFactoryTests.java +++ b/modules/analysis-common/src/test/java/org/opensearch/analysis/common/CommonAnalysisFactoryTests.java @@ -110,6 +110,7 @@ protected Map> getTokenFilters() { filters.put("latvianstem", StemmerTokenFilterFactory.class); filters.put("norwegianlightstem", StemmerTokenFilterFactory.class); filters.put("norwegianminimalstem", StemmerTokenFilterFactory.class); + filters.put("norwegiannormalization", Void.class); filters.put("portuguesestem", StemmerTokenFilterFactory.class); filters.put("portugueselightstem", StemmerTokenFilterFactory.class); filters.put("portugueseminimalstem", StemmerTokenFilterFactory.class); @@ -117,6 +118,7 @@ protected Map> getTokenFilters() { filters.put("soranistem", StemmerTokenFilterFactory.class); filters.put("spanishlightstem", StemmerTokenFilterFactory.class); filters.put("swedishlightstem", StemmerTokenFilterFactory.class); + filters.put("swedishminimalstem", Void.class); filters.put("stemmeroverride", StemmerOverrideTokenFilterFactory.class); filters.put("kstem", KStemTokenFilterFactory.class); filters.put("synonym", SynonymTokenFilterFactory.class); @@ -242,7 +244,7 @@ protected Map> getPreConfiguredTokenizers() { tokenizers.put("keyword", null); tokenizers.put("lowercase", Void.class); tokenizers.put("classic", null); - tokenizers.put("uax_url_email", org.apache.lucene.analysis.standard.UAX29URLEmailTokenizerFactory.class); + tokenizers.put("uax_url_email", org.apache.lucene.analysis.email.UAX29URLEmailTokenizerFactory.class); tokenizers.put("path_hierarchy", null); tokenizers.put("letter", null); tokenizers.put("whitespace", null); diff --git a/modules/analysis-common/src/test/java/org/opensearch/analysis/common/DisableGraphQueryTests.java b/modules/analysis-common/src/test/java/org/opensearch/analysis/common/DisableGraphQueryTests.java index 0b0beea41751c..35915af8f263d 100644 --- a/modules/analysis-common/src/test/java/org/opensearch/analysis/common/DisableGraphQueryTests.java +++ b/modules/analysis-common/src/test/java/org/opensearch/analysis/common/DisableGraphQueryTests.java @@ -107,11 +107,15 @@ public void setup() { // parsed queries for "text_shingle_unigram:(foo bar baz)" with query parsers // that ignores position length attribute expectedQueryWithUnigram = new BooleanQuery.Builder().add( - new SynonymQuery(new Term("text_shingle_unigram", "foo"), new Term("text_shingle_unigram", "foo bar")), + new SynonymQuery.Builder("text_shingle_unigram").addTerm(new Term("text_shingle_unigram", "foo")) + .addTerm(new Term("text_shingle_unigram", "foo bar")) + .build(), BooleanClause.Occur.SHOULD ) .add( - new SynonymQuery(new Term("text_shingle_unigram", "bar"), new Term("text_shingle_unigram", "bar baz")), + new SynonymQuery.Builder("text_shingle_unigram").addTerm(new Term("text_shingle_unigram", "bar")) + .addTerm(new Term("text_shingle_unigram", "bar baz")) + .build(), BooleanClause.Occur.SHOULD ) .add(new TermQuery(new Term("text_shingle_unigram", "baz")), BooleanClause.Occur.SHOULD) diff --git a/modules/lang-expression/licenses/lucene-expressions-8.10.1.jar.sha1 b/modules/lang-expression/licenses/lucene-expressions-8.10.1.jar.sha1 deleted file mode 100644 index f327cbcb6f8e6..0000000000000 --- a/modules/lang-expression/licenses/lucene-expressions-8.10.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -24932a4be7064a99126d80776718845b356abae0 \ No newline at end of file diff --git a/modules/lang-expression/licenses/lucene-expressions-9.0.0.jar.sha1 b/modules/lang-expression/licenses/lucene-expressions-9.0.0.jar.sha1 new file mode 100644 index 0000000000000..21edcc44b664e --- /dev/null +++ b/modules/lang-expression/licenses/lucene-expressions-9.0.0.jar.sha1 @@ -0,0 +1 @@ +0a3d818d6f6fb113831ed34553b24763fbda1e84 \ No newline at end of file diff --git a/modules/lang-expression/src/main/java/org/opensearch/script/expression/ExpressionScriptEngine.java b/modules/lang-expression/src/main/java/org/opensearch/script/expression/ExpressionScriptEngine.java index a6fcd7a1978e4..1c3dc69359952 100644 --- a/modules/lang-expression/src/main/java/org/opensearch/script/expression/ExpressionScriptEngine.java +++ b/modules/lang-expression/src/main/java/org/opensearch/script/expression/ExpressionScriptEngine.java @@ -37,7 +37,6 @@ import org.apache.lucene.expressions.js.JavascriptCompiler; import org.apache.lucene.expressions.js.VariableContext; import org.apache.lucene.search.DoubleValuesSource; -import org.apache.lucene.search.SortField; import org.opensearch.SpecialPermission; import org.opensearch.common.Nullable; import org.opensearch.index.fielddata.IndexFieldData; @@ -263,7 +262,7 @@ private static NumberSortScript.LeafFactory newSortScript(Expression expr, Searc for (String variable : expr.variables) { try { if (variable.equals("_score")) { - bindings.add(new SortField("_score", SortField.Type.SCORE)); + bindings.add("_score", DoubleValuesSource.SCORES); needsScores = true; } else if (vars != null && vars.containsKey(variable)) { bindFromParams(vars, bindings, variable); @@ -320,7 +319,7 @@ private static AggregationScript.LeafFactory newAggregationScript( for (String variable : expr.variables) { try { if (variable.equals("_score")) { - bindings.add(new SortField("_score", SortField.Type.SCORE)); + bindings.add("_score", DoubleValuesSource.SCORES); needsScores = true; } else if (variable.equals("_value")) { specialValue = new ReplaceableConstDoubleValueSource(); @@ -393,7 +392,7 @@ private static ScoreScript.LeafFactory newScoreScript(Expression expr, SearchLoo for (String variable : expr.variables) { try { if (variable.equals("_score")) { - bindings.add(new SortField("_score", SortField.Type.SCORE)); + bindings.add("_score", DoubleValuesSource.SCORES); needsScores = true; } else if (variable.equals("_value")) { specialValue = new ReplaceableConstDoubleValueSource(); diff --git a/modules/lang-expression/src/main/plugin-metadata/plugin-security.policy b/modules/lang-expression/src/main/plugin-metadata/plugin-security.policy index d3fa7589f092a..0c61624ca4fd7 100644 --- a/modules/lang-expression/src/main/plugin-metadata/plugin-security.policy +++ b/modules/lang-expression/src/main/plugin-metadata/plugin-security.policy @@ -42,4 +42,5 @@ grant { permission org.opensearch.script.ClassPermission "java.lang.Math"; permission org.opensearch.script.ClassPermission "org.apache.lucene.util.MathUtil"; permission org.opensearch.script.ClassPermission "org.apache.lucene.util.SloppyMath"; + permission org.opensearch.script.ClassPermission "org.apache.lucene.expressions.js.ExpressionMath"; }; diff --git a/modules/mapper-extras/src/main/java/org/opensearch/index/mapper/SearchAsYouTypeFieldMapper.java b/modules/mapper-extras/src/main/java/org/opensearch/index/mapper/SearchAsYouTypeFieldMapper.java index 7bf102584a379..7394993448bbf 100644 --- a/modules/mapper-extras/src/main/java/org/opensearch/index/mapper/SearchAsYouTypeFieldMapper.java +++ b/modules/mapper-extras/src/main/java/org/opensearch/index/mapper/SearchAsYouTypeFieldMapper.java @@ -44,6 +44,10 @@ import org.apache.lucene.document.FieldType; import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.Term; +import org.apache.lucene.queries.spans.FieldMaskingSpanQuery; +import org.apache.lucene.queries.spans.SpanMultiTermQueryWrapper; +import org.apache.lucene.queries.spans.SpanQuery; +import org.apache.lucene.queries.spans.SpanTermQuery; import org.apache.lucene.search.AutomatonQuery; import org.apache.lucene.search.BooleanClause; import org.apache.lucene.search.BooleanQuery; @@ -52,10 +56,6 @@ import org.apache.lucene.search.PrefixQuery; import org.apache.lucene.search.Query; import org.apache.lucene.search.TermQuery; -import org.apache.lucene.search.spans.FieldMaskingSpanQuery; -import org.apache.lucene.search.spans.SpanMultiTermQueryWrapper; -import org.apache.lucene.search.spans.SpanQuery; -import org.apache.lucene.search.spans.SpanTermQuery; import org.apache.lucene.util.automaton.Automata; import org.apache.lucene.util.automaton.Automaton; import org.apache.lucene.util.automaton.Operations; diff --git a/modules/mapper-extras/src/test/java/org/opensearch/index/mapper/SearchAsYouTypeFieldMapperTests.java b/modules/mapper-extras/src/test/java/org/opensearch/index/mapper/SearchAsYouTypeFieldMapperTests.java index 037b486df956d..786791314692d 100644 --- a/modules/mapper-extras/src/test/java/org/opensearch/index/mapper/SearchAsYouTypeFieldMapperTests.java +++ b/modules/mapper-extras/src/test/java/org/opensearch/index/mapper/SearchAsYouTypeFieldMapperTests.java @@ -38,6 +38,9 @@ import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.IndexableField; import org.apache.lucene.index.Term; +import org.apache.lucene.queries.spans.FieldMaskingSpanQuery; +import org.apache.lucene.queries.spans.SpanNearQuery; +import org.apache.lucene.queries.spans.SpanTermQuery; import org.apache.lucene.search.BooleanClause; import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.ConstantScoreQuery; @@ -47,9 +50,6 @@ import org.apache.lucene.search.Query; import org.apache.lucene.search.SynonymQuery; import org.apache.lucene.search.TermQuery; -import org.apache.lucene.search.spans.FieldMaskingSpanQuery; -import org.apache.lucene.search.spans.SpanNearQuery; -import org.apache.lucene.search.spans.SpanTermQuery; import org.opensearch.common.Strings; import org.opensearch.common.lucene.search.MultiPhrasePrefixQuery; import org.opensearch.common.xcontent.XContentBuilder; diff --git a/modules/parent-join/src/main/java/org/opensearch/join/query/HasChildQueryBuilder.java b/modules/parent-join/src/main/java/org/opensearch/join/query/HasChildQueryBuilder.java index 2e7411743b15d..be957146da21d 100644 --- a/modules/parent-join/src/main/java/org/opensearch/join/query/HasChildQueryBuilder.java +++ b/modules/parent-join/src/main/java/org/opensearch/join/query/HasChildQueryBuilder.java @@ -37,6 +37,7 @@ import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.MatchNoDocsQuery; import org.apache.lucene.search.Query; +import org.apache.lucene.search.QueryVisitor; import org.apache.lucene.search.join.JoinUtil; import org.apache.lucene.search.join.ScoreMode; import org.apache.lucene.search.similarities.Similarity; @@ -409,6 +410,11 @@ public static final class LateParsingQuery extends Query { this.similarity = similarity; } + @Override + public void visit(QueryVisitor visitor) { + visitor.visitLeaf(this); + } + @Override public Query rewrite(IndexReader reader) throws IOException { Query rewritten = super.rewrite(reader); diff --git a/modules/percolator/src/main/java/org/opensearch/percolator/PercolateQuery.java b/modules/percolator/src/main/java/org/opensearch/percolator/PercolateQuery.java index 14e7973ec0c2d..0aa8318e7c191 100644 --- a/modules/percolator/src/main/java/org/opensearch/percolator/PercolateQuery.java +++ b/modules/percolator/src/main/java/org/opensearch/percolator/PercolateQuery.java @@ -34,11 +34,11 @@ import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.LeafReaderContext; -import org.apache.lucene.index.Term; import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.Explanation; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.Query; +import org.apache.lucene.search.QueryVisitor; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; import org.apache.lucene.search.ScorerSupplier; @@ -56,7 +56,6 @@ import java.io.IOException; import java.util.List; import java.util.Objects; -import java.util.Set; final class PercolateQuery extends Query implements Accountable { @@ -112,8 +111,6 @@ public Weight createWeight(IndexSearcher searcher, ScoreMode scoreMode, float bo final Weight verifiedMatchesWeight = verifiedMatchesQuery.createWeight(searcher, ScoreMode.COMPLETE_NO_SCORES, boost); final Weight candidateMatchesWeight = candidateMatchesQuery.createWeight(searcher, ScoreMode.COMPLETE_NO_SCORES, boost); return new Weight(this) { - @Override - public void extractTerms(Set set) {} @Override public Explanation explain(LeafReaderContext leafReaderContext, int docId) throws IOException { @@ -245,6 +242,11 @@ Query getVerifiedMatchesQuery() { return verifiedMatchesQuery; } + @Override + public void visit(QueryVisitor visitor) { + visitor.visitLeaf(this); + } + // Comparing identity here to avoid being cached // Note that in theory if the same instance gets used multiple times it could still get cached, // however since we create a new query instance each time we this query this shouldn't happen and thus diff --git a/modules/percolator/src/main/java/org/opensearch/percolator/PercolatorFieldMapper.java b/modules/percolator/src/main/java/org/opensearch/percolator/PercolatorFieldMapper.java index a8b0395dd84e0..fec38207582e7 100644 --- a/modules/percolator/src/main/java/org/opensearch/percolator/PercolatorFieldMapper.java +++ b/modules/percolator/src/main/java/org/opensearch/percolator/PercolatorFieldMapper.java @@ -43,9 +43,9 @@ import org.apache.lucene.index.Term; import org.apache.lucene.index.Terms; import org.apache.lucene.index.TermsEnum; +import org.apache.lucene.sandbox.search.CoveringQuery; import org.apache.lucene.search.BooleanClause; import org.apache.lucene.search.BooleanQuery; -import org.apache.lucene.search.CoveringQuery; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.LongValuesSource; import org.apache.lucene.search.MatchNoDocsQuery; @@ -279,7 +279,7 @@ Query percolateQuery( } Query filter = null; if (excludeNestedDocuments) { - filter = Queries.newNonNestedFilter(indexVersion); + filter = Queries.newNonNestedFilter(); } return new PercolateQuery(name, queryStore, documents, candidateQuery, searcher, filter, verifiedMatchesQuery); } diff --git a/modules/percolator/src/main/java/org/opensearch/percolator/PercolatorMatchedSlotSubFetchPhase.java b/modules/percolator/src/main/java/org/opensearch/percolator/PercolatorMatchedSlotSubFetchPhase.java index 244e3ef1beacc..a157a20f5f2c4 100644 --- a/modules/percolator/src/main/java/org/opensearch/percolator/PercolatorMatchedSlotSubFetchPhase.java +++ b/modules/percolator/src/main/java/org/opensearch/percolator/PercolatorMatchedSlotSubFetchPhase.java @@ -44,7 +44,6 @@ import org.apache.lucene.search.Weight; import org.apache.lucene.util.BitSet; import org.apache.lucene.util.BitSetIterator; -import org.opensearch.Version; import org.opensearch.common.document.DocumentField; import org.opensearch.common.lucene.search.Queries; import org.opensearch.search.fetch.FetchContext; @@ -127,7 +126,7 @@ static class PercolateContext { this.percolateQuery = pq; this.singlePercolateQuery = singlePercolateQuery; IndexSearcher percolatorIndexSearcher = percolateQuery.getPercolatorIndexSearcher(); - Query nonNestedFilter = percolatorIndexSearcher.rewrite(Queries.newNonNestedFilter(Version.CURRENT)); + Query nonNestedFilter = percolatorIndexSearcher.rewrite(Queries.newNonNestedFilter()); Weight weight = percolatorIndexSearcher.createWeight(nonNestedFilter, ScoreMode.COMPLETE_NO_SCORES, 1f); Scorer s = weight.scorer(percolatorIndexSearcher.getIndexReader().leaves().get(0)); int memoryIndexMaxDoc = percolatorIndexSearcher.getIndexReader().maxDoc(); @@ -148,7 +147,7 @@ Query filterNestedDocs(Query in) { if (rootDocsBySlot != null) { // Ensures that we filter out nested documents return new BooleanQuery.Builder().add(in, BooleanClause.Occur.MUST) - .add(Queries.newNonNestedFilter(Version.CURRENT), BooleanClause.Occur.FILTER) + .add(Queries.newNonNestedFilter(), BooleanClause.Occur.FILTER) .build(); } return in; diff --git a/modules/percolator/src/main/java/org/opensearch/percolator/QueryAnalyzer.java b/modules/percolator/src/main/java/org/opensearch/percolator/QueryAnalyzer.java index 4a8ab8ba7d437..3a1b6734dd444 100644 --- a/modules/percolator/src/main/java/org/opensearch/percolator/QueryAnalyzer.java +++ b/modules/percolator/src/main/java/org/opensearch/percolator/QueryAnalyzer.java @@ -35,6 +35,8 @@ import org.apache.lucene.index.PrefixCodedTerms; import org.apache.lucene.index.Term; import org.apache.lucene.queries.BlendedTermQuery; +import org.apache.lucene.queries.spans.SpanOrQuery; +import org.apache.lucene.queries.spans.SpanTermQuery; import org.apache.lucene.search.BooleanClause.Occur; import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.BoostQuery; @@ -48,8 +50,6 @@ import org.apache.lucene.search.SynonymQuery; import org.apache.lucene.search.TermInSetQuery; import org.apache.lucene.search.TermQuery; -import org.apache.lucene.search.spans.SpanOrQuery; -import org.apache.lucene.search.spans.SpanTermQuery; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.NumericUtils; import org.apache.lucene.util.automaton.ByteRunAutomaton; diff --git a/modules/percolator/src/test/java/org/opensearch/percolator/CandidateQueryTests.java b/modules/percolator/src/test/java/org/opensearch/percolator/CandidateQueryTests.java index 4058548f052f8..e59aa227e3dc7 100644 --- a/modules/percolator/src/test/java/org/opensearch/percolator/CandidateQueryTests.java +++ b/modules/percolator/src/test/java/org/opensearch/percolator/CandidateQueryTests.java @@ -37,7 +37,6 @@ import org.apache.lucene.document.DoublePoint; import org.apache.lucene.document.Field; import org.apache.lucene.document.FloatPoint; -import org.apache.lucene.document.HalfFloatPoint; import org.apache.lucene.document.InetAddressPoint; import org.apache.lucene.document.IntPoint; import org.apache.lucene.document.LongPoint; @@ -60,10 +59,15 @@ import org.apache.lucene.index.memory.MemoryIndex; import org.apache.lucene.queries.BlendedTermQuery; import org.apache.lucene.queries.CommonTermsQuery; +import org.apache.lucene.queries.spans.SpanNearQuery; +import org.apache.lucene.queries.spans.SpanNotQuery; +import org.apache.lucene.queries.spans.SpanOrQuery; +import org.apache.lucene.queries.spans.SpanTermQuery; +import org.apache.lucene.sandbox.document.HalfFloatPoint; +import org.apache.lucene.sandbox.search.CoveringQuery; import org.apache.lucene.search.BooleanClause.Occur; import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.ConstantScoreQuery; -import org.apache.lucene.search.CoveringQuery; import org.apache.lucene.search.DisjunctionMaxQuery; import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.Explanation; @@ -74,6 +78,7 @@ import org.apache.lucene.search.PhraseQuery; import org.apache.lucene.search.PrefixQuery; import org.apache.lucene.search.Query; +import org.apache.lucene.search.QueryVisitor; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; import org.apache.lucene.search.Sort; @@ -83,10 +88,6 @@ import org.apache.lucene.search.TopDocs; import org.apache.lucene.search.Weight; import org.apache.lucene.search.WildcardQuery; -import org.apache.lucene.search.spans.SpanNearQuery; -import org.apache.lucene.search.spans.SpanNotQuery; -import org.apache.lucene.search.spans.SpanOrQuery; -import org.apache.lucene.search.spans.SpanTermQuery; import org.apache.lucene.store.ByteBuffersDirectory; import org.apache.lucene.store.Directory; import org.apache.lucene.util.BytesRef; @@ -123,7 +124,6 @@ import java.util.HashMap; import java.util.List; import java.util.Map; -import java.util.Set; import java.util.function.Function; import java.util.function.Supplier; import java.util.stream.Collectors; @@ -1279,6 +1279,11 @@ public Query rewrite(IndexReader reader) throws IOException { return new TermQuery(term); } + @Override + public void visit(QueryVisitor visitor) { + visitor.visitLeaf(this); + } + @Override public String toString(String field) { return "custom{" + field + "}"; @@ -1310,9 +1315,6 @@ public Weight createWeight(IndexSearcher searcher, ScoreMode scoreMode, float bo final IndexSearcher percolatorIndexSearcher = memoryIndex.createSearcher(); return new Weight(this) { - @Override - public void extractTerms(Set terms) {} - @Override public Explanation explain(LeafReaderContext context, int doc) throws IOException { Scorer scorer = scorer(context); @@ -1386,6 +1388,11 @@ public boolean isCacheable(LeafReaderContext ctx) { }; } + @Override + public void visit(QueryVisitor visitor) { + visitor.visitLeaf(this); + } + @Override public String toString(String field) { return "control{" + field + "}"; diff --git a/modules/percolator/src/test/java/org/opensearch/percolator/PercolateQueryTests.java b/modules/percolator/src/test/java/org/opensearch/percolator/PercolateQueryTests.java index a4a6f9b6de254..c5049e21acc0c 100644 --- a/modules/percolator/src/test/java/org/opensearch/percolator/PercolateQueryTests.java +++ b/modules/percolator/src/test/java/org/opensearch/percolator/PercolateQueryTests.java @@ -42,6 +42,8 @@ import org.apache.lucene.index.NoMergePolicy; import org.apache.lucene.index.Term; import org.apache.lucene.index.memory.MemoryIndex; +import org.apache.lucene.queries.spans.SpanNearQuery; +import org.apache.lucene.queries.spans.SpanTermQuery; import org.apache.lucene.search.BooleanClause; import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.ConstantScoreQuery; @@ -53,8 +55,6 @@ import org.apache.lucene.search.Query; import org.apache.lucene.search.TermQuery; import org.apache.lucene.search.TopDocs; -import org.apache.lucene.search.spans.SpanNearQuery; -import org.apache.lucene.search.spans.SpanTermQuery; import org.apache.lucene.store.Directory; import org.opensearch.common.bytes.BytesArray; import org.opensearch.test.OpenSearchTestCase; diff --git a/modules/percolator/src/test/java/org/opensearch/percolator/PercolatorFieldMapperTests.java b/modules/percolator/src/test/java/org/opensearch/percolator/PercolatorFieldMapperTests.java index 2c0aa593317b4..ca6f3a78b27d7 100644 --- a/modules/percolator/src/test/java/org/opensearch/percolator/PercolatorFieldMapperTests.java +++ b/modules/percolator/src/test/java/org/opensearch/percolator/PercolatorFieldMapperTests.java @@ -35,7 +35,6 @@ import org.apache.lucene.analysis.core.WhitespaceAnalyzer; import org.apache.lucene.document.DoublePoint; import org.apache.lucene.document.FloatPoint; -import org.apache.lucene.document.HalfFloatPoint; import org.apache.lucene.document.InetAddressPoint; import org.apache.lucene.document.IntPoint; import org.apache.lucene.document.LongPoint; @@ -43,9 +42,10 @@ import org.apache.lucene.index.IndexableField; import org.apache.lucene.index.Term; import org.apache.lucene.index.memory.MemoryIndex; +import org.apache.lucene.sandbox.document.HalfFloatPoint; +import org.apache.lucene.sandbox.search.CoveringQuery; import org.apache.lucene.search.BooleanClause.Occur; import org.apache.lucene.search.BooleanQuery; -import org.apache.lucene.search.CoveringQuery; import org.apache.lucene.search.PhraseQuery; import org.apache.lucene.search.Query; import org.apache.lucene.search.TermInSetQuery; diff --git a/modules/percolator/src/test/java/org/opensearch/percolator/QueryAnalyzerTests.java b/modules/percolator/src/test/java/org/opensearch/percolator/QueryAnalyzerTests.java index 6a7198d55faee..509f483bcd253 100644 --- a/modules/percolator/src/test/java/org/opensearch/percolator/QueryAnalyzerTests.java +++ b/modules/percolator/src/test/java/org/opensearch/percolator/QueryAnalyzerTests.java @@ -33,7 +33,6 @@ import org.apache.lucene.document.DoublePoint; import org.apache.lucene.document.FloatPoint; -import org.apache.lucene.document.HalfFloatPoint; import org.apache.lucene.document.InetAddressPoint; import org.apache.lucene.document.IntPoint; import org.apache.lucene.document.LatLonPoint; @@ -45,6 +44,12 @@ import org.apache.lucene.queries.intervals.IntervalQuery; import org.apache.lucene.queries.intervals.Intervals; import org.apache.lucene.queries.intervals.IntervalsSource; +import org.apache.lucene.queries.spans.SpanFirstQuery; +import org.apache.lucene.queries.spans.SpanNearQuery; +import org.apache.lucene.queries.spans.SpanNotQuery; +import org.apache.lucene.queries.spans.SpanOrQuery; +import org.apache.lucene.queries.spans.SpanTermQuery; +import org.apache.lucene.sandbox.document.HalfFloatPoint; import org.apache.lucene.search.BooleanClause; import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.BoostQuery; @@ -63,11 +68,6 @@ import org.apache.lucene.search.BooleanClause.Occur; import org.apache.lucene.search.join.QueryBitSetProducer; import org.apache.lucene.search.join.ScoreMode; -import org.apache.lucene.search.spans.SpanFirstQuery; -import org.apache.lucene.search.spans.SpanNearQuery; -import org.apache.lucene.search.spans.SpanNotQuery; -import org.apache.lucene.search.spans.SpanOrQuery; -import org.apache.lucene.search.spans.SpanTermQuery; import org.apache.lucene.util.BytesRef; import org.opensearch.Version; import org.opensearch.common.lucene.search.function.CombineFunction; @@ -824,13 +824,13 @@ public void testExtractQueryMetadata_disjunctionMaxQuery() { } public void testSynonymQuery() { - SynonymQuery query = new SynonymQuery(); + SynonymQuery query = new SynonymQuery.Builder("field").build(); Result result = analyze(query, Version.CURRENT); assertThat(result.verified, is(true)); assertThat(result.minimumShouldMatch, equalTo(0)); assertThat(result.extractions.isEmpty(), is(true)); - query = new SynonymQuery(new Term("_field", "_value1"), new Term("_field", "_value2")); + query = new SynonymQuery.Builder("_field").addTerm(new Term("_field", "_value1")).addTerm(new Term("_field", "_value2")).build(); result = analyze(query, Version.CURRENT); assertThat(result.verified, is(true)); assertThat(result.minimumShouldMatch, equalTo(1)); diff --git a/plugins/analysis-icu/build.gradle b/plugins/analysis-icu/build.gradle index 8bc8c2c764e29..e5c084559f0a6 100644 --- a/plugins/analysis-icu/build.gradle +++ b/plugins/analysis-icu/build.gradle @@ -28,8 +28,6 @@ * under the License. */ -import de.thetaphi.forbiddenapis.gradle.CheckForbiddenApis - apply plugin: 'opensearch.yaml-rest-test' apply plugin: 'opensearch.internal-cluster-test' @@ -46,7 +44,7 @@ forbiddenApisMain { } dependencies { - api "org.apache.lucene:lucene-analyzers-icu:${versions.lucene}" + api "org.apache.lucene:lucene-analysis-icu:${versions.lucene}" api "com.ibm.icu:icu4j:${versions.icu4j}" } diff --git a/plugins/analysis-icu/licenses/icu4j-62.1.jar.sha1 b/plugins/analysis-icu/licenses/icu4j-62.1.jar.sha1 deleted file mode 100644 index c24c69cf4b90f..0000000000000 --- a/plugins/analysis-icu/licenses/icu4j-62.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -7a4d00d5ec5febd252a6182e8b6e87a0a9821f81 \ No newline at end of file diff --git a/plugins/analysis-icu/licenses/icu4j-68.2.jar.sha1 b/plugins/analysis-icu/licenses/icu4j-68.2.jar.sha1 new file mode 100644 index 0000000000000..fcb3d79075099 --- /dev/null +++ b/plugins/analysis-icu/licenses/icu4j-68.2.jar.sha1 @@ -0,0 +1 @@ +76893e6000401ace133a65262254be0ebe556d46 \ No newline at end of file diff --git a/plugins/analysis-icu/licenses/lucene-analysis-icu-9.0.0.jar.sha1 b/plugins/analysis-icu/licenses/lucene-analysis-icu-9.0.0.jar.sha1 new file mode 100644 index 0000000000000..a0df1a4b7cb2e --- /dev/null +++ b/plugins/analysis-icu/licenses/lucene-analysis-icu-9.0.0.jar.sha1 @@ -0,0 +1 @@ +a23a2c1c9baad61b6fb5380f072e41534c275875 \ No newline at end of file diff --git a/plugins/analysis-icu/licenses/lucene-analyzers-icu-8.10.1.jar.sha1 b/plugins/analysis-icu/licenses/lucene-analyzers-icu-8.10.1.jar.sha1 deleted file mode 100644 index cd11905d4531e..0000000000000 --- a/plugins/analysis-icu/licenses/lucene-analyzers-icu-8.10.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -a1eec256a25340ba5d432d2800f759db83eb5145 \ No newline at end of file diff --git a/plugins/analysis-icu/src/main/java/org/opensearch/index/analysis/ICUCollationKeyFilter.java b/plugins/analysis-icu/src/main/java/org/opensearch/index/analysis/ICUCollationKeyFilter.java index 3d4affb280b48..d7e097ce79798 100644 --- a/plugins/analysis-icu/src/main/java/org/opensearch/index/analysis/ICUCollationKeyFilter.java +++ b/plugins/analysis-icu/src/main/java/org/opensearch/index/analysis/ICUCollationKeyFilter.java @@ -35,7 +35,7 @@ import org.apache.lucene.analysis.TokenFilter; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.tokenattributes.CharTermAttribute; -import org.apache.lucene.collation.ICUCollationDocValuesField; +import org.apache.lucene.analysis.icu.ICUCollationDocValuesField; import java.io.IOException; diff --git a/plugins/analysis-kuromoji/build.gradle b/plugins/analysis-kuromoji/build.gradle index 29ed05a9661dd..60738fb28b6d5 100644 --- a/plugins/analysis-kuromoji/build.gradle +++ b/plugins/analysis-kuromoji/build.gradle @@ -35,7 +35,7 @@ opensearchplugin { } dependencies { - api "org.apache.lucene:lucene-analyzers-kuromoji:${versions.lucene}" + api "org.apache.lucene:lucene-analysis-kuromoji:${versions.lucene}" } restResources { diff --git a/plugins/analysis-kuromoji/licenses/lucene-analysis-kuromoji-9.0.0.jar.sha1 b/plugins/analysis-kuromoji/licenses/lucene-analysis-kuromoji-9.0.0.jar.sha1 new file mode 100644 index 0000000000000..7eb72638fd6d2 --- /dev/null +++ b/plugins/analysis-kuromoji/licenses/lucene-analysis-kuromoji-9.0.0.jar.sha1 @@ -0,0 +1 @@ +55f00abe01e51181d687c6bbceca8544f319b97d \ No newline at end of file diff --git a/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-8.10.1.jar.sha1 b/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-8.10.1.jar.sha1 deleted file mode 100644 index 0cee3fd1fe9cf..0000000000000 --- a/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-8.10.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -d9ff6329a9755bbdb7343452bf246e61ae9279d8 \ No newline at end of file diff --git a/plugins/analysis-nori/build.gradle b/plugins/analysis-nori/build.gradle index 1f0b73f334f88..3def7f9c6c60f 100644 --- a/plugins/analysis-nori/build.gradle +++ b/plugins/analysis-nori/build.gradle @@ -35,7 +35,7 @@ opensearchplugin { } dependencies { - api "org.apache.lucene:lucene-analyzers-nori:${versions.lucene}" + api "org.apache.lucene:lucene-analysis-nori:${versions.lucene}" } restResources { diff --git a/plugins/analysis-nori/licenses/lucene-analysis-nori-9.0.0.jar.sha1 b/plugins/analysis-nori/licenses/lucene-analysis-nori-9.0.0.jar.sha1 new file mode 100644 index 0000000000000..4d787ad04791f --- /dev/null +++ b/plugins/analysis-nori/licenses/lucene-analysis-nori-9.0.0.jar.sha1 @@ -0,0 +1 @@ +c5258e674ad9c189338b026710869c2955d8e11d \ No newline at end of file diff --git a/plugins/analysis-nori/licenses/lucene-analyzers-nori-8.10.1.jar.sha1 b/plugins/analysis-nori/licenses/lucene-analyzers-nori-8.10.1.jar.sha1 deleted file mode 100644 index ec8d7c98c2d6f..0000000000000 --- a/plugins/analysis-nori/licenses/lucene-analyzers-nori-8.10.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -6e78aef6d1b709ed3e27dbc949255e078da08d41 \ No newline at end of file diff --git a/plugins/analysis-phonetic/build.gradle b/plugins/analysis-phonetic/build.gradle index 5d9e52307f389..ffa0466d43170 100644 --- a/plugins/analysis-phonetic/build.gradle +++ b/plugins/analysis-phonetic/build.gradle @@ -35,7 +35,7 @@ opensearchplugin { } dependencies { - api "org.apache.lucene:lucene-analyzers-phonetic:${versions.lucene}" + api "org.apache.lucene:lucene-analysis-phonetic:${versions.lucene}" api "commons-codec:commons-codec:${versions.commonscodec}" } diff --git a/plugins/analysis-phonetic/licenses/lucene-analysis-phonetic-9.0.0.jar.sha1 b/plugins/analysis-phonetic/licenses/lucene-analysis-phonetic-9.0.0.jar.sha1 new file mode 100644 index 0000000000000..8d915a28087e6 --- /dev/null +++ b/plugins/analysis-phonetic/licenses/lucene-analysis-phonetic-9.0.0.jar.sha1 @@ -0,0 +1 @@ +437960fac10a9f8327fbd87be4e408eb140988b3 \ No newline at end of file diff --git a/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-8.10.1.jar.sha1 b/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-8.10.1.jar.sha1 deleted file mode 100644 index 14edf0533a00d..0000000000000 --- a/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-8.10.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -c186bf6dd0c2fa6612ba9b0d785ff2d388d32a23 \ No newline at end of file diff --git a/plugins/analysis-smartcn/build.gradle b/plugins/analysis-smartcn/build.gradle index 4a389d60cac19..92f2774854715 100644 --- a/plugins/analysis-smartcn/build.gradle +++ b/plugins/analysis-smartcn/build.gradle @@ -35,7 +35,7 @@ opensearchplugin { } dependencies { - api "org.apache.lucene:lucene-analyzers-smartcn:${versions.lucene}" + api "org.apache.lucene:lucene-analysis-smartcn:${versions.lucene}" } restResources { diff --git a/plugins/analysis-smartcn/licenses/lucene-analysis-smartcn-9.0.0.jar.sha1 b/plugins/analysis-smartcn/licenses/lucene-analysis-smartcn-9.0.0.jar.sha1 new file mode 100644 index 0000000000000..d57bf6b3ab80d --- /dev/null +++ b/plugins/analysis-smartcn/licenses/lucene-analysis-smartcn-9.0.0.jar.sha1 @@ -0,0 +1 @@ +fe96c0b4609be5f7450773c2d7f099c51f4b1f7a \ No newline at end of file diff --git a/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-8.10.1.jar.sha1 b/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-8.10.1.jar.sha1 deleted file mode 100644 index 5fc06ea596458..0000000000000 --- a/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-8.10.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -ebda1884c24bb14ee451b98e7565c86966f8863d \ No newline at end of file diff --git a/plugins/analysis-stempel/build.gradle b/plugins/analysis-stempel/build.gradle index b03d33adc4207..d713f80172c58 100644 --- a/plugins/analysis-stempel/build.gradle +++ b/plugins/analysis-stempel/build.gradle @@ -35,7 +35,7 @@ opensearchplugin { } dependencies { - api "org.apache.lucene:lucene-analyzers-stempel:${versions.lucene}" + api "org.apache.lucene:lucene-analysis-stempel:${versions.lucene}" } restResources { diff --git a/plugins/analysis-stempel/licenses/lucene-analysis-stempel-9.0.0.jar.sha1 b/plugins/analysis-stempel/licenses/lucene-analysis-stempel-9.0.0.jar.sha1 new file mode 100644 index 0000000000000..ade92c37c5865 --- /dev/null +++ b/plugins/analysis-stempel/licenses/lucene-analysis-stempel-9.0.0.jar.sha1 @@ -0,0 +1 @@ +b92e86dd451d225e68ee4abac5b00bf883b6ea00 \ No newline at end of file diff --git a/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-8.10.1.jar.sha1 b/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-8.10.1.jar.sha1 deleted file mode 100644 index bf2d58255a77e..0000000000000 --- a/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-8.10.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -2a4bd86c96374cdc5acaf7c0efd5127f2fd3a519 \ No newline at end of file diff --git a/plugins/analysis-ukrainian/build.gradle b/plugins/analysis-ukrainian/build.gradle index 9e4bb9c647859..386452fcf8aeb 100644 --- a/plugins/analysis-ukrainian/build.gradle +++ b/plugins/analysis-ukrainian/build.gradle @@ -35,7 +35,7 @@ opensearchplugin { } dependencies { - api "org.apache.lucene:lucene-analyzers-morfologik:${versions.lucene}" + api "org.apache.lucene:lucene-analysis-morfologik:${versions.lucene}" api "org.carrot2:morfologik-stemming:2.1.8" api "org.carrot2:morfologik-fsa:2.1.8" api "ua.net.nlp:morfologik-ukrainian-search:4.9.1" diff --git a/plugins/analysis-ukrainian/licenses/lucene-analysis-morfologik-9.0.0.jar.sha1 b/plugins/analysis-ukrainian/licenses/lucene-analysis-morfologik-9.0.0.jar.sha1 new file mode 100644 index 0000000000000..433ce1f0552c8 --- /dev/null +++ b/plugins/analysis-ukrainian/licenses/lucene-analysis-morfologik-9.0.0.jar.sha1 @@ -0,0 +1 @@ +048fddf601c6de7dd296f6da3f394544618f7cea \ No newline at end of file diff --git a/plugins/analysis-ukrainian/licenses/lucene-analyzers-morfologik-8.10.1.jar.sha1 b/plugins/analysis-ukrainian/licenses/lucene-analyzers-morfologik-8.10.1.jar.sha1 deleted file mode 100644 index 6076c699bb7bf..0000000000000 --- a/plugins/analysis-ukrainian/licenses/lucene-analyzers-morfologik-8.10.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -09de2e3fa72355228b2723f958dcb0ec1bc3f31a \ No newline at end of file diff --git a/plugins/mapper-annotated-text/src/test/java/org/opensearch/search/fetch/subphase/highlight/AnnotatedTextHighlighterTests.java b/plugins/mapper-annotated-text/src/test/java/org/opensearch/search/fetch/subphase/highlight/AnnotatedTextHighlighterTests.java index dd2ee23355c1e..fe75566c315a1 100644 --- a/plugins/mapper-annotated-text/src/test/java/org/opensearch/search/fetch/subphase/highlight/AnnotatedTextHighlighterTests.java +++ b/plugins/mapper-annotated-text/src/test/java/org/opensearch/search/fetch/subphase/highlight/AnnotatedTextHighlighterTests.java @@ -136,7 +136,6 @@ private void assertHighlightOneDoc( noMatchSize, expectedPassages.length, name -> "text".equals(name), - Integer.MAX_VALUE, Integer.MAX_VALUE ); highlighter.setFieldMatcher((name) -> "text".equals(name)); diff --git a/plugins/store-smb/src/internalClusterTest/java/org/opensearch/index/store/SmbNIOFsTests.java b/plugins/store-smb/src/internalClusterTest/java/org/opensearch/index/store/SmbNIOFsTests.java index 6610d8f704ea3..eca9ca356a764 100644 --- a/plugins/store-smb/src/internalClusterTest/java/org/opensearch/index/store/SmbNIOFsTests.java +++ b/plugins/store-smb/src/internalClusterTest/java/org/opensearch/index/store/SmbNIOFsTests.java @@ -6,13 +6,34 @@ * compatible open source license. */ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +/* + * Modifications Copyright OpenSearch Contributors. See + * GitHub history for details. + */ + package org.opensearch.index.store; import org.opensearch.common.settings.Settings; -/** - * Index Settings Tests for NIO FileSystem as index store type. - */ public class SmbNIOFsTests extends AbstractAzureFsTestCase { @Override public Settings indexSettings() { diff --git a/server/build.gradle b/server/build.gradle index 3a11428ca7919..dcf4d43c60192 100644 --- a/server/build.gradle +++ b/server/build.gradle @@ -97,7 +97,7 @@ dependencies { // lucene api "org.apache.lucene:lucene-core:${versions.lucene}" - api "org.apache.lucene:lucene-analyzers-common:${versions.lucene}" + api "org.apache.lucene:lucene-analysis-common:${versions.lucene}" api "org.apache.lucene:lucene-backward-codecs:${versions.lucene}" api "org.apache.lucene:lucene-grouping:${versions.lucene}" api "org.apache.lucene:lucene-highlighter:${versions.lucene}" diff --git a/server/licenses/lucene-analysis-common-9.0.0.jar.sha1 b/server/licenses/lucene-analysis-common-9.0.0.jar.sha1 new file mode 100644 index 0000000000000..2ed9dbcbe22f6 --- /dev/null +++ b/server/licenses/lucene-analysis-common-9.0.0.jar.sha1 @@ -0,0 +1 @@ +f78890829c3d6f15de48fdbc2c77ef4c0e3f005c \ No newline at end of file diff --git a/server/licenses/lucene-analyzers-common-8.10.1.jar.sha1 b/server/licenses/lucene-analyzers-common-8.10.1.jar.sha1 deleted file mode 100644 index 685f94bcc6601..0000000000000 --- a/server/licenses/lucene-analyzers-common-8.10.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -23bb36a98d01100953674c56c20861b29b5a5175 \ No newline at end of file diff --git a/server/licenses/lucene-backward-codecs-8.10.1.jar.sha1 b/server/licenses/lucene-backward-codecs-8.10.1.jar.sha1 deleted file mode 100644 index 3191833511058..0000000000000 --- a/server/licenses/lucene-backward-codecs-8.10.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -7399c32bc4ba7e37e14a9660ffd7962acf68a802 \ No newline at end of file diff --git a/server/licenses/lucene-backward-codecs-9.0.0.jar.sha1 b/server/licenses/lucene-backward-codecs-9.0.0.jar.sha1 new file mode 100644 index 0000000000000..acf5a2b543199 --- /dev/null +++ b/server/licenses/lucene-backward-codecs-9.0.0.jar.sha1 @@ -0,0 +1 @@ +9fb48d0244799e18299449ee62459caab0728490 \ No newline at end of file diff --git a/server/licenses/lucene-core-8.10.1.jar.sha1 b/server/licenses/lucene-core-8.10.1.jar.sha1 deleted file mode 100644 index 77f85d74d6e6c..0000000000000 --- a/server/licenses/lucene-core-8.10.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -deb78f6b21d29f964ab267ad59fafb58ef740101 \ No newline at end of file diff --git a/server/licenses/lucene-core-9.0.0.jar.sha1 b/server/licenses/lucene-core-9.0.0.jar.sha1 new file mode 100644 index 0000000000000..c874382fc8355 --- /dev/null +++ b/server/licenses/lucene-core-9.0.0.jar.sha1 @@ -0,0 +1 @@ +be679fd274f264e4e8b02bc032d2788cd4076ab4 \ No newline at end of file diff --git a/server/licenses/lucene-grouping-8.10.1.jar.sha1 b/server/licenses/lucene-grouping-8.10.1.jar.sha1 deleted file mode 100644 index 82dd3ba35b0a2..0000000000000 --- a/server/licenses/lucene-grouping-8.10.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -7b91bb886d30c67a8f980d3bdfd6b7826a62d5e7 \ No newline at end of file diff --git a/server/licenses/lucene-grouping-9.0.0.jar.sha1 b/server/licenses/lucene-grouping-9.0.0.jar.sha1 new file mode 100644 index 0000000000000..18a81b5fa97ff --- /dev/null +++ b/server/licenses/lucene-grouping-9.0.0.jar.sha1 @@ -0,0 +1 @@ +27ebe235d427b4e392fabab9b6bfa09524ca7f8b \ No newline at end of file diff --git a/server/licenses/lucene-highlighter-8.10.1.jar.sha1 b/server/licenses/lucene-highlighter-8.10.1.jar.sha1 deleted file mode 100644 index 901a99e05fa27..0000000000000 --- a/server/licenses/lucene-highlighter-8.10.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -ec4a2103cb300aab7e6142f1c7778dd505ecb8e2 \ No newline at end of file diff --git a/server/licenses/lucene-highlighter-9.0.0.jar.sha1 b/server/licenses/lucene-highlighter-9.0.0.jar.sha1 new file mode 100644 index 0000000000000..5503495c2f86c --- /dev/null +++ b/server/licenses/lucene-highlighter-9.0.0.jar.sha1 @@ -0,0 +1 @@ +a3cb395c2e8c672e6eec951b2b02371a4a883f73 \ No newline at end of file diff --git a/server/licenses/lucene-join-8.10.1.jar.sha1 b/server/licenses/lucene-join-8.10.1.jar.sha1 deleted file mode 100644 index b7165475dac4f..0000000000000 --- a/server/licenses/lucene-join-8.10.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -aa368e9d11660dcfcfaab1a39dd871f05fa2b031 \ No newline at end of file diff --git a/server/licenses/lucene-join-9.0.0.jar.sha1 b/server/licenses/lucene-join-9.0.0.jar.sha1 new file mode 100644 index 0000000000000..dcbaa17875435 --- /dev/null +++ b/server/licenses/lucene-join-9.0.0.jar.sha1 @@ -0,0 +1 @@ +94a855b5d09a6601289aeaeba0f11d5539552590 \ No newline at end of file diff --git a/server/licenses/lucene-memory-8.10.1.jar.sha1 b/server/licenses/lucene-memory-8.10.1.jar.sha1 deleted file mode 100644 index ace60de0396b2..0000000000000 --- a/server/licenses/lucene-memory-8.10.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -9de18bf605879647e964fd57ddf3fa6f85ca743e \ No newline at end of file diff --git a/server/licenses/lucene-memory-9.0.0.jar.sha1 b/server/licenses/lucene-memory-9.0.0.jar.sha1 new file mode 100644 index 0000000000000..157597ce9878f --- /dev/null +++ b/server/licenses/lucene-memory-9.0.0.jar.sha1 @@ -0,0 +1 @@ +2371c95031422bc1f501d43ffcc7311baed4b35b \ No newline at end of file diff --git a/server/licenses/lucene-misc-8.10.1.jar.sha1 b/server/licenses/lucene-misc-8.10.1.jar.sha1 deleted file mode 100644 index ef9f37d080361..0000000000000 --- a/server/licenses/lucene-misc-8.10.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -e9cca86ebbe010d375388c5a17216e2d2b2e76bb \ No newline at end of file diff --git a/server/licenses/lucene-misc-9.0.0.jar.sha1 b/server/licenses/lucene-misc-9.0.0.jar.sha1 new file mode 100644 index 0000000000000..ef031d34305a2 --- /dev/null +++ b/server/licenses/lucene-misc-9.0.0.jar.sha1 @@ -0,0 +1 @@ +25c6170f4fa2f707908dfb92fbafc76727f901e0 \ No newline at end of file diff --git a/server/licenses/lucene-queries-8.10.1.jar.sha1 b/server/licenses/lucene-queries-8.10.1.jar.sha1 deleted file mode 100644 index ee8ec29fd21f9..0000000000000 --- a/server/licenses/lucene-queries-8.10.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -21b70a0996e3408291514d99e3b03800d0bcd657 \ No newline at end of file diff --git a/server/licenses/lucene-queries-9.0.0.jar.sha1 b/server/licenses/lucene-queries-9.0.0.jar.sha1 new file mode 100644 index 0000000000000..4b43c9e6b709a --- /dev/null +++ b/server/licenses/lucene-queries-9.0.0.jar.sha1 @@ -0,0 +1 @@ +87b4c7833d30895baf7091f9cb0db878e970b604 \ No newline at end of file diff --git a/server/licenses/lucene-queryparser-8.10.1.jar.sha1 b/server/licenses/lucene-queryparser-8.10.1.jar.sha1 deleted file mode 100644 index 3175b926c47ad..0000000000000 --- a/server/licenses/lucene-queryparser-8.10.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -087f52ee3f72f387b802c49a96e4a14b3b05dd21 \ No newline at end of file diff --git a/server/licenses/lucene-queryparser-9.0.0.jar.sha1 b/server/licenses/lucene-queryparser-9.0.0.jar.sha1 new file mode 100644 index 0000000000000..62a4650a168c7 --- /dev/null +++ b/server/licenses/lucene-queryparser-9.0.0.jar.sha1 @@ -0,0 +1 @@ +bf13395ad2033bca3182fcbc83204e8ae1951945 \ No newline at end of file diff --git a/server/licenses/lucene-sandbox-8.10.1.jar.sha1 b/server/licenses/lucene-sandbox-8.10.1.jar.sha1 deleted file mode 100644 index 5941c170b1e80..0000000000000 --- a/server/licenses/lucene-sandbox-8.10.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -82b15ef61297e6d7b0c1f6c37c502d6b77a82f1e \ No newline at end of file diff --git a/server/licenses/lucene-sandbox-9.0.0.jar.sha1 b/server/licenses/lucene-sandbox-9.0.0.jar.sha1 new file mode 100644 index 0000000000000..4396efda1a83b --- /dev/null +++ b/server/licenses/lucene-sandbox-9.0.0.jar.sha1 @@ -0,0 +1 @@ +3c153a1dc1da3f98083cc932c9476df4b77b0ca5 \ No newline at end of file diff --git a/server/licenses/lucene-spatial-extras-8.10.1.jar.sha1 b/server/licenses/lucene-spatial-extras-8.10.1.jar.sha1 deleted file mode 100644 index 7eb235d9a1914..0000000000000 --- a/server/licenses/lucene-spatial-extras-8.10.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -7a3b6eac3e66bb1c6fb05c0cd980e5592adaf96b \ No newline at end of file diff --git a/server/licenses/lucene-spatial-extras-9.0.0.jar.sha1 b/server/licenses/lucene-spatial-extras-9.0.0.jar.sha1 new file mode 100644 index 0000000000000..a742934def499 --- /dev/null +++ b/server/licenses/lucene-spatial-extras-9.0.0.jar.sha1 @@ -0,0 +1 @@ +91535ef6512c45c7e2b113b04cab7738ee774893 \ No newline at end of file diff --git a/server/licenses/lucene-spatial3d-8.10.1.jar.sha1 b/server/licenses/lucene-spatial3d-8.10.1.jar.sha1 deleted file mode 100644 index 177ceb41b4205..0000000000000 --- a/server/licenses/lucene-spatial3d-8.10.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -823a5e9d2fd3b5b668d305e0781d0e074e9f2ebb \ No newline at end of file diff --git a/server/licenses/lucene-spatial3d-9.0.0.jar.sha1 b/server/licenses/lucene-spatial3d-9.0.0.jar.sha1 new file mode 100644 index 0000000000000..0722795c260ad --- /dev/null +++ b/server/licenses/lucene-spatial3d-9.0.0.jar.sha1 @@ -0,0 +1 @@ +6b4ee47f218ed3d123c1b07671677a2e4f3c133b \ No newline at end of file diff --git a/server/licenses/lucene-suggest-8.10.1.jar.sha1 b/server/licenses/lucene-suggest-8.10.1.jar.sha1 deleted file mode 100644 index dae6bab002ef4..0000000000000 --- a/server/licenses/lucene-suggest-8.10.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -92d7e5a178d0df58e0b4d400755ac46bae3eea11 \ No newline at end of file diff --git a/server/licenses/lucene-suggest-9.0.0.jar.sha1 b/server/licenses/lucene-suggest-9.0.0.jar.sha1 new file mode 100644 index 0000000000000..7eb41e758379e --- /dev/null +++ b/server/licenses/lucene-suggest-9.0.0.jar.sha1 @@ -0,0 +1 @@ +a7d0e7279737114c039f5214082da948732096a6 \ No newline at end of file diff --git a/server/src/internalClusterTest/java/org/opensearch/indices/recovery/IndexPrimaryRelocationIT.java b/server/src/internalClusterTest/java/org/opensearch/indices/recovery/IndexPrimaryRelocationIT.java index 7fd2466647272..3f174dd0fdd6a 100644 --- a/server/src/internalClusterTest/java/org/opensearch/indices/recovery/IndexPrimaryRelocationIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/indices/recovery/IndexPrimaryRelocationIT.java @@ -56,6 +56,7 @@ public class IndexPrimaryRelocationIT extends OpenSearchIntegTestCase { private static final int RELOCATION_COUNT = 15; + @AwaitsFix(bugUrl = "https://github.com/opensearch-project/OpenSearch/issues/2063") public void testPrimaryRelocationWhileIndexing() throws Exception { internalCluster().ensureAtLeastNumDataNodes(randomIntBetween(2, 3)); client().admin() diff --git a/server/src/internalClusterTest/java/org/opensearch/recovery/RelocationIT.java b/server/src/internalClusterTest/java/org/opensearch/recovery/RelocationIT.java index c5b0d99e6d275..d17761f62eb53 100644 --- a/server/src/internalClusterTest/java/org/opensearch/recovery/RelocationIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/recovery/RelocationIT.java @@ -32,9 +32,7 @@ package org.opensearch.recovery; -import com.carrotsearch.hppc.IntHashSet; import com.carrotsearch.hppc.cursors.ObjectCursor; -import com.carrotsearch.hppc.procedures.IntProcedure; import org.apache.lucene.index.IndexFileNames; import org.apache.lucene.util.English; import org.opensearch.action.ActionFuture; @@ -61,6 +59,7 @@ import org.opensearch.env.NodeEnvironment; import org.opensearch.index.IndexService; import org.opensearch.index.IndexSettings; +import org.opensearch.index.mapper.MapperService; import org.opensearch.index.seqno.ReplicationTracker; import org.opensearch.index.seqno.RetentionLease; import org.opensearch.index.shard.IndexEventListener; @@ -192,6 +191,7 @@ public void testSimpleRelocationNoIndexing() { assertThat(client().prepareSearch("test").setSize(0).execute().actionGet().getHits().getTotalHits().value, equalTo(20L)); } + @AwaitsFix(bugUrl = "https://github.com/opensearch-project/OpenSearch/issues/2063") public void testRelocationWhileIndexingRandom() throws Exception { int numberOfRelocations = scaledRandomIntBetween(1, rarely() ? 10 : 4); int numberOfReplicas = randomBoolean() ? 0 : 1; @@ -228,7 +228,7 @@ public void testRelocationWhileIndexingRandom() throws Exception { } int numDocs = scaledRandomIntBetween(200, 2500); - try (BackgroundIndexer indexer = new BackgroundIndexer("test", "type1", client(), numDocs)) { + try (BackgroundIndexer indexer = new BackgroundIndexer("test", MapperService.SINGLE_MAPPING_NAME, client(), numDocs)) { logger.info("--> waiting for {} docs to be indexed ...", numDocs); waitForDocs(numDocs, indexer); logger.info("--> {} docs indexed", numDocs); @@ -285,20 +285,20 @@ public void testRelocationWhileIndexingRandom() throws Exception { for (int hit = 0; hit < indexer.totalIndexedDocs(); hit++) { hitIds[hit] = hit + 1; } - IntHashSet set = IntHashSet.from(hitIds); + Set set = Arrays.stream(hitIds).boxed().collect(Collectors.toSet()); for (SearchHit hit : hits.getHits()) { int id = Integer.parseInt(hit.getId()); - if (!set.remove(id)) { + if (set.remove(id) == false) { logger.error("Extra id [{}]", id); } } - set.forEach((IntProcedure) value -> { logger.error("Missing id [{}]", value); }); + set.forEach(value -> logger.error("Missing id [{}]", value)); } assertThat(hits.getTotalHits().value, equalTo(indexer.totalIndexedDocs())); logger.info("--> DONE search test round {}", i + 1); } - if (!ranOnce) { + if (ranOnce == false) { fail(); } } diff --git a/server/src/internalClusterTest/java/org/opensearch/search/fetch/subphase/highlight/HighlighterSearchIT.java b/server/src/internalClusterTest/java/org/opensearch/search/fetch/subphase/highlight/HighlighterSearchIT.java index f0fe5e4479b76..d1b3895ff40e1 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/fetch/subphase/highlight/HighlighterSearchIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/fetch/subphase/highlight/HighlighterSearchIT.java @@ -3288,6 +3288,36 @@ public void testKeywordFieldHighlighting() throws IOException { ); } + public void testCopyToFields() throws Exception { + XContentBuilder b = jsonBuilder().startObject().startObject("properties"); + b.startObject("foo"); + { + b.field("type", "text"); + b.field("copy_to", "foo_copy"); + } + b.endObject(); + // If field is not stored, it is looked up in source (but source has only 'foo' + b.startObject("foo_copy").field("type", "text").field("store", true).endObject(); + b.endObject().endObject(); + prepareCreate("test").addMapping("type", b).get(); + + client().prepareIndex("test") + .setId("1") + .setSource(jsonBuilder().startObject().field("foo", "how now brown cow").endObject()) + .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) + .get(); + + SearchResponse response = client().prepareSearch() + .setQuery(matchQuery("foo_copy", "brown")) + .highlighter(new HighlightBuilder().field(new Field("foo_copy"))) + .get(); + + assertHitCount(response, 1); + HighlightField field = response.getHits().getAt(0).getHighlightFields().get("foo_copy"); + assertThat(field.getFragments().length, equalTo(1)); + assertThat(field.getFragments()[0].string(), equalTo("how now brown cow")); + } + public void testACopyFieldWithNestedQuery() throws Exception { String mapping = Strings.toString( jsonBuilder().startObject() diff --git a/server/src/internalClusterTest/java/org/opensearch/search/query/QueryStringIT.java b/server/src/internalClusterTest/java/org/opensearch/search/query/QueryStringIT.java index 5b2d87a6508fe..494aa4c0e6b88 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/query/QueryStringIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/query/QueryStringIT.java @@ -331,8 +331,6 @@ public void testLimitOnExpandedFields() throws Exception { doAssertOneHitForQueryString("field_A0:foo"); // expanding to the limit should work doAssertOneHitForQueryString("field_A\\*:foo"); - // expanding two blocks to the limit still works - doAssertOneHitForQueryString("field_A\\*:foo field_B\\*:bar"); // adding a non-existing field on top shouldn't overshoot the limit doAssertOneHitForQueryString("field_A\\*:foo unmapped:something"); diff --git a/server/src/internalClusterTest/java/org/opensearch/validate/SimpleValidateQueryIT.java b/server/src/internalClusterTest/java/org/opensearch/validate/SimpleValidateQueryIT.java index 30ab282bf3d44..a97c4a0d13f12 100644 --- a/server/src/internalClusterTest/java/org/opensearch/validate/SimpleValidateQueryIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/validate/SimpleValidateQueryIT.java @@ -238,11 +238,10 @@ public void testExplainValidateQueryTwoNodes() throws IOException { assertThat(response.getQueryExplanation().size(), equalTo(1)); assertThat( response.getQueryExplanation().get(0).getExplanation(), - equalTo( - "(MatchNoDocsQuery(\"failed [bar] query, caused by number_format_exception:[For input string: \"foo\"]\") " - + "| foo:foo | baz:foo)" - ) + containsString("MatchNoDocsQuery(\"failed [bar] query, caused by number_format_exception:[For input string: \"foo\"]\")") ); + assertThat(response.getQueryExplanation().get(0).getExplanation(), containsString("foo:foo")); + assertThat(response.getQueryExplanation().get(0).getExplanation(), containsString("baz:foo")); assertThat(response.getQueryExplanation().get(0).getError(), nullValue()); } } diff --git a/server/src/main/java/org/apache/lucene/misc/search/similarity/LegacyBM25Similarity.java b/server/src/main/java/org/apache/lucene/misc/search/similarity/LegacyBM25Similarity.java new file mode 100644 index 0000000000000..3b812e1c70368 --- /dev/null +++ b/server/src/main/java/org/apache/lucene/misc/search/similarity/LegacyBM25Similarity.java @@ -0,0 +1,117 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.lucene.misc.search.similarity; + +import org.apache.lucene.index.FieldInvertState; +import org.apache.lucene.search.CollectionStatistics; +import org.apache.lucene.search.TermStatistics; +import org.apache.lucene.search.similarities.BM25Similarity; +import org.apache.lucene.search.similarities.Similarity; + +/** + * Similarity that behaves like {@link BM25Similarity} while also applying the k1+1 factor to the + * numerator of the scoring formula + * + * @see BM25Similarity + * @deprecated {@link BM25Similarity} should be used instead + */ +@Deprecated +public final class LegacyBM25Similarity extends Similarity { + + private final BM25Similarity bm25Similarity; + + /** + * BM25 with these default values: + * + *
    + *
  • {@code k1 = 1.2} + *
  • {@code b = 0.75} + *
  • {@code discountOverlaps = true} + *
+ */ + public LegacyBM25Similarity() { + this.bm25Similarity = new BM25Similarity(); + } + + /** + * BM25 with the supplied parameter values. + * + * @param k1 Controls non-linear term frequency normalization (saturation). + * @param b Controls to what degree document length normalizes tf values. + * @throws IllegalArgumentException if {@code k1} is infinite or negative, or if {@code b} is not + * within the range {@code [0..1]} + */ + public LegacyBM25Similarity(float k1, float b) { + this.bm25Similarity = new BM25Similarity(k1, b); + } + + /** + * BM25 with the supplied parameter values. + * + * @param k1 Controls non-linear term frequency normalization (saturation). + * @param b Controls to what degree document length normalizes tf values. + * @param discountOverlaps True if overlap tokens (tokens with a position of increment of zero) + * are discounted from the document's length. + * @throws IllegalArgumentException if {@code k1} is infinite or negative, or if {@code b} is not + * within the range {@code [0..1]} + */ + public LegacyBM25Similarity(float k1, float b, boolean discountOverlaps) { + this.bm25Similarity = new BM25Similarity(k1, b, discountOverlaps); + } + + @Override + public long computeNorm(FieldInvertState state) { + return bm25Similarity.computeNorm(state); + } + + @Override + public SimScorer scorer(float boost, CollectionStatistics collectionStats, TermStatistics... termStats) { + return bm25Similarity.scorer(boost * (1 + bm25Similarity.getK1()), collectionStats, termStats); + } + + /** + * Returns the k1 parameter + * + * @see #LegacyBM25Similarity(float, float) + */ + public final float getK1() { + return bm25Similarity.getK1(); + } + + /** + * Returns the b parameter + * + * @see #LegacyBM25Similarity(float, float) + */ + public final float getB() { + return bm25Similarity.getB(); + } + + /** + * Returns true if overlap tokens are discounted from the document's length. + * + * @see #LegacyBM25Similarity(float, float, boolean) + */ + public boolean getDiscountOverlaps() { + return bm25Similarity.getDiscountOverlaps(); + } + + @Override + public String toString() { + return bm25Similarity.toString(); + } +} diff --git a/server/src/main/java/org/apache/lucene/queries/BinaryDocValuesRangeQuery.java b/server/src/main/java/org/apache/lucene/queries/BinaryDocValuesRangeQuery.java index 82778d31f6c2c..a2c59de7832d4 100644 --- a/server/src/main/java/org/apache/lucene/queries/BinaryDocValuesRangeQuery.java +++ b/server/src/main/java/org/apache/lucene/queries/BinaryDocValuesRangeQuery.java @@ -39,6 +39,7 @@ import org.apache.lucene.search.ConstantScoreWeight; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.Query; +import org.apache.lucene.search.QueryVisitor; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; import org.apache.lucene.search.TwoPhaseIterator; @@ -138,6 +139,13 @@ public boolean isCacheable(LeafReaderContext ctx) { }; } + @Override + public void visit(QueryVisitor visitor) { + if (visitor.acceptField(fieldName)) { + visitor.visitLeaf(this); + } + } + @Override public String toString(String field) { return "BinaryDocValuesRangeQuery(fieldName=" + field + ",from=" + originalFrom + ",to=" + originalTo + ")"; diff --git a/server/src/main/java/org/apache/lucene/queries/SpanMatchNoDocsQuery.java b/server/src/main/java/org/apache/lucene/queries/SpanMatchNoDocsQuery.java index 12c51d951c6b2..ac279d6882634 100644 --- a/server/src/main/java/org/apache/lucene/queries/SpanMatchNoDocsQuery.java +++ b/server/src/main/java/org/apache/lucene/queries/SpanMatchNoDocsQuery.java @@ -34,16 +34,16 @@ import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.Term; import org.apache.lucene.index.TermStates; +import org.apache.lucene.queries.spans.SpanQuery; +import org.apache.lucene.queries.spans.SpanWeight; +import org.apache.lucene.queries.spans.Spans; import org.apache.lucene.search.IndexSearcher; +import org.apache.lucene.search.QueryVisitor; import org.apache.lucene.search.ScoreMode; -import org.apache.lucene.search.spans.SpanQuery; -import org.apache.lucene.search.spans.SpanWeight; -import org.apache.lucene.search.spans.Spans; import java.io.IOException; import java.util.Collections; import java.util.Map; -import java.util.Set; /** * A {@link SpanQuery} that matches no documents. @@ -57,6 +57,11 @@ public SpanMatchNoDocsQuery(String field, String reason) { this.reason = reason; } + @Override + public void visit(QueryVisitor visitor) { + visitor.visitLeaf(this); + } + @Override public String getField() { return field; @@ -88,9 +93,6 @@ public Spans getSpans(LeafReaderContext ctx, Postings requiredPostings) { return null; } - @Override - public void extractTerms(Set terms) {} - @Override public boolean isCacheable(LeafReaderContext ctx) { return true; diff --git a/server/src/main/java/org/apache/lucene/search/uhighlight/CustomUnifiedHighlighter.java b/server/src/main/java/org/apache/lucene/search/uhighlight/CustomUnifiedHighlighter.java index f5fe33f1f95e1..fb22eb583d9e1 100644 --- a/server/src/main/java/org/apache/lucene/search/uhighlight/CustomUnifiedHighlighter.java +++ b/server/src/main/java/org/apache/lucene/search/uhighlight/CustomUnifiedHighlighter.java @@ -35,14 +35,15 @@ import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.index.LeafReader; import org.apache.lucene.index.Term; +import org.apache.lucene.queries.spans.SpanMultiTermQueryWrapper; +import org.apache.lucene.queries.spans.SpanNearQuery; +import org.apache.lucene.queries.spans.SpanOrQuery; +import org.apache.lucene.queries.spans.SpanQuery; +import org.apache.lucene.queries.spans.SpanTermQuery; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.PrefixQuery; import org.apache.lucene.search.Query; -import org.apache.lucene.search.spans.SpanMultiTermQueryWrapper; -import org.apache.lucene.search.spans.SpanNearQuery; -import org.apache.lucene.search.spans.SpanOrQuery; -import org.apache.lucene.search.spans.SpanQuery; -import org.apache.lucene.search.spans.SpanTermQuery; +import org.apache.lucene.search.uhighlight.UnifiedHighlighter.HighlightFlag; import org.apache.lucene.util.BytesRef; import org.opensearch.common.CheckedSupplier; import org.opensearch.common.Nullable; @@ -77,7 +78,6 @@ public class CustomUnifiedHighlighter extends UnifiedHighlighter { private final Locale breakIteratorLocale; private final int noMatchSize; private final FieldHighlighter fieldHighlighter; - private final int keywordIgnoreAbove; private final int maxAnalyzedOffset; /** @@ -97,7 +97,6 @@ public class CustomUnifiedHighlighter extends UnifiedHighlighter { * @param noMatchSize The size of the text that should be returned when no highlighting can be performed. * @param maxPassages the maximum number of passes to highlight * @param fieldMatcher decides which terms should be highlighted - * @param keywordIgnoreAbove if the field's value is longer than this we'll skip it * @param maxAnalyzedOffset if the field is more than this long we'll refuse to use the ANALYZED * offset source for it because it'd be super slow */ @@ -114,7 +113,6 @@ public CustomUnifiedHighlighter( int noMatchSize, int maxPassages, Predicate fieldMatcher, - int keywordIgnoreAbove, int maxAnalyzedOffset ) throws IOException { super(searcher, analyzer); @@ -126,7 +124,6 @@ public CustomUnifiedHighlighter( this.field = field; this.noMatchSize = noMatchSize; this.setFieldMatcher(fieldMatcher); - this.keywordIgnoreAbove = keywordIgnoreAbove; this.maxAnalyzedOffset = maxAnalyzedOffset; fieldHighlighter = getFieldHighlighter(field, query, extractTerms(query), maxPassages); } @@ -144,9 +141,6 @@ public Snippet[] highlightField(LeafReader reader, int docId, CheckedSupplier keywordIgnoreAbove) { - return null; // skip highlighting keyword terms that were ignored during indexing - } if ((offsetSource == OffsetSource.ANALYSIS) && (fieldValueLength > maxAnalyzedOffset)) { throw new IllegalArgumentException( "The length of [" @@ -266,4 +260,12 @@ protected OffsetSource getOffsetSource(String field) { return offsetSource; } + /** Customize the highlighting flags to use by field. */ + @Override + protected Set getFlags(String field) { + final Set flags = super.getFlags(field); + // Change the defaults introduced by https://issues.apache.org/jira/browse/LUCENE-9431 + flags.remove(HighlightFlag.WEIGHT_MATCHES); + return flags; + } } diff --git a/server/src/main/java/org/apache/lucene/search/vectorhighlight/CustomFieldQuery.java b/server/src/main/java/org/apache/lucene/search/vectorhighlight/CustomFieldQuery.java index 8e71aa5a0dce6..ac688f15cda01 100644 --- a/server/src/main/java/org/apache/lucene/search/vectorhighlight/CustomFieldQuery.java +++ b/server/src/main/java/org/apache/lucene/search/vectorhighlight/CustomFieldQuery.java @@ -35,6 +35,7 @@ import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.Term; import org.apache.lucene.queries.BlendedTermQuery; +import org.apache.lucene.queries.spans.SpanTermQuery; import org.apache.lucene.search.BoostQuery; import org.apache.lucene.search.ConstantScoreQuery; import org.apache.lucene.search.MultiPhraseQuery; @@ -42,7 +43,6 @@ import org.apache.lucene.search.Query; import org.apache.lucene.search.SynonymQuery; import org.apache.lucene.search.TermQuery; -import org.apache.lucene.search.spans.SpanTermQuery; import org.opensearch.common.lucene.search.MultiPhrasePrefixQuery; import org.opensearch.common.lucene.search.function.FunctionScoreQuery; import org.opensearch.index.search.OpenSearchToParentBlockJoinQuery; diff --git a/server/src/main/java/org/apache/lucene/util/CombinedBitSet.java b/server/src/main/java/org/apache/lucene/util/CombinedBitSet.java index 7b7841cc4265c..1b4f31892c7f8 100644 --- a/server/src/main/java/org/apache/lucene/util/CombinedBitSet.java +++ b/server/src/main/java/org/apache/lucene/util/CombinedBitSet.java @@ -127,4 +127,9 @@ public void clear(int i) { public void clear(int startIndex, int endIndex) { throw new UnsupportedOperationException("not implemented"); } + + @Override + public boolean getAndSet(int i) { + throw new UnsupportedOperationException("not implemented"); + } } diff --git a/server/src/main/java/org/apache/lucene/util/SPIClassIterator.java b/server/src/main/java/org/apache/lucene/util/SPIClassIterator.java new file mode 100644 index 0000000000000..1480c9aeeb2d8 --- /dev/null +++ b/server/src/main/java/org/apache/lucene/util/SPIClassIterator.java @@ -0,0 +1,186 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +/* @notice + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/* + * Modifications Copyright OpenSearch Contributors. See + * GitHub history for details. + */ + +package org.apache.lucene.util; + +import java.io.IOException; +import java.io.InputStream; +import java.io.BufferedReader; +import java.io.InputStreamReader; +import java.net.URL; +import java.nio.charset.StandardCharsets; +import java.util.ArrayList; +import java.util.Collections; +import java.util.Enumeration; +import java.util.Iterator; +import java.util.Locale; +import java.util.NoSuchElementException; +import java.util.Objects; +import java.util.ServiceConfigurationError; + +/** + * Helper class for loading SPI classes from classpath (META-INF files). + * This is a light impl of {@link java.util.ServiceLoader} but is guaranteed to + * be bug-free regarding classpath order and does not instantiate or initialize + * the classes found. + */ +@SuppressForbidden(reason = "Taken from Lucene") +public final class SPIClassIterator implements Iterator> { + private static final String META_INF_SERVICES = "META-INF/services/"; + + private final Class clazz; + private final ClassLoader loader; + private final Enumeration profilesEnum; + private Iterator linesIterator; + + /** Creates a new SPI iterator to lookup services of type {@code clazz} using + * the same {@link ClassLoader} as the argument. */ + public static SPIClassIterator get(Class clazz) { + return new SPIClassIterator<>(clazz, Objects.requireNonNull(clazz.getClassLoader(), () -> clazz + " has no classloader.")); + } + + /** Creates a new SPI iterator to lookup services of type {@code clazz} using the given classloader. */ + public static SPIClassIterator get(Class clazz, ClassLoader loader) { + return new SPIClassIterator<>(clazz, loader); + } + + /** + * Utility method to check if some class loader is a (grand-)parent of or the same as another one. + * This means the child will be able to load all classes from the parent, too. + *

+ * If caller's codesource doesn't have enough permissions to do the check, {@code false} is returned + * (this is fine, because if we get a {@code SecurityException} it is for sure no parent). + */ + public static boolean isParentClassLoader(final ClassLoader parent, final ClassLoader child) { + try { + ClassLoader cl = child; + while (cl != null) { + if (cl == parent) { + return true; + } + cl = cl.getParent(); + } + return false; + } catch (SecurityException se) { + return false; + } + } + + private SPIClassIterator(Class clazz, ClassLoader loader) { + this.clazz = Objects.requireNonNull(clazz, "clazz"); + this.loader = Objects.requireNonNull(loader, "loader"); + try { + final String fullName = META_INF_SERVICES + clazz.getName(); + this.profilesEnum = loader.getResources(fullName); + } catch (IOException ioe) { + throw new ServiceConfigurationError("Error loading SPI profiles for type " + clazz.getName() + " from classpath", ioe); + } + this.linesIterator = Collections.emptySet().iterator(); + } + + private boolean loadNextProfile() { + ArrayList lines = null; + while (profilesEnum.hasMoreElements()) { + if (lines != null) { + lines.clear(); + } else { + lines = new ArrayList<>(); + } + final URL url = profilesEnum.nextElement(); + try { + final InputStream in = url.openStream(); + boolean success = false; + try { + final BufferedReader reader = new BufferedReader(new InputStreamReader(in, StandardCharsets.UTF_8)); + String line; + while ((line = reader.readLine()) != null) { + final int pos = line.indexOf('#'); + if (pos >= 0) { + line = line.substring(0, pos); + } + line = line.trim(); + if (line.length() > 0) { + lines.add(line); + } + } + success = true; + } finally { + if (success) { + IOUtils.close(in); + } else { + IOUtils.closeWhileHandlingException(in); + } + } + } catch (IOException ioe) { + throw new ServiceConfigurationError("Error loading SPI class list from URL: " + url, ioe); + } + if (lines.isEmpty() == false) { + this.linesIterator = lines.iterator(); + return true; + } + } + return false; + } + + @Override + public boolean hasNext() { + return linesIterator.hasNext() || loadNextProfile(); + } + + @Override + public Class next() { + // hasNext() implicitely loads the next profile, so it is essential to call this here! + if (hasNext() == false) { + throw new NoSuchElementException(); + } + assert linesIterator.hasNext(); + final String c = linesIterator.next(); + try { + // don't initialize the class (pass false as 2nd parameter): + return Class.forName(c, false, loader).asSubclass(clazz); + } catch (ClassNotFoundException cnfe) { + throw new ServiceConfigurationError( + String.format( + Locale.ROOT, + "An SPI class of type %s with classname %s does not exist, " + "please fix the file '%s%1$s' in your classpath.", + clazz.getName(), + c, + META_INF_SERVICES + ) + ); + } + } + + @Override + public void remove() { + throw new UnsupportedOperationException(); + } + +} diff --git a/server/src/main/java/org/apache/lucene/util/packed/XPacked64.java b/server/src/main/java/org/apache/lucene/util/packed/XPacked64.java new file mode 100644 index 0000000000000..d811b245606ba --- /dev/null +++ b/server/src/main/java/org/apache/lucene/util/packed/XPacked64.java @@ -0,0 +1,317 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/* + * Modifications Copyright OpenSearch Contributors. See + * GitHub history for details. + */ +package org.apache.lucene.util.packed; + +import java.io.IOException; +import java.util.Arrays; +import org.apache.lucene.store.DataInput; +import org.apache.lucene.util.RamUsageEstimator; + +/** + * Forked from Lucene 8.x; removed in Lucene 9.0 + * + * @todo further investigate a better alternative + * + * Space optimized random access capable array of values with a fixed number of bits/value. Values + * are packed contiguously. + * + *

The implementation strives to perform as fast as possible under the constraint of contiguous + * bits, by avoiding expensive operations. This comes at the cost of code clarity. + * + *

Technical details: This implementation is a refinement of a non-branching version. The + * non-branching get and set methods meant that 2 or 4 atomics in the underlying array were always + * accessed, even for the cases where only 1 or 2 were needed. Even with caching, this had a + * detrimental effect on performance. Related to this issue, the old implementation used lookup + * tables for shifts and masks, which also proved to be a bit slower than calculating the shifts and + * masks on the fly. See https://issues.apache.org/jira/browse/LUCENE-4062 for details. + */ +class XPacked64 extends XPackedInts.MutableImpl { + static final int BLOCK_SIZE = 64; // 32 = int, 64 = long + static final int BLOCK_BITS = 6; // The #bits representing BLOCK_SIZE + static final int MOD_MASK = BLOCK_SIZE - 1; // x % BLOCK_SIZE + + /** Values are stores contiguously in the blocks array. */ + private final long[] blocks; + /** A right-aligned mask of width BitsPerValue used by {@link #get(int)}. */ + private final long maskRight; + /** Optimization: Saves one lookup in {@link #get(int)}. */ + private final int bpvMinusBlockSize; + + /** + * Creates an array with the internal structures adjusted for the given limits and initialized to + * 0. + * + * @param valueCount the number of elements. + * @param bitsPerValue the number of bits available for any given value. + */ + public XPacked64(int valueCount, int bitsPerValue) { + super(valueCount, bitsPerValue); + final PackedInts.Format format = PackedInts.Format.PACKED; + final int longCount = format.longCount(PackedInts.VERSION_CURRENT, valueCount, bitsPerValue); + this.blocks = new long[longCount]; + maskRight = ~0L << (BLOCK_SIZE - bitsPerValue) >>> (BLOCK_SIZE - bitsPerValue); + bpvMinusBlockSize = bitsPerValue - BLOCK_SIZE; + } + + /** + * Creates an array with content retrieved from the given DataInput. + * + * @param in a DataInput, positioned at the start of Packed64-content. + * @param valueCount the number of elements. + * @param bitsPerValue the number of bits available for any given value. + * @throws java.io.IOException if the values for the backing array could not be retrieved. + */ + public XPacked64(int packedIntsVersion, DataInput in, int valueCount, int bitsPerValue) throws IOException { + super(valueCount, bitsPerValue); + final PackedInts.Format format = PackedInts.Format.PACKED; + final long byteCount = format.byteCount(packedIntsVersion, valueCount, bitsPerValue); // to know how much to read + final int longCount = format.longCount(PackedInts.VERSION_CURRENT, valueCount, bitsPerValue); // to size the array + blocks = new long[longCount]; + // read as many longs as we can + for (int i = 0; i < byteCount / 8; ++i) { + blocks[i] = in.readLong(); + } + final int remaining = (int) (byteCount % 8); + if (remaining != 0) { + // read the last bytes + long lastLong = 0; + for (int i = 0; i < remaining; ++i) { + lastLong |= (in.readByte() & 0xFFL) << (56 - i * 8); + } + blocks[blocks.length - 1] = lastLong; + } + maskRight = ~0L << (BLOCK_SIZE - bitsPerValue) >>> (BLOCK_SIZE - bitsPerValue); + bpvMinusBlockSize = bitsPerValue - BLOCK_SIZE; + } + + /** + * @param index the position of the value. + * @return the value at the given index. + */ + @Override + public long get(final int index) { + // The abstract index in a bit stream + final long majorBitPos = (long) index * bitsPerValue; + // The index in the backing long-array + final int elementPos = (int) (majorBitPos >>> BLOCK_BITS); + // The number of value-bits in the second long + final long endBits = (majorBitPos & MOD_MASK) + bpvMinusBlockSize; + + if (endBits <= 0) { // Single block + return (blocks[elementPos] >>> -endBits) & maskRight; + } + // Two blocks + return ((blocks[elementPos] << endBits) | (blocks[elementPos + 1] >>> (BLOCK_SIZE - endBits))) & maskRight; + } + + @Override + public int get(int index, long[] arr, int off, int len) { + assert len > 0 : "len must be > 0 (got " + len + ")"; + assert index >= 0 && index < valueCount; + len = Math.min(len, valueCount - index); + assert off + len <= arr.length; + + final int originalIndex = index; + final PackedInts.Decoder decoder = BulkOperation.of(PackedInts.Format.PACKED, bitsPerValue); + + // go to the next block where the value does not span across two blocks + final int offsetInBlocks = index % decoder.longValueCount(); + if (offsetInBlocks != 0) { + for (int i = offsetInBlocks; i < decoder.longValueCount() && len > 0; ++i) { + arr[off++] = get(index++); + --len; + } + if (len == 0) { + return index - originalIndex; + } + } + + // bulk get + assert index % decoder.longValueCount() == 0; + int blockIndex = (int) (((long) index * bitsPerValue) >>> BLOCK_BITS); + assert (((long) index * bitsPerValue) & MOD_MASK) == 0; + final int iterations = len / decoder.longValueCount(); + decoder.decode(blocks, blockIndex, arr, off, iterations); + final int gotValues = iterations * decoder.longValueCount(); + index += gotValues; + len -= gotValues; + assert len >= 0; + + if (index > originalIndex) { + // stay at the block boundary + return index - originalIndex; + } else { + // no progress so far => already at a block boundary but no full block to get + assert index == originalIndex; + return super.get(index, arr, off, len); + } + } + + @Override + public void set(final int index, final long value) { + // The abstract index in a contiguous bit stream + final long majorBitPos = (long) index * bitsPerValue; + // The index in the backing long-array + final int elementPos = (int) (majorBitPos >>> BLOCK_BITS); // / BLOCK_SIZE + // The number of value-bits in the second long + final long endBits = (majorBitPos & MOD_MASK) + bpvMinusBlockSize; + + if (endBits <= 0) { // Single block + blocks[elementPos] = blocks[elementPos] & ~(maskRight << -endBits) | (value << -endBits); + return; + } + // Two blocks + blocks[elementPos] = blocks[elementPos] & ~(maskRight >>> endBits) | (value >>> endBits); + blocks[elementPos + 1] = blocks[elementPos + 1] & (~0L >>> endBits) | (value << (BLOCK_SIZE - endBits)); + } + + @Override + public int set(int index, long[] arr, int off, int len) { + assert len > 0 : "len must be > 0 (got " + len + ")"; + assert index >= 0 && index < valueCount; + len = Math.min(len, valueCount - index); + assert off + len <= arr.length; + + final int originalIndex = index; + final PackedInts.Encoder encoder = BulkOperation.of(PackedInts.Format.PACKED, bitsPerValue); + + // go to the next block where the value does not span across two blocks + final int offsetInBlocks = index % encoder.longValueCount(); + if (offsetInBlocks != 0) { + for (int i = offsetInBlocks; i < encoder.longValueCount() && len > 0; ++i) { + set(index++, arr[off++]); + --len; + } + if (len == 0) { + return index - originalIndex; + } + } + + // bulk set + assert index % encoder.longValueCount() == 0; + int blockIndex = (int) (((long) index * bitsPerValue) >>> BLOCK_BITS); + assert (((long) index * bitsPerValue) & MOD_MASK) == 0; + final int iterations = len / encoder.longValueCount(); + encoder.encode(arr, off, blocks, blockIndex, iterations); + final int setValues = iterations * encoder.longValueCount(); + index += setValues; + len -= setValues; + assert len >= 0; + + if (index > originalIndex) { + // stay at the block boundary + return index - originalIndex; + } else { + // no progress so far => already at a block boundary but no full block to get + assert index == originalIndex; + return super.set(index, arr, off, len); + } + } + + @Override + public String toString() { + return "Packed64(bitsPerValue=" + bitsPerValue + ",size=" + size() + ",blocks=" + blocks.length + ")"; + } + + @Override + public long ramBytesUsed() { + return RamUsageEstimator.alignObjectSize( + RamUsageEstimator.NUM_BYTES_OBJECT_HEADER + 3 * Integer.BYTES // bpvMinusBlockSize,valueCount,bitsPerValue + + Long.BYTES // maskRight + + RamUsageEstimator.NUM_BYTES_OBJECT_REF + ) // blocks ref + + RamUsageEstimator.sizeOf(blocks); + } + + @Override + public void fill(int fromIndex, int toIndex, long val) { + assert PackedInts.unsignedBitsRequired(val) <= getBitsPerValue(); + assert fromIndex <= toIndex; + + // minimum number of values that use an exact number of full blocks + final int nAlignedValues = 64 / gcd(64, bitsPerValue); + final int span = toIndex - fromIndex; + if (span <= 3 * nAlignedValues) { + // there needs be at least 2 * nAlignedValues aligned values for the + // block approach to be worth trying + super.fill(fromIndex, toIndex, val); + return; + } + + // fill the first values naively until the next block start + final int fromIndexModNAlignedValues = fromIndex % nAlignedValues; + if (fromIndexModNAlignedValues != 0) { + for (int i = fromIndexModNAlignedValues; i < nAlignedValues; ++i) { + set(fromIndex++, val); + } + } + assert fromIndex % nAlignedValues == 0; + + // compute the long[] blocks for nAlignedValues consecutive values and + // use them to set as many values as possible without applying any mask + // or shift + final int nAlignedBlocks = (nAlignedValues * bitsPerValue) >> 6; + final long[] nAlignedValuesBlocks; + { + XPacked64 values = new XPacked64(nAlignedValues, bitsPerValue); + for (int i = 0; i < nAlignedValues; ++i) { + values.set(i, val); + } + nAlignedValuesBlocks = values.blocks; + assert nAlignedBlocks <= nAlignedValuesBlocks.length; + } + final int startBlock = (int) (((long) fromIndex * bitsPerValue) >>> 6); + final int endBlock = (int) (((long) toIndex * bitsPerValue) >>> 6); + for (int block = startBlock; block < endBlock; ++block) { + final long blockValue = nAlignedValuesBlocks[block % nAlignedBlocks]; + blocks[block] = blockValue; + } + + // fill the gap + for (int i = (int) (((long) endBlock << 6) / bitsPerValue); i < toIndex; ++i) { + set(i, val); + } + } + + private static int gcd(int a, int b) { + if (a < b) { + return gcd(b, a); + } else if (b == 0) { + return a; + } else { + return gcd(b, a % b); + } + } + + @Override + public void clear() { + Arrays.fill(blocks, 0L); + } +} diff --git a/server/src/main/java/org/apache/lucene/util/packed/XPacked64SingleBlock.java b/server/src/main/java/org/apache/lucene/util/packed/XPacked64SingleBlock.java new file mode 100644 index 0000000000000..ef7644c32a843 --- /dev/null +++ b/server/src/main/java/org/apache/lucene/util/packed/XPacked64SingleBlock.java @@ -0,0 +1,574 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with this + * work for additional information regarding copyright ownership. The ASF + * licenses this file to You under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ +package org.apache.lucene.util.packed; + +import java.io.IOException; +import java.util.Arrays; +import org.apache.lucene.store.DataInput; +import org.apache.lucene.util.RamUsageEstimator; + +/** + * Forked from Lucene 8.x; removed in Lucene 9.0 + * + * @todo further investigate a better alternative + * + * This class is similar to {@link Packed64} except that it trades space for speed by ensuring that + * a single block needs to be read/written in order to read/write a value. + */ +abstract class XPacked64SingleBlock extends XPackedInts.MutableImpl { + + public static final int MAX_SUPPORTED_BITS_PER_VALUE = 32; + private static final int[] SUPPORTED_BITS_PER_VALUE = new int[] { 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 12, 16, 21, 32 }; + + public static boolean isSupported(int bitsPerValue) { + return Arrays.binarySearch(SUPPORTED_BITS_PER_VALUE, bitsPerValue) >= 0; + } + + private static int requiredCapacity(int valueCount, int valuesPerBlock) { + return valueCount / valuesPerBlock + (valueCount % valuesPerBlock == 0 ? 0 : 1); + } + + final long[] blocks; + + XPacked64SingleBlock(int valueCount, int bitsPerValue) { + super(valueCount, bitsPerValue); + assert isSupported(bitsPerValue); + final int valuesPerBlock = 64 / bitsPerValue; + blocks = new long[requiredCapacity(valueCount, valuesPerBlock)]; + } + + @Override + public void clear() { + Arrays.fill(blocks, 0L); + } + + @Override + public long ramBytesUsed() { + return RamUsageEstimator.alignObjectSize( + RamUsageEstimator.NUM_BYTES_OBJECT_HEADER + 2 * Integer.BYTES // valueCount,bitsPerValue + + RamUsageEstimator.NUM_BYTES_OBJECT_REF + ) // blocks ref + + RamUsageEstimator.sizeOf(blocks); + } + + @Override + public int get(int index, long[] arr, int off, int len) { + assert len > 0 : "len must be > 0 (got " + len + ")"; + assert index >= 0 && index < valueCount; + len = Math.min(len, valueCount - index); + assert off + len <= arr.length; + + final int originalIndex = index; + + // go to the next block boundary + final int valuesPerBlock = 64 / bitsPerValue; + final int offsetInBlock = index % valuesPerBlock; + if (offsetInBlock != 0) { + for (int i = offsetInBlock; i < valuesPerBlock && len > 0; ++i) { + arr[off++] = get(index++); + --len; + } + if (len == 0) { + return index - originalIndex; + } + } + + // bulk get + assert index % valuesPerBlock == 0; + @SuppressWarnings("deprecation") + final PackedInts.Decoder decoder = BulkOperation.of(PackedInts.Format.PACKED_SINGLE_BLOCK, bitsPerValue); + assert decoder.longBlockCount() == 1; + assert decoder.longValueCount() == valuesPerBlock; + final int blockIndex = index / valuesPerBlock; + final int nblocks = (index + len) / valuesPerBlock - blockIndex; + decoder.decode(blocks, blockIndex, arr, off, nblocks); + final int diff = nblocks * valuesPerBlock; + index += diff; + len -= diff; + + if (index > originalIndex) { + // stay at the block boundary + return index - originalIndex; + } else { + // no progress so far => already at a block boundary but no full block to + // get + assert index == originalIndex; + return super.get(index, arr, off, len); + } + } + + @Override + public int set(int index, long[] arr, int off, int len) { + assert len > 0 : "len must be > 0 (got " + len + ")"; + assert index >= 0 && index < valueCount; + len = Math.min(len, valueCount - index); + assert off + len <= arr.length; + + final int originalIndex = index; + + // go to the next block boundary + final int valuesPerBlock = 64 / bitsPerValue; + final int offsetInBlock = index % valuesPerBlock; + if (offsetInBlock != 0) { + for (int i = offsetInBlock; i < valuesPerBlock && len > 0; ++i) { + set(index++, arr[off++]); + --len; + } + if (len == 0) { + return index - originalIndex; + } + } + + // bulk set + assert index % valuesPerBlock == 0; + @SuppressWarnings("deprecation") + final BulkOperation op = BulkOperation.of(PackedInts.Format.PACKED_SINGLE_BLOCK, bitsPerValue); + assert op.longBlockCount() == 1; + assert op.longValueCount() == valuesPerBlock; + final int blockIndex = index / valuesPerBlock; + final int nblocks = (index + len) / valuesPerBlock - blockIndex; + op.encode(arr, off, blocks, blockIndex, nblocks); + final int diff = nblocks * valuesPerBlock; + index += diff; + len -= diff; + + if (index > originalIndex) { + // stay at the block boundary + return index - originalIndex; + } else { + // no progress so far => already at a block boundary but no full block to + // set + assert index == originalIndex; + return super.set(index, arr, off, len); + } + } + + @Override + public void fill(int fromIndex, int toIndex, long val) { + assert fromIndex >= 0; + assert fromIndex <= toIndex; + assert PackedInts.unsignedBitsRequired(val) <= bitsPerValue; + + final int valuesPerBlock = 64 / bitsPerValue; + if (toIndex - fromIndex <= valuesPerBlock << 1) { + // there needs to be at least one full block to set for the block + // approach to be worth trying + super.fill(fromIndex, toIndex, val); + return; + } + + // set values naively until the next block start + int fromOffsetInBlock = fromIndex % valuesPerBlock; + if (fromOffsetInBlock != 0) { + for (int i = fromOffsetInBlock; i < valuesPerBlock; ++i) { + set(fromIndex++, val); + } + assert fromIndex % valuesPerBlock == 0; + } + + // bulk set of the inner blocks + final int fromBlock = fromIndex / valuesPerBlock; + final int toBlock = toIndex / valuesPerBlock; + assert fromBlock * valuesPerBlock == fromIndex; + + long blockValue = 0L; + for (int i = 0; i < valuesPerBlock; ++i) { + blockValue = blockValue | (val << (i * bitsPerValue)); + } + Arrays.fill(blocks, fromBlock, toBlock, blockValue); + + // fill the gap + for (int i = valuesPerBlock * toBlock; i < toIndex; ++i) { + set(i, val); + } + } + + @SuppressWarnings("deprecation") + protected PackedInts.Format getFormat() { + return PackedInts.Format.PACKED_SINGLE_BLOCK; + } + + @Override + public String toString() { + return getClass().getSimpleName() + "(bitsPerValue=" + bitsPerValue + ",size=" + size() + ",blocks=" + blocks.length + ")"; + } + + public static XPacked64SingleBlock create(DataInput in, int valueCount, int bitsPerValue) throws IOException { + XPacked64SingleBlock reader = create(valueCount, bitsPerValue); + for (int i = 0; i < reader.blocks.length; ++i) { + reader.blocks[i] = in.readLong(); + } + return reader; + } + + public static XPacked64SingleBlock create(int valueCount, int bitsPerValue) { + switch (bitsPerValue) { + case 1: + return new XPacked64SingleBlock1(valueCount); + case 2: + return new XPacked64SingleBlock2(valueCount); + case 3: + return new XPacked64SingleBlock3(valueCount); + case 4: + return new XPacked64SingleBlock4(valueCount); + case 5: + return new XPacked64SingleBlock5(valueCount); + case 6: + return new XPacked64SingleBlock6(valueCount); + case 7: + return new XPacked64SingleBlock7(valueCount); + case 8: + return new XPacked64SingleBlock8(valueCount); + case 9: + return new XPacked64SingleBlock9(valueCount); + case 10: + return new XPacked64SingleBlock10(valueCount); + case 12: + return new XPacked64SingleBlock12(valueCount); + case 16: + return new XPacked64SingleBlock16(valueCount); + case 21: + return new XPacked64SingleBlock21(valueCount); + case 32: + return new XPacked64SingleBlock32(valueCount); + default: + throw new IllegalArgumentException("Unsupported number of bits per value: " + 32); + } + } + + static class XPacked64SingleBlock1 extends XPacked64SingleBlock { + + XPacked64SingleBlock1(int valueCount) { + super(valueCount, 1); + } + + @Override + public long get(int index) { + final int o = index >>> 6; + final int b = index & 63; + final int shift = b << 0; + return (blocks[o] >>> shift) & 1L; + } + + @Override + public void set(int index, long value) { + final int o = index >>> 6; + final int b = index & 63; + final int shift = b << 0; + blocks[o] = (blocks[o] & ~(1L << shift)) | (value << shift); + } + } + + static class XPacked64SingleBlock2 extends XPacked64SingleBlock { + + XPacked64SingleBlock2(int valueCount) { + super(valueCount, 2); + } + + @Override + public long get(int index) { + final int o = index >>> 5; + final int b = index & 31; + final int shift = b << 1; + return (blocks[o] >>> shift) & 3L; + } + + @Override + public void set(int index, long value) { + final int o = index >>> 5; + final int b = index & 31; + final int shift = b << 1; + blocks[o] = (blocks[o] & ~(3L << shift)) | (value << shift); + } + } + + static class XPacked64SingleBlock3 extends XPacked64SingleBlock { + + XPacked64SingleBlock3(int valueCount) { + super(valueCount, 3); + } + + @Override + public long get(int index) { + final int o = index / 21; + final int b = index % 21; + final int shift = b * 3; + return (blocks[o] >>> shift) & 7L; + } + + @Override + public void set(int index, long value) { + final int o = index / 21; + final int b = index % 21; + final int shift = b * 3; + blocks[o] = (blocks[o] & ~(7L << shift)) | (value << shift); + } + } + + static class XPacked64SingleBlock4 extends XPacked64SingleBlock { + + XPacked64SingleBlock4(int valueCount) { + super(valueCount, 4); + } + + @Override + public long get(int index) { + final int o = index >>> 4; + final int b = index & 15; + final int shift = b << 2; + return (blocks[o] >>> shift) & 15L; + } + + @Override + public void set(int index, long value) { + final int o = index >>> 4; + final int b = index & 15; + final int shift = b << 2; + blocks[o] = (blocks[o] & ~(15L << shift)) | (value << shift); + } + } + + static class XPacked64SingleBlock5 extends XPacked64SingleBlock { + + XPacked64SingleBlock5(int valueCount) { + super(valueCount, 5); + } + + @Override + public long get(int index) { + final int o = index / 12; + final int b = index % 12; + final int shift = b * 5; + return (blocks[o] >>> shift) & 31L; + } + + @Override + public void set(int index, long value) { + final int o = index / 12; + final int b = index % 12; + final int shift = b * 5; + blocks[o] = (blocks[o] & ~(31L << shift)) | (value << shift); + } + } + + static class XPacked64SingleBlock6 extends XPacked64SingleBlock { + + XPacked64SingleBlock6(int valueCount) { + super(valueCount, 6); + } + + @Override + public long get(int index) { + final int o = index / 10; + final int b = index % 10; + final int shift = b * 6; + return (blocks[o] >>> shift) & 63L; + } + + @Override + public void set(int index, long value) { + final int o = index / 10; + final int b = index % 10; + final int shift = b * 6; + blocks[o] = (blocks[o] & ~(63L << shift)) | (value << shift); + } + } + + static class XPacked64SingleBlock7 extends XPacked64SingleBlock { + + XPacked64SingleBlock7(int valueCount) { + super(valueCount, 7); + } + + @Override + public long get(int index) { + final int o = index / 9; + final int b = index % 9; + final int shift = b * 7; + return (blocks[o] >>> shift) & 127L; + } + + @Override + public void set(int index, long value) { + final int o = index / 9; + final int b = index % 9; + final int shift = b * 7; + blocks[o] = (blocks[o] & ~(127L << shift)) | (value << shift); + } + } + + static class XPacked64SingleBlock8 extends XPacked64SingleBlock { + + XPacked64SingleBlock8(int valueCount) { + super(valueCount, 8); + } + + @Override + public long get(int index) { + final int o = index >>> 3; + final int b = index & 7; + final int shift = b << 3; + return (blocks[o] >>> shift) & 255L; + } + + @Override + public void set(int index, long value) { + final int o = index >>> 3; + final int b = index & 7; + final int shift = b << 3; + blocks[o] = (blocks[o] & ~(255L << shift)) | (value << shift); + } + } + + static class XPacked64SingleBlock9 extends XPacked64SingleBlock { + + XPacked64SingleBlock9(int valueCount) { + super(valueCount, 9); + } + + @Override + public long get(int index) { + final int o = index / 7; + final int b = index % 7; + final int shift = b * 9; + return (blocks[o] >>> shift) & 511L; + } + + @Override + public void set(int index, long value) { + final int o = index / 7; + final int b = index % 7; + final int shift = b * 9; + blocks[o] = (blocks[o] & ~(511L << shift)) | (value << shift); + } + } + + static class XPacked64SingleBlock10 extends XPacked64SingleBlock { + + XPacked64SingleBlock10(int valueCount) { + super(valueCount, 10); + } + + @Override + public long get(int index) { + final int o = index / 6; + final int b = index % 6; + final int shift = b * 10; + return (blocks[o] >>> shift) & 1023L; + } + + @Override + public void set(int index, long value) { + final int o = index / 6; + final int b = index % 6; + final int shift = b * 10; + blocks[o] = (blocks[o] & ~(1023L << shift)) | (value << shift); + } + } + + static class XPacked64SingleBlock12 extends XPacked64SingleBlock { + + XPacked64SingleBlock12(int valueCount) { + super(valueCount, 12); + } + + @Override + public long get(int index) { + final int o = index / 5; + final int b = index % 5; + final int shift = b * 12; + return (blocks[o] >>> shift) & 4095L; + } + + @Override + public void set(int index, long value) { + final int o = index / 5; + final int b = index % 5; + final int shift = b * 12; + blocks[o] = (blocks[o] & ~(4095L << shift)) | (value << shift); + } + } + + static class XPacked64SingleBlock16 extends XPacked64SingleBlock { + + XPacked64SingleBlock16(int valueCount) { + super(valueCount, 16); + } + + @Override + public long get(int index) { + final int o = index >>> 2; + final int b = index & 3; + final int shift = b << 4; + return (blocks[o] >>> shift) & 65535L; + } + + @Override + public void set(int index, long value) { + final int o = index >>> 2; + final int b = index & 3; + final int shift = b << 4; + blocks[o] = (blocks[o] & ~(65535L << shift)) | (value << shift); + } + } + + static class XPacked64SingleBlock21 extends XPacked64SingleBlock { + + XPacked64SingleBlock21(int valueCount) { + super(valueCount, 21); + } + + @Override + public long get(int index) { + final int o = index / 3; + final int b = index % 3; + final int shift = b * 21; + return (blocks[o] >>> shift) & 2097151L; + } + + @Override + public void set(int index, long value) { + final int o = index / 3; + final int b = index % 3; + final int shift = b * 21; + blocks[o] = (blocks[o] & ~(2097151L << shift)) | (value << shift); + } + } + + static class XPacked64SingleBlock32 extends XPacked64SingleBlock { + + XPacked64SingleBlock32(int valueCount) { + super(valueCount, 32); + } + + @Override + public long get(int index) { + final int o = index >>> 1; + final int b = index & 1; + final int shift = b << 5; + return (blocks[o] >>> shift) & 4294967295L; + } + + @Override + public void set(int index, long value) { + final int o = index >>> 1; + final int b = index & 1; + final int shift = b << 5; + blocks[o] = (blocks[o] & ~(4294967295L << shift)) | (value << shift); + } + } +} diff --git a/server/src/main/java/org/apache/lucene/util/packed/XPackedInts.java b/server/src/main/java/org/apache/lucene/util/packed/XPackedInts.java new file mode 100644 index 0000000000000..9a277a7b5f2f4 --- /dev/null +++ b/server/src/main/java/org/apache/lucene/util/packed/XPackedInts.java @@ -0,0 +1,740 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.lucene.util.packed; + +import java.io.EOFException; +import java.io.IOException; +import java.util.Arrays; +import org.apache.lucene.codecs.CodecUtil; +import org.apache.lucene.store.DataInput; +import org.apache.lucene.store.DataOutput; +import org.apache.lucene.store.IndexInput; +import org.apache.lucene.util.RamUsageEstimator; +import org.apache.lucene.util.packed.PackedInts.Decoder; +import org.apache.lucene.util.packed.PackedInts.Encoder; +import org.apache.lucene.util.packed.PackedInts.Format; +import org.apache.lucene.util.packed.PackedInts.FormatAndBits; +import org.apache.lucene.util.packed.PackedInts.Reader; +import org.apache.lucene.util.packed.PackedInts.ReaderIterator; +import org.apache.lucene.util.packed.PackedInts.Writer; + +/** + * Forked from Lucene 8.x; removed in Lucene 8.9 + * + * Todo: further investigate a better alternative + * + * Simplistic compression for array of unsigned long values. Each value is {@code >= 0} and {@code + * <=} a specified maximum value. The values are stored as packed ints, with each value consuming a + * fixed number of bits. + */ +public class XPackedInts { + + /** At most 700% memory overhead, always select a direct implementation. */ + public static final float FASTEST = 7f; + + /** At most 50% memory overhead, always select a reasonably fast implementation. */ + public static final float FAST = 0.5f; + + /** At most 25% memory overhead. */ + public static final float DEFAULT = 0.25f; + + /** No memory overhead at all, but the returned implementation may be slow. */ + public static final float COMPACT = 0f; + + /** Default amount of memory to use for bulk operations. */ + public static final int DEFAULT_BUFFER_SIZE = 1024; // 1K + + public static final String CODEC_NAME = "PackedInts"; + public static final int VERSION_MONOTONIC_WITHOUT_ZIGZAG = 2; + public static final int VERSION_START = VERSION_MONOTONIC_WITHOUT_ZIGZAG; + public static final int VERSION_CURRENT = VERSION_MONOTONIC_WITHOUT_ZIGZAG; + + /** Check the validity of a version number. */ + public static void checkVersion(int version) { + if (version < VERSION_START) { + throw new IllegalArgumentException("Version is too old, should be at least " + VERSION_START + " (got " + version + ")"); + } else if (version > VERSION_CURRENT) { + throw new IllegalArgumentException("Version is too new, should be at most " + VERSION_CURRENT + " (got " + version + ")"); + } + } + + /** + * Try to find the {@link Format} and number of bits per value that would restore from disk the + * fastest reader whose overhead is less than acceptableOverheadRatio. + * + *

The acceptableOverheadRatio parameter makes sense for random-access {@link + * Reader}s. In case you only plan to perform sequential access on this stream later on, you + * should probably use {@link PackedInts#COMPACT}. + * + *

If you don't know how many values you are going to write, use valueCount = -1. + */ + public static FormatAndBits fastestFormatAndBits(int valueCount, int bitsPerValue, float acceptableOverheadRatio) { + if (valueCount == -1) { + valueCount = Integer.MAX_VALUE; + } + + acceptableOverheadRatio = Math.max(COMPACT, acceptableOverheadRatio); + acceptableOverheadRatio = Math.min(FASTEST, acceptableOverheadRatio); + float acceptableOverheadPerValue = acceptableOverheadRatio * bitsPerValue; // in bits + + int maxBitsPerValue = bitsPerValue + (int) acceptableOverheadPerValue; + + int actualBitsPerValue = -1; + + // rounded number of bits per value are usually the fastest + if (bitsPerValue <= 8 && maxBitsPerValue >= 8) { + actualBitsPerValue = 8; + } else if (bitsPerValue <= 16 && maxBitsPerValue >= 16) { + actualBitsPerValue = 16; + } else if (bitsPerValue <= 32 && maxBitsPerValue >= 32) { + actualBitsPerValue = 32; + } else if (bitsPerValue <= 64 && maxBitsPerValue >= 64) { + actualBitsPerValue = 64; + } else { + actualBitsPerValue = bitsPerValue; + } + + return new FormatAndBits(Format.PACKED, actualBitsPerValue); + } + + final static class XPackedWriter extends XWriter { + + boolean finished; + final PackedInts.Format format; + final BulkOperation encoder; + final byte[] nextBlocks; + final long[] nextValues; + final int iterations; + int off; + int written; + + XPackedWriter(PackedInts.Format format, DataOutput out, int valueCount, int bitsPerValue, int mem) { + super(out, valueCount, bitsPerValue); + this.format = format; + encoder = BulkOperation.of(format, bitsPerValue); + iterations = encoder.computeIterations(valueCount, mem); + nextBlocks = new byte[iterations * encoder.byteBlockCount()]; + nextValues = new long[iterations * encoder.byteValueCount()]; + off = 0; + written = 0; + finished = false; + } + + @Override + protected PackedInts.Format getFormat() { + return format; + } + + @Override + public void add(long v) throws IOException { + assert PackedInts.unsignedBitsRequired(v) <= bitsPerValue; + assert !finished; + if (valueCount != -1 && written >= valueCount) { + throw new EOFException("Writing past end of stream"); + } + nextValues[off++] = v; + if (off == nextValues.length) { + flush(); + } + ++written; + } + + @Override + public void finish() throws IOException { + assert !finished; + if (valueCount != -1) { + while (written < valueCount) { + add(0L); + } + } + flush(); + finished = true; + } + + private void flush() throws IOException { + encoder.encode(nextValues, 0, nextBlocks, 0, iterations); + final int blockCount = (int) format.byteCount(PackedInts.VERSION_CURRENT, off, bitsPerValue); + out.writeBytes(nextBlocks, blockCount); + Arrays.fill(nextValues, 0L); + off = 0; + } + + @Override + public int ord() { + return written - 1; + } + } + + /** + * A packed integer array that can be modified. + * + */ + public abstract static class Mutable extends Reader { + + /** + * @return the number of bits used to store any given value. Note: This does not imply that + * memory usage is {@code bitsPerValue * #values} as implementations are free to use + * non-space-optimal packing of bits. + */ + public abstract int getBitsPerValue(); + + /** + * Set the value at the given index in the array. + * + * @param index where the value should be positioned. + * @param value a value conforming to the constraints set by the array. + */ + public abstract void set(int index, long value); + + /** + * Bulk set: set at least one and at most len longs starting at off in + * arr into this mutable, starting at index. Returns the actual number + * of values that have been set. + */ + public int set(int index, long[] arr, int off, int len) { + assert len > 0 : "len must be > 0 (got " + len + ")"; + assert index >= 0 && index < size(); + len = Math.min(len, size() - index); + assert off + len <= arr.length; + + for (int i = index, o = off, end = index + len; i < end; ++i, ++o) { + set(i, arr[o]); + } + return len; + } + + /** + * Fill the mutable from fromIndex (inclusive) to toIndex (exclusive) + * with val. + */ + public void fill(int fromIndex, int toIndex, long val) { + assert val <= maxValue(getBitsPerValue()); + assert fromIndex <= toIndex; + for (int i = fromIndex; i < toIndex; ++i) { + set(i, val); + } + } + + /** Sets all values to 0. */ + public void clear() { + fill(0, size(), 0); + } + + /** + * Save this mutable into out. Instantiating a reader from the generated data will + * return a reader with the same number of bits per value. + */ + public void save(DataOutput out) throws IOException { + XWriter writer = getWriterNoHeader(out, getFormat(), size(), getBitsPerValue(), DEFAULT_BUFFER_SIZE); + writer.writeHeader(); + for (int i = 0; i < size(); ++i) { + writer.add(get(i)); + } + writer.finish(); + } + + /** The underlying format. */ + Format getFormat() { + return Format.PACKED; + } + } + + /** + * A simple base for Readers that keeps track of valueCount and bitsPerValue. + * + */ + abstract static class ReaderImpl extends Reader { + protected final int valueCount; + + protected ReaderImpl(int valueCount) { + this.valueCount = valueCount; + } + + @Override + public abstract long get(int index); + + @Override + public final int size() { + return valueCount; + } + } + + abstract static class MutableImpl extends Mutable { + + protected final int valueCount; + protected final int bitsPerValue; + + protected MutableImpl(int valueCount, int bitsPerValue) { + this.valueCount = valueCount; + assert bitsPerValue > 0 && bitsPerValue <= 64 : "bitsPerValue=" + bitsPerValue; + this.bitsPerValue = bitsPerValue; + } + + @Override + public final int getBitsPerValue() { + return bitsPerValue; + } + + @Override + public final int size() { + return valueCount; + } + + @Override + public String toString() { + return getClass().getSimpleName() + "(valueCount=" + valueCount + ",bitsPerValue=" + bitsPerValue + ")"; + } + } + + /** A {@link Reader} which has all its values equal to 0 (bitsPerValue = 0). */ + public static final class NullReader extends Reader { + + private final int valueCount; + + /** Sole constructor. */ + public NullReader(int valueCount) { + this.valueCount = valueCount; + } + + @Override + public long get(int index) { + return 0; + } + + @Override + public int get(int index, long[] arr, int off, int len) { + assert len > 0 : "len must be > 0 (got " + len + ")"; + assert index >= 0 && index < valueCount; + len = Math.min(len, valueCount - index); + Arrays.fill(arr, off, off + len, 0); + return len; + } + + @Override + public int size() { + return valueCount; + } + + @Override + public long ramBytesUsed() { + return RamUsageEstimator.alignObjectSize(RamUsageEstimator.NUM_BYTES_OBJECT_HEADER + Integer.BYTES); + } + } + + /** + * A write-once Writer. + * + */ + public abstract static class XWriter extends Writer { + protected XWriter(DataOutput out, int valueCount, int bitsPerValue) { + super(out, valueCount, bitsPerValue); + } + + void writeHeader() throws IOException { + assert valueCount != -1; + CodecUtil.writeHeader(out, CODEC_NAME, VERSION_CURRENT); + out.writeVInt(bitsPerValue); + out.writeVInt(valueCount); + out.writeVInt(getFormat().getId()); + } + } + + /** + * Get a {@link Decoder}. + * + * @param format the format used to store packed ints + * @param version the compatibility version + * @param bitsPerValue the number of bits per value + * @return a decoder + */ + public static Decoder getDecoder(Format format, int version, int bitsPerValue) { + checkVersion(version); + return BulkOperation.of(format, bitsPerValue); + } + + /** + * Get an {@link Encoder}. + * + * @param format the format used to store packed ints + * @param version the compatibility version + * @param bitsPerValue the number of bits per value + * @return an encoder + */ + public static Encoder getEncoder(Format format, int version, int bitsPerValue) { + checkVersion(version); + return BulkOperation.of(format, bitsPerValue); + } + + /** + * Expert: Restore a {@link Reader} from a stream without reading metadata at the beginning of the + * stream. This method is useful to restore data from streams which have been created using {@link + * XPackedInts#getWriterNoHeader(DataOutput, Format, int, int, int)}. + * + * @param in the stream to read data from, positioned at the beginning of the packed values + * @param format the format used to serialize + * @param version the version used to serialize the data + * @param valueCount how many values the stream holds + * @param bitsPerValue the number of bits per value + * @return a Reader + * @throws IOException If there is a low-level I/O error + * @see XPackedInts#getWriterNoHeader(DataOutput, Format, int, int, int) + */ + public static Reader getReaderNoHeader(DataInput in, Format format, int version, int valueCount, int bitsPerValue) throws IOException { + checkVersion(version); + switch (format) { + case PACKED_SINGLE_BLOCK: + return XPacked64SingleBlock.create(in, valueCount, bitsPerValue); + case PACKED: + return new XPacked64(version, in, valueCount, bitsPerValue); + default: + throw new AssertionError("Unknown Writer format: " + format); + } + } + + /** + * Restore a {@link Reader} from a stream. + * + * @param in the stream to read data from + * @return a Reader + * @throws IOException If there is a low-level I/O error + */ + public static Reader getReader(DataInput in) throws IOException { + final int version = CodecUtil.checkHeader(in, CODEC_NAME, VERSION_START, VERSION_CURRENT); + final int bitsPerValue = in.readVInt(); + assert bitsPerValue > 0 && bitsPerValue <= 64 : "bitsPerValue=" + bitsPerValue; + final int valueCount = in.readVInt(); + final Format format = Format.byId(in.readVInt()); + + return getReaderNoHeader(in, format, version, valueCount, bitsPerValue); + } + + /** + * Expert: Restore a {@link ReaderIterator} from a stream without reading metadata at the + * beginning of the stream. This method is useful to restore data from streams which have been + * created using {@link XPackedInts#getWriterNoHeader(DataOutput, Format, int, int, int)}. + * + * @param in the stream to read data from, positioned at the beginning of the packed values + * @param format the format used to serialize + * @param version the version used to serialize the data + * @param valueCount how many values the stream holds + * @param bitsPerValue the number of bits per value + * @param mem how much memory the iterator is allowed to use to read-ahead (likely to speed up + * iteration) + * @return a ReaderIterator + * @see XPackedInts#getWriterNoHeader(DataOutput, Format, int, int, int) + */ + public static ReaderIterator getReaderIteratorNoHeader( + DataInput in, + Format format, + int version, + int valueCount, + int bitsPerValue, + int mem + ) { + checkVersion(version); + return new PackedReaderIterator(format, version, valueCount, bitsPerValue, in, mem); + } + + /** + * Retrieve PackedInts as a {@link ReaderIterator} + * + * @param in positioned at the beginning of a stored packed int structure. + * @param mem how much memory the iterator is allowed to use to read-ahead (likely to speed up + * iteration) + * @return an iterator to access the values + * @throws IOException if the structure could not be retrieved. + */ + public static ReaderIterator getReaderIterator(DataInput in, int mem) throws IOException { + final int version = CodecUtil.checkHeader(in, CODEC_NAME, VERSION_START, VERSION_CURRENT); + final int bitsPerValue = in.readVInt(); + assert bitsPerValue > 0 && bitsPerValue <= 64 : "bitsPerValue=" + bitsPerValue; + final int valueCount = in.readVInt(); + final Format format = Format.byId(in.readVInt()); + return getReaderIteratorNoHeader(in, format, version, valueCount, bitsPerValue, mem); + } + + /** + * Expert: Construct a direct {@link Reader} from a stream without reading metadata at the + * beginning of the stream. This method is useful to restore data from streams which have been + * created using {@link XPackedInts#getWriterNoHeader(DataOutput, Format, int, int, int)}. + * + *

The returned reader will have very little memory overhead, but every call to {@link + * Reader#get(int)} is likely to perform a disk seek. + * + * @param in the stream to read data from + * @param format the format used to serialize + * @param version the version used to serialize the data + * @param valueCount how many values the stream holds + * @param bitsPerValue the number of bits per value + * @return a direct Reader + */ + public static Reader getDirectReaderNoHeader(final IndexInput in, Format format, int version, int valueCount, int bitsPerValue) { + checkVersion(version); + switch (format) { + case PACKED: + return new DirectPackedReader(bitsPerValue, valueCount, in); + case PACKED_SINGLE_BLOCK: + return new DirectPacked64SingleBlockReader(bitsPerValue, valueCount, in); + default: + throw new AssertionError("Unknown format: " + format); + } + } + + /** + * Construct a direct {@link Reader} from an {@link IndexInput}. This method is useful to restore + * data from streams which have been created using {@link XPackedInts#getWriter(DataOutput, int, + * int, float)}. + * + *

The returned reader will have very little memory overhead, but every call to {@link + * Reader#get(int)} is likely to perform a disk seek. + * + * @param in the stream to read data from + * @return a direct Reader + * @throws IOException If there is a low-level I/O error + */ + public static Reader getDirectReader(IndexInput in) throws IOException { + final int version = CodecUtil.checkHeader(in, CODEC_NAME, VERSION_START, VERSION_CURRENT); + final int bitsPerValue = in.readVInt(); + assert bitsPerValue > 0 && bitsPerValue <= 64 : "bitsPerValue=" + bitsPerValue; + final int valueCount = in.readVInt(); + final Format format = Format.byId(in.readVInt()); + return getDirectReaderNoHeader(in, format, version, valueCount, bitsPerValue); + } + + /** + * Create a packed integer array with the given amount of values initialized to 0. the valueCount + * and the bitsPerValue cannot be changed after creation. All Mutables known by this factory are + * kept fully in RAM. + * + *

Positive values of acceptableOverheadRatio will trade space for speed by + * selecting a faster but potentially less memory-efficient implementation. An + * acceptableOverheadRatio of {@link PackedInts#COMPACT} will make sure that the most + * memory-efficient implementation is selected whereas {@link PackedInts#FASTEST} will make sure + * that the fastest implementation is selected. + * + * @param valueCount the number of elements + * @param bitsPerValue the number of bits available for any given value + * @param acceptableOverheadRatio an acceptable overhead ratio per value + * @return a mutable packed integer array + */ + public static Mutable getMutable(int valueCount, int bitsPerValue, float acceptableOverheadRatio) { + final FormatAndBits formatAndBits = fastestFormatAndBits(valueCount, bitsPerValue, acceptableOverheadRatio); + return getMutable(valueCount, formatAndBits.bitsPerValue, formatAndBits.format); + } + + /** + * Same as {@link #getMutable(int, int, float)} with a pre-computed number of bits per value and + * format. + * + */ + public static Mutable getMutable(int valueCount, int bitsPerValue, PackedInts.Format format) { + assert valueCount >= 0; + switch (format) { + case PACKED_SINGLE_BLOCK: + return XPacked64SingleBlock.create(valueCount, bitsPerValue); + case PACKED: + return new XPacked64(valueCount, bitsPerValue); + default: + throw new AssertionError(); + } + } + + /** + * Expert: Create a packed integer array writer for the given output, format, value count, and + * number of bits per value. + * + *

The resulting stream will be long-aligned. This means that depending on the format which is + * used, up to 63 bits will be wasted. An easy way to make sure that no space is lost is to always + * use a valueCount that is a multiple of 64. + * + *

This method does not write any metadata to the stream, meaning that it is your + * responsibility to store it somewhere else in order to be able to recover data from the stream + * later on: + * + *

    + *
  • format (using {@link Format#getId()}), + *
  • valueCount, + *
  • bitsPerValue, + *
  • {@link #VERSION_CURRENT}. + *
+ * + *

It is possible to start writing values without knowing how many of them you are actually + * going to write. To do this, just pass -1 as valueCount. On the other + * hand, for any positive value of valueCount, the returned writer will make sure + * that you don't write more values than expected and pad the end of stream with zeros in case you + * have written less than valueCount when calling {@link Writer#finish()}. + * + *

The mem parameter lets you control how much memory can be used to buffer + * changes in memory before flushing to disk. High values of mem are likely to + * improve throughput. On the other hand, if speed is not that important to you, a value of + * 0 will use as little memory as possible and should already offer reasonable throughput. + * + * @param out the data output + * @param format the format to use to serialize the values + * @param valueCount the number of values + * @param bitsPerValue the number of bits per value + * @param mem how much memory (in bytes) can be used to speed up serialization + * @return a Writer + * @see XPackedInts#getReaderIteratorNoHeader(DataInput, Format, int, int, int, int) + * @see XPackedInts#getReaderNoHeader(DataInput, Format, int, int, int) + */ + public static XWriter getWriterNoHeader(DataOutput out, Format format, int valueCount, int bitsPerValue, int mem) { + return new XPackedWriter(format, out, valueCount, bitsPerValue, mem); + } + + /** + * Create a packed integer array writer for the given output, format, value count, and number of + * bits per value. + * + *

The resulting stream will be long-aligned. This means that depending on the format which is + * used under the hoods, up to 63 bits will be wasted. An easy way to make sure that no space is + * lost is to always use a valueCount that is a multiple of 64. + * + *

This method writes metadata to the stream, so that the resulting stream is sufficient to + * restore a {@link Reader} from it. You don't need to track valueCount or + * bitsPerValue by yourself. In case this is a problem, you should probably look at {@link + * #getWriterNoHeader(DataOutput, Format, int, int, int)}. + * + *

The acceptableOverheadRatio parameter controls how readers that will be + * restored from this stream trade space for speed by selecting a faster but potentially less + * memory-efficient implementation. An acceptableOverheadRatio of {@link + * PackedInts#COMPACT} will make sure that the most memory-efficient implementation is selected + * whereas {@link PackedInts#FASTEST} will make sure that the fastest implementation is selected. + * In case you are only interested in reading this stream sequentially later on, you should + * probably use {@link PackedInts#COMPACT}. + * + * @param out the data output + * @param valueCount the number of values + * @param bitsPerValue the number of bits per value + * @param acceptableOverheadRatio an acceptable overhead ratio per value + * @return a Writer + * @throws IOException If there is a low-level I/O error + */ + public static Writer getWriter(DataOutput out, int valueCount, int bitsPerValue, float acceptableOverheadRatio) throws IOException { + assert valueCount >= 0; + + final FormatAndBits formatAndBits = fastestFormatAndBits(valueCount, bitsPerValue, acceptableOverheadRatio); + final XWriter writer = getWriterNoHeader(out, formatAndBits.format, valueCount, formatAndBits.bitsPerValue, DEFAULT_BUFFER_SIZE); + writer.writeHeader(); + return writer; + } + + /** + * Returns how many bits are required to hold values up to and including maxValue NOTE: This + * method returns at least 1. + * + * @param maxValue the maximum value that should be representable. + * @return the amount of bits needed to represent values from 0 to maxValue. + */ + public static int bitsRequired(long maxValue) { + if (maxValue < 0) { + throw new IllegalArgumentException("maxValue must be non-negative (got: " + maxValue + ")"); + } + return unsignedBitsRequired(maxValue); + } + + /** + * Returns how many bits are required to store bits, interpreted as an unsigned + * value. NOTE: This method returns at least 1. + * + */ + public static int unsignedBitsRequired(long bits) { + return Math.max(1, 64 - Long.numberOfLeadingZeros(bits)); + } + + /** + * Calculates the maximum unsigned long that can be expressed with the given number of bits. + * + * @param bitsPerValue the number of bits available for any given value. + * @return the maximum value for the given bits. + */ + public static long maxValue(int bitsPerValue) { + return bitsPerValue == 64 ? Long.MAX_VALUE : ~(~0L << bitsPerValue); + } + + /** + * Copy src[srcPos:srcPos+len] into dest[destPos:destPos+len] using at + * most mem bytes. + */ + public static void copy(Reader src, int srcPos, Mutable dest, int destPos, int len, int mem) { + assert srcPos + len <= src.size(); + assert destPos + len <= dest.size(); + final int capacity = mem >>> 3; + if (capacity == 0) { + for (int i = 0; i < len; ++i) { + dest.set(destPos++, src.get(srcPos++)); + } + } else if (len > 0) { + // use bulk operations + final long[] buf = new long[Math.min(capacity, len)]; + copy(src, srcPos, dest, destPos, len, buf); + } + } + + /** + * Same as {@link #copy(Reader, int, Mutable, int, int, int)} but using a pre-allocated buffer. + */ + static void copy(Reader src, int srcPos, Mutable dest, int destPos, int len, long[] buf) { + assert buf.length > 0; + int remaining = 0; + while (len > 0) { + final int read = src.get(srcPos, buf, remaining, Math.min(len, buf.length - remaining)); + assert read > 0; + srcPos += read; + len -= read; + remaining += read; + final int written = dest.set(destPos, buf, 0, remaining); + assert written > 0; + destPos += written; + if (written < remaining) { + System.arraycopy(buf, written, buf, 0, remaining - written); + } + remaining -= written; + } + while (remaining > 0) { + final int written = dest.set(destPos, buf, 0, remaining); + destPos += written; + remaining -= written; + System.arraycopy(buf, written, buf, 0, remaining); + } + } + + /** + * Check that the block size is a power of 2, in the right bounds, and return its log in base 2. + */ + static int checkBlockSize(int blockSize, int minBlockSize, int maxBlockSize) { + if (blockSize < minBlockSize || blockSize > maxBlockSize) { + throw new IllegalArgumentException("blockSize must be >= " + minBlockSize + " and <= " + maxBlockSize + ", got " + blockSize); + } + if ((blockSize & (blockSize - 1)) != 0) { + throw new IllegalArgumentException("blockSize must be a power of two, got " + blockSize); + } + return Integer.numberOfTrailingZeros(blockSize); + } + + /** + * Return the number of blocks required to store size values on blockSize + * . + */ + static int numBlocks(long size, int blockSize) { + final int numBlocks = (int) (size / blockSize) + (size % blockSize == 0 ? 0 : 1); + if ((long) numBlocks * blockSize < size) { + throw new IllegalArgumentException("size is too large for this block size"); + } + return numBlocks; + } +} diff --git a/server/src/main/java/org/opensearch/Version.java b/server/src/main/java/org/opensearch/Version.java index 88e04a6c5dd77..e8a06af50f525 100644 --- a/server/src/main/java/org/opensearch/Version.java +++ b/server/src/main/java/org/opensearch/Version.java @@ -80,7 +80,7 @@ public class Version implements Comparable, ToXContentFragment { public static final Version V_1_2_5 = new Version(1020599, org.apache.lucene.util.Version.LUCENE_8_10_1); public static final Version V_1_3_0 = new Version(1030099, org.apache.lucene.util.Version.LUCENE_8_10_1); public static final Version V_1_4_0 = new Version(1040099, org.apache.lucene.util.Version.LUCENE_8_10_1); - public static final Version V_2_0_0 = new Version(2000099, org.apache.lucene.util.Version.LUCENE_8_10_1); + public static final Version V_2_0_0 = new Version(2000099, org.apache.lucene.util.Version.LUCENE_9_0_0); public static final Version CURRENT = V_2_0_0; public static Version readVersion(StreamInput in) throws IOException { diff --git a/server/src/main/java/org/opensearch/action/admin/indices/segments/IndicesSegmentResponse.java b/server/src/main/java/org/opensearch/action/admin/indices/segments/IndicesSegmentResponse.java index ed9c086d0481c..82fe438236d0f 100644 --- a/server/src/main/java/org/opensearch/action/admin/indices/segments/IndicesSegmentResponse.java +++ b/server/src/main/java/org/opensearch/action/admin/indices/segments/IndicesSegmentResponse.java @@ -154,13 +154,6 @@ protected void addCustomXContentFields(XContentBuilder builder, Params params) t if (segment.getSegmentSort() != null) { toXContent(builder, segment.getSegmentSort()); } - if (segment.ramTree != null) { - builder.startArray(Fields.RAM_TREE); - for (Accountable child : segment.ramTree.getChildResources()) { - toXContent(builder, child); - } - builder.endArray(); - } if (segment.attributes != null && segment.attributes.isEmpty() == false) { builder.field("attributes", segment.attributes); } diff --git a/server/src/main/java/org/opensearch/action/search/SearchPhaseController.java b/server/src/main/java/org/opensearch/action/search/SearchPhaseController.java index 43e97d3e8c437..8c3b1d20b33a0 100644 --- a/server/src/main/java/org/opensearch/action/search/SearchPhaseController.java +++ b/server/src/main/java/org/opensearch/action/search/SearchPhaseController.java @@ -224,7 +224,6 @@ static TopDocs mergeTopDocs(Collection results, int topN, int from) { if (results.isEmpty()) { return null; } - final boolean setShardIndex = false; final TopDocs topDocs = results.stream().findFirst().get(); final TopDocs mergedTopDocs; final int numShards = results.size(); @@ -234,15 +233,15 @@ static TopDocs mergeTopDocs(Collection results, int topN, int from) { CollapseTopFieldDocs firstTopDocs = (CollapseTopFieldDocs) topDocs; final Sort sort = new Sort(firstTopDocs.fields); final CollapseTopFieldDocs[] shardTopDocs = results.toArray(new CollapseTopFieldDocs[numShards]); - mergedTopDocs = CollapseTopFieldDocs.merge(sort, from, topN, shardTopDocs, setShardIndex); + mergedTopDocs = CollapseTopFieldDocs.merge(sort, from, topN, shardTopDocs, false); } else if (topDocs instanceof TopFieldDocs) { TopFieldDocs firstTopDocs = (TopFieldDocs) topDocs; final Sort sort = new Sort(firstTopDocs.fields); final TopFieldDocs[] shardTopDocs = results.toArray(new TopFieldDocs[numShards]); - mergedTopDocs = TopDocs.merge(sort, from, topN, shardTopDocs, setShardIndex); + mergedTopDocs = TopDocs.merge(sort, from, topN, shardTopDocs); } else { final TopDocs[] shardTopDocs = results.toArray(new TopDocs[numShards]); - mergedTopDocs = TopDocs.merge(from, topN, shardTopDocs, setShardIndex); + mergedTopDocs = TopDocs.merge(from, topN, shardTopDocs); } return mergedTopDocs; } diff --git a/server/src/main/java/org/opensearch/action/search/TransportSearchHelper.java b/server/src/main/java/org/opensearch/action/search/TransportSearchHelper.java index 76770245a3dbe..7ddfdfec34cb1 100644 --- a/server/src/main/java/org/opensearch/action/search/TransportSearchHelper.java +++ b/server/src/main/java/org/opensearch/action/search/TransportSearchHelper.java @@ -32,10 +32,11 @@ package org.opensearch.action.search; -import org.apache.lucene.store.RAMOutputStream; import org.opensearch.LegacyESVersion; import org.opensearch.Version; +import org.opensearch.common.bytes.BytesReference; import org.opensearch.common.io.stream.BytesStreamInput; +import org.opensearch.common.io.stream.BytesStreamOutput; import org.opensearch.common.util.concurrent.AtomicArray; import org.opensearch.search.SearchPhaseResult; import org.opensearch.search.SearchShardTarget; @@ -57,7 +58,8 @@ static InternalScrollSearchRequest internalScrollSearchRequest(ShardSearchContex static String buildScrollId(AtomicArray searchPhaseResults, Version version) { boolean includeContextUUID = version.onOrAfter(LegacyESVersion.V_7_7_0); - try (RAMOutputStream out = new RAMOutputStream()) { + try { + BytesStreamOutput out = new BytesStreamOutput(); if (includeContextUUID) { out.writeString(INCLUDE_CONTEXT_UUID); } @@ -77,8 +79,7 @@ static String buildScrollId(AtomicArray searchPhase out.writeString(searchShardTarget.getNodeId()); } } - byte[] bytes = new byte[(int) out.getFilePointer()]; - out.writeTo(bytes, 0); + byte[] bytes = BytesReference.toBytes(out.bytes()); return Base64.getUrlEncoder().encodeToString(bytes); } catch (IOException e) { throw new UncheckedIOException(e); diff --git a/server/src/main/java/org/opensearch/common/bytes/BytesArray.java b/server/src/main/java/org/opensearch/common/bytes/BytesArray.java index 832e8a3e54f16..69f715856c696 100644 --- a/server/src/main/java/org/opensearch/common/bytes/BytesArray.java +++ b/server/src/main/java/org/opensearch/common/bytes/BytesArray.java @@ -33,11 +33,11 @@ package org.opensearch.common.bytes; import org.apache.lucene.util.BytesRef; -import org.apache.lucene.util.FutureArrays; import org.opensearch.common.io.stream.StreamInput; import java.io.IOException; import java.io.OutputStream; +import java.util.Arrays; public final class BytesArray extends AbstractBytesReference { @@ -96,7 +96,7 @@ public boolean equals(Object other) { } if (other instanceof BytesArray) { final BytesArray that = (BytesArray) other; - return FutureArrays.equals(bytes, offset, offset + length, that.bytes, that.offset, that.offset + that.length); + return Arrays.equals(bytes, offset, offset + length, that.bytes, that.offset, that.offset + that.length); } return super.equals(other); } diff --git a/server/src/main/java/org/opensearch/common/bytes/CompositeBytesReference.java b/server/src/main/java/org/opensearch/common/bytes/CompositeBytesReference.java index 2656a1225a07a..2a989e33e918f 100644 --- a/server/src/main/java/org/opensearch/common/bytes/CompositeBytesReference.java +++ b/server/src/main/java/org/opensearch/common/bytes/CompositeBytesReference.java @@ -35,7 +35,6 @@ import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRefBuilder; import org.apache.lucene.util.BytesRefIterator; -import org.apache.lucene.util.FutureObjects; import org.apache.lucene.util.RamUsageEstimator; import java.io.IOException; @@ -100,7 +99,7 @@ public byte get(int index) { @Override public int indexOf(byte marker, int from) { final int remainingBytes = Math.max(length - from, 0); - FutureObjects.checkFromIndexSize(from, remainingBytes, length); + Objects.checkFromIndexSize(from, remainingBytes, length); int result = -1; if (length == 0) { @@ -132,7 +131,7 @@ public int length() { @Override public BytesReference slice(int from, int length) { - FutureObjects.checkFromIndexSize(from, length, this.length); + Objects.checkFromIndexSize(from, length, this.length); if (length == 0) { return BytesArray.EMPTY; diff --git a/server/src/main/java/org/opensearch/common/geo/GeoUtils.java b/server/src/main/java/org/opensearch/common/geo/GeoUtils.java index 92010a5d7f57e..1585e6cf2ad60 100644 --- a/server/src/main/java/org/opensearch/common/geo/GeoUtils.java +++ b/server/src/main/java/org/opensearch/common/geo/GeoUtils.java @@ -625,8 +625,8 @@ public static double arcDistance(double lat1, double lon1, double lat2, double l * 4 decimal degrees */ public static double planeDistance(double lat1, double lon1, double lat2, double lon2) { - double x = (lon2 - lon1) * SloppyMath.TO_RADIANS * Math.cos((lat2 + lat1) / 2.0 * SloppyMath.TO_RADIANS); - double y = (lat2 - lat1) * SloppyMath.TO_RADIANS; + double x = Math.toRadians(lon2 - lon1) * Math.cos(Math.toRadians((lat2 + lat1) / 2.0d)); + double y = Math.toRadians(lat2 - lat1); return Math.sqrt(x * x + y * y) * EARTH_MEAN_RADIUS; } diff --git a/server/src/main/java/org/opensearch/common/lucene/Lucene.java b/server/src/main/java/org/opensearch/common/lucene/Lucene.java index bdfed94a94299..6e17aab92f24b 100644 --- a/server/src/main/java/org/opensearch/common/lucene/Lucene.java +++ b/server/src/main/java/org/opensearch/common/lucene/Lucene.java @@ -69,6 +69,7 @@ import org.apache.lucene.index.SortedSetDocValues; import org.apache.lucene.index.StoredFieldVisitor; import org.apache.lucene.index.Terms; +import org.apache.lucene.index.VectorValues; import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.Explanation; import org.apache.lucene.search.FieldDoc; @@ -119,7 +120,7 @@ import java.util.Map; public class Lucene { - public static final String LATEST_CODEC = "Lucene87"; + public static final String LATEST_CODEC = "Lucene90"; public static final String SOFT_DELETES_FIELD = "__soft_deletes"; @@ -217,7 +218,7 @@ public static SegmentInfos pruneUnreferencedFiles(String segmentsFileName, Direc * since checksums don's match anymore. that's why we prune the name here directly. * We also want the caller to know if we were not able to remove a segments_N file. */ - if (file.startsWith(IndexFileNames.SEGMENTS) || file.equals(IndexFileNames.OLD_SEGMENTS_GEN)) { + if (file.startsWith(IndexFileNames.SEGMENTS)) { foundSegmentFiles++; if (file.equals(si.getSegmentsFileName()) == false) { directory.deleteFile(file); // remove all segment_N files except of the one we wanna keep @@ -260,7 +261,7 @@ public static IndexCommit getIndexCommit(SegmentInfos si, Directory directory) t public static void cleanLuceneIndex(Directory directory) throws IOException { try (Lock writeLock = directory.obtainLock(IndexWriter.WRITE_LOCK_NAME)) { for (final String file : directory.listAll()) { - if (file.startsWith(IndexFileNames.SEGMENTS) || file.equals(IndexFileNames.OLD_SEGMENTS_GEN)) { + if (file.startsWith(IndexFileNames.SEGMENTS)) { directory.deleteFile(file); // remove all segment_N files } } @@ -1110,6 +1111,16 @@ public CacheHelper getCoreCacheHelper() { public CacheHelper getReaderCacheHelper() { return null; } + + @Override + public VectorValues getVectorValues(String field) throws IOException { + return null; + } + + @Override + public TopDocs searchNearestVectors(String field, float[] target, int k, Bits acceptDocs) throws IOException { + return null; + } }; } } diff --git a/server/src/main/java/org/opensearch/common/lucene/MinimumScoreCollector.java b/server/src/main/java/org/opensearch/common/lucene/MinimumScoreCollector.java index a00e4efe21fbb..81c98c862d2b2 100644 --- a/server/src/main/java/org/opensearch/common/lucene/MinimumScoreCollector.java +++ b/server/src/main/java/org/opensearch/common/lucene/MinimumScoreCollector.java @@ -58,7 +58,7 @@ public MinimumScoreCollector(Collector collector, float minimumScore) { @Override public void setScorer(Scorable scorer) throws IOException { if (!(scorer instanceof ScoreCachingWrappingScorer)) { - scorer = new ScoreCachingWrappingScorer(scorer); + scorer = ScoreCachingWrappingScorer.wrap(scorer); } this.scorer = scorer; leafCollector.setScorer(scorer); diff --git a/server/src/main/java/org/opensearch/common/lucene/search/MoreLikeThisQuery.java b/server/src/main/java/org/opensearch/common/lucene/search/MoreLikeThisQuery.java index 5d39b0440231b..bc83f07f74103 100644 --- a/server/src/main/java/org/opensearch/common/lucene/search/MoreLikeThisQuery.java +++ b/server/src/main/java/org/opensearch/common/lucene/search/MoreLikeThisQuery.java @@ -43,6 +43,7 @@ import org.apache.lucene.search.BooleanClause; import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.Query; +import org.apache.lucene.search.QueryVisitor; import org.apache.lucene.search.similarities.ClassicSimilarity; import org.apache.lucene.search.similarities.Similarity; import org.apache.lucene.search.similarities.TFIDFSimilarity; @@ -377,4 +378,9 @@ public float getBoostTermsFactor() { public void setBoostTermsFactor(float boostTermsFactor) { this.boostTermsFactor = boostTermsFactor; } + + @Override + public void visit(QueryVisitor visitor) { + visitor.visitLeaf(this); + } } diff --git a/server/src/main/java/org/opensearch/common/lucene/search/MultiPhrasePrefixQuery.java b/server/src/main/java/org/opensearch/common/lucene/search/MultiPhrasePrefixQuery.java index 14c66df36b79e..711ff9860a5ce 100644 --- a/server/src/main/java/org/opensearch/common/lucene/search/MultiPhrasePrefixQuery.java +++ b/server/src/main/java/org/opensearch/common/lucene/search/MultiPhrasePrefixQuery.java @@ -43,6 +43,7 @@ import org.apache.lucene.search.MatchNoDocsQuery; import org.apache.lucene.search.MultiPhraseQuery; import org.apache.lucene.search.Query; +import org.apache.lucene.search.QueryVisitor; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.StringHelper; @@ -320,4 +321,9 @@ private boolean termArraysEquals(List termArrays1, List termArra public String getField() { return field; } + + @Override + public void visit(QueryVisitor visitor) { + visitor.visitLeaf(this); + } } diff --git a/server/src/main/java/org/opensearch/common/lucene/search/Queries.java b/server/src/main/java/org/opensearch/common/lucene/search/Queries.java index d119611e02c6a..ef10d1eb0d221 100644 --- a/server/src/main/java/org/opensearch/common/lucene/search/Queries.java +++ b/server/src/main/java/org/opensearch/common/lucene/search/Queries.java @@ -44,7 +44,6 @@ import org.apache.lucene.search.Query; import org.apache.lucene.util.BytesRef; import org.opensearch.OpenSearchException; -import org.opensearch.Version; import org.opensearch.common.Nullable; import org.opensearch.index.mapper.SeqNoFieldMapper; import org.opensearch.index.mapper.TypeFieldMapper; @@ -84,9 +83,8 @@ public static Query newNestedFilter() { /** * Creates a new non-nested docs query - * @param indexVersionCreated the index version created since newer indices can identify a parent field more efficiently */ - public static Query newNonNestedFilter(Version indexVersionCreated) { + public static Query newNonNestedFilter() { return new DocValuesFieldExistsQuery(SeqNoFieldMapper.PRIMARY_TERM_NAME); } diff --git a/server/src/main/java/org/opensearch/common/lucene/search/SpanBooleanQueryRewriteWithMaxClause.java b/server/src/main/java/org/opensearch/common/lucene/search/SpanBooleanQueryRewriteWithMaxClause.java index 8ac8eb3c41ae2..4b770529af4a8 100644 --- a/server/src/main/java/org/opensearch/common/lucene/search/SpanBooleanQueryRewriteWithMaxClause.java +++ b/server/src/main/java/org/opensearch/common/lucene/search/SpanBooleanQueryRewriteWithMaxClause.java @@ -39,13 +39,13 @@ import org.apache.lucene.index.Terms; import org.apache.lucene.index.TermsEnum; import org.apache.lucene.queries.SpanMatchNoDocsQuery; +import org.apache.lucene.queries.spans.SpanMultiTermQueryWrapper; +import org.apache.lucene.queries.spans.SpanOrQuery; +import org.apache.lucene.queries.spans.SpanQuery; +import org.apache.lucene.queries.spans.SpanTermQuery; import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.MultiTermQuery; import org.apache.lucene.search.Query; -import org.apache.lucene.search.spans.SpanMultiTermQueryWrapper; -import org.apache.lucene.search.spans.SpanOrQuery; -import org.apache.lucene.search.spans.SpanQuery; -import org.apache.lucene.search.spans.SpanTermQuery; import org.apache.lucene.util.AttributeSource; import org.apache.lucene.util.BytesRef; diff --git a/server/src/main/java/org/opensearch/common/lucene/search/function/FunctionScoreQuery.java b/server/src/main/java/org/opensearch/common/lucene/search/function/FunctionScoreQuery.java index f7b91db2e712f..09239b0108422 100644 --- a/server/src/main/java/org/opensearch/common/lucene/search/function/FunctionScoreQuery.java +++ b/server/src/main/java/org/opensearch/common/lucene/search/function/FunctionScoreQuery.java @@ -34,7 +34,6 @@ import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.LeafReaderContext; -import org.apache.lucene.index.Term; import org.apache.lucene.search.BooleanClause; import org.apache.lucene.search.Explanation; import org.apache.lucene.search.FilterScorer; @@ -59,7 +58,6 @@ import java.util.List; import java.util.Locale; import java.util.Objects; -import java.util.Set; /** * A query that allows for a pluggable boost function / filter. If it matches @@ -370,11 +368,6 @@ class CustomBoostFactorWeight extends Weight { this.needsScores = needsScores; } - @Override - public void extractTerms(Set terms) { - subQueryWeight.extractTerms(terms); - } - private FunctionFactorScorer functionScorer(LeafReaderContext context) throws IOException { Scorer subQueryScorer = subQueryWeight.scorer(context); if (subQueryScorer == null) { diff --git a/server/src/main/java/org/opensearch/common/lucene/search/function/ScriptScoreQuery.java b/server/src/main/java/org/opensearch/common/lucene/search/function/ScriptScoreQuery.java index 44c76e74d5a41..846cfd4b6431e 100644 --- a/server/src/main/java/org/opensearch/common/lucene/search/function/ScriptScoreQuery.java +++ b/server/src/main/java/org/opensearch/common/lucene/search/function/ScriptScoreQuery.java @@ -34,7 +34,6 @@ import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.LeafReaderContext; -import org.apache.lucene.index.Term; import org.apache.lucene.search.BooleanClause; import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.Explanation; @@ -57,7 +56,6 @@ import java.io.IOException; import java.util.Objects; -import java.util.Set; /** * A query that uses a script to compute documents' scores. @@ -136,11 +134,6 @@ public BulkScorer bulkScorer(LeafReaderContext context) throws IOException { } } - @Override - public void extractTerms(Set terms) { - subQueryWeight.extractTerms(terms); - } - @Override public Scorer scorer(LeafReaderContext context) throws IOException { Scorer subQueryScorer = subQueryWeight.scorer(context); diff --git a/server/src/main/java/org/opensearch/common/settings/KeyStoreWrapper.java b/server/src/main/java/org/opensearch/common/settings/KeyStoreWrapper.java index 1980584982579..900eda6975526 100644 --- a/server/src/main/java/org/opensearch/common/settings/KeyStoreWrapper.java +++ b/server/src/main/java/org/opensearch/common/settings/KeyStoreWrapper.java @@ -32,13 +32,12 @@ package org.opensearch.common.settings; +import org.apache.lucene.backward_codecs.store.EndiannessReverserUtil; import org.apache.lucene.codecs.CodecUtil; import org.apache.lucene.index.IndexFormatTooNewException; import org.apache.lucene.index.IndexFormatTooOldException; -import org.apache.lucene.store.BufferedChecksumIndexInput; import org.apache.lucene.store.ChecksumIndexInput; import org.apache.lucene.store.IOContext; -import org.apache.lucene.store.IndexInput; import org.apache.lucene.store.IndexOutput; import org.apache.lucene.store.NIOFSDirectory; import org.apache.lucene.util.SetOnce; @@ -238,8 +237,7 @@ public static KeyStoreWrapper load(Path configDir, String keystoreFileName) thro } NIOFSDirectory directory = new NIOFSDirectory(configDir); - try (IndexInput indexInput = directory.openInput(keystoreFileName, IOContext.READONCE)) { - ChecksumIndexInput input = new BufferedChecksumIndexInput(indexInput); + try (ChecksumIndexInput input = EndiannessReverserUtil.openChecksumInput(directory, keystoreFileName, IOContext.READONCE)) { final int formatVersion; try { formatVersion = CodecUtil.checkHeader(input, keystoreFileName, MIN_FORMAT_VERSION, FORMAT_VERSION); @@ -521,7 +519,7 @@ public synchronized void save(Path configDir, char[] password) throws Exception NIOFSDirectory directory = new NIOFSDirectory(configDir); // write to tmp file first, then overwrite String tmpFile = KEYSTORE_FILENAME + ".tmp"; - try (IndexOutput output = directory.createOutput(tmpFile, IOContext.DEFAULT)) { + try (IndexOutput output = EndiannessReverserUtil.createOutput(directory, tmpFile, IOContext.DEFAULT)) { CodecUtil.writeHeader(output, KEYSTORE_FILENAME, FORMAT_VERSION); output.writeByte(password.length == 0 ? (byte) 0 : (byte) 1); diff --git a/server/src/main/java/org/opensearch/common/util/CuckooFilter.java b/server/src/main/java/org/opensearch/common/util/CuckooFilter.java index 4c4b7ac5d9011..e23b21936dfe3 100644 --- a/server/src/main/java/org/opensearch/common/util/CuckooFilter.java +++ b/server/src/main/java/org/opensearch/common/util/CuckooFilter.java @@ -34,6 +34,7 @@ import org.apache.lucene.store.DataInput; import org.apache.lucene.store.DataOutput; import org.apache.lucene.util.packed.PackedInts; +import org.apache.lucene.util.packed.XPackedInts; import org.opensearch.common.io.stream.StreamInput; import org.opensearch.common.io.stream.StreamOutput; import org.opensearch.common.io.stream.Writeable; @@ -80,7 +81,7 @@ public class CuckooFilter implements Writeable { private static final int MAX_EVICTIONS = 500; static final int EMPTY = 0; - private final PackedInts.Mutable data; + private final XPackedInts.Mutable data; private final int numBuckets; private final int bitsPerEntry; private final int fingerprintMask; @@ -107,7 +108,7 @@ public class CuckooFilter implements Writeable { "Attempted to create [" + numBuckets * entriesPerBucket + "] entries which is > Integer.MAX_VALUE" ); } - this.data = PackedInts.getMutable(numBuckets * entriesPerBucket, bitsPerEntry, PackedInts.COMPACT); + this.data = XPackedInts.getMutable(numBuckets * entriesPerBucket, bitsPerEntry, PackedInts.COMPACT); // puts the bits at the right side of the mask, e.g. `0000000000001111` for bitsPerEntry = 4 this.fingerprintMask = (0x80000000 >> (bitsPerEntry - 1)) >>> (Integer.SIZE - bitsPerEntry); @@ -132,7 +133,7 @@ public class CuckooFilter implements Writeable { ); } // TODO this is probably super slow, but just used for testing atm - this.data = PackedInts.getMutable(numBuckets * entriesPerBucket, bitsPerEntry, PackedInts.COMPACT); + this.data = XPackedInts.getMutable(numBuckets * entriesPerBucket, bitsPerEntry, PackedInts.COMPACT); for (int i = 0; i < other.data.size(); i++) { data.set(i, other.data.get(i)); } @@ -148,7 +149,7 @@ public class CuckooFilter implements Writeable { this.fingerprintMask = (0x80000000 >> (bitsPerEntry - 1)) >>> (Integer.SIZE - bitsPerEntry); - data = (PackedInts.Mutable) PackedInts.getReader(new DataInput() { + data = (XPackedInts.Mutable) XPackedInts.getReader(new DataInput() { @Override public byte readByte() throws IOException { return in.readByte(); @@ -158,6 +159,11 @@ public byte readByte() throws IOException { public void readBytes(byte[] b, int offset, int len) throws IOException { in.readBytes(b, offset, len); } + + @Override + public void skipBytes(long numBytes) throws IOException { + in.skip(numBytes); + } }); } diff --git a/server/src/main/java/org/opensearch/gateway/MetadataStateFormat.java b/server/src/main/java/org/opensearch/gateway/MetadataStateFormat.java index 53b297e0f99fb..fd1dee46815a8 100644 --- a/server/src/main/java/org/opensearch/gateway/MetadataStateFormat.java +++ b/server/src/main/java/org/opensearch/gateway/MetadataStateFormat.java @@ -34,6 +34,7 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.message.ParameterizedMessage; +import org.apache.lucene.backward_codecs.store.EndiannessReverserUtil; import org.apache.lucene.codecs.CodecUtil; import org.apache.lucene.index.CorruptIndexException; import org.apache.lucene.index.IndexFormatTooNewException; @@ -117,7 +118,7 @@ private void writeStateToFirstLocation(final T state, Path stateLocation, Direct throws WriteStateException { try { deleteFileIfExists(stateLocation, stateDir, tmpFileName); - try (IndexOutput out = stateDir.createOutput(tmpFileName, IOContext.DEFAULT)) { + try (IndexOutput out = EndiannessReverserUtil.createOutput(stateDir, tmpFileName, IOContext.DEFAULT)) { CodecUtil.writeHeader(out, STATE_FILE_CODEC, STATE_FILE_VERSION); out.writeInt(FORMAT.index()); try (XContentBuilder builder = newXContentBuilder(FORMAT, new IndexOutputOutputStream(out) { @@ -306,7 +307,7 @@ protected XContentBuilder newXContentBuilder(XContentType type, OutputStream str */ public final T read(NamedXContentRegistry namedXContentRegistry, Path file) throws IOException { try (Directory dir = newDirectory(file.getParent())) { - try (IndexInput indexInput = dir.openInput(file.getFileName().toString(), IOContext.DEFAULT)) { + try (IndexInput indexInput = EndiannessReverserUtil.openInput(dir, file.getFileName().toString(), IOContext.DEFAULT)) { // We checksum the entire file before we even go and parse it. If it's corrupted we barf right here. CodecUtil.checksumEntireFile(indexInput); CodecUtil.checkHeader(indexInput, STATE_FILE_CODEC, MIN_COMPATIBLE_STATE_FILE_VERSION, STATE_FILE_VERSION); diff --git a/server/src/main/java/org/opensearch/index/cache/bitset/BitsetFilterCache.java b/server/src/main/java/org/opensearch/index/cache/bitset/BitsetFilterCache.java index eb14b902d1aef..0bc77f0f0079b 100644 --- a/server/src/main/java/org/opensearch/index/cache/bitset/BitsetFilterCache.java +++ b/server/src/main/java/org/opensearch/index/cache/bitset/BitsetFilterCache.java @@ -33,6 +33,7 @@ package org.opensearch.index.cache.bitset; import org.apache.logging.log4j.message.ParameterizedMessage; +import org.apache.lucene.index.FilterLeafReader; import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.IndexReaderContext; import org.apache.lucene.index.LeafReaderContext; @@ -147,7 +148,7 @@ public void clear(String reason) { } private BitSet getAndLoadIfNotPresent(final Query query, final LeafReaderContext context) throws ExecutionException { - final IndexReader.CacheHelper cacheHelper = context.reader().getCoreCacheHelper(); + final IndexReader.CacheHelper cacheHelper = FilterLeafReader.unwrap(context.reader()).getCoreCacheHelper(); if (cacheHelper == null) { throw new IllegalArgumentException("Reader " + context.reader() + " does not support caching"); } @@ -273,7 +274,7 @@ public IndexWarmer.TerminationHandle warmReader(final IndexShard indexShard, fin } if (hasNested) { - warmUp.add(Queries.newNonNestedFilter(indexSettings.getIndexVersionCreated())); + warmUp.add(Queries.newNonNestedFilter()); } final CountDownLatch latch = new CountDownLatch(reader.leaves().size() * warmUp.size()); diff --git a/server/src/main/java/org/opensearch/index/codec/CodecService.java b/server/src/main/java/org/opensearch/index/codec/CodecService.java index 136810c8cc2e2..d22c7239922bc 100644 --- a/server/src/main/java/org/opensearch/index/codec/CodecService.java +++ b/server/src/main/java/org/opensearch/index/codec/CodecService.java @@ -34,8 +34,8 @@ import org.apache.logging.log4j.Logger; import org.apache.lucene.codecs.Codec; -import org.apache.lucene.codecs.lucene87.Lucene87Codec; -import org.apache.lucene.codecs.lucene87.Lucene87Codec.Mode; +import org.apache.lucene.codecs.lucene90.Lucene90Codec; +import org.apache.lucene.codecs.lucene90.Lucene90Codec.Mode; import org.opensearch.common.Nullable; import org.opensearch.common.collect.MapBuilder; import org.opensearch.index.mapper.MapperService; @@ -60,8 +60,8 @@ public class CodecService { public CodecService(@Nullable MapperService mapperService, Logger logger) { final MapBuilder codecs = MapBuilder.newMapBuilder(); if (mapperService == null) { - codecs.put(DEFAULT_CODEC, new Lucene87Codec()); - codecs.put(BEST_COMPRESSION_CODEC, new Lucene87Codec(Mode.BEST_COMPRESSION)); + codecs.put(DEFAULT_CODEC, new Lucene90Codec()); + codecs.put(BEST_COMPRESSION_CODEC, new Lucene90Codec(Mode.BEST_COMPRESSION)); } else { codecs.put(DEFAULT_CODEC, new PerFieldMappingPostingFormatCodec(Mode.BEST_SPEED, mapperService, logger)); codecs.put(BEST_COMPRESSION_CODEC, new PerFieldMappingPostingFormatCodec(Mode.BEST_COMPRESSION, mapperService, logger)); diff --git a/server/src/main/java/org/opensearch/index/codec/PerFieldMappingPostingFormatCodec.java b/server/src/main/java/org/opensearch/index/codec/PerFieldMappingPostingFormatCodec.java index 06f2216a28812..20a8ff7ca9170 100644 --- a/server/src/main/java/org/opensearch/index/codec/PerFieldMappingPostingFormatCodec.java +++ b/server/src/main/java/org/opensearch/index/codec/PerFieldMappingPostingFormatCodec.java @@ -36,8 +36,8 @@ import org.apache.lucene.codecs.Codec; import org.apache.lucene.codecs.DocValuesFormat; import org.apache.lucene.codecs.PostingsFormat; -import org.apache.lucene.codecs.lucene80.Lucene80DocValuesFormat; -import org.apache.lucene.codecs.lucene87.Lucene87Codec; +import org.apache.lucene.codecs.lucene90.Lucene90Codec; +import org.apache.lucene.codecs.lucene90.Lucene90DocValuesFormat; import org.opensearch.common.lucene.Lucene; import org.opensearch.index.mapper.CompletionFieldMapper; import org.opensearch.index.mapper.MappedFieldType; @@ -51,10 +51,10 @@ * per index in real time via the mapping API. If no specific postings format is * configured for a specific field the default postings format is used. */ -public class PerFieldMappingPostingFormatCodec extends Lucene87Codec { +public class PerFieldMappingPostingFormatCodec extends Lucene90Codec { private final Logger logger; private final MapperService mapperService; - private final DocValuesFormat dvFormat = new Lucene80DocValuesFormat(Lucene80DocValuesFormat.Mode.BEST_COMPRESSION); + private final DocValuesFormat dvFormat = new Lucene90DocValuesFormat(); static { assert Codec.forName(Lucene.LATEST_CODEC).getClass().isAssignableFrom(PerFieldMappingPostingFormatCodec.class) diff --git a/server/src/main/java/org/opensearch/index/engine/Engine.java b/server/src/main/java/org/opensearch/index/engine/Engine.java index 825d71d6d1024..bba1d8c069c68 100644 --- a/server/src/main/java/org/opensearch/index/engine/Engine.java +++ b/server/src/main/java/org/opensearch/index/engine/Engine.java @@ -51,7 +51,6 @@ import org.apache.lucene.store.AlreadyClosedException; import org.apache.lucene.store.Directory; import org.apache.lucene.store.IOContext; -import org.apache.lucene.util.Accountables; import org.apache.lucene.util.SetOnce; import org.opensearch.ExceptionsHelper; import org.opensearch.action.index.IndexRequest; @@ -997,9 +996,6 @@ private void fillSegmentInfo(SegmentReader segmentReader, boolean verbose, boole logger.trace(() -> new ParameterizedMessage("failed to get size for [{}]", info.info.name), e); } segment.segmentSort = info.info.getIndexSort(); - if (verbose) { - segment.ramTree = Accountables.namedAccountable("root", segmentReader); - } segment.attributes = info.info.getAttributes(); // TODO: add more fine grained mem stats values to per segment info here segments.put(info.info.name, segment); diff --git a/server/src/main/java/org/opensearch/index/engine/InternalEngine.java b/server/src/main/java/org/opensearch/index/engine/InternalEngine.java index 1c5f06e85cb88..84090047d68e8 100644 --- a/server/src/main/java/org/opensearch/index/engine/InternalEngine.java +++ b/server/src/main/java/org/opensearch/index/engine/InternalEngine.java @@ -53,7 +53,6 @@ import org.apache.lucene.search.BooleanClause; import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.DocIdSetIterator; -import org.apache.lucene.search.DocValuesFieldExistsQuery; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.Query; import org.apache.lucene.search.ReferenceManager; @@ -77,6 +76,7 @@ import org.opensearch.common.lucene.LoggerInfoStream; import org.opensearch.common.lucene.Lucene; import org.opensearch.common.lucene.index.OpenSearchDirectoryReader; +import org.opensearch.common.lucene.search.Queries; import org.opensearch.common.lucene.uid.Versions; import org.opensearch.common.lucene.uid.VersionsAndSeqNoResolver; import org.opensearch.common.lucene.uid.VersionsAndSeqNoResolver.DocIdAndSeqNo; @@ -2978,7 +2978,7 @@ private void restoreVersionMapAndCheckpointTracker(DirectoryReader directoryRead BooleanClause.Occur.MUST ) // exclude non-root nested documents - .add(new DocValuesFieldExistsQuery(SeqNoFieldMapper.PRIMARY_TERM_NAME), BooleanClause.Occur.MUST) + .add(Queries.newNonNestedFilter(), BooleanClause.Occur.MUST) .build(); final Weight weight = searcher.createWeight(searcher.rewrite(query), ScoreMode.COMPLETE_NO_SCORES, 1.0f); for (LeafReaderContext leaf : directoryReader.leaves()) { diff --git a/server/src/main/java/org/opensearch/index/engine/LuceneChangesSnapshot.java b/server/src/main/java/org/opensearch/index/engine/LuceneChangesSnapshot.java index ae1dc9e647073..98da554a5a1b0 100644 --- a/server/src/main/java/org/opensearch/index/engine/LuceneChangesSnapshot.java +++ b/server/src/main/java/org/opensearch/index/engine/LuceneChangesSnapshot.java @@ -47,7 +47,6 @@ import org.apache.lucene.search.TopDocs; import org.apache.lucene.search.TopFieldCollector; import org.apache.lucene.util.ArrayUtil; -import org.opensearch.Version; import org.opensearch.common.bytes.BytesReference; import org.opensearch.common.lucene.Lucene; import org.opensearch.common.lucene.search.Queries; @@ -247,7 +246,7 @@ private void fillParallelArray(ScoreDoc[] scoreDocs, ParallelArray parallelArray private static Query operationsRangeQuery(long fromSeqNo, long toSeqNo) { return new BooleanQuery.Builder().add(LongPoint.newRangeQuery(SeqNoFieldMapper.NAME, fromSeqNo, toSeqNo), BooleanClause.Occur.MUST) - .add(Queries.newNonNestedFilter(Version.CURRENT), BooleanClause.Occur.MUST) // exclude non-root nested docs + .add(Queries.newNonNestedFilter(), BooleanClause.Occur.MUST) // exclude non-root nested docs .build(); } diff --git a/server/src/main/java/org/opensearch/index/engine/PrunePostingsMergePolicy.java b/server/src/main/java/org/opensearch/index/engine/PrunePostingsMergePolicy.java index aee87dd7e1a42..18f2799edc244 100644 --- a/server/src/main/java/org/opensearch/index/engine/PrunePostingsMergePolicy.java +++ b/server/src/main/java/org/opensearch/index/engine/PrunePostingsMergePolicy.java @@ -151,11 +151,6 @@ public ImpactsEnum impacts(int flags) throws IOException { public int size() { return postingsReader.size(); } - - @Override - public long ramBytesUsed() { - return postingsReader.ramBytesUsed(); - } }; } diff --git a/server/src/main/java/org/opensearch/index/engine/RecoverySourcePruneMergePolicy.java b/server/src/main/java/org/opensearch/index/engine/RecoverySourcePruneMergePolicy.java index b1d08d2605640..0a885f23629e6 100644 --- a/server/src/main/java/org/opensearch/index/engine/RecoverySourcePruneMergePolicy.java +++ b/server/src/main/java/org/opensearch/index/engine/RecoverySourcePruneMergePolicy.java @@ -46,7 +46,7 @@ import org.apache.lucene.index.SortedNumericDocValues; import org.apache.lucene.index.SortedSetDocValues; import org.apache.lucene.index.StoredFieldVisitor; -import org.apache.lucene.search.ConjunctionDISI; +import org.apache.lucene.search.ConjunctionUtils; import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.Query; @@ -119,7 +119,7 @@ public NumericDocValues getNumeric(FieldInfo field) throws IOException { // we can't return null here lucenes DocIdMerger expects an instance intersection = DocIdSetIterator.empty(); } else { - intersection = ConjunctionDISI.intersectIterators( + intersection = ConjunctionUtils.intersectIterators( Arrays.asList(numeric, new BitSetIterator(recoverySourceToKeep, recoverySourceToKeep.length())) ); } @@ -202,11 +202,6 @@ public void checkIntegrity() throws IOException { public void close() throws IOException { in.close(); } - - @Override - public long ramBytesUsed() { - return in.ramBytesUsed(); - } } private abstract static class FilterStoredFieldsReader extends StoredFieldsReader { @@ -217,11 +212,6 @@ private abstract static class FilterStoredFieldsReader extends StoredFieldsReade this.in = fieldsReader; } - @Override - public long ramBytesUsed() { - return in.ramBytesUsed(); - } - @Override public void close() throws IOException { in.close(); @@ -294,7 +284,7 @@ public void binaryField(FieldInfo fieldInfo, byte[] value) throws IOException { } @Override - public void stringField(FieldInfo fieldInfo, byte[] value) throws IOException { + public void stringField(FieldInfo fieldInfo, String value) throws IOException { visitor.stringField(fieldInfo, value); } diff --git a/server/src/main/java/org/opensearch/index/engine/Segment.java b/server/src/main/java/org/opensearch/index/engine/Segment.java index 2b824c847f75f..4874d0a30196f 100644 --- a/server/src/main/java/org/opensearch/index/engine/Segment.java +++ b/server/src/main/java/org/opensearch/index/engine/Segment.java @@ -39,7 +39,6 @@ import org.apache.lucene.search.SortedSetSelector; import org.apache.lucene.search.SortedNumericSelector; import org.apache.lucene.util.Accountable; -import org.apache.lucene.util.Accountables; import org.opensearch.Version; import org.opensearch.common.Nullable; import org.opensearch.common.io.stream.StreamInput; @@ -49,9 +48,7 @@ import org.opensearch.common.unit.ByteSizeValue; import java.io.IOException; -import java.util.ArrayList; import java.util.Collection; -import java.util.List; import java.util.Map; import java.util.Objects; @@ -68,7 +65,6 @@ public class Segment implements Writeable { public Boolean compound = null; public String mergeId; public Sort segmentSort; - public Accountable ramTree = null; public Map attributes; private static final ByteSizeValue ZERO_BYTE_SIZE_VALUE = new ByteSizeValue(0L); @@ -91,7 +87,7 @@ public Segment(StreamInput in) throws IOException { } if (in.readBoolean()) { // verbose mode - ramTree = readRamTree(in); + readRamTree(in); } segmentSort = readSegmentSort(in); if (in.readBoolean()) { @@ -207,12 +203,7 @@ public void writeTo(StreamOutput out) throws IOException { if (out.getVersion().before(Version.V_2_0_0)) { out.writeLong(0L); } - - boolean verbose = ramTree != null; - out.writeBoolean(verbose); - if (verbose) { - writeRamTree(out, ramTree); - } + out.writeBoolean(false); writeSegmentSort(out, segmentSort); boolean hasAttributes = attributes != null; out.writeBoolean(hasAttributes); @@ -312,18 +303,13 @@ private void writeSegmentSort(StreamOutput out, Sort sort) throws IOException { } } - private Accountable readRamTree(StreamInput in) throws IOException { - final String name = in.readString(); - final long bytes = in.readVLong(); + private static void readRamTree(StreamInput in) throws IOException { + in.readString(); + in.readVLong(); int numChildren = in.readVInt(); - if (numChildren == 0) { - return Accountables.namedAccountable(name, bytes); - } - List children = new ArrayList<>(numChildren); - while (numChildren-- > 0) { - children.add(readRamTree(in)); + for (int i = 0; i < numChildren; i++) { + readRamTree(in); } - return Accountables.namedAccountable(name, children, bytes); } // the ram tree is written recursively since the depth is fairly low (5 or 6) diff --git a/server/src/main/java/org/opensearch/index/engine/TranslogLeafReader.java b/server/src/main/java/org/opensearch/index/engine/TranslogLeafReader.java index ddf40e55be4b3..07fe3f9230de4 100644 --- a/server/src/main/java/org/opensearch/index/engine/TranslogLeafReader.java +++ b/server/src/main/java/org/opensearch/index/engine/TranslogLeafReader.java @@ -46,6 +46,9 @@ import org.apache.lucene.index.SortedSetDocValues; import org.apache.lucene.index.StoredFieldVisitor; import org.apache.lucene.index.Terms; +import org.apache.lucene.index.VectorSimilarityFunction; +import org.apache.lucene.index.VectorValues; +import org.apache.lucene.search.TopDocs; import org.apache.lucene.util.Bits; import org.apache.lucene.util.BytesRef; import org.opensearch.common.util.set.Sets; @@ -56,7 +59,6 @@ import org.opensearch.index.translog.Translog; import java.io.IOException; -import java.nio.charset.StandardCharsets; import java.util.Collections; import java.util.Set; @@ -79,6 +81,8 @@ public final class TranslogLeafReader extends LeafReader { 0, 0, 0, + 0, + VectorSimilarityFunction.EUCLIDEAN, false ); private static final FieldInfo FAKE_ROUTING_FIELD = new FieldInfo( @@ -94,6 +98,8 @@ public final class TranslogLeafReader extends LeafReader { 0, 0, 0, + 0, + VectorSimilarityFunction.EUCLIDEAN, false ); private static final FieldInfo FAKE_ID_FIELD = new FieldInfo( @@ -109,6 +115,8 @@ public final class TranslogLeafReader extends LeafReader { 0, 0, 0, + 0, + VectorSimilarityFunction.EUCLIDEAN, false ); public static Set ALL_FIELD_NAMES = Sets.newHashSet(FAKE_SOURCE_FIELD.name, FAKE_ROUTING_FIELD.name, FAKE_ID_FIELD.name); @@ -208,7 +216,7 @@ public void document(int docID, StoredFieldVisitor visitor) throws IOException { visitor.binaryField(FAKE_SOURCE_FIELD, operation.source().toBytesRef().bytes); } if (operation.routing() != null && visitor.needsField(FAKE_ROUTING_FIELD) == StoredFieldVisitor.Status.YES) { - visitor.stringField(FAKE_ROUTING_FIELD, operation.routing().getBytes(StandardCharsets.UTF_8)); + visitor.stringField(FAKE_ROUTING_FIELD, operation.routing()); } if (visitor.needsField(FAKE_ID_FIELD) == StoredFieldVisitor.Status.YES) { BytesRef bytesRef = Uid.encodeId(operation.id()); @@ -227,4 +235,14 @@ protected void doClose() { public CacheHelper getReaderCacheHelper() { throw new UnsupportedOperationException(); } + + @Override + public VectorValues getVectorValues(String field) throws IOException { + return getVectorValues(field); + } + + @Override + public TopDocs searchNearestVectors(String field, float[] target, int k, Bits acceptDocs) throws IOException { + throw new UnsupportedOperationException(); + } } diff --git a/server/src/main/java/org/opensearch/index/fielddata/IndexNumericFieldData.java b/server/src/main/java/org/opensearch/index/fielddata/IndexNumericFieldData.java index 578657bea8818..dd987e9f79546 100644 --- a/server/src/main/java/org/opensearch/index/fielddata/IndexNumericFieldData.java +++ b/server/src/main/java/org/opensearch/index/fielddata/IndexNumericFieldData.java @@ -129,6 +129,8 @@ public final SortField sortField( : SortedNumericSelector.Type.MIN; SortField sortField = new SortedNumericSortField(getFieldName(), getNumericType().sortFieldType, reverse, selectorType); sortField.setMissingValue(source.missingObject(missingValue, reverse)); + // todo: remove since deprecated + sortField.setOptimizeSortWithPoints(false); return sortField; } diff --git a/server/src/main/java/org/opensearch/index/fielddata/plain/PagedBytesIndexFieldData.java b/server/src/main/java/org/opensearch/index/fielddata/plain/PagedBytesIndexFieldData.java index f680ceffc517d..e661b61548cd0 100644 --- a/server/src/main/java/org/opensearch/index/fielddata/plain/PagedBytesIndexFieldData.java +++ b/server/src/main/java/org/opensearch/index/fielddata/plain/PagedBytesIndexFieldData.java @@ -31,10 +31,6 @@ package org.opensearch.index.fielddata.plain; -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; -import org.apache.lucene.codecs.blocktree.FieldReader; -import org.apache.lucene.codecs.blocktree.Stats; import org.apache.lucene.index.FilteredTermsEnum; import org.apache.lucene.index.LeafReader; import org.apache.lucene.index.LeafReaderContext; @@ -69,7 +65,6 @@ import java.io.IOException; public class PagedBytesIndexFieldData extends AbstractIndexOrdinalsFieldData { - private static final Logger logger = LogManager.getLogger(PagedBytesIndexFieldData.class); private final double minFrequency, maxFrequency; private final int minSegmentSize; @@ -226,36 +221,6 @@ public long bytesPerValue(BytesRef term) { return bytes; } - /** - * @return the estimate for loading the entire term set into field data, or 0 if unavailable - */ - public long estimateStringFieldData() { - try { - LeafReader reader = context.reader(); - Terms terms = reader.terms(getFieldName()); - - final Terms fieldTerms = reader.terms(getFieldName()); - - if (fieldTerms instanceof FieldReader) { - final Stats stats = ((FieldReader) fieldTerms).getStats(); - long totalTermBytes = stats.totalTermBytes; - if (logger.isTraceEnabled()) { - logger.trace( - "totalTermBytes: {}, terms.size(): {}, terms.getSumDocFreq(): {}", - totalTermBytes, - terms.size(), - terms.getSumDocFreq() - ); - } - long totalBytes = totalTermBytes + (2 * terms.size()) + (4 * terms.getSumDocFreq()); - return totalBytes; - } - } catch (Exception e) { - logger.warn("Unable to estimate memory overhead", e); - } - return 0; - } - /** * Determine whether the BlockTreeTermsReader.FieldReader can be used * for estimating the field data, adding the estimate to the circuit @@ -271,25 +236,7 @@ public TermsEnum beforeLoad(Terms terms) throws IOException { TermsEnum iterator = terms.iterator(); TermsEnum filteredIterator = filter(terms, iterator, reader); - final boolean filtered = iterator != filteredIterator; - iterator = filteredIterator; - - if (filtered) { - if (logger.isTraceEnabled()) { - logger.trace("Filter exists, can't circuit break normally, using RamAccountingTermsEnum"); - } - return new RamAccountingTermsEnum(iterator, breaker, this, this.fieldName); - } else { - estimatedBytes = this.estimateStringFieldData(); - // If we weren't able to estimate, wrap in the RamAccountingTermsEnum - if (estimatedBytes == 0) { - iterator = new RamAccountingTermsEnum(iterator, breaker, this, this.fieldName); - } else { - breaker.addEstimateBytesAndMaybeBreak(estimatedBytes, fieldName); - } - - return iterator; - } + return new RamAccountingTermsEnum(filteredIterator, breaker, this, this.fieldName); } private TermsEnum filter(Terms terms, TermsEnum iterator, LeafReader reader) throws IOException { diff --git a/server/src/main/java/org/opensearch/index/fielddata/plain/SortedNumericIndexFieldData.java b/server/src/main/java/org/opensearch/index/fielddata/plain/SortedNumericIndexFieldData.java index b050e0d3dfa9f..714b762f81fb7 100644 --- a/server/src/main/java/org/opensearch/index/fielddata/plain/SortedNumericIndexFieldData.java +++ b/server/src/main/java/org/opensearch/index/fielddata/plain/SortedNumericIndexFieldData.java @@ -32,7 +32,6 @@ package org.opensearch.index.fielddata.plain; -import org.apache.lucene.document.HalfFloatPoint; import org.apache.lucene.index.DocValues; import org.apache.lucene.index.DocValuesType; import org.apache.lucene.index.LeafReader; @@ -53,6 +52,7 @@ import org.opensearch.index.fielddata.fieldcomparator.LongValuesComparatorSource; import org.opensearch.index.mapper.DocValueFetcher; import org.opensearch.indices.breaker.CircuitBreakerService; +import org.apache.lucene.sandbox.document.HalfFloatPoint; import org.opensearch.search.DocValueFormat; import org.opensearch.search.MultiValueMode; import org.opensearch.search.aggregations.support.ValuesSourceType; diff --git a/server/src/main/java/org/opensearch/index/fieldvisitor/FieldsVisitor.java b/server/src/main/java/org/opensearch/index/fieldvisitor/FieldsVisitor.java index a51137b4a4f69..eb094ce1df9a4 100644 --- a/server/src/main/java/org/opensearch/index/fieldvisitor/FieldsVisitor.java +++ b/server/src/main/java/org/opensearch/index/fieldvisitor/FieldsVisitor.java @@ -39,18 +39,17 @@ import org.opensearch.index.mapper.IdFieldMapper; import org.opensearch.index.mapper.IgnoredFieldMapper; import org.opensearch.index.mapper.MappedFieldType; -import org.opensearch.index.mapper.MapperService; import org.opensearch.index.mapper.RoutingFieldMapper; import org.opensearch.index.mapper.SourceFieldMapper; import org.opensearch.index.mapper.Uid; -import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; +import java.util.function.Function; import static java.util.Collections.emptyMap; import static java.util.Collections.unmodifiableSet; @@ -96,9 +95,9 @@ public Status needsField(FieldInfo fieldInfo) { return requiredFields.isEmpty() ? Status.STOP : Status.NO; } - public void postProcess(MapperService mapperService) { + public final void postProcess(Function fieldTypeLookup) { for (Map.Entry> entry : fields().entrySet()) { - MappedFieldType fieldType = mapperService.fieldType(entry.getKey()); + MappedFieldType fieldType = fieldTypeLookup.apply(entry.getKey()); if (fieldType == null) { throw new IllegalStateException("Field [" + entry.getKey() + "] exists in the index but not in mappings"); } @@ -125,10 +124,9 @@ public void binaryField(FieldInfo fieldInfo, BytesRef value) { } @Override - public void stringField(FieldInfo fieldInfo, byte[] bytes) { + public void stringField(FieldInfo fieldInfo, String value) { assert IdFieldMapper.NAME.equals(fieldInfo.name) == false : "_id field must go through binaryField"; assert sourceFieldName.equals(fieldInfo.name) == false : "source field must go through binaryField"; - final String value = new String(bytes, StandardCharsets.UTF_8); addValue(fieldInfo.name, value); } diff --git a/server/src/main/java/org/opensearch/index/fieldvisitor/SingleFieldsVisitor.java b/server/src/main/java/org/opensearch/index/fieldvisitor/SingleFieldsVisitor.java index a3aacd823fbb8..3214ec2780701 100644 --- a/server/src/main/java/org/opensearch/index/fieldvisitor/SingleFieldsVisitor.java +++ b/server/src/main/java/org/opensearch/index/fieldvisitor/SingleFieldsVisitor.java @@ -38,7 +38,6 @@ import org.opensearch.index.mapper.Uid; import org.apache.lucene.util.BytesRef; -import java.nio.charset.StandardCharsets; import java.util.List; /** @@ -84,8 +83,8 @@ public void binaryField(FieldInfo fieldInfo, byte[] value) { } @Override - public void stringField(FieldInfo fieldInfo, byte[] bytes) { - addValue(new String(bytes, StandardCharsets.UTF_8)); + public void stringField(FieldInfo fieldInfo, String value) { + addValue(value); } @Override diff --git a/server/src/main/java/org/opensearch/index/get/ShardGetService.java b/server/src/main/java/org/opensearch/index/get/ShardGetService.java index a877b0085816a..e63d80336bc7a 100644 --- a/server/src/main/java/org/opensearch/index/get/ShardGetService.java +++ b/server/src/main/java/org/opensearch/index/get/ShardGetService.java @@ -39,6 +39,7 @@ import org.apache.lucene.index.IndexableFieldType; import org.apache.lucene.index.StoredFieldVisitor; import org.apache.lucene.index.Term; +import org.apache.lucene.index.VectorSimilarityFunction; import org.opensearch.OpenSearchException; import org.opensearch.common.Nullable; import org.opensearch.common.bytes.BytesReference; @@ -324,6 +325,8 @@ private GetResult innerGetLoadFromStoredFields( 0, 0, 0, + 0, + VectorSimilarityFunction.EUCLIDEAN, false ); StoredFieldVisitor.Status status = fieldVisitor.needsField(fieldInfo); @@ -347,7 +350,7 @@ private GetResult innerGetLoadFromStoredFields( // put stored fields into result objects if (!fieldVisitor.fields().isEmpty()) { - fieldVisitor.postProcess(mapperService); + fieldVisitor.postProcess(mapperService::fieldType); documentFields = new HashMap<>(); metadataFields = new HashMap<>(); for (Map.Entry> entry : fieldVisitor.fields().entrySet()) { diff --git a/server/src/main/java/org/opensearch/index/mapper/CompletionFieldMapper.java b/server/src/main/java/org/opensearch/index/mapper/CompletionFieldMapper.java index 5d0e2349aca6a..2d0f59fc76ea4 100644 --- a/server/src/main/java/org/opensearch/index/mapper/CompletionFieldMapper.java +++ b/server/src/main/java/org/opensearch/index/mapper/CompletionFieldMapper.java @@ -35,7 +35,7 @@ import org.apache.lucene.document.FieldType; import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.Term; -import org.apache.lucene.search.suggest.document.Completion84PostingsFormat; +import org.apache.lucene.search.suggest.document.Completion90PostingsFormat; import org.apache.lucene.search.suggest.document.CompletionAnalyzer; import org.apache.lucene.search.suggest.document.CompletionQuery; import org.apache.lucene.search.suggest.document.FuzzyCompletionQuery; @@ -311,7 +311,7 @@ public ContextMappings getContextMappings() { */ public static synchronized PostingsFormat postingsFormat() { if (postingsFormat == null) { - postingsFormat = new Completion84PostingsFormat(); + postingsFormat = new Completion90PostingsFormat(); } return postingsFormat; } diff --git a/server/src/main/java/org/opensearch/index/mapper/DateFieldMapper.java b/server/src/main/java/org/opensearch/index/mapper/DateFieldMapper.java index 9aa7b019a8e61..e2239069603e6 100644 --- a/server/src/main/java/org/opensearch/index/mapper/DateFieldMapper.java +++ b/server/src/main/java/org/opensearch/index/mapper/DateFieldMapper.java @@ -37,9 +37,9 @@ import org.apache.lucene.document.StoredField; import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.PointValues; +import org.apache.lucene.sandbox.search.IndexSortSortedNumericDocValuesRangeQuery; import org.apache.lucene.search.BoostQuery; import org.apache.lucene.search.IndexOrDocValuesQuery; -import org.apache.lucene.search.IndexSortSortedNumericDocValuesRangeQuery; import org.apache.lucene.search.Query; import org.opensearch.OpenSearchParseException; import org.opensearch.Version; diff --git a/server/src/main/java/org/opensearch/index/mapper/MappedFieldType.java b/server/src/main/java/org/opensearch/index/mapper/MappedFieldType.java index 575cfc8ca424b..645115bfe26e2 100644 --- a/server/src/main/java/org/opensearch/index/mapper/MappedFieldType.java +++ b/server/src/main/java/org/opensearch/index/mapper/MappedFieldType.java @@ -38,6 +38,8 @@ import org.apache.lucene.index.PrefixCodedTerms.TermIterator; import org.apache.lucene.index.Term; import org.apache.lucene.queries.intervals.IntervalsSource; +import org.apache.lucene.queries.spans.SpanMultiTermQueryWrapper; +import org.apache.lucene.queries.spans.SpanQuery; import org.apache.lucene.search.BooleanClause.Occur; import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.BoostQuery; @@ -48,8 +50,6 @@ import org.apache.lucene.search.Query; import org.apache.lucene.search.TermInSetQuery; import org.apache.lucene.search.TermQuery; -import org.apache.lucene.search.spans.SpanMultiTermQueryWrapper; -import org.apache.lucene.search.spans.SpanQuery; import org.apache.lucene.util.BytesRef; import org.opensearch.OpenSearchParseException; import org.opensearch.common.Nullable; diff --git a/server/src/main/java/org/opensearch/index/mapper/NumberFieldMapper.java b/server/src/main/java/org/opensearch/index/mapper/NumberFieldMapper.java index 368f4ae4adea3..bf7b1ae70c711 100644 --- a/server/src/main/java/org/opensearch/index/mapper/NumberFieldMapper.java +++ b/server/src/main/java/org/opensearch/index/mapper/NumberFieldMapper.java @@ -37,14 +37,14 @@ import org.apache.lucene.document.DoublePoint; import org.apache.lucene.document.Field; import org.apache.lucene.document.FloatPoint; -import org.apache.lucene.document.HalfFloatPoint; import org.apache.lucene.document.IntPoint; import org.apache.lucene.document.LongPoint; import org.apache.lucene.document.SortedNumericDocValuesField; import org.apache.lucene.document.StoredField; +import org.apache.lucene.sandbox.document.HalfFloatPoint; +import org.apache.lucene.sandbox.search.IndexSortSortedNumericDocValuesRangeQuery; import org.apache.lucene.search.BoostQuery; import org.apache.lucene.search.IndexOrDocValuesQuery; -import org.apache.lucene.search.IndexSortSortedNumericDocValuesRangeQuery; import org.apache.lucene.search.MatchNoDocsQuery; import org.apache.lucene.search.Query; import org.apache.lucene.util.BytesRef; diff --git a/server/src/main/java/org/opensearch/index/mapper/RangeType.java b/server/src/main/java/org/opensearch/index/mapper/RangeType.java index 9b0c374f8b54e..5f666dece7ed2 100644 --- a/server/src/main/java/org/opensearch/index/mapper/RangeType.java +++ b/server/src/main/java/org/opensearch/index/mapper/RangeType.java @@ -46,7 +46,6 @@ import org.apache.lucene.search.MatchNoDocsQuery; import org.apache.lucene.search.Query; import org.apache.lucene.util.BytesRef; -import org.apache.lucene.util.FutureArrays; import org.opensearch.common.Nullable; import org.opensearch.common.geo.ShapeRelation; import org.opensearch.common.network.InetAddresses; @@ -62,6 +61,7 @@ import java.time.ZoneOffset; import java.time.ZonedDateTime; import java.util.ArrayList; +import java.util.Arrays; import java.util.List; import java.util.Set; import java.util.function.BiFunction; @@ -196,7 +196,7 @@ private Query createQuery( ) { byte[] lowerBytes = InetAddressPoint.encode((InetAddress) lower); byte[] upperBytes = InetAddressPoint.encode((InetAddress) upper); - if (FutureArrays.compareUnsigned(lowerBytes, 0, lowerBytes.length, upperBytes, 0, upperBytes.length) > 0) { + if (Arrays.compareUnsigned(lowerBytes, 0, lowerBytes.length, upperBytes, 0, upperBytes.length) > 0) { throw new IllegalArgumentException("Range query `from` value (" + lower + ") is greater than `to` value (" + upper + ")"); } InetAddress correctedFrom = includeLower ? (InetAddress) lower : nextUp(lower); @@ -204,7 +204,7 @@ private Query createQuery( ; lowerBytes = InetAddressPoint.encode(correctedFrom); upperBytes = InetAddressPoint.encode(correctedTo); - if (FutureArrays.compareUnsigned(lowerBytes, 0, lowerBytes.length, upperBytes, 0, upperBytes.length) > 0) { + if (Arrays.compareUnsigned(lowerBytes, 0, lowerBytes.length, upperBytes, 0, upperBytes.length) > 0) { return new MatchNoDocsQuery("float range didn't intersect anything"); } else { return querySupplier.apply(correctedFrom, correctedTo); diff --git a/server/src/main/java/org/opensearch/index/mapper/TextFieldMapper.java b/server/src/main/java/org/opensearch/index/mapper/TextFieldMapper.java index bcb3134e532d7..049c85dc910ed 100644 --- a/server/src/main/java/org/opensearch/index/mapper/TextFieldMapper.java +++ b/server/src/main/java/org/opensearch/index/mapper/TextFieldMapper.java @@ -48,6 +48,12 @@ import org.apache.lucene.index.Term; import org.apache.lucene.queries.intervals.Intervals; import org.apache.lucene.queries.intervals.IntervalsSource; +import org.apache.lucene.queries.spans.FieldMaskingSpanQuery; +import org.apache.lucene.queries.spans.SpanMultiTermQueryWrapper; +import org.apache.lucene.queries.spans.SpanNearQuery; +import org.apache.lucene.queries.spans.SpanOrQuery; +import org.apache.lucene.queries.spans.SpanQuery; +import org.apache.lucene.queries.spans.SpanTermQuery; import org.apache.lucene.search.AutomatonQuery; import org.apache.lucene.search.BooleanClause; import org.apache.lucene.search.BooleanQuery; @@ -59,12 +65,6 @@ import org.apache.lucene.search.Query; import org.apache.lucene.search.SynonymQuery; import org.apache.lucene.search.TermQuery; -import org.apache.lucene.search.spans.FieldMaskingSpanQuery; -import org.apache.lucene.search.spans.SpanMultiTermQueryWrapper; -import org.apache.lucene.search.spans.SpanNearQuery; -import org.apache.lucene.search.spans.SpanOrQuery; -import org.apache.lucene.search.spans.SpanQuery; -import org.apache.lucene.search.spans.SpanTermQuery; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.automaton.Automata; import org.apache.lucene.util.automaton.Automaton; @@ -1067,8 +1067,9 @@ public static Query createPhrasePrefixQuery( } if (terms.length == 1) { - Term[] newTerms = Arrays.stream(terms[0]).map(term -> new Term(prefixField, term.bytes())).toArray(Term[]::new); - return new SynonymQuery(newTerms); + SynonymQuery.Builder sb = new SynonymQuery.Builder(prefixField); + Arrays.stream(terms[0]).map(term -> new Term(prefixField, term.bytes())).forEach(sb::addTerm); + return sb.build(); } SpanNearQuery.Builder spanQuery = new SpanNearQuery.Builder(field, true); diff --git a/server/src/main/java/org/opensearch/index/query/AbstractQueryBuilder.java b/server/src/main/java/org/opensearch/index/query/AbstractQueryBuilder.java index 7088b914adc22..3ab3d099f0778 100644 --- a/server/src/main/java/org/opensearch/index/query/AbstractQueryBuilder.java +++ b/server/src/main/java/org/opensearch/index/query/AbstractQueryBuilder.java @@ -35,8 +35,6 @@ import org.apache.lucene.search.BoostQuery; import org.apache.lucene.search.MatchNoDocsQuery; import org.apache.lucene.search.Query; -import org.apache.lucene.search.spans.SpanBoostQuery; -import org.apache.lucene.search.spans.SpanQuery; import org.apache.lucene.util.BytesRef; import org.opensearch.common.ParseField; import org.opensearch.common.ParsingException; @@ -116,9 +114,7 @@ public final Query toQuery(QueryShardContext context) throws IOException { Query query = doToQuery(context); if (query != null) { if (boost != DEFAULT_BOOST) { - if (query instanceof SpanQuery) { - query = new SpanBoostQuery((SpanQuery) query, boost); - } else if (query instanceof MatchNoDocsQuery == false) { + if (query instanceof MatchNoDocsQuery == false) { query = new BoostQuery(query, boost); } } diff --git a/server/src/main/java/org/opensearch/index/query/FieldMaskingSpanQueryBuilder.java b/server/src/main/java/org/opensearch/index/query/FieldMaskingSpanQueryBuilder.java index ebc451a4493ed..1b095c6130a7c 100644 --- a/server/src/main/java/org/opensearch/index/query/FieldMaskingSpanQueryBuilder.java +++ b/server/src/main/java/org/opensearch/index/query/FieldMaskingSpanQueryBuilder.java @@ -32,9 +32,9 @@ package org.opensearch.index.query; +import org.apache.lucene.queries.spans.FieldMaskingSpanQuery; +import org.apache.lucene.queries.spans.SpanQuery; import org.apache.lucene.search.Query; -import org.apache.lucene.search.spans.FieldMaskingSpanQuery; -import org.apache.lucene.search.spans.SpanQuery; import org.opensearch.common.ParseField; import org.opensearch.common.ParsingException; import org.opensearch.common.Strings; @@ -135,6 +135,7 @@ public static FieldMaskingSpanQueryBuilder fromXContent(XContentParser parser) t ); } inner = (SpanQueryBuilder) query; + SpanQueryBuilderUtil.checkNoBoost(SPAN_FIELD_MASKING_FIELD.getPreferredName(), currentFieldName, parser, inner); } else { throw new ParsingException( parser.getTokenLocation(), @@ -176,7 +177,7 @@ public static FieldMaskingSpanQueryBuilder fromXContent(XContentParser parser) t } @Override - protected SpanQuery doToQuery(QueryShardContext context) throws IOException { + protected Query doToQuery(QueryShardContext context) throws IOException { String fieldInQuery = fieldName; MappedFieldType fieldType = context.fieldMapper(fieldName); if (fieldType != null) { diff --git a/server/src/main/java/org/opensearch/index/query/InnerHitContextBuilder.java b/server/src/main/java/org/opensearch/index/query/InnerHitContextBuilder.java index 712f106545c41..10efb18dc0cfa 100644 --- a/server/src/main/java/org/opensearch/index/query/InnerHitContextBuilder.java +++ b/server/src/main/java/org/opensearch/index/query/InnerHitContextBuilder.java @@ -109,13 +109,13 @@ protected void setupInnerHitsContext(QueryShardContext queryShardContext, InnerH } if (innerHitBuilder.getDocValueFields() != null) { FetchDocValuesContext docValuesContext = FetchDocValuesContext.create( - queryShardContext.getMapperService(), + queryShardContext::simpleMatchToIndexNames, + queryShardContext.getIndexSettings().getMaxDocvalueFields(), innerHitBuilder.getDocValueFields() ); innerHitsContext.docValuesContext(docValuesContext); } if (innerHitBuilder.getFetchFields() != null) { - String indexName = queryShardContext.index().getName(); FetchFieldsContext fieldsContext = new FetchFieldsContext(innerHitBuilder.getFetchFields()); innerHitsContext.fetchFieldsContext(fieldsContext); } diff --git a/server/src/main/java/org/opensearch/index/query/NestedQueryBuilder.java b/server/src/main/java/org/opensearch/index/query/NestedQueryBuilder.java index 0fada4f4a1363..52a7635d708f5 100644 --- a/server/src/main/java/org/opensearch/index/query/NestedQueryBuilder.java +++ b/server/src/main/java/org/opensearch/index/query/NestedQueryBuilder.java @@ -301,7 +301,7 @@ protected Query doToQuery(QueryShardContext context) throws IOException { Query innerQuery; ObjectMapper objectMapper = context.nestedScope().getObjectMapper(); if (objectMapper == null) { - parentFilter = context.bitsetFilter(Queries.newNonNestedFilter(context.indexVersionCreated())); + parentFilter = context.bitsetFilter(Queries.newNonNestedFilter()); } else { parentFilter = context.bitsetFilter(objectMapper.nestedTypeFilter()); } @@ -416,7 +416,7 @@ public TopDocsAndMaxScore topDocs(SearchHit hit) throws IOException { Query rawParentFilter; if (parentObjectMapper == null) { - rawParentFilter = Queries.newNonNestedFilter(context.indexShard().indexSettings().getIndexVersionCreated()); + rawParentFilter = Queries.newNonNestedFilter(); } else { rawParentFilter = parentObjectMapper.nestedTypeFilter(); } diff --git a/server/src/main/java/org/opensearch/index/query/ScriptQueryBuilder.java b/server/src/main/java/org/opensearch/index/query/ScriptQueryBuilder.java index 8739e48eb411b..1d781060dc6bc 100644 --- a/server/src/main/java/org/opensearch/index/query/ScriptQueryBuilder.java +++ b/server/src/main/java/org/opensearch/index/query/ScriptQueryBuilder.java @@ -38,6 +38,7 @@ import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.Query; +import org.apache.lucene.search.QueryVisitor; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; import org.apache.lucene.search.TwoPhaseIterator; @@ -180,6 +181,11 @@ public String toString(String field) { return buffer.toString(); } + @Override + public void visit(QueryVisitor visitor) { + visitor.visitLeaf(this); + } + @Override public boolean equals(Object obj) { if (sameClassAs(obj) == false) return false; diff --git a/server/src/main/java/org/opensearch/index/query/SpanContainingQueryBuilder.java b/server/src/main/java/org/opensearch/index/query/SpanContainingQueryBuilder.java index 131b27b6b6ad9..9168f7a29f2dc 100644 --- a/server/src/main/java/org/opensearch/index/query/SpanContainingQueryBuilder.java +++ b/server/src/main/java/org/opensearch/index/query/SpanContainingQueryBuilder.java @@ -32,9 +32,9 @@ package org.opensearch.index.query; +import org.apache.lucene.queries.spans.SpanContainingQuery; +import org.apache.lucene.queries.spans.SpanQuery; import org.apache.lucene.search.Query; -import org.apache.lucene.search.spans.SpanContainingQuery; -import org.apache.lucene.search.spans.SpanQuery; import org.opensearch.common.ParseField; import org.opensearch.common.ParsingException; import org.opensearch.common.io.stream.StreamInput; @@ -48,7 +48,7 @@ import static org.opensearch.index.query.SpanQueryBuilder.SpanQueryBuilderUtil.checkNoBoost; /** - * Builder for {@link org.apache.lucene.search.spans.SpanContainingQuery}. + * Builder for {@link org.apache.lucene.queries.spans.SpanContainingQuery}. */ public class SpanContainingQueryBuilder extends AbstractQueryBuilder implements SpanQueryBuilder { public static final String NAME = "span_containing"; diff --git a/server/src/main/java/org/opensearch/index/query/SpanFirstQueryBuilder.java b/server/src/main/java/org/opensearch/index/query/SpanFirstQueryBuilder.java index 087382d570411..e3d6315b5d18c 100644 --- a/server/src/main/java/org/opensearch/index/query/SpanFirstQueryBuilder.java +++ b/server/src/main/java/org/opensearch/index/query/SpanFirstQueryBuilder.java @@ -32,9 +32,9 @@ package org.opensearch.index.query; +import org.apache.lucene.queries.spans.SpanFirstQuery; +import org.apache.lucene.queries.spans.SpanQuery; import org.apache.lucene.search.Query; -import org.apache.lucene.search.spans.SpanFirstQuery; -import org.apache.lucene.search.spans.SpanQuery; import org.opensearch.common.ParseField; import org.opensearch.common.ParsingException; import org.opensearch.common.io.stream.StreamInput; diff --git a/server/src/main/java/org/opensearch/index/query/SpanMultiTermQueryBuilder.java b/server/src/main/java/org/opensearch/index/query/SpanMultiTermQueryBuilder.java index 2783e0b9b8777..9411ba9c59087 100644 --- a/server/src/main/java/org/opensearch/index/query/SpanMultiTermQueryBuilder.java +++ b/server/src/main/java/org/opensearch/index/query/SpanMultiTermQueryBuilder.java @@ -32,13 +32,13 @@ package org.opensearch.index.query; import org.apache.lucene.queries.SpanMatchNoDocsQuery; +import org.apache.lucene.queries.spans.SpanMultiTermQueryWrapper; import org.apache.lucene.search.BoostQuery; import org.apache.lucene.search.ConstantScoreQuery; import org.apache.lucene.search.MatchNoDocsQuery; import org.apache.lucene.search.MultiTermQuery; import org.apache.lucene.search.Query; import org.apache.lucene.search.TopTermsRewrite; -import org.apache.lucene.search.spans.SpanMultiTermQueryWrapper; import org.opensearch.common.ParseField; import org.opensearch.common.ParsingException; import org.opensearch.common.io.stream.StreamInput; diff --git a/server/src/main/java/org/opensearch/index/query/SpanNearQueryBuilder.java b/server/src/main/java/org/opensearch/index/query/SpanNearQueryBuilder.java index 264a3f87dd2dd..a648eedc9f3b5 100644 --- a/server/src/main/java/org/opensearch/index/query/SpanNearQueryBuilder.java +++ b/server/src/main/java/org/opensearch/index/query/SpanNearQueryBuilder.java @@ -32,9 +32,9 @@ package org.opensearch.index.query; +import org.apache.lucene.queries.spans.SpanNearQuery; +import org.apache.lucene.queries.spans.SpanQuery; import org.apache.lucene.search.Query; -import org.apache.lucene.search.spans.SpanNearQuery; -import org.apache.lucene.search.spans.SpanQuery; import org.opensearch.common.ParseField; import org.opensearch.common.ParsingException; import org.opensearch.common.Strings; diff --git a/server/src/main/java/org/opensearch/index/query/SpanNotQueryBuilder.java b/server/src/main/java/org/opensearch/index/query/SpanNotQueryBuilder.java index 4db989e29b841..c43430e00de98 100644 --- a/server/src/main/java/org/opensearch/index/query/SpanNotQueryBuilder.java +++ b/server/src/main/java/org/opensearch/index/query/SpanNotQueryBuilder.java @@ -32,9 +32,9 @@ package org.opensearch.index.query; +import org.apache.lucene.queries.spans.SpanNotQuery; +import org.apache.lucene.queries.spans.SpanQuery; import org.apache.lucene.search.Query; -import org.apache.lucene.search.spans.SpanNotQuery; -import org.apache.lucene.search.spans.SpanQuery; import org.opensearch.common.ParseField; import org.opensearch.common.ParsingException; import org.opensearch.common.io.stream.StreamInput; diff --git a/server/src/main/java/org/opensearch/index/query/SpanOrQueryBuilder.java b/server/src/main/java/org/opensearch/index/query/SpanOrQueryBuilder.java index 1e4a3b45f0df8..0a08f778cf889 100644 --- a/server/src/main/java/org/opensearch/index/query/SpanOrQueryBuilder.java +++ b/server/src/main/java/org/opensearch/index/query/SpanOrQueryBuilder.java @@ -32,9 +32,9 @@ package org.opensearch.index.query; +import org.apache.lucene.queries.spans.SpanOrQuery; +import org.apache.lucene.queries.spans.SpanQuery; import org.apache.lucene.search.Query; -import org.apache.lucene.search.spans.SpanOrQuery; -import org.apache.lucene.search.spans.SpanQuery; import org.opensearch.common.ParseField; import org.opensearch.common.ParsingException; import org.opensearch.common.io.stream.StreamInput; diff --git a/server/src/main/java/org/opensearch/index/query/SpanTermQueryBuilder.java b/server/src/main/java/org/opensearch/index/query/SpanTermQueryBuilder.java index 07091bf0eb1b6..02a0f55685ca4 100644 --- a/server/src/main/java/org/opensearch/index/query/SpanTermQueryBuilder.java +++ b/server/src/main/java/org/opensearch/index/query/SpanTermQueryBuilder.java @@ -33,9 +33,9 @@ package org.opensearch.index.query; import org.apache.lucene.index.Term; +import org.apache.lucene.queries.spans.SpanQuery; +import org.apache.lucene.queries.spans.SpanTermQuery; import org.apache.lucene.search.Query; -import org.apache.lucene.search.spans.SpanQuery; -import org.apache.lucene.search.spans.SpanTermQuery; import org.opensearch.common.ParseField; import org.opensearch.common.ParsingException; import org.opensearch.common.io.stream.StreamInput; diff --git a/server/src/main/java/org/opensearch/index/query/SpanWithinQueryBuilder.java b/server/src/main/java/org/opensearch/index/query/SpanWithinQueryBuilder.java index 83bf3d1b90eea..a8ab2a8831f55 100644 --- a/server/src/main/java/org/opensearch/index/query/SpanWithinQueryBuilder.java +++ b/server/src/main/java/org/opensearch/index/query/SpanWithinQueryBuilder.java @@ -32,9 +32,9 @@ package org.opensearch.index.query; +import org.apache.lucene.queries.spans.SpanQuery; +import org.apache.lucene.queries.spans.SpanWithinQuery; import org.apache.lucene.search.Query; -import org.apache.lucene.search.spans.SpanQuery; -import org.apache.lucene.search.spans.SpanWithinQuery; import org.opensearch.common.ParseField; import org.opensearch.common.ParsingException; import org.opensearch.common.io.stream.StreamInput; @@ -48,7 +48,7 @@ import static org.opensearch.index.query.SpanQueryBuilder.SpanQueryBuilderUtil.checkNoBoost; /** - * Builder for {@link org.apache.lucene.search.spans.SpanWithinQuery}. + * Builder for {@link org.apache.lucene.queries.spans.SpanWithinQuery}. */ public class SpanWithinQueryBuilder extends AbstractQueryBuilder implements SpanQueryBuilder { public static final String NAME = "span_within"; diff --git a/server/src/main/java/org/opensearch/index/query/TermsSetQueryBuilder.java b/server/src/main/java/org/opensearch/index/query/TermsSetQueryBuilder.java index 55ea770c2e154..f7dd146f9f019 100644 --- a/server/src/main/java/org/opensearch/index/query/TermsSetQueryBuilder.java +++ b/server/src/main/java/org/opensearch/index/query/TermsSetQueryBuilder.java @@ -34,8 +34,8 @@ import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.SortedNumericDocValues; import org.apache.lucene.index.Term; +import org.apache.lucene.sandbox.search.CoveringQuery; import org.apache.lucene.search.BooleanQuery; -import org.apache.lucene.search.CoveringQuery; import org.apache.lucene.search.DoubleValues; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.LongValues; diff --git a/server/src/main/java/org/opensearch/index/search/MatchQuery.java b/server/src/main/java/org/opensearch/index/search/MatchQuery.java index 75f8d9aa6ba9f..485715c430b3f 100644 --- a/server/src/main/java/org/opensearch/index/search/MatchQuery.java +++ b/server/src/main/java/org/opensearch/index/search/MatchQuery.java @@ -42,6 +42,11 @@ import org.apache.lucene.analysis.tokenattributes.TermToBytesRefAttribute; import org.apache.lucene.index.Term; import org.apache.lucene.queries.ExtendedCommonTermsQuery; +import org.apache.lucene.queries.spans.SpanMultiTermQueryWrapper; +import org.apache.lucene.queries.spans.SpanNearQuery; +import org.apache.lucene.queries.spans.SpanOrQuery; +import org.apache.lucene.queries.spans.SpanQuery; +import org.apache.lucene.queries.spans.SpanTermQuery; import org.apache.lucene.search.BooleanClause; import org.apache.lucene.search.BooleanClause.Occur; import org.apache.lucene.search.BooleanQuery; @@ -50,11 +55,6 @@ import org.apache.lucene.search.MultiTermQuery; import org.apache.lucene.search.Query; import org.apache.lucene.search.TermQuery; -import org.apache.lucene.search.spans.SpanMultiTermQueryWrapper; -import org.apache.lucene.search.spans.SpanNearQuery; -import org.apache.lucene.search.spans.SpanOrQuery; -import org.apache.lucene.search.spans.SpanQuery; -import org.apache.lucene.search.spans.SpanTermQuery; import org.apache.lucene.util.QueryBuilder; import org.apache.lucene.util.graph.GraphTokenStreamFiniteStrings; import org.opensearch.OpenSearchException; @@ -550,11 +550,6 @@ private SpanQuery newSpanQuery(Term[] terms, boolean isPrefix) { return new SpanOrQuery(spanQueries); } - @Override - protected SpanQuery createSpanQuery(TokenStream in, String field) throws IOException { - return createSpanQuery(in, field, false); - } - private SpanQuery createSpanQuery(TokenStream in, String field, boolean isPrefix) throws IOException { TermToBytesRefAttribute termAtt = in.getAttribute(TermToBytesRefAttribute.class); PositionIncrementAttribute posIncAtt = in.getAttribute(PositionIncrementAttribute.class); diff --git a/server/src/main/java/org/opensearch/index/search/QueryStringQueryParser.java b/server/src/main/java/org/opensearch/index/search/QueryStringQueryParser.java index 85c801ca43fe6..e9437f5704851 100644 --- a/server/src/main/java/org/opensearch/index/search/QueryStringQueryParser.java +++ b/server/src/main/java/org/opensearch/index/search/QueryStringQueryParser.java @@ -37,6 +37,9 @@ import org.apache.lucene.analysis.tokenattributes.CharTermAttribute; import org.apache.lucene.analysis.tokenattributes.PositionIncrementAttribute; import org.apache.lucene.index.Term; +import org.apache.lucene.queries.spans.SpanNearQuery; +import org.apache.lucene.queries.spans.SpanOrQuery; +import org.apache.lucene.queries.spans.SpanQuery; import org.apache.lucene.queryparser.classic.ParseException; import org.apache.lucene.queryparser.classic.Token; import org.apache.lucene.queryparser.classic.XQueryParser; @@ -52,9 +55,6 @@ import org.apache.lucene.search.Query; import org.apache.lucene.search.SynonymQuery; import org.apache.lucene.search.WildcardQuery; -import org.apache.lucene.search.spans.SpanNearQuery; -import org.apache.lucene.search.spans.SpanOrQuery; -import org.apache.lucene.search.spans.SpanQuery; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.automaton.RegExp; import org.opensearch.common.lucene.search.Queries; @@ -646,11 +646,11 @@ private Query getPossiblyAnalyzedPrefixQuery(String field, String termStr, Mappe } } else if (isLastPos == false) { // build a synonym query for terms in the same position. - Term[] terms = new Term[plist.size()]; - for (int i = 0; i < plist.size(); i++) { - terms[i] = new Term(field, plist.get(i)); + SynonymQuery.Builder sb = new SynonymQuery.Builder(field); + for (String synonym : plist) { + sb.addTerm(new Term(field, synonym)); } - posQuery = new SynonymQuery(terms); + posQuery = sb.build(); } else { List innerClauses = new ArrayList<>(); for (String token : plist) { diff --git a/server/src/main/java/org/opensearch/index/search/SimpleQueryStringQueryParser.java b/server/src/main/java/org/opensearch/index/search/SimpleQueryStringQueryParser.java index 2173c53f8131a..f4f68634d0df5 100644 --- a/server/src/main/java/org/opensearch/index/search/SimpleQueryStringQueryParser.java +++ b/server/src/main/java/org/opensearch/index/search/SimpleQueryStringQueryParser.java @@ -287,11 +287,12 @@ private Query newPossiblyAnalyzedQuery(String field, String termStr, Analyzer an } } else if (isLastPos == false) { // build a synonym query for terms in the same position. - Term[] terms = new Term[plist.size()]; - for (int i = 0; i < plist.size(); i++) { - terms[i] = new Term(field, plist.get(i)); + SynonymQuery.Builder sb = new SynonymQuery.Builder(field); + for (BytesRef bytesRef : plist) { + sb.addTerm(new Term(field, bytesRef)); + } - posQuery = new SynonymQuery(terms); + posQuery = sb.build(); } else { BooleanQuery.Builder innerBuilder = new BooleanQuery.Builder(); for (BytesRef token : plist) { diff --git a/server/src/main/java/org/opensearch/index/shard/ShardSplittingQuery.java b/server/src/main/java/org/opensearch/index/shard/ShardSplittingQuery.java index a76e296c40681..16d76ece840a2 100644 --- a/server/src/main/java/org/opensearch/index/shard/ShardSplittingQuery.java +++ b/server/src/main/java/org/opensearch/index/shard/ShardSplittingQuery.java @@ -45,6 +45,7 @@ import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.Query; +import org.apache.lucene.search.QueryVisitor; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; import org.apache.lucene.search.TwoPhaseIterator; @@ -257,13 +258,10 @@ public void binaryField(FieldInfo fieldInfo, byte[] value) throws IOException { } @Override - public void stringField(FieldInfo fieldInfo, byte[] value) throws IOException { - spare.bytes = value; - spare.offset = 0; - spare.length = value.length; + public void stringField(FieldInfo fieldInfo, String value) throws IOException { switch (fieldInfo.name) { case RoutingFieldMapper.NAME: - routing = spare.utf8ToString(); + routing = value; break; default: throw new IllegalStateException("Unexpected field: " + fieldInfo.name); @@ -358,7 +356,7 @@ public float matchCost() { */ private static BitSetProducer newParentDocBitSetProducer(Version indexVersionCreated) { return context -> { - Query query = Queries.newNonNestedFilter(indexVersionCreated); + Query query = Queries.newNonNestedFilter(); final IndexReaderContext topLevelContext = ReaderUtil.getTopLevelContext(context); final IndexSearcher searcher = new IndexSearcher(topLevelContext); searcher.setQueryCache(null); @@ -367,4 +365,9 @@ private static BitSetProducer newParentDocBitSetProducer(Version indexVersionCre return s == null ? null : BitSet.of(s.iterator(), context.reader().maxDoc()); }; } + + @Override + public void visit(QueryVisitor visitor) { + visitor.visitLeaf(this); + } } diff --git a/server/src/main/java/org/opensearch/index/shard/StoreRecovery.java b/server/src/main/java/org/opensearch/index/shard/StoreRecovery.java index 20bb6e7060ca3..387f77a839d35 100644 --- a/server/src/main/java/org/opensearch/index/shard/StoreRecovery.java +++ b/server/src/main/java/org/opensearch/index/shard/StoreRecovery.java @@ -191,7 +191,7 @@ void addIndices( assert sources.length > 0; final int luceneIndexCreatedVersionMajor = Lucene.readSegmentInfos(sources[0]).getIndexCreatedVersionMajor(); - final Directory hardLinkOrCopyTarget = new org.apache.lucene.store.HardlinkCopyDirectoryWrapper(target); + final Directory hardLinkOrCopyTarget = new org.apache.lucene.misc.store.HardlinkCopyDirectoryWrapper(target); IndexWriterConfig iwc = new IndexWriterConfig(null).setSoftDeletesField(Lucene.SOFT_DELETES_FIELD) .setCommitOnClose(false) diff --git a/server/src/main/java/org/opensearch/index/similarity/SimilarityProviders.java b/server/src/main/java/org/opensearch/index/similarity/SimilarityProviders.java index 41d97b9d2a53d..3617c9607a3ab 100644 --- a/server/src/main/java/org/opensearch/index/similarity/SimilarityProviders.java +++ b/server/src/main/java/org/opensearch/index/similarity/SimilarityProviders.java @@ -32,6 +32,7 @@ package org.opensearch.index.similarity; +import org.apache.lucene.misc.search.similarity.LegacyBM25Similarity; import org.apache.lucene.search.similarities.AfterEffect; import org.apache.lucene.search.similarities.AfterEffectB; import org.apache.lucene.search.similarities.AfterEffectL; @@ -62,7 +63,6 @@ import org.apache.lucene.search.similarities.NormalizationH2; import org.apache.lucene.search.similarities.NormalizationH3; import org.apache.lucene.search.similarities.NormalizationZ; -import org.apache.lucene.search.similarity.LegacyBM25Similarity; import org.opensearch.LegacyESVersion; import org.opensearch.Version; import org.opensearch.common.logging.DeprecationLogger; @@ -307,9 +307,7 @@ public static LegacyBM25Similarity createBM25Similarity(Settings settings, Versi float b = settings.getAsFloat("b", 0.75f); boolean discountOverlaps = settings.getAsBoolean(DISCOUNT_OVERLAPS, true); - LegacyBM25Similarity similarity = new LegacyBM25Similarity(k1, b); - similarity.setDiscountOverlaps(discountOverlaps); - return similarity; + return new LegacyBM25Similarity(k1, b, discountOverlaps); } public static BooleanSimilarity createBooleanSimilarity(Settings settings, Version indexCreatedVersion) { diff --git a/server/src/main/java/org/opensearch/index/similarity/SimilarityService.java b/server/src/main/java/org/opensearch/index/similarity/SimilarityService.java index a183457ffbc72..bb82d53de815a 100644 --- a/server/src/main/java/org/opensearch/index/similarity/SimilarityService.java +++ b/server/src/main/java/org/opensearch/index/similarity/SimilarityService.java @@ -34,6 +34,7 @@ import org.apache.lucene.index.FieldInvertState; import org.apache.lucene.index.IndexOptions; +import org.apache.lucene.misc.search.similarity.LegacyBM25Similarity; import org.apache.lucene.search.CollectionStatistics; import org.apache.lucene.search.Explanation; import org.apache.lucene.search.TermStatistics; @@ -42,7 +43,6 @@ import org.apache.lucene.search.similarities.PerFieldSimilarityWrapper; import org.apache.lucene.search.similarities.Similarity; import org.apache.lucene.search.similarities.Similarity.SimScorer; -import org.apache.lucene.search.similarity.LegacyBM25Similarity; import org.apache.lucene.util.BytesRef; import org.opensearch.LegacyESVersion; import org.opensearch.Version; diff --git a/server/src/main/java/org/opensearch/index/store/Store.java b/server/src/main/java/org/opensearch/index/store/Store.java index 2b47c5845a394..f4a94023a8ac8 100644 --- a/server/src/main/java/org/opensearch/index/store/Store.java +++ b/server/src/main/java/org/opensearch/index/store/Store.java @@ -50,6 +50,7 @@ import org.apache.lucene.index.SegmentInfos; import org.apache.lucene.store.AlreadyClosedException; import org.apache.lucene.store.BufferedChecksum; +import org.apache.lucene.store.ByteArrayDataInput; import org.apache.lucene.store.ChecksumIndexInput; import org.apache.lucene.store.Directory; import org.apache.lucene.store.FilterDirectory; @@ -66,7 +67,6 @@ import org.opensearch.common.UUIDs; import org.opensearch.common.bytes.BytesReference; import org.opensearch.common.io.Streams; -import org.opensearch.common.io.stream.BytesStreamInput; import org.opensearch.common.io.stream.BytesStreamOutput; import org.opensearch.common.io.stream.StreamInput; import org.opensearch.common.io.stream.StreamOutput; @@ -669,9 +669,7 @@ public void cleanupAndVerify(String reason, MetadataSnapshot sourceMetadata) thr directory.deleteFile(reason, existingFile); // FNF should not happen since we hold a write lock? } catch (IOException ex) { - if (existingFile.startsWith(IndexFileNames.SEGMENTS) - || existingFile.equals(IndexFileNames.OLD_SEGMENTS_GEN) - || existingFile.startsWith(CORRUPTED_MARKER_NAME_PREFIX)) { + if (existingFile.startsWith(IndexFileNames.SEGMENTS) || existingFile.startsWith(CORRUPTED_MARKER_NAME_PREFIX)) { // TODO do we need to also fail this if we can't delete the pending commit file? // if one of those files can't be deleted we better fail the cleanup otherwise we might leave an old commit // point around? @@ -1053,9 +1051,6 @@ public RecoveryDiff recoveryDiff(MetadataSnapshot recoveryTargetSnapshot) { final List perCommitStoreFiles = new ArrayList<>(); for (StoreFileMetadata meta : this) { - if (IndexFileNames.OLD_SEGMENTS_GEN.equals(meta.name())) { // legacy - continue; // we don't need that file at all - } final String segmentId = IndexFileNames.parseSegmentName(meta.name()); final String extension = IndexFileNames.getExtension(meta.name()); if (IndexFileNames.SEGMENTS.equals(segmentId) @@ -1095,14 +1090,11 @@ public RecoveryDiff recoveryDiff(MetadataSnapshot recoveryTargetSnapshot) { Collections.unmodifiableList(different), Collections.unmodifiableList(missing) ); - assert recoveryDiff.size() == this.metadata.size() - (metadata.containsKey(IndexFileNames.OLD_SEGMENTS_GEN) ? 1 : 0) - : "some files are missing recoveryDiff size: [" - + recoveryDiff.size() - + "] metadata size: [" - + this.metadata.size() - + "] contains segments.gen: [" - + metadata.containsKey(IndexFileNames.OLD_SEGMENTS_GEN) - + "]"; + assert recoveryDiff.size() == this.metadata.size() : "some files are missing recoveryDiff size: [" + + recoveryDiff.size() + + "] metadata size: [" + + this.metadata.size() + + "]"; return recoveryDiff; } @@ -1236,7 +1228,7 @@ public void verify() throws IOException { String footerDigest = null; if (metadata.checksum().equals(actualChecksum) && writtenBytes == metadata.length()) { ByteArrayIndexInput indexInput = new ByteArrayIndexInput("checksum", this.footerChecksum); - footerDigest = digestToString(indexInput.readLong()); + footerDigest = digestToString(CodecUtil.readBELong(indexInput)); if (metadata.checksum().equals(footerDigest)) { return; } @@ -1393,9 +1385,9 @@ public void seek(long pos) throws IOException { // skipping the verified portion input.seek(verifiedPosition); // and checking unverified - skipBytes(pos - verifiedPosition); + super.seek(pos); } else { - skipBytes(pos - getFilePointer()); + super.seek(pos); } } } @@ -1425,8 +1417,12 @@ public IndexInput slice(String sliceDescription, long offset, long length) throw throw new UnsupportedOperationException(); } - public long getStoredChecksum() throws IOException { - return new BytesStreamInput(checksum).readLong(); + public long getStoredChecksum() { + try { + return CodecUtil.readBELong(new ByteArrayDataInput(checksum)); + } catch (IOException e) { + throw new UncheckedIOException(e); + } } public long verify() throws CorruptIndexException, IOException { diff --git a/server/src/main/java/org/opensearch/index/translog/Checkpoint.java b/server/src/main/java/org/opensearch/index/translog/Checkpoint.java index b8346a26da8e6..0cff884cabfaa 100644 --- a/server/src/main/java/org/opensearch/index/translog/Checkpoint.java +++ b/server/src/main/java/org/opensearch/index/translog/Checkpoint.java @@ -32,6 +32,7 @@ package org.opensearch.index.translog; +import org.apache.lucene.backward_codecs.store.EndiannessReverserUtil; import org.apache.lucene.codecs.CodecUtil; import org.apache.lucene.index.CorruptIndexException; import org.apache.lucene.index.IndexFormatTooNewException; @@ -64,11 +65,12 @@ final class Checkpoint { final long minTranslogGeneration; final long trimmedAboveSeqNo; - private static final int CURRENT_VERSION = 3; // introduction of trimmed above seq# + private static final int VERSION_LUCENE_BIG_ENDIAN = 3; // big endian format (Lucene 9+ switches to little endian) + private static final int CURRENT_VERSION = 4; // introduction of trimmed above seq# private static final String CHECKPOINT_CODEC = "ckp"; - static final int V3_FILE_SIZE = CodecUtil.headerLength(CHECKPOINT_CODEC) + Integer.BYTES // ops + static final int V4_FILE_SIZE = CodecUtil.headerLength(CHECKPOINT_CODEC) + Integer.BYTES // ops + Long.BYTES // offset + Long.BYTES // generation + Long.BYTES // minimum sequence number @@ -153,6 +155,10 @@ static Checkpoint emptyTranslogCheckpoint( } static Checkpoint readCheckpointV3(final DataInput in) throws IOException { + return readCheckpointV4(EndiannessReverserUtil.wrapDataInput(in)); + } + + static Checkpoint readCheckpointV4(final DataInput in) throws IOException { final long offset = in.readLong(); final int numOps = in.readInt(); final long generation = in.readLong(); @@ -191,10 +197,10 @@ public static Checkpoint read(Path path) throws IOException { try (IndexInput indexInput = dir.openInput(path.getFileName().toString(), IOContext.DEFAULT)) { // We checksum the entire file before we even go and parse it. If it's corrupted we barf right here. CodecUtil.checksumEntireFile(indexInput); - final int fileVersion = CodecUtil.checkHeader(indexInput, CHECKPOINT_CODEC, CURRENT_VERSION, CURRENT_VERSION); - assert fileVersion == CURRENT_VERSION : fileVersion; - assert indexInput.length() == V3_FILE_SIZE : indexInput.length(); - return Checkpoint.readCheckpointV3(indexInput); + final int fileVersion = CodecUtil.checkHeader(indexInput, CHECKPOINT_CODEC, VERSION_LUCENE_BIG_ENDIAN, CURRENT_VERSION); + assert fileVersion == CURRENT_VERSION || fileVersion == VERSION_LUCENE_BIG_ENDIAN : fileVersion; + assert indexInput.length() == V4_FILE_SIZE : indexInput.length(); + return fileVersion == CURRENT_VERSION ? Checkpoint.readCheckpointV4(indexInput) : Checkpoint.readCheckpointV3(indexInput); } catch (CorruptIndexException | NoSuchFileException | IndexFormatTooOldException | IndexFormatTooNewException e) { throw new TranslogCorruptedException(path.toString(), e); } @@ -207,9 +213,8 @@ public static void write(ChannelFactory factory, Path checkpointFile, Checkpoint // now go and write to the channel, in one go. try (FileChannel channel = factory.open(checkpointFile, options)) { Channels.writeToChannel(bytes, channel); - // no need to force metadata, file size stays the same and we did the full fsync - // when we first created the file, so the directory entry doesn't change as well - channel.force(false); + // force fsync with metadata since this is used on file creation + channel.force(true); } } @@ -222,7 +227,7 @@ public static void write(FileChannel fileChannel, Path checkpointFile, Checkpoin } private static byte[] createCheckpointBytes(Path checkpointFile, Checkpoint checkpoint) throws IOException { - final ByteArrayOutputStream byteOutputStream = new ByteArrayOutputStream(V3_FILE_SIZE) { + final ByteArrayOutputStream byteOutputStream = new ByteArrayOutputStream(V4_FILE_SIZE) { @Override public synchronized byte[] toByteArray() { // don't clone @@ -235,17 +240,17 @@ public synchronized byte[] toByteArray() { resourceDesc, checkpointFile.toString(), byteOutputStream, - V3_FILE_SIZE + V4_FILE_SIZE ) ) { CodecUtil.writeHeader(indexOutput, CHECKPOINT_CODEC, CURRENT_VERSION); checkpoint.write(indexOutput); CodecUtil.writeFooter(indexOutput); - assert indexOutput.getFilePointer() == V3_FILE_SIZE : "get you numbers straight; bytes written: " + assert indexOutput.getFilePointer() == V4_FILE_SIZE : "get you numbers straight; bytes written: " + indexOutput.getFilePointer() + ", buffer size: " - + V3_FILE_SIZE; + + V4_FILE_SIZE; assert indexOutput.getFilePointer() < 512 : "checkpoint files have to be smaller than 512 bytes for atomic writes; size: " + indexOutput.getFilePointer(); } diff --git a/server/src/main/java/org/opensearch/index/translog/Translog.java b/server/src/main/java/org/opensearch/index/translog/Translog.java index 2586599d3ed59..25d84efbee38f 100644 --- a/server/src/main/java/org/opensearch/index/translog/Translog.java +++ b/server/src/main/java/org/opensearch/index/translog/Translog.java @@ -2018,7 +2018,6 @@ public static String createEmptyTranslog( final Checkpoint checkpoint = Checkpoint.emptyTranslogCheckpoint(0, generation, initialGlobalCheckpoint, minTranslogGeneration); Checkpoint.write(channelFactory, checkpointFile, checkpoint, StandardOpenOption.WRITE, StandardOpenOption.CREATE_NEW); - IOUtils.fsync(checkpointFile, false); final TranslogWriter writer = TranslogWriter.create( shardId, uuid, diff --git a/server/src/main/java/org/opensearch/index/translog/TranslogReader.java b/server/src/main/java/org/opensearch/index/translog/TranslogReader.java index 6d1ae86ebccec..3c47ce2207e4b 100644 --- a/server/src/main/java/org/opensearch/index/translog/TranslogReader.java +++ b/server/src/main/java/org/opensearch/index/translog/TranslogReader.java @@ -111,7 +111,6 @@ TranslogReader closeIntoTrimmedReader(long aboveSeqNo, ChannelFactory channelFac ); Checkpoint.write(channelFactory, checkpointFile, newCheckpoint, StandardOpenOption.WRITE); - IOUtils.fsync(checkpointFile, false); IOUtils.fsync(checkpointFile.getParent(), true); newReader = new TranslogReader(newCheckpoint, channel, path, header); diff --git a/server/src/main/java/org/opensearch/index/translog/TruncateTranslogAction.java b/server/src/main/java/org/opensearch/index/translog/TruncateTranslogAction.java index cac4cdb610cda..7d30fe2e2ffe3 100644 --- a/server/src/main/java/org/opensearch/index/translog/TruncateTranslogAction.java +++ b/server/src/main/java/org/opensearch/index/translog/TruncateTranslogAction.java @@ -245,8 +245,6 @@ private static void writeEmptyCheckpoint(Path filename, int translogLength, long StandardOpenOption.READ, StandardOpenOption.CREATE_NEW ); - // fsync with metadata here to make sure. - IOUtils.fsync(filename, false); } /** diff --git a/server/src/main/java/org/opensearch/indices/IndicesQueryCache.java b/server/src/main/java/org/opensearch/indices/IndicesQueryCache.java index 9c7d66457ce15..2cbb83fde278d 100644 --- a/server/src/main/java/org/opensearch/indices/IndicesQueryCache.java +++ b/server/src/main/java/org/opensearch/indices/IndicesQueryCache.java @@ -35,7 +35,6 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.apache.lucene.index.LeafReaderContext; -import org.apache.lucene.index.Term; import org.apache.lucene.search.BulkScorer; import org.apache.lucene.search.Explanation; import org.apache.lucene.search.LRUQueryCache; @@ -154,11 +153,6 @@ protected CachingWeightWrapper(Weight in) { this.in = in; } - @Override - public void extractTerms(Set terms) { - in.extractTerms(terms); - } - @Override public Explanation explain(LeafReaderContext context, int doc) throws IOException { shardKeyMap.add(context.reader()); diff --git a/server/src/main/java/org/opensearch/indices/analysis/AnalysisModule.java b/server/src/main/java/org/opensearch/indices/analysis/AnalysisModule.java index 857c093a64e19..5d43bfa8876c8 100644 --- a/server/src/main/java/org/opensearch/indices/analysis/AnalysisModule.java +++ b/server/src/main/java/org/opensearch/indices/analysis/AnalysisModule.java @@ -228,7 +228,7 @@ static Map setupPreConfiguredTokenFilters(List })); /* Note that "stop" is available in lucene-core but it's pre-built * version uses a set of English stop words that are in - * lucene-analyzers-common so "stop" is defined in the analysis-common + * lucene-analysis-common so "stop" is defined in the analysis-common * module. */ for (AnalysisPlugin plugin : plugins) { diff --git a/server/src/main/java/org/opensearch/indices/analysis/PreBuiltAnalyzers.java b/server/src/main/java/org/opensearch/indices/analysis/PreBuiltAnalyzers.java index 21afbf7b15753..640100bbcc082 100644 --- a/server/src/main/java/org/opensearch/indices/analysis/PreBuiltAnalyzers.java +++ b/server/src/main/java/org/opensearch/indices/analysis/PreBuiltAnalyzers.java @@ -33,12 +33,12 @@ import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.CharArraySet; +import org.apache.lucene.analysis.classic.ClassicAnalyzer; import org.apache.lucene.analysis.core.KeywordAnalyzer; import org.apache.lucene.analysis.core.SimpleAnalyzer; import org.apache.lucene.analysis.core.StopAnalyzer; import org.apache.lucene.analysis.core.WhitespaceAnalyzer; import org.apache.lucene.analysis.en.EnglishAnalyzer; -import org.apache.lucene.analysis.standard.ClassicAnalyzer; import org.apache.lucene.analysis.standard.StandardAnalyzer; import org.opensearch.Version; import org.opensearch.indices.analysis.PreBuiltCacheFactory.CachingStrategy; @@ -50,9 +50,7 @@ public enum PreBuiltAnalyzers { STANDARD(CachingStrategy.OPENSEARCH) { @Override protected Analyzer create(Version version) { - final Analyzer a = new StandardAnalyzer(CharArraySet.EMPTY_SET); - a.setVersion(version.luceneVersion); - return a; + return new StandardAnalyzer(CharArraySet.EMPTY_SET); } }, @@ -75,36 +73,28 @@ protected Analyzer create(Version version) { STOP { @Override protected Analyzer create(Version version) { - Analyzer a = new StopAnalyzer(EnglishAnalyzer.ENGLISH_STOP_WORDS_SET); - a.setVersion(version.luceneVersion); - return a; + return new StopAnalyzer(EnglishAnalyzer.ENGLISH_STOP_WORDS_SET); } }, WHITESPACE { @Override protected Analyzer create(Version version) { - Analyzer a = new WhitespaceAnalyzer(); - a.setVersion(version.luceneVersion); - return a; + return new WhitespaceAnalyzer(); } }, SIMPLE { @Override protected Analyzer create(Version version) { - Analyzer a = new SimpleAnalyzer(); - a.setVersion(version.luceneVersion); - return a; + return new SimpleAnalyzer(); } }, CLASSIC { @Override protected Analyzer create(Version version) { - Analyzer a = new ClassicAnalyzer(); - a.setVersion(version.luceneVersion); - return a; + return new ClassicAnalyzer(); } }; diff --git a/server/src/main/java/org/opensearch/indices/recovery/PeerRecoveryTargetService.java b/server/src/main/java/org/opensearch/indices/recovery/PeerRecoveryTargetService.java index 684c401716883..d7c3421b1de93 100644 --- a/server/src/main/java/org/opensearch/indices/recovery/PeerRecoveryTargetService.java +++ b/server/src/main/java/org/opensearch/indices/recovery/PeerRecoveryTargetService.java @@ -216,7 +216,6 @@ private void doRecovery(final long recoveryId, final StartRecoveryRequest preExi final TransportRequest requestToSend; final StartRecoveryRequest startRequest; final RecoveryState.Timer timer; - CancellableThreads cancellableThreads; try (RecoveryRef recoveryRef = onGoingRecoveries.getRecovery(recoveryId)) { if (recoveryRef == null) { logger.trace("not running recovery with id [{}] - can not find it (probably finished)", recoveryId); @@ -224,7 +223,6 @@ private void doRecovery(final long recoveryId, final StartRecoveryRequest preExi } final RecoveryTarget recoveryTarget = recoveryRef.get(); timer = recoveryTarget.state().getTimer(); - cancellableThreads = recoveryTarget.cancellableThreads(); if (preExistingRequest == null) { try { final IndexShard indexShard = recoveryTarget.indexShard(); @@ -256,21 +254,12 @@ private void doRecovery(final long recoveryId, final StartRecoveryRequest preExi logger.trace("{} reestablishing recovery from {}", startRequest.shardId(), startRequest.sourceNode()); } } - RecoveryResponseHandler responseHandler = new RecoveryResponseHandler(startRequest, timer); - - try { - cancellableThreads.executeIO(() -> - // we still execute under cancelableThreads here to ensure we interrupt any blocking call to the network if any - // on the underlying transport. It's unclear if we need this here at all after moving to async execution but - // the issues that a missing call to this could cause are sneaky and hard to debug. If we don't need it on this - // call we can potentially remove it altogether which we should do it in a major release only with enough - // time to test. This shoudl be done for 7.0 if possible - transportService.sendRequest(startRequest.sourceNode(), actionName, requestToSend, responseHandler)); - } catch (CancellableThreads.ExecutionCancelledException e) { - logger.trace("recovery cancelled", e); - } catch (Exception e) { - responseHandler.onException(e); - } + transportService.sendRequest( + startRequest.sourceNode(), + actionName, + requestToSend, + new RecoveryResponseHandler(startRequest, timer) + ); } /** diff --git a/server/src/main/java/org/apache/lucene/queries/MinDocQuery.java b/server/src/main/java/org/opensearch/lucene/queries/MinDocQuery.java similarity index 96% rename from server/src/main/java/org/apache/lucene/queries/MinDocQuery.java rename to server/src/main/java/org/opensearch/lucene/queries/MinDocQuery.java index ae68f93349ce5..a7dbadb32ccf2 100644 --- a/server/src/main/java/org/apache/lucene/queries/MinDocQuery.java +++ b/server/src/main/java/org/opensearch/lucene/queries/MinDocQuery.java @@ -30,7 +30,7 @@ * GitHub history for details. */ -package org.apache.lucene.queries; +package org.opensearch.lucene.queries; import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.LeafReaderContext; @@ -39,6 +39,7 @@ import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.Query; +import org.apache.lucene.search.QueryVisitor; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; import org.apache.lucene.search.Weight; @@ -160,6 +161,11 @@ public long cost() { } } + @Override + public void visit(QueryVisitor visitor) { + visitor.visitLeaf(this); + } + @Override public String toString(String field) { return "MinDocQuery(minDoc=" + minDoc + ")"; diff --git a/server/src/main/java/org/apache/lucene/queries/SearchAfterSortedDocQuery.java b/server/src/main/java/org/opensearch/lucene/queries/SearchAfterSortedDocQuery.java similarity index 97% rename from server/src/main/java/org/apache/lucene/queries/SearchAfterSortedDocQuery.java rename to server/src/main/java/org/opensearch/lucene/queries/SearchAfterSortedDocQuery.java index 45df15651bfa2..fd4d84fabe9c7 100644 --- a/server/src/main/java/org/apache/lucene/queries/SearchAfterSortedDocQuery.java +++ b/server/src/main/java/org/opensearch/lucene/queries/SearchAfterSortedDocQuery.java @@ -30,7 +30,7 @@ * GitHub history for details. */ -package org.apache.lucene.queries; +package org.opensearch.lucene.queries; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.search.ConstantScoreScorer; @@ -41,6 +41,7 @@ import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.LeafFieldComparator; import org.apache.lucene.search.Query; +import org.apache.lucene.search.QueryVisitor; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; import org.apache.lucene.search.Sort; @@ -114,6 +115,11 @@ public boolean isCacheable(LeafReaderContext ctx) { }; } + @Override + public void visit(QueryVisitor visitor) { + visitor.visitLeaf(this); + } + @Override public String toString(String field) { return "SearchAfterSortedDocQuery(sort=" + sort + ", afterDoc=" + after.toString() + ")"; diff --git a/server/src/main/java/org/opensearch/plugins/PluginsService.java b/server/src/main/java/org/opensearch/plugins/PluginsService.java index 8e1bb4c510042..4ef2dc4617de9 100644 --- a/server/src/main/java/org/opensearch/plugins/PluginsService.java +++ b/server/src/main/java/org/opensearch/plugins/PluginsService.java @@ -34,9 +34,6 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; -import org.apache.lucene.analysis.util.CharFilterFactory; -import org.apache.lucene.analysis.util.TokenFilterFactory; -import org.apache.lucene.analysis.util.TokenizerFactory; import org.apache.lucene.codecs.Codec; import org.apache.lucene.codecs.DocValuesFormat; import org.apache.lucene.codecs.PostingsFormat; @@ -748,10 +745,6 @@ static void reloadLuceneSPI(ClassLoader loader) { PostingsFormat.reloadPostingsFormats(loader); DocValuesFormat.reloadDocValuesFormats(loader); Codec.reloadCodecs(loader); - // Analysis: - CharFilterFactory.reloadCharFilters(loader); - TokenFilterFactory.reloadTokenFilters(loader); - TokenizerFactory.reloadTokenizers(loader); } private Class loadPluginClass(String className, ClassLoader loader) { diff --git a/server/src/main/java/org/opensearch/search/DefaultSearchContext.java b/server/src/main/java/org/opensearch/search/DefaultSearchContext.java index a641f2e625e16..f6c76664c8988 100644 --- a/server/src/main/java/org/opensearch/search/DefaultSearchContext.java +++ b/server/src/main/java/org/opensearch/search/DefaultSearchContext.java @@ -324,7 +324,7 @@ public Query buildFilteredQuery(Query query) { if (mapperService().hasNested() && new NestedHelper(mapperService()).mightMatchNestedDocs(query) && (aliasFilter == null || new NestedHelper(mapperService()).mightMatchNestedDocs(aliasFilter))) { - filters.add(Queries.newNonNestedFilter(mapperService().getIndexSettings().getIndexVersionCreated())); + filters.add(Queries.newNonNestedFilter()); } if (aliasFilter != null) { diff --git a/server/src/main/java/org/opensearch/search/SearchService.java b/server/src/main/java/org/opensearch/search/SearchService.java index de4586efd60b1..0ffe859879453 100644 --- a/server/src/main/java/org/opensearch/search/SearchService.java +++ b/server/src/main/java/org/opensearch/search/SearchService.java @@ -1088,7 +1088,11 @@ private void parseSource(DefaultSearchContext context, SearchSourceBuilder sourc context.fetchSourceContext(source.fetchSource()); } if (source.docValueFields() != null) { - FetchDocValuesContext docValuesContext = FetchDocValuesContext.create(context.mapperService(), source.docValueFields()); + FetchDocValuesContext docValuesContext = FetchDocValuesContext.create( + context.mapperService()::simpleMatchToFullName, + context.mapperService().getIndexSettings().getMaxDocvalueFields(), + source.docValueFields() + ); context.docValuesContext(docValuesContext); } if (source.fetchFields() != null) { diff --git a/server/src/main/java/org/opensearch/search/aggregations/MultiBucketCollector.java b/server/src/main/java/org/opensearch/search/aggregations/MultiBucketCollector.java index 4334afbe30454..6bb044b1d7ea8 100644 --- a/server/src/main/java/org/opensearch/search/aggregations/MultiBucketCollector.java +++ b/server/src/main/java/org/opensearch/search/aggregations/MultiBucketCollector.java @@ -189,7 +189,7 @@ private MultiLeafBucketCollector(List collectors, boolean c @Override public void setScorer(Scorable scorer) throws IOException { if (cacheScores) { - scorer = new ScoreCachingWrappingScorer(scorer); + scorer = ScoreCachingWrappingScorer.wrap(scorer); } for (int i = 0; i < numCollectors; ++i) { final LeafCollector c = collectors[i]; diff --git a/server/src/main/java/org/opensearch/search/aggregations/bucket/composite/CompositeAggregator.java b/server/src/main/java/org/opensearch/search/aggregations/bucket/composite/CompositeAggregator.java index 1d48850bee122..73dc838a36198 100644 --- a/server/src/main/java/org/opensearch/search/aggregations/bucket/composite/CompositeAggregator.java +++ b/server/src/main/java/org/opensearch/search/aggregations/bucket/composite/CompositeAggregator.java @@ -37,7 +37,7 @@ import org.apache.lucene.index.NumericDocValues; import org.apache.lucene.index.SortedNumericDocValues; import org.apache.lucene.index.SortedSetDocValues; -import org.apache.lucene.queries.SearchAfterSortedDocQuery; +import org.opensearch.lucene.queries.SearchAfterSortedDocQuery; import org.apache.lucene.search.BooleanClause; import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.CollectionTerminatedException; diff --git a/server/src/main/java/org/opensearch/search/aggregations/bucket/composite/PointsSortedDocsProducer.java b/server/src/main/java/org/opensearch/search/aggregations/bucket/composite/PointsSortedDocsProducer.java index d08f8d0d95931..8dbb902b96186 100644 --- a/server/src/main/java/org/opensearch/search/aggregations/bucket/composite/PointsSortedDocsProducer.java +++ b/server/src/main/java/org/opensearch/search/aggregations/bucket/composite/PointsSortedDocsProducer.java @@ -38,9 +38,9 @@ import org.apache.lucene.search.DocIdSet; import org.apache.lucene.search.Query; import org.apache.lucene.util.DocIdSetBuilder; -import org.apache.lucene.util.FutureArrays; import java.io.IOException; +import java.util.Arrays; import java.util.function.ToLongFunction; /** @@ -166,10 +166,9 @@ public void visit(int docID, byte[] packedValue) throws IOException { @Override public PointValues.Relation compare(byte[] minPackedValue, byte[] maxPackedValue) { - if ((upperPointQuery != null - && FutureArrays.compareUnsigned(minPackedValue, 0, bytesPerDim, upperPointQuery, 0, bytesPerDim) > 0) + if ((upperPointQuery != null && Arrays.compareUnsigned(minPackedValue, 0, bytesPerDim, upperPointQuery, 0, bytesPerDim) > 0) || (lowerPointQuery != null - && FutureArrays.compareUnsigned(maxPackedValue, 0, bytesPerDim, lowerPointQuery, 0, bytesPerDim) < 0)) { + && Arrays.compareUnsigned(maxPackedValue, 0, bytesPerDim, lowerPointQuery, 0, bytesPerDim) < 0)) { // does not match the query return PointValues.Relation.CELL_OUTSIDE_QUERY; } diff --git a/server/src/main/java/org/opensearch/search/aggregations/bucket/nested/NestedAggregator.java b/server/src/main/java/org/opensearch/search/aggregations/bucket/nested/NestedAggregator.java index 38371e5fcaeee..a003f1380253b 100644 --- a/server/src/main/java/org/opensearch/search/aggregations/bucket/nested/NestedAggregator.java +++ b/server/src/main/java/org/opensearch/search/aggregations/bucket/nested/NestedAggregator.java @@ -82,9 +82,7 @@ public class NestedAggregator extends BucketsAggregator implements SingleBucketA ) throws IOException { super(name, factories, context, parent, cardinality, metadata); - Query parentFilter = parentObjectMapper != null - ? parentObjectMapper.nestedTypeFilter() - : Queries.newNonNestedFilter(context.mapperService().getIndexSettings().getIndexVersionCreated()); + Query parentFilter = parentObjectMapper != null ? parentObjectMapper.nestedTypeFilter() : Queries.newNonNestedFilter(); this.parentFilter = context.bitsetFilterCache().getBitSetProducer(parentFilter); this.childFilter = childObjectMapper.nestedTypeFilter(); this.collectsFromSingleBucket = cardinality.map(estimate -> estimate < 2); diff --git a/server/src/main/java/org/opensearch/search/aggregations/bucket/nested/ReverseNestedAggregator.java b/server/src/main/java/org/opensearch/search/aggregations/bucket/nested/ReverseNestedAggregator.java index 2d5f6cb324aaf..689304215e9c7 100644 --- a/server/src/main/java/org/opensearch/search/aggregations/bucket/nested/ReverseNestedAggregator.java +++ b/server/src/main/java/org/opensearch/search/aggregations/bucket/nested/ReverseNestedAggregator.java @@ -71,7 +71,7 @@ public ReverseNestedAggregator( ) throws IOException { super(name, factories, context, parent, cardinality, metadata); if (objectMapper == null) { - parentFilter = Queries.newNonNestedFilter(context.mapperService().getIndexSettings().getIndexVersionCreated()); + parentFilter = Queries.newNonNestedFilter(); } else { parentFilter = objectMapper.nestedTypeFilter(); } diff --git a/server/src/main/java/org/opensearch/search/aggregations/bucket/sampler/DiversifiedBytesHashSamplerAggregator.java b/server/src/main/java/org/opensearch/search/aggregations/bucket/sampler/DiversifiedBytesHashSamplerAggregator.java index 2c44bd5c059fd..c0b3536838430 100644 --- a/server/src/main/java/org/opensearch/search/aggregations/bucket/sampler/DiversifiedBytesHashSamplerAggregator.java +++ b/server/src/main/java/org/opensearch/search/aggregations/bucket/sampler/DiversifiedBytesHashSamplerAggregator.java @@ -34,8 +34,8 @@ import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.NumericDocValues; -import org.apache.lucene.search.DiversifiedTopDocsCollector; -import org.apache.lucene.search.DiversifiedTopDocsCollector.ScoreDocKey; +import org.apache.lucene.misc.search.DiversifiedTopDocsCollector; +import org.apache.lucene.misc.search.DiversifiedTopDocsCollector.ScoreDocKey; import org.apache.lucene.search.TopDocsCollector; import org.apache.lucene.util.BytesRef; import org.opensearch.OpenSearchException; diff --git a/server/src/main/java/org/opensearch/search/aggregations/bucket/sampler/DiversifiedMapSamplerAggregator.java b/server/src/main/java/org/opensearch/search/aggregations/bucket/sampler/DiversifiedMapSamplerAggregator.java index 627753d7cdb71..fdb00eed24c0d 100644 --- a/server/src/main/java/org/opensearch/search/aggregations/bucket/sampler/DiversifiedMapSamplerAggregator.java +++ b/server/src/main/java/org/opensearch/search/aggregations/bucket/sampler/DiversifiedMapSamplerAggregator.java @@ -34,8 +34,8 @@ import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.NumericDocValues; -import org.apache.lucene.search.DiversifiedTopDocsCollector; -import org.apache.lucene.search.DiversifiedTopDocsCollector.ScoreDocKey; +import org.apache.lucene.misc.search.DiversifiedTopDocsCollector; +import org.apache.lucene.misc.search.DiversifiedTopDocsCollector.ScoreDocKey; import org.apache.lucene.search.TopDocsCollector; import org.apache.lucene.util.BytesRef; import org.opensearch.OpenSearchException; diff --git a/server/src/main/java/org/opensearch/search/aggregations/bucket/sampler/DiversifiedNumericSamplerAggregator.java b/server/src/main/java/org/opensearch/search/aggregations/bucket/sampler/DiversifiedNumericSamplerAggregator.java index 8535705a1c820..8b4bed89e678c 100644 --- a/server/src/main/java/org/opensearch/search/aggregations/bucket/sampler/DiversifiedNumericSamplerAggregator.java +++ b/server/src/main/java/org/opensearch/search/aggregations/bucket/sampler/DiversifiedNumericSamplerAggregator.java @@ -35,8 +35,8 @@ import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.NumericDocValues; import org.apache.lucene.index.SortedNumericDocValues; -import org.apache.lucene.search.DiversifiedTopDocsCollector; -import org.apache.lucene.search.DiversifiedTopDocsCollector.ScoreDocKey; +import org.apache.lucene.misc.search.DiversifiedTopDocsCollector; +import org.apache.lucene.misc.search.DiversifiedTopDocsCollector.ScoreDocKey; import org.apache.lucene.search.TopDocsCollector; import org.opensearch.OpenSearchException; import org.opensearch.index.fielddata.AbstractNumericDocValues; diff --git a/server/src/main/java/org/opensearch/search/aggregations/bucket/sampler/DiversifiedOrdinalsSamplerAggregator.java b/server/src/main/java/org/opensearch/search/aggregations/bucket/sampler/DiversifiedOrdinalsSamplerAggregator.java index 4d8237c5f42f8..62caa017eddba 100644 --- a/server/src/main/java/org/opensearch/search/aggregations/bucket/sampler/DiversifiedOrdinalsSamplerAggregator.java +++ b/server/src/main/java/org/opensearch/search/aggregations/bucket/sampler/DiversifiedOrdinalsSamplerAggregator.java @@ -37,8 +37,8 @@ import org.apache.lucene.index.NumericDocValues; import org.apache.lucene.index.SortedDocValues; import org.apache.lucene.index.SortedSetDocValues; -import org.apache.lucene.search.DiversifiedTopDocsCollector; -import org.apache.lucene.search.DiversifiedTopDocsCollector.ScoreDocKey; +import org.apache.lucene.misc.search.DiversifiedTopDocsCollector; +import org.apache.lucene.misc.search.DiversifiedTopDocsCollector.ScoreDocKey; import org.apache.lucene.search.TopDocsCollector; import org.opensearch.index.fielddata.AbstractNumericDocValues; import org.opensearch.search.aggregations.Aggregator; diff --git a/server/src/main/java/org/opensearch/search/aggregations/bucket/sampler/SamplerAggregator.java b/server/src/main/java/org/opensearch/search/aggregations/bucket/sampler/SamplerAggregator.java index 56d93e2e498f9..a7855c2b400f1 100644 --- a/server/src/main/java/org/opensearch/search/aggregations/bucket/sampler/SamplerAggregator.java +++ b/server/src/main/java/org/opensearch/search/aggregations/bucket/sampler/SamplerAggregator.java @@ -32,7 +32,7 @@ package org.opensearch.search.aggregations.bucket.sampler; import org.apache.lucene.index.LeafReaderContext; -import org.apache.lucene.search.DiversifiedTopDocsCollector; +import org.apache.lucene.misc.search.DiversifiedTopDocsCollector; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.util.RamUsageEstimator; import org.opensearch.common.ParseField; diff --git a/server/src/main/java/org/opensearch/search/aggregations/metrics/InternalTopHits.java b/server/src/main/java/org/opensearch/search/aggregations/metrics/InternalTopHits.java index de4ad1376be06..24e7875a6ade9 100644 --- a/server/src/main/java/org/opensearch/search/aggregations/metrics/InternalTopHits.java +++ b/server/src/main/java/org/opensearch/search/aggregations/metrics/InternalTopHits.java @@ -141,16 +141,22 @@ public InternalAggregation reduce(List aggregations, Reduce InternalTopHits topHitsAgg = (InternalTopHits) aggregations.get(i); shardDocs[i] = topHitsAgg.topDocs.topDocs; shardHits[i] = topHitsAgg.searchHits; + for (ScoreDoc doc : shardDocs[i].scoreDocs) { + doc.shardIndex = i; + } } - reducedTopDocs = TopDocs.merge(sort, from, size, (TopFieldDocs[]) shardDocs, true); + reducedTopDocs = TopDocs.merge(sort, from, size, (TopFieldDocs[]) shardDocs); } else { shardDocs = new TopDocs[aggregations.size()]; for (int i = 0; i < shardDocs.length; i++) { InternalTopHits topHitsAgg = (InternalTopHits) aggregations.get(i); shardDocs[i] = topHitsAgg.topDocs.topDocs; shardHits[i] = topHitsAgg.searchHits; + for (ScoreDoc doc : shardDocs[i].scoreDocs) { + doc.shardIndex = i; + } } - reducedTopDocs = TopDocs.merge(from, size, shardDocs, true); + reducedTopDocs = TopDocs.merge(from, size, shardDocs); } float maxScore = Float.NaN; diff --git a/server/src/main/java/org/opensearch/search/aggregations/metrics/MaxAggregator.java b/server/src/main/java/org/opensearch/search/aggregations/metrics/MaxAggregator.java index e1f6273f62fb6..b755d54de0fe5 100644 --- a/server/src/main/java/org/opensearch/search/aggregations/metrics/MaxAggregator.java +++ b/server/src/main/java/org/opensearch/search/aggregations/metrics/MaxAggregator.java @@ -37,7 +37,6 @@ import org.apache.lucene.search.CollectionTerminatedException; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.util.Bits; -import org.apache.lucene.util.FutureArrays; import org.opensearch.common.lease.Releasables; import org.opensearch.common.util.BigArrays; import org.opensearch.common.util.DoubleArray; @@ -54,6 +53,7 @@ import org.opensearch.search.internal.SearchContext; import java.io.IOException; +import java.util.Arrays; import java.util.Map; import java.util.function.Function; @@ -200,7 +200,7 @@ public void visit(int docID, byte[] packedValue) { @Override public PointValues.Relation compare(byte[] minPackedValue, byte[] maxPackedValue) { - if (FutureArrays.equals(maxValue, 0, numBytes, maxPackedValue, 0, numBytes)) { + if (Arrays.equals(maxValue, 0, numBytes, maxPackedValue, 0, numBytes)) { // we only check leaves that contain the max value for the segment. return PointValues.Relation.CELL_CROSSES_QUERY; } else { diff --git a/server/src/main/java/org/opensearch/search/aggregations/metrics/TopHitsAggregatorFactory.java b/server/src/main/java/org/opensearch/search/aggregations/metrics/TopHitsAggregatorFactory.java index 81fa4b9f83d15..b9699964a611e 100644 --- a/server/src/main/java/org/opensearch/search/aggregations/metrics/TopHitsAggregatorFactory.java +++ b/server/src/main/java/org/opensearch/search/aggregations/metrics/TopHitsAggregatorFactory.java @@ -127,7 +127,11 @@ public Aggregator createInternal( subSearchContext.storedFieldsContext(storedFieldsContext); } if (docValueFields != null) { - FetchDocValuesContext docValuesContext = FetchDocValuesContext.create(searchContext.mapperService(), docValueFields); + FetchDocValuesContext docValuesContext = FetchDocValuesContext.create( + searchContext.mapperService()::simpleMatchToFullName, + searchContext.mapperService().getIndexSettings().getMaxDocvalueFields(), + docValueFields + ); subSearchContext.docValuesContext(docValuesContext); } if (fetchFields != null) { diff --git a/server/src/main/java/org/opensearch/search/fetch/FetchPhase.java b/server/src/main/java/org/opensearch/search/fetch/FetchPhase.java index a74497477099a..e50f903b22920 100644 --- a/server/src/main/java/org/opensearch/search/fetch/FetchPhase.java +++ b/server/src/main/java/org/opensearch/search/fetch/FetchPhase.java @@ -83,6 +83,7 @@ import java.util.List; import java.util.Map; import java.util.Set; +import java.util.function.Function; import static java.util.Collections.emptyMap; @@ -273,9 +274,7 @@ private boolean sourceRequired(SearchContext context) { private int findRootDocumentIfNested(SearchContext context, LeafReaderContext subReaderContext, int subDocId) throws IOException { if (context.mapperService().hasNested()) { - BitSet bits = context.bitsetFilterCache() - .getBitSetProducer(Queries.newNonNestedFilter(context.indexShard().indexSettings().getIndexVersionCreated())) - .getBitSet(subReaderContext); + BitSet bits = context.bitsetFilterCache().getBitSetProducer(Queries.newNonNestedFilter()).getBitSet(subReaderContext); if (!bits.get(subDocId)) { return bits.nextSetBit(subDocId); } @@ -333,7 +332,7 @@ private HitContext prepareNonNestedHitContext( return new HitContext(hit, subReaderContext, subDocId, lookup.source()); } else { SearchHit hit; - loadStoredFields(context.mapperService(), fieldReader, fieldsVisitor, subDocId); + loadStoredFields(context::fieldType, fieldReader, fieldsVisitor, subDocId); String id = fieldsVisitor.id(); if (fieldsVisitor.fields().isEmpty() == false) { Map docFields = new HashMap<>(); @@ -391,8 +390,8 @@ private HitContext prepareNestedHitContext( } } else { FieldsVisitor rootFieldsVisitor = new FieldsVisitor(needSource); - loadStoredFields(context.mapperService(), storedFieldReader, rootFieldsVisitor, rootDocId); - rootFieldsVisitor.postProcess(context.mapperService()); + loadStoredFields(context::fieldType, storedFieldReader, rootFieldsVisitor, rootDocId); + rootFieldsVisitor.postProcess(context::fieldType); rootId = rootFieldsVisitor.id(); if (needSource) { @@ -410,7 +409,7 @@ private HitContext prepareNestedHitContext( Map metaFields = emptyMap(); if (context.hasStoredFields() && !context.storedFieldsContext().fieldNames().isEmpty()) { FieldsVisitor nestedFieldsVisitor = new CustomFieldsVisitor(storedToRequestedFields.keySet(), false); - loadStoredFields(context.mapperService(), storedFieldReader, nestedFieldsVisitor, nestedDocId); + loadStoredFields(context::fieldType, storedFieldReader, nestedFieldsVisitor, nestedDocId); if (nestedFieldsVisitor.fields().isEmpty() == false) { docFields = new HashMap<>(); metaFields = new HashMap<>(); @@ -508,7 +507,7 @@ private SearchHit.NestedIdentity getInternalNestedIdentity( } parentFilter = nestedParentObjectMapper.nestedTypeFilter(); } else { - parentFilter = Queries.newNonNestedFilter(context.indexShard().indexSettings().getIndexVersionCreated()); + parentFilter = Queries.newNonNestedFilter(); } Query childFilter = nestedObjectMapper.nestedTypeFilter(); @@ -553,14 +552,14 @@ private SearchHit.NestedIdentity getInternalNestedIdentity( } private void loadStoredFields( - MapperService mapperService, + Function fieldTypeLookup, CheckedBiConsumer fieldReader, FieldsVisitor fieldVisitor, int docId ) throws IOException { fieldVisitor.reset(); fieldReader.accept(docId, fieldVisitor); - fieldVisitor.postProcess(mapperService); + fieldVisitor.postProcess(fieldTypeLookup); } private static void fillDocAndMetaFields( diff --git a/server/src/main/java/org/opensearch/search/fetch/subphase/FetchDocValuesContext.java b/server/src/main/java/org/opensearch/search/fetch/subphase/FetchDocValuesContext.java index df463a667b2e3..3bfb3365fe46e 100644 --- a/server/src/main/java/org/opensearch/search/fetch/subphase/FetchDocValuesContext.java +++ b/server/src/main/java/org/opensearch/search/fetch/subphase/FetchDocValuesContext.java @@ -32,11 +32,12 @@ package org.opensearch.search.fetch.subphase; import org.opensearch.index.IndexSettings; -import org.opensearch.index.mapper.MapperService; import java.util.ArrayList; import java.util.Collection; import java.util.List; +import java.util.Set; +import java.util.function.Function; /** * All the required context to pull a field from the doc values. @@ -44,15 +45,18 @@ public class FetchDocValuesContext { private final List fields; - public static FetchDocValuesContext create(MapperService mapperService, List fieldPatterns) { + public static FetchDocValuesContext create( + Function> simpleMatchToFullName, + int maxAllowedDocvalueFields, + List fieldPatterns + ) { List fields = new ArrayList<>(); for (FieldAndFormat field : fieldPatterns) { - Collection fieldNames = mapperService.simpleMatchToFullName(field.field); + Collection fieldNames = simpleMatchToFullName.apply(field.field); for (String fieldName : fieldNames) { fields.add(new FieldAndFormat(fieldName, field.format)); } } - int maxAllowedDocvalueFields = mapperService.getIndexSettings().getMaxDocvalueFields(); if (fields.size() > maxAllowedDocvalueFields) { throw new IllegalArgumentException( "Trying to retrieve too many docvalue_fields. Must be less than or equal to: [" diff --git a/server/src/main/java/org/opensearch/search/fetch/subphase/InnerHitsContext.java b/server/src/main/java/org/opensearch/search/fetch/subphase/InnerHitsContext.java index d7ac7d21f1922..22acd599d18c6 100644 --- a/server/src/main/java/org/opensearch/search/fetch/subphase/InnerHitsContext.java +++ b/server/src/main/java/org/opensearch/search/fetch/subphase/InnerHitsContext.java @@ -35,7 +35,7 @@ import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.search.CollectionTerminatedException; import org.apache.lucene.search.Collector; -import org.apache.lucene.search.ConjunctionDISI; +import org.apache.lucene.search.ConjunctionUtils; import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.LeafCollector; import org.apache.lucene.search.ScoreMode; @@ -187,7 +187,7 @@ public static void intersect(Weight weight, Weight innerHitQueryWeight, Collecto try { Bits acceptDocs = ctx.reader().getLiveDocs(); - DocIdSetIterator iterator = ConjunctionDISI.intersectIterators( + DocIdSetIterator iterator = ConjunctionUtils.intersectIterators( Arrays.asList(innerHitQueryScorer.iterator(), scorer.iterator()) ); for (int docId = iterator.nextDoc(); docId < DocIdSetIterator.NO_MORE_DOCS; docId = iterator.nextDoc()) { diff --git a/server/src/main/java/org/opensearch/search/fetch/subphase/highlight/UnifiedHighlighter.java b/server/src/main/java/org/opensearch/search/fetch/subphase/highlight/UnifiedHighlighter.java index 8f0c434674feb..8e97fc3a27ffb 100644 --- a/server/src/main/java/org/opensearch/search/fetch/subphase/highlight/UnifiedHighlighter.java +++ b/server/src/main/java/org/opensearch/search/fetch/subphase/highlight/UnifiedHighlighter.java @@ -48,7 +48,6 @@ import org.opensearch.common.text.Text; import org.opensearch.index.mapper.DocumentMapper; import org.opensearch.index.mapper.IdFieldMapper; -import org.opensearch.index.mapper.KeywordFieldMapper; import org.opensearch.index.mapper.MappedFieldType; import org.opensearch.index.mapper.TextSearchInfo; import org.opensearch.index.query.QueryShardContext; @@ -134,14 +133,6 @@ CustomUnifiedHighlighter buildHighlighter(FieldHighlightContext fieldContext) th ? HighlightUtils.Encoders.HTML : HighlightUtils.Encoders.DEFAULT; int maxAnalyzedOffset = fieldContext.context.getIndexSettings().getHighlightMaxAnalyzedOffset(); - int keywordIgnoreAbove = Integer.MAX_VALUE; - if (fieldContext.fieldType instanceof KeywordFieldMapper.KeywordFieldType) { - KeywordFieldMapper mapper = (KeywordFieldMapper) fieldContext.context.mapperService() - .documentMapper() - .mappers() - .getMapper(fieldContext.fieldName); - keywordIgnoreAbove = mapper.ignoreAbove(); - } int numberOfFragments = fieldContext.field.fieldOptions().numberOfFragments(); Analyzer analyzer = getAnalyzer(fieldContext.context.mapperService().documentMapper()); PassageFormatter passageFormatter = getPassageFormatter(fieldContext.hitContext, fieldContext.field, encoder); @@ -178,7 +169,6 @@ CustomUnifiedHighlighter buildHighlighter(FieldHighlightContext fieldContext) th fieldContext.field.fieldOptions().noMatchSize(), higlighterNumberOfFragments, fieldMatcher(fieldContext), - keywordIgnoreAbove, maxAnalyzedOffset ); } diff --git a/server/src/main/java/org/opensearch/search/internal/ContextIndexSearcher.java b/server/src/main/java/org/opensearch/search/internal/ContextIndexSearcher.java index dad21d024ad49..2cc15d4c65b96 100644 --- a/server/src/main/java/org/opensearch/search/internal/ContextIndexSearcher.java +++ b/server/src/main/java/org/opensearch/search/internal/ContextIndexSearcher.java @@ -41,7 +41,7 @@ import org.apache.lucene.search.CollectionTerminatedException; import org.apache.lucene.search.Collector; import org.apache.lucene.search.CollectorManager; -import org.apache.lucene.search.ConjunctionDISI; +import org.apache.lucene.search.ConjunctionUtils; import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.Explanation; import org.apache.lucene.search.IndexSearcher; @@ -291,10 +291,6 @@ private void searchLeaf(LeafReaderContext ctx, Weight weight, Collector collecto private Weight wrapWeight(Weight weight) { if (cancellable.isEnabled()) { return new Weight(weight.getQuery()) { - @Override - public void extractTerms(Set terms) { - throw new UnsupportedOperationException(); - } @Override public Explanation explain(LeafReaderContext context, int doc) throws IOException { @@ -344,7 +340,7 @@ static void intersectScorerAndBitSet(Scorer scorer, BitSet acceptDocs, LeafColle collector.setScorer(scorer); // ConjunctionDISI uses the DocIdSetIterator#cost() to order the iterators, so if roleBits has the lowest cardinality it should // be used first: - DocIdSetIterator iterator = ConjunctionDISI.intersectIterators( + DocIdSetIterator iterator = ConjunctionUtils.intersectIterators( Arrays.asList(new BitSetIterator(acceptDocs, acceptDocs.approximateCardinality()), scorer.iterator()) ); int seen = 0; diff --git a/server/src/main/java/org/opensearch/search/profile/query/ProfileWeight.java b/server/src/main/java/org/opensearch/search/profile/query/ProfileWeight.java index 30c36b70bc85c..56b69ab3fb265 100644 --- a/server/src/main/java/org/opensearch/search/profile/query/ProfileWeight.java +++ b/server/src/main/java/org/opensearch/search/profile/query/ProfileWeight.java @@ -33,7 +33,6 @@ package org.opensearch.search.profile.query; import org.apache.lucene.index.LeafReaderContext; -import org.apache.lucene.index.Term; import org.apache.lucene.search.BulkScorer; import org.apache.lucene.search.Explanation; import org.apache.lucene.search.Query; @@ -44,7 +43,6 @@ import org.opensearch.search.profile.Timer; import java.io.IOException; -import java.util.Set; /** * Weight wrapper that will compute how much time it takes to build the @@ -128,8 +126,8 @@ public Explanation explain(LeafReaderContext context, int doc) throws IOExceptio } @Override - public void extractTerms(Set set) { - subQueryWeight.extractTerms(set); + public int count(LeafReaderContext context) throws IOException { + return subQueryWeight.count(context); } @Override diff --git a/server/src/main/java/org/opensearch/search/query/QueryPhase.java b/server/src/main/java/org/opensearch/search/query/QueryPhase.java index e78741f48a223..7d4b8738c1800 100644 --- a/server/src/main/java/org/opensearch/search/query/QueryPhase.java +++ b/server/src/main/java/org/opensearch/search/query/QueryPhase.java @@ -36,8 +36,8 @@ import org.apache.logging.log4j.Logger; import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.LeafReaderContext; -import org.apache.lucene.queries.MinDocQuery; -import org.apache.lucene.queries.SearchAfterSortedDocQuery; +import org.opensearch.lucene.queries.MinDocQuery; +import org.opensearch.lucene.queries.SearchAfterSortedDocQuery; import org.apache.lucene.search.BooleanClause; import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.Collector; @@ -47,7 +47,6 @@ import org.apache.lucene.search.Query; import org.apache.lucene.search.ScoreDoc; import org.apache.lucene.search.Sort; -import org.apache.lucene.search.SortField; import org.apache.lucene.search.TopDocs; import org.apache.lucene.search.TotalHits; import org.opensearch.action.search.SearchShardTask; @@ -55,9 +54,6 @@ import org.opensearch.common.lucene.Lucene; import org.opensearch.common.lucene.search.TopDocsAndMaxScore; import org.opensearch.common.util.concurrent.QueueResizingOpenSearchThreadPoolExecutor; -import org.opensearch.index.IndexSortConfig; -import org.opensearch.index.mapper.DateFieldMapper.DateFieldType; -import org.opensearch.index.mapper.MappedFieldType; import org.opensearch.search.DocValueFormat; import org.opensearch.search.SearchContextSourcePrinter; import org.opensearch.search.SearchService; @@ -235,10 +231,6 @@ static boolean executeInternal(SearchContext searchContext) throws QueryPhaseExe // this collector can filter documents during the collection hasFilterCollector = true; } - // optimizing sort on Numerics (long and date) - if ((searchContext.sort() != null) && SYS_PROP_REWRITE_SORT) { - enhanceSortOnNumeric(searchContext, searcher.getIndexReader()); - } boolean timeoutSet = scrollContext == null && searchContext.timeout() != null @@ -332,27 +324,6 @@ private static boolean searchWithCollector( return topDocsFactory.shouldRescore(); } - private static void enhanceSortOnNumeric(SearchContext searchContext, IndexReader reader) { - if (canEarlyTerminate(reader, searchContext.sort())) { - // disable this optimization if index sorting matches the query sort since it's already optimized by index searcher - return; - } - Sort sort = searchContext.sort().sort; - SortField sortField = sort.getSort()[0]; - if (SortField.Type.LONG.equals(IndexSortConfig.getSortFieldType(sortField)) == false) return; - - // check if this is a field of type Long or Date, that is indexed and has doc values - String fieldName = sortField.getField(); - if (fieldName == null) return; // happens when _score or _doc is the 1st sort field - if (searchContext.mapperService() == null) return; // mapperService can be null in tests - final MappedFieldType fieldType = searchContext.mapperService().fieldType(fieldName); - if (fieldType == null) return; // for unmapped fields, default behaviour depending on "unmapped_type" flag - if ((fieldType.typeName().equals("long") == false) && (fieldType instanceof DateFieldType == false)) return; - if (fieldType.isSearchable() == false) return; - if (fieldType.hasDocValues() == false) return; - sortField.setCanUsePoints(); - } - /** * Returns true if the provided query returns docs in index order (internal doc ids). * @param query The query to execute diff --git a/server/src/main/java/org/opensearch/search/query/TopDocsCollectorContext.java b/server/src/main/java/org/opensearch/search/query/TopDocsCollectorContext.java index a007969ffd108..9cf7dca3c4caf 100644 --- a/server/src/main/java/org/opensearch/search/query/TopDocsCollectorContext.java +++ b/server/src/main/java/org/opensearch/search/query/TopDocsCollectorContext.java @@ -40,6 +40,7 @@ import org.apache.lucene.index.PointValues; import org.apache.lucene.index.Term; import org.apache.lucene.index.Terms; +import org.apache.lucene.queries.spans.SpanQuery; import org.apache.lucene.search.BooleanClause; import org.apache.lucene.search.BoostQuery; import org.apache.lucene.search.Collector; @@ -63,7 +64,6 @@ import org.apache.lucene.search.TotalHits; import org.apache.lucene.search.grouping.CollapseTopFieldDocs; import org.apache.lucene.search.grouping.CollapsingTopDocsCollector; -import org.apache.lucene.search.spans.SpanQuery; import org.opensearch.action.search.MaxScoreCollector; import org.opensearch.common.Nullable; import org.opensearch.common.lucene.Lucene; diff --git a/server/src/main/java/org/opensearch/search/slice/SliceQuery.java b/server/src/main/java/org/opensearch/search/slice/SliceQuery.java index 9dd1b557b34c7..5b2c97e44a521 100644 --- a/server/src/main/java/org/opensearch/search/slice/SliceQuery.java +++ b/server/src/main/java/org/opensearch/search/slice/SliceQuery.java @@ -33,6 +33,7 @@ package org.opensearch.search.slice; import org.apache.lucene.search.Query; +import org.apache.lucene.search.QueryVisitor; import java.util.Objects; @@ -91,4 +92,10 @@ public String toString(String f) { return getClass().getSimpleName() + "[field=" + field + ", id=" + id + ", max=" + max + "]"; } + @Override + public void visit(QueryVisitor visitor) { + if (visitor.acceptField(field)) { + visitor.visitLeaf(this); + } + } } diff --git a/server/src/main/java/org/opensearch/search/sort/SortBuilder.java b/server/src/main/java/org/opensearch/search/sort/SortBuilder.java index 09470f0b2cb45..74a4d974c9555 100644 --- a/server/src/main/java/org/opensearch/search/sort/SortBuilder.java +++ b/server/src/main/java/org/opensearch/search/sort/SortBuilder.java @@ -201,7 +201,7 @@ protected static Nested resolveNested(QueryShardContext context, NestedSortBuild final ObjectMapper objectMapper = context.nestedScope().getObjectMapper(); final Query parentQuery; if (objectMapper == null) { - parentQuery = Queries.newNonNestedFilter(context.indexVersionCreated()); + parentQuery = Queries.newNonNestedFilter(); } else { parentQuery = objectMapper.nestedTypeFilter(); } diff --git a/server/src/main/resources/org/opensearch/bootstrap/security.policy b/server/src/main/resources/org/opensearch/bootstrap/security.policy index f51cfbd65a0f0..97b73aedf24bb 100644 --- a/server/src/main/resources/org/opensearch/bootstrap/security.policy +++ b/server/src/main/resources/org/opensearch/bootstrap/security.policy @@ -46,6 +46,8 @@ grant codeBase "${codebase.opensearch-secure-sm}" { grant codeBase "${codebase.opensearch}" { // needed for loading plugins which may expect the context class loader to be set permission java.lang.RuntimePermission "setContextClassLoader"; + // needed for SPI class loading + permission java.lang.RuntimePermission "accessDeclaredMembers"; }; //// Very special jar permissions: diff --git a/server/src/test/java/org/opensearch/LegacyESVersionTests.java b/server/src/test/java/org/opensearch/LegacyESVersionTests.java index aea6f2eebea16..8fb3636dd8b2c 100644 --- a/server/src/test/java/org/opensearch/LegacyESVersionTests.java +++ b/server/src/test/java/org/opensearch/LegacyESVersionTests.java @@ -195,9 +195,9 @@ public void testIsBeta() { } public void testIsAlpha() { - assertTrue(new LegacyESVersion(5000001, org.apache.lucene.util.Version.LUCENE_7_0_0).isAlpha()); - assertFalse(new LegacyESVersion(4000002, org.apache.lucene.util.Version.LUCENE_7_0_0).isAlpha()); - assertTrue(new LegacyESVersion(4000002, org.apache.lucene.util.Version.LUCENE_7_0_0).isBeta()); + assertTrue(new LegacyESVersion(5000001, org.apache.lucene.util.Version.LUCENE_8_0_0).isAlpha()); + assertFalse(new LegacyESVersion(4000002, org.apache.lucene.util.Version.LUCENE_8_0_0).isAlpha()); + assertTrue(new LegacyESVersion(4000002, org.apache.lucene.util.Version.LUCENE_8_0_0).isBeta()); assertTrue(LegacyESVersion.fromString("5.0.0-alpha14").isAlpha()); assertEquals(5000014, LegacyESVersion.fromString("5.0.0-alpha14").id); assertTrue(LegacyESVersion.fromId(5000015).isAlpha()); diff --git a/server/src/test/java/org/opensearch/action/admin/indices/segments/IndicesSegmentsRequestTests.java b/server/src/test/java/org/opensearch/action/admin/indices/segments/IndicesSegmentsRequestTests.java index ca3b1f3f3815d..402d6439c4838 100644 --- a/server/src/test/java/org/opensearch/action/admin/indices/segments/IndicesSegmentsRequestTests.java +++ b/server/src/test/java/org/opensearch/action/admin/indices/segments/IndicesSegmentsRequestTests.java @@ -34,7 +34,6 @@ import org.opensearch.action.support.IndicesOptions; import org.opensearch.common.settings.Settings; -import org.opensearch.index.engine.Segment; import org.opensearch.index.MergePolicyConfig; import org.opensearch.indices.IndexClosedException; import org.opensearch.plugins.Plugin; @@ -43,7 +42,6 @@ import org.junit.Before; import java.util.Collection; -import java.util.List; import static org.hamcrest.Matchers.is; @@ -71,18 +69,6 @@ public void setupIndex() { client().admin().indices().prepareRefresh().get(); } - public void testBasic() { - IndicesSegmentResponse rsp = client().admin().indices().prepareSegments("test").get(); - List segments = rsp.getIndices().get("test").iterator().next().getShards()[0].getSegments(); - assertNull(segments.get(0).toString(), segments.get(0).ramTree); - } - - public void testVerbose() { - IndicesSegmentResponse rsp = client().admin().indices().prepareSegments("test").setVerbose(true).get(); - List segments = rsp.getIndices().get("test").iterator().next().getShards()[0].getSegments(); - assertNotNull(segments.get(0).toString(), segments.get(0).ramTree); - } - /** * with the default IndicesOptions inherited from BroadcastOperationRequest this will raise an exception */ diff --git a/server/src/test/java/org/opensearch/cluster/routing/allocation/IndexShardHotSpotTests.java b/server/src/test/java/org/opensearch/cluster/routing/allocation/IndexShardHotSpotTests.java index 98fb9ae14dd5e..875432c9e6e11 100644 --- a/server/src/test/java/org/opensearch/cluster/routing/allocation/IndexShardHotSpotTests.java +++ b/server/src/test/java/org/opensearch/cluster/routing/allocation/IndexShardHotSpotTests.java @@ -109,6 +109,7 @@ public void testClusterScaleIn() { /** * Test cluster scale in scenario with skewed shard distribution in remaining nodes. */ + @AwaitsFix(bugUrl = "https://github.com/opensearch-project/OpenSearch/issues/2063") public void testClusterScaleInWithSkew() { setupInitialCluster(4, 100, 5, 1); buildAllocationService("node_0,node_1"); diff --git a/server/src/test/java/org/opensearch/common/lucene/LuceneTests.java b/server/src/test/java/org/opensearch/common/lucene/LuceneTests.java index 05b185beb57e9..69c431994ba7e 100644 --- a/server/src/test/java/org/opensearch/common/lucene/LuceneTests.java +++ b/server/src/test/java/org/opensearch/common/lucene/LuceneTests.java @@ -56,6 +56,7 @@ import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.Query; +import org.apache.lucene.search.QueryVisitor; import org.apache.lucene.search.ScoreDoc; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; @@ -440,6 +441,11 @@ public void testAsSequentialAccessBits() throws Exception { private static class UnsupportedQuery extends Query { + @Override + public void visit(QueryVisitor visitor) { + visitor.visitLeaf(this); + } + @Override public String toString(String field) { return "Unsupported"; @@ -464,11 +470,6 @@ public boolean isCacheable(LeafReaderContext ctx) { return true; } - @Override - public void extractTerms(Set terms) { - throw new UnsupportedOperationException(); - } - @Override public Explanation explain(LeafReaderContext context, int doc) throws IOException { throw new UnsupportedOperationException(); diff --git a/server/src/test/java/org/opensearch/common/lucene/search/QueriesTests.java b/server/src/test/java/org/opensearch/common/lucene/search/QueriesTests.java index 25af50f02cd45..b9f128037a970 100644 --- a/server/src/test/java/org/opensearch/common/lucene/search/QueriesTests.java +++ b/server/src/test/java/org/opensearch/common/lucene/search/QueriesTests.java @@ -48,9 +48,9 @@ public class QueriesTests extends OpenSearchTestCase { public void testNonNestedQuery() { for (Version version : VersionUtils.allVersions()) { // This is a custom query that extends AutomatonQuery and want to make sure the equals method works - assertEquals(Queries.newNonNestedFilter(version), Queries.newNonNestedFilter(version)); - assertEquals(Queries.newNonNestedFilter(version).hashCode(), Queries.newNonNestedFilter(version).hashCode()); - assertEquals(Queries.newNonNestedFilter(version), new DocValuesFieldExistsQuery(SeqNoFieldMapper.PRIMARY_TERM_NAME)); + assertEquals(Queries.newNonNestedFilter(), Queries.newNonNestedFilter()); + assertEquals(Queries.newNonNestedFilter().hashCode(), Queries.newNonNestedFilter().hashCode()); + assertEquals(Queries.newNonNestedFilter(), new DocValuesFieldExistsQuery(SeqNoFieldMapper.PRIMARY_TERM_NAME)); } } diff --git a/server/src/test/java/org/opensearch/common/lucene/search/function/MinScoreScorerTests.java b/server/src/test/java/org/opensearch/common/lucene/search/function/MinScoreScorerTests.java index b9112b3674c82..26674189f3cd8 100644 --- a/server/src/test/java/org/opensearch/common/lucene/search/function/MinScoreScorerTests.java +++ b/server/src/test/java/org/opensearch/common/lucene/search/function/MinScoreScorerTests.java @@ -33,7 +33,6 @@ package org.opensearch.common.lucene.search.function; import org.apache.lucene.index.LeafReaderContext; -import org.apache.lucene.index.Term; import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.Explanation; import org.apache.lucene.search.MatchAllDocsQuery; @@ -83,10 +82,6 @@ public int advance(int target) throws IOException { private static Weight fakeWeight() { return new Weight(new MatchAllDocsQuery()) { - @Override - public void extractTerms(Set terms) { - - } @Override public Explanation explain(LeafReaderContext context, int doc) throws IOException { diff --git a/server/src/test/java/org/opensearch/gateway/MetadataStateFormatTests.java b/server/src/test/java/org/opensearch/gateway/MetadataStateFormatTests.java index 4547313639909..70e1e8d73ef3a 100644 --- a/server/src/test/java/org/opensearch/gateway/MetadataStateFormatTests.java +++ b/server/src/test/java/org/opensearch/gateway/MetadataStateFormatTests.java @@ -243,7 +243,7 @@ public static void corruptFile(Path fileToCorrupt, Logger logger) throws IOExcep assertThat(input.getFilePointer(), is(0L)); input.seek(input.length() - 8); // one long is the checksum... 8 bytes checksumAfterCorruption = input.getChecksum(); - actualChecksumAfterCorruption = input.readLong(); + actualChecksumAfterCorruption = CodecUtil.readBELong(input); } StringBuilder msg = new StringBuilder(); msg.append("Checksum before: [").append(checksumBeforeCorruption).append("]"); diff --git a/server/src/test/java/org/opensearch/index/codec/CodecTests.java b/server/src/test/java/org/opensearch/index/codec/CodecTests.java index 745092678861c..66de4d03ebbbf 100644 --- a/server/src/test/java/org/opensearch/index/codec/CodecTests.java +++ b/server/src/test/java/org/opensearch/index/codec/CodecTests.java @@ -34,17 +34,14 @@ import org.apache.logging.log4j.LogManager; import org.apache.lucene.codecs.Codec; -import org.apache.lucene.codecs.lucene80.Lucene80DocValuesFormat; -import org.apache.lucene.codecs.lucene87.Lucene87Codec; -import org.apache.lucene.codecs.lucene87.Lucene87StoredFieldsFormat; -import org.apache.lucene.document.BinaryDocValuesField; +import org.apache.lucene.codecs.lucene90.Lucene90Codec; +import org.apache.lucene.codecs.lucene90.Lucene90StoredFieldsFormat; import org.apache.lucene.document.Document; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.IndexWriter; import org.apache.lucene.index.IndexWriterConfig; import org.apache.lucene.index.SegmentReader; import org.apache.lucene.store.Directory; -import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.LuceneTestCase.SuppressCodecs; import org.opensearch.common.settings.Settings; import org.opensearch.env.Environment; @@ -68,43 +65,21 @@ public class CodecTests extends OpenSearchTestCase { public void testResolveDefaultCodecs() throws Exception { CodecService codecService = createCodecService(); assertThat(codecService.codec("default"), instanceOf(PerFieldMappingPostingFormatCodec.class)); - assertThat(codecService.codec("default"), instanceOf(Lucene87Codec.class)); - assertThat(codecService.codec("Lucene87"), instanceOf(Lucene87Codec.class)); + assertThat(codecService.codec("default"), instanceOf(Lucene90Codec.class)); } public void testDefault() throws Exception { Codec codec = createCodecService().codec("default"); - assertDVCompressionEquals(Lucene80DocValuesFormat.Mode.BEST_COMPRESSION, codec); - assertStoredFieldsFormatCompressionEquals(Lucene87StoredFieldsFormat.Mode.BEST_SPEED, codec); + assertStoredFieldsCompressionEquals(Lucene90Codec.Mode.BEST_SPEED, codec); } public void testBestCompression() throws Exception { Codec codec = createCodecService().codec("best_compression"); - assertDVCompressionEquals(Lucene80DocValuesFormat.Mode.BEST_COMPRESSION, codec); - assertStoredFieldsFormatCompressionEquals(Lucene87StoredFieldsFormat.Mode.BEST_COMPRESSION, codec); - } - - private void assertDVCompressionEquals(Lucene80DocValuesFormat.Mode expected, Codec actual) throws Exception { - Directory dir = newDirectory(); - IndexWriterConfig iwc = newIndexWriterConfig(null); - iwc.setCodec(actual); - IndexWriter iw = new IndexWriter(dir, iwc); - Document doc = new Document(); - doc.add(new BinaryDocValuesField("foo", new BytesRef("aaa"))); - iw.addDocument(doc); - iw.commit(); - iw.close(); - DirectoryReader ir = DirectoryReader.open(dir); - SegmentReader sr = (SegmentReader) ir.leaves().get(0).reader(); - String v = sr.getFieldInfos().fieldInfo("foo").getAttribute(Lucene80DocValuesFormat.MODE_KEY); - assertNotNull(v); - assertEquals(expected, Lucene80DocValuesFormat.Mode.valueOf(v)); - ir.close(); - dir.close(); + assertStoredFieldsCompressionEquals(Lucene90Codec.Mode.BEST_COMPRESSION, codec); } // write some docs with it, inspect .si to see this was the used compression - private void assertStoredFieldsFormatCompressionEquals(Lucene87StoredFieldsFormat.Mode expected, Codec actual) throws Exception { + private void assertStoredFieldsCompressionEquals(Lucene90Codec.Mode expected, Codec actual) throws Exception { Directory dir = newDirectory(); IndexWriterConfig iwc = newIndexWriterConfig(null); iwc.setCodec(actual); @@ -114,9 +89,9 @@ private void assertStoredFieldsFormatCompressionEquals(Lucene87StoredFieldsForma iw.close(); DirectoryReader ir = DirectoryReader.open(dir); SegmentReader sr = (SegmentReader) ir.leaves().get(0).reader(); - String v = sr.getSegmentInfo().info.getAttribute(Lucene87StoredFieldsFormat.MODE_KEY); + String v = sr.getSegmentInfo().info.getAttribute(Lucene90StoredFieldsFormat.MODE_KEY); assertNotNull(v); - assertEquals(expected, Lucene87StoredFieldsFormat.Mode.valueOf(v)); + assertEquals(expected, Lucene90Codec.Mode.valueOf(v)); ir.close(); dir.close(); } diff --git a/server/src/test/java/org/opensearch/index/engine/CompletionStatsCacheTests.java b/server/src/test/java/org/opensearch/index/engine/CompletionStatsCacheTests.java index 103c8c392be94..30285b1a3a014 100644 --- a/server/src/test/java/org/opensearch/index/engine/CompletionStatsCacheTests.java +++ b/server/src/test/java/org/opensearch/index/engine/CompletionStatsCacheTests.java @@ -32,14 +32,14 @@ package org.opensearch.index.engine; import org.apache.lucene.codecs.PostingsFormat; -import org.apache.lucene.codecs.lucene87.Lucene87Codec; +import org.apache.lucene.codecs.lucene90.Lucene90Codec; import org.apache.lucene.document.Document; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.IndexWriter; import org.apache.lucene.index.IndexWriterConfig; import org.apache.lucene.search.Query; import org.apache.lucene.search.QueryCachingPolicy; -import org.apache.lucene.search.suggest.document.Completion84PostingsFormat; +import org.apache.lucene.search.suggest.document.Completion90PostingsFormat; import org.apache.lucene.search.suggest.document.SuggestField; import org.apache.lucene.store.Directory; import org.opensearch.OpenSearchException; @@ -69,8 +69,8 @@ public void testExceptionsAreNotCached() { public void testCompletionStatsCache() throws IOException, InterruptedException { final IndexWriterConfig indexWriterConfig = newIndexWriterConfig(); - final PostingsFormat postingsFormat = new Completion84PostingsFormat(); - indexWriterConfig.setCodec(new Lucene87Codec() { + final PostingsFormat postingsFormat = new Completion90PostingsFormat(); + indexWriterConfig.setCodec(new Lucene90Codec() { @Override public PostingsFormat getPostingsFormatForField(String field) { return postingsFormat; // all fields are suggest fields diff --git a/server/src/test/java/org/opensearch/index/engine/InternalEngineTests.java b/server/src/test/java/org/opensearch/index/engine/InternalEngineTests.java index 33f09a3e67db8..5202e04990f95 100644 --- a/server/src/test/java/org/opensearch/index/engine/InternalEngineTests.java +++ b/server/src/test/java/org/opensearch/index/engine/InternalEngineTests.java @@ -317,7 +317,6 @@ public void testVerboseSegments() throws Exception { segments = engine.segments(true); assertThat(segments.size(), equalTo(1)); - assertThat(segments.get(0).ramTree, notNullValue()); ParsedDocument doc2 = testParsedDocument("2", null, testDocumentWithTextField(), B_2, null); engine.index(indexForDoc(doc2)); @@ -328,9 +327,6 @@ public void testVerboseSegments() throws Exception { segments = engine.segments(true); assertThat(segments.size(), equalTo(3)); - assertThat(segments.get(0).ramTree, notNullValue()); - assertThat(segments.get(1).ramTree, notNullValue()); - assertThat(segments.get(2).ramTree, notNullValue()); } } diff --git a/server/src/test/java/org/opensearch/index/engine/LiveVersionMapTests.java b/server/src/test/java/org/opensearch/index/engine/LiveVersionMapTests.java index 95f6e7998b128..f9c6c065904d7 100644 --- a/server/src/test/java/org/opensearch/index/engine/LiveVersionMapTests.java +++ b/server/src/test/java/org/opensearch/index/engine/LiveVersionMapTests.java @@ -68,7 +68,7 @@ public void testRamBytesUsed() throws Exception { map.putIndexUnderLock(uid.toBytesRef(), randomIndexVersionValue()); } } - long actualRamBytesUsed = RamUsageTester.sizeOf(map); + long actualRamBytesUsed = RamUsageTester.ramUsed(map); long estimatedRamBytesUsed = map.ramBytesUsed(); // less than 50% off assertEquals(actualRamBytesUsed, estimatedRamBytesUsed, actualRamBytesUsed / 2); @@ -84,7 +84,7 @@ public void testRamBytesUsed() throws Exception { map.putIndexUnderLock(uid.toBytesRef(), randomIndexVersionValue()); } } - actualRamBytesUsed = RamUsageTester.sizeOf(map); + actualRamBytesUsed = RamUsageTester.ramUsed(map); estimatedRamBytesUsed = map.ramBytesUsed(); long tolerance; if (Constants.JRE_IS_MINIMUM_JAVA9) { diff --git a/server/src/test/java/org/opensearch/index/engine/SegmentTests.java b/server/src/test/java/org/opensearch/index/engine/SegmentTests.java index 744b0d0cb4733..7b0072f83f9cb 100644 --- a/server/src/test/java/org/opensearch/index/engine/SegmentTests.java +++ b/server/src/test/java/org/opensearch/index/engine/SegmentTests.java @@ -92,7 +92,7 @@ static Segment randomSegment() { segment.sizeInBytes = randomNonNegativeLong(); segment.docCount = randomIntBetween(1, Integer.MAX_VALUE); segment.delDocCount = randomIntBetween(0, segment.docCount); - segment.version = Version.LUCENE_7_0_0; + segment.version = Version.LUCENE_8_0_0; segment.compound = randomBoolean(); segment.mergeId = randomAlphaOfLengthBetween(1, 10); segment.segmentSort = randomIndexSort(); diff --git a/server/src/test/java/org/opensearch/index/engine/VersionValueTests.java b/server/src/test/java/org/opensearch/index/engine/VersionValueTests.java index 3ca7cdade3509..dc6511b9e2632 100644 --- a/server/src/test/java/org/opensearch/index/engine/VersionValueTests.java +++ b/server/src/test/java/org/opensearch/index/engine/VersionValueTests.java @@ -44,12 +44,12 @@ public void testIndexRamBytesUsed() { translogLoc = new Translog.Location(randomNonNegativeLong(), randomNonNegativeLong(), randomInt()); } IndexVersionValue versionValue = new IndexVersionValue(translogLoc, randomLong(), randomLong(), randomLong()); - assertEquals(RamUsageTester.sizeOf(versionValue), versionValue.ramBytesUsed()); + assertEquals(RamUsageTester.ramUsed(versionValue), versionValue.ramBytesUsed()); } public void testDeleteRamBytesUsed() { DeleteVersionValue versionValue = new DeleteVersionValue(randomLong(), randomLong(), randomLong(), randomLong()); - assertEquals(RamUsageTester.sizeOf(versionValue), versionValue.ramBytesUsed()); + assertEquals(RamUsageTester.ramUsed(versionValue), versionValue.ramBytesUsed()); } } diff --git a/server/src/test/java/org/opensearch/index/mapper/DateFieldTypeTests.java b/server/src/test/java/org/opensearch/index/mapper/DateFieldTypeTests.java index 20e0d5cfeec29..085343f4ff2f7 100644 --- a/server/src/test/java/org/opensearch/index/mapper/DateFieldTypeTests.java +++ b/server/src/test/java/org/opensearch/index/mapper/DateFieldTypeTests.java @@ -40,9 +40,9 @@ import org.apache.lucene.index.IndexWriterConfig; import org.apache.lucene.index.MultiReader; import org.apache.lucene.index.SortedNumericDocValues; +import org.apache.lucene.sandbox.search.IndexSortSortedNumericDocValuesRangeQuery; import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.IndexOrDocValuesQuery; -import org.apache.lucene.search.IndexSortSortedNumericDocValuesRangeQuery; import org.apache.lucene.search.Query; import org.apache.lucene.store.Directory; import org.opensearch.Version; diff --git a/server/src/test/java/org/opensearch/index/mapper/NumberFieldTypeTests.java b/server/src/test/java/org/opensearch/index/mapper/NumberFieldTypeTests.java index 1ac20f4d0dfe6..57f3f3693257b 100644 --- a/server/src/test/java/org/opensearch/index/mapper/NumberFieldTypeTests.java +++ b/server/src/test/java/org/opensearch/index/mapper/NumberFieldTypeTests.java @@ -36,16 +36,16 @@ import org.apache.lucene.document.Document; import org.apache.lucene.document.DoublePoint; import org.apache.lucene.document.FloatPoint; -import org.apache.lucene.document.HalfFloatPoint; import org.apache.lucene.document.IntPoint; import org.apache.lucene.document.LongPoint; import org.apache.lucene.document.SortedNumericDocValuesField; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.IndexWriter; import org.apache.lucene.index.IndexWriterConfig; +import org.apache.lucene.sandbox.document.HalfFloatPoint; +import org.apache.lucene.sandbox.search.IndexSortSortedNumericDocValuesRangeQuery; import org.apache.lucene.search.IndexOrDocValuesQuery; import org.apache.lucene.search.IndexSearcher; -import org.apache.lucene.search.IndexSortSortedNumericDocValuesRangeQuery; import org.apache.lucene.search.MatchNoDocsQuery; import org.apache.lucene.search.Query; import org.apache.lucene.search.Sort; @@ -531,6 +531,7 @@ public void doTestDocValueRangeQueries(NumberType type, Supplier valueSu dir.close(); } + @AwaitsFix(bugUrl = "https://github.com/opensearch-project/OpenSearch/issues/2063") public void testIndexSortIntRange() throws Exception { doTestIndexSortRangeQueries(NumberType.INTEGER, random()::nextInt); } diff --git a/server/src/test/java/org/opensearch/index/mapper/StoredNumericValuesTests.java b/server/src/test/java/org/opensearch/index/mapper/StoredNumericValuesTests.java index 65776001381a0..b6e1818364328 100644 --- a/server/src/test/java/org/opensearch/index/mapper/StoredNumericValuesTests.java +++ b/server/src/test/java/org/opensearch/index/mapper/StoredNumericValuesTests.java @@ -155,7 +155,7 @@ public void testBytesAndNumericRepresentation() throws Exception { CustomFieldsVisitor fieldsVisitor = new CustomFieldsVisitor(fieldNames, false); searcher.doc(0, fieldsVisitor); - fieldsVisitor.postProcess(mapperService); + fieldsVisitor.postProcess(mapperService::fieldType); assertThat(fieldsVisitor.fields().size(), equalTo(10)); assertThat(fieldsVisitor.fields().get("field1").size(), equalTo(1)); assertThat(fieldsVisitor.fields().get("field1").get(0), equalTo((byte) 1)); diff --git a/server/src/test/java/org/opensearch/index/mapper/TextFieldMapperTests.java b/server/src/test/java/org/opensearch/index/mapper/TextFieldMapperTests.java index f33e59cb39208..2db5f2eea3596 100644 --- a/server/src/test/java/org/opensearch/index/mapper/TextFieldMapperTests.java +++ b/server/src/test/java/org/opensearch/index/mapper/TextFieldMapperTests.java @@ -51,6 +51,10 @@ import org.apache.lucene.index.PostingsEnum; import org.apache.lucene.index.Term; import org.apache.lucene.index.TermsEnum; +import org.apache.lucene.queries.spans.FieldMaskingSpanQuery; +import org.apache.lucene.queries.spans.SpanNearQuery; +import org.apache.lucene.queries.spans.SpanOrQuery; +import org.apache.lucene.queries.spans.SpanTermQuery; import org.apache.lucene.search.BooleanClause; import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.MultiPhraseQuery; @@ -58,10 +62,6 @@ import org.apache.lucene.search.Query; import org.apache.lucene.search.SynonymQuery; import org.apache.lucene.search.TermQuery; -import org.apache.lucene.search.spans.FieldMaskingSpanQuery; -import org.apache.lucene.search.spans.SpanNearQuery; -import org.apache.lucene.search.spans.SpanOrQuery; -import org.apache.lucene.search.spans.SpanTermQuery; import org.apache.lucene.util.BytesRef; import org.opensearch.common.Strings; import org.opensearch.common.lucene.search.MultiPhrasePrefixQuery; diff --git a/server/src/test/java/org/opensearch/index/query/DisMaxQueryBuilderTests.java b/server/src/test/java/org/opensearch/index/query/DisMaxQueryBuilderTests.java index 7bbebb82c03d0..bc8cd6c57b975 100644 --- a/server/src/test/java/org/opensearch/index/query/DisMaxQueryBuilderTests.java +++ b/server/src/test/java/org/opensearch/index/query/DisMaxQueryBuilderTests.java @@ -42,14 +42,9 @@ import java.io.IOException; import java.util.Collection; import java.util.HashMap; -import java.util.Iterator; import java.util.List; import java.util.Map; -import static org.hamcrest.Matchers.closeTo; -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.instanceOf; - public class DisMaxQueryBuilderTests extends AbstractQueryTestCase { /** * @return a {@link DisMaxQueryBuilder} with random inner queries @@ -70,14 +65,8 @@ protected DisMaxQueryBuilder doCreateTestQueryBuilder() { @Override protected void doAssertLuceneQuery(DisMaxQueryBuilder queryBuilder, Query query, QueryShardContext context) throws IOException { Collection queries = AbstractQueryBuilder.toQueries(queryBuilder.innerQueries(), context); - assertThat(query, instanceOf(DisjunctionMaxQuery.class)); - DisjunctionMaxQuery disjunctionMaxQuery = (DisjunctionMaxQuery) query; - assertThat(disjunctionMaxQuery.getTieBreakerMultiplier(), equalTo(queryBuilder.tieBreaker())); - assertThat(disjunctionMaxQuery.getDisjuncts().size(), equalTo(queries.size())); - Iterator queryIterator = queries.iterator(); - for (int i = 0; i < disjunctionMaxQuery.getDisjuncts().size(); i++) { - assertThat(disjunctionMaxQuery.getDisjuncts().get(i), equalTo(queryIterator.next())); - } + Query expected = new DisjunctionMaxQuery(queries, queryBuilder.tieBreaker()); + assertEquals(expected, query); } @Override @@ -114,20 +103,8 @@ public void testToQueryInnerPrefixQuery() throws Exception { + " }\n" + "}"; Query query = parseQuery(queryAsString).toQuery(createShardContext()); - assertThat(query, instanceOf(DisjunctionMaxQuery.class)); - DisjunctionMaxQuery disjunctionMaxQuery = (DisjunctionMaxQuery) query; - - List disjuncts = disjunctionMaxQuery.getDisjuncts(); - assertThat(disjuncts.size(), equalTo(1)); - - assertThat(disjuncts.get(0), instanceOf(BoostQuery.class)); - BoostQuery boostQuery = (BoostQuery) disjuncts.get(0); - assertThat((double) boostQuery.getBoost(), closeTo(1.2, 0.00001)); - assertThat(boostQuery.getQuery(), instanceOf(PrefixQuery.class)); - PrefixQuery firstQ = (PrefixQuery) boostQuery.getQuery(); - // since age is automatically registered in data, we encode it as numeric - assertThat(firstQ.getPrefix(), equalTo(new Term(TEXT_FIELD_NAME, "sh"))); - + Query expected = new DisjunctionMaxQuery(List.of(new BoostQuery(new PrefixQuery(new Term(TEXT_FIELD_NAME, "sh")), 1.2f)), 0); + assertEquals(expected, query); } public void testFromJson() throws IOException { diff --git a/server/src/test/java/org/opensearch/index/query/FieldMaskingSpanQueryBuilderTests.java b/server/src/test/java/org/opensearch/index/query/FieldMaskingSpanQueryBuilderTests.java index 10d4a7918fb1e..402b44ed3df76 100644 --- a/server/src/test/java/org/opensearch/index/query/FieldMaskingSpanQueryBuilderTests.java +++ b/server/src/test/java/org/opensearch/index/query/FieldMaskingSpanQueryBuilderTests.java @@ -32,8 +32,12 @@ package org.opensearch.index.query; +import org.apache.lucene.index.Term; +import org.apache.lucene.queries.spans.SpanTermQuery; +import org.apache.lucene.search.BoostQuery; import org.apache.lucene.search.Query; -import org.apache.lucene.search.spans.FieldMaskingSpanQuery; +import org.apache.lucene.queries.spans.FieldMaskingSpanQuery; +import org.opensearch.common.ParsingException; import org.opensearch.test.AbstractQueryTestCase; import java.io.IOException; @@ -52,6 +56,7 @@ protected FieldMaskingSpanQueryBuilder doCreateTestQueryBuilder() { fieldName = randomAlphaOfLengthBetween(1, 10); } SpanTermQueryBuilder innerQuery = new SpanTermQueryBuilderTests().createTestQueryBuilder(); + innerQuery.boost(1f); return new FieldMaskingSpanQueryBuilder(innerQuery, fieldName); } @@ -62,7 +67,8 @@ protected void doAssertLuceneQuery(FieldMaskingSpanQueryBuilder queryBuilder, Qu assertThat(query, instanceOf(FieldMaskingSpanQuery.class)); FieldMaskingSpanQuery fieldMaskingSpanQuery = (FieldMaskingSpanQuery) query; assertThat(fieldMaskingSpanQuery.getField(), equalTo(fieldInQuery)); - assertThat(fieldMaskingSpanQuery.getMaskedQuery(), equalTo(queryBuilder.innerQuery().toQuery(context))); + Query sub = queryBuilder.innerQuery().toQuery(context); + assertThat(fieldMaskingSpanQuery.getMaskedQuery(), equalTo(sub)); } public void testIllegalArguments() { @@ -90,10 +96,35 @@ public void testFromJson() throws IOException { + " \"_name\" : \"KPI\"\n" + " }\n" + "}"; - FieldMaskingSpanQueryBuilder parsed = (FieldMaskingSpanQueryBuilder) parseQuery(json); - checkGeneratedJson(json, parsed); - assertEquals(json, 42.0, parsed.boost(), 0.00001); - assertEquals(json, 0.23, parsed.innerQuery().boost(), 0.00001); + Exception exception = expectThrows(ParsingException.class, () -> parseQuery(json)); + assertThat( + exception.getMessage(), + equalTo( + SPAN_FIELD_MASKING_FIELD.getPreferredName() + " [query] as a nested span clause can't have non-default boost value [0.23]" + ) + ); + } + + public void testJsonSpanTermWithBoost() throws IOException { + String json = "{\n" + + " \"span_field_masking\" : {\n" + + " \"query\" : {\n" + + " \"span_term\" : {\n" + + " \"value\" : {\n" + + " \"value\" : \"term\"\n" + + " }\n" + + " }\n" + + " },\n" + + " \"field\" : \"mapped_geo_shape\",\n" + + " \"boost\" : 42.0,\n" + + " \"_name\" : \"KPI\"\n" + + " }\n" + + "}"; + Query query = parseQuery(json).toQuery(createShardContext()); + assertEquals( + new BoostQuery(new FieldMaskingSpanQuery(new SpanTermQuery(new Term("value", "term")), "mapped_geo_shape"), 42f), + query + ); } public void testDeprecatedName() throws IOException { diff --git a/server/src/test/java/org/opensearch/index/query/MatchBoolPrefixQueryBuilderTests.java b/server/src/test/java/org/opensearch/index/query/MatchBoolPrefixQueryBuilderTests.java index 758f69f715a4d..9ae95fd941a59 100644 --- a/server/src/test/java/org/opensearch/index/query/MatchBoolPrefixQueryBuilderTests.java +++ b/server/src/test/java/org/opensearch/index/query/MatchBoolPrefixQueryBuilderTests.java @@ -277,7 +277,9 @@ public void testAnalysisSynonym() throws Exception { query, asList( new TermQuery(new Term(TEXT_FIELD_NAME, "fox")), - new SynonymQuery(new Term(TEXT_FIELD_NAME, "dogs"), new Term(TEXT_FIELD_NAME, "dog")), + new SynonymQuery.Builder(TEXT_FIELD_NAME).addTerm(new Term(TEXT_FIELD_NAME, "dogs")) + .addTerm(new Term(TEXT_FIELD_NAME, "dog")) + .build(), new PrefixQuery(new Term(TEXT_FIELD_NAME, "red")) ) ); diff --git a/server/src/test/java/org/opensearch/index/query/MatchQueryBuilderTests.java b/server/src/test/java/org/opensearch/index/query/MatchQueryBuilderTests.java index bf42aca156805..354e932f6b9f9 100644 --- a/server/src/test/java/org/opensearch/index/query/MatchQueryBuilderTests.java +++ b/server/src/test/java/org/opensearch/index/query/MatchQueryBuilderTests.java @@ -37,6 +37,10 @@ import org.apache.lucene.analysis.MockSynonymAnalyzer; import org.apache.lucene.index.Term; import org.apache.lucene.queries.ExtendedCommonTermsQuery; +import org.apache.lucene.queries.spans.SpanNearQuery; +import org.apache.lucene.queries.spans.SpanOrQuery; +import org.apache.lucene.queries.spans.SpanQuery; +import org.apache.lucene.queries.spans.SpanTermQuery; import org.apache.lucene.search.BooleanClause; import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.FuzzyQuery; @@ -47,10 +51,6 @@ import org.apache.lucene.search.Query; import org.apache.lucene.search.SynonymQuery; import org.apache.lucene.search.TermQuery; -import org.apache.lucene.search.spans.SpanNearQuery; -import org.apache.lucene.search.spans.SpanOrQuery; -import org.apache.lucene.search.spans.SpanQuery; -import org.apache.lucene.search.spans.SpanTermQuery; import org.apache.lucene.util.BytesRef; import org.opensearch.action.admin.indices.mapping.put.PutMappingRequest; import org.opensearch.common.ParsingException; diff --git a/server/src/test/java/org/opensearch/index/query/MultiMatchQueryBuilderTests.java b/server/src/test/java/org/opensearch/index/query/MultiMatchQueryBuilderTests.java index 294674ee01189..ec04ee8fd3d6d 100644 --- a/server/src/test/java/org/opensearch/index/query/MultiMatchQueryBuilderTests.java +++ b/server/src/test/java/org/opensearch/index/query/MultiMatchQueryBuilderTests.java @@ -64,9 +64,9 @@ import static org.opensearch.index.query.QueryBuilders.multiMatchQuery; import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertBooleanSubQuery; -import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertDisjunctionSubQuery; import static org.hamcrest.CoreMatchers.anyOf; import static org.hamcrest.CoreMatchers.containsString; +import static org.hamcrest.CoreMatchers.either; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.CoreMatchers.hasItems; import static org.hamcrest.CoreMatchers.instanceOf; @@ -241,34 +241,29 @@ public void testToQueryMultipleFieldsDisableDismax() throws Exception { .field(KEYWORD_FIELD_NAME) .tieBreaker(1.0f) .toQuery(createShardContext()); - assertThat(query, instanceOf(DisjunctionMaxQuery.class)); - DisjunctionMaxQuery dQuery = (DisjunctionMaxQuery) query; - assertThat(dQuery.getTieBreakerMultiplier(), equalTo(1.0f)); - assertThat(dQuery.getDisjuncts().size(), equalTo(2)); - assertThat(assertDisjunctionSubQuery(query, TermQuery.class, 0).getTerm(), equalTo(new Term(TEXT_FIELD_NAME, "test"))); - assertThat(assertDisjunctionSubQuery(query, TermQuery.class, 1).getTerm(), equalTo(new Term(KEYWORD_FIELD_NAME, "test"))); + Query expected = new DisjunctionMaxQuery( + List.of(new TermQuery(new Term(TEXT_FIELD_NAME, "test")), new TermQuery(new Term(KEYWORD_FIELD_NAME, "test"))), + 1 + ); + assertEquals(expected, query); } public void testToQueryMultipleFieldsDisMaxQuery() throws Exception { Query query = multiMatchQuery("test").field(TEXT_FIELD_NAME).field(KEYWORD_FIELD_NAME).toQuery(createShardContext()); - assertThat(query, instanceOf(DisjunctionMaxQuery.class)); - DisjunctionMaxQuery disMaxQuery = (DisjunctionMaxQuery) query; - assertThat(disMaxQuery.getTieBreakerMultiplier(), equalTo(0.0f)); - List disjuncts = disMaxQuery.getDisjuncts(); - assertThat(disjuncts.get(0), instanceOf(TermQuery.class)); - assertThat(((TermQuery) disjuncts.get(0)).getTerm(), equalTo(new Term(TEXT_FIELD_NAME, "test"))); - assertThat(disjuncts.get(1), instanceOf(TermQuery.class)); - assertThat(((TermQuery) disjuncts.get(1)).getTerm(), equalTo(new Term(KEYWORD_FIELD_NAME, "test"))); + Query expected = new DisjunctionMaxQuery( + List.of(new TermQuery(new Term(TEXT_FIELD_NAME, "test")), new TermQuery(new Term(KEYWORD_FIELD_NAME, "test"))), + 0 + ); + assertEquals(expected, query); } public void testToQueryFieldsWildcard() throws Exception { Query query = multiMatchQuery("test").field("mapped_str*").tieBreaker(1.0f).toQuery(createShardContext()); - assertThat(query, instanceOf(DisjunctionMaxQuery.class)); - DisjunctionMaxQuery dQuery = (DisjunctionMaxQuery) query; - assertThat(dQuery.getTieBreakerMultiplier(), equalTo(1.0f)); - assertThat(dQuery.getDisjuncts().size(), equalTo(2)); - assertThat(assertDisjunctionSubQuery(query, TermQuery.class, 0).getTerm(), equalTo(new Term(TEXT_FIELD_NAME, "test"))); - assertThat(assertDisjunctionSubQuery(query, TermQuery.class, 1).getTerm(), equalTo(new Term(KEYWORD_FIELD_NAME, "test"))); + Query expected = new DisjunctionMaxQuery( + List.of(new TermQuery(new Term(TEXT_FIELD_NAME, "test")), new TermQuery(new Term(KEYWORD_FIELD_NAME, "test"))), + 1 + ); + assertEquals(expected, query); } public void testToQueryFieldMissing() throws Exception { @@ -298,11 +293,22 @@ public void testToQueryBooleanPrefixMultipleFields() throws IOException { assertThat(query, instanceOf(DisjunctionMaxQuery.class)); final DisjunctionMaxQuery disMaxQuery = (DisjunctionMaxQuery) query; assertThat(disMaxQuery.getDisjuncts(), hasSize(2)); - final BooleanQuery firstDisjunct = assertDisjunctionSubQuery(disMaxQuery, BooleanQuery.class, 0); - assertThat(firstDisjunct.clauses(), hasSize(2)); - assertThat(assertBooleanSubQuery(firstDisjunct, TermQuery.class, 0).getTerm(), equalTo(new Term(TEXT_FIELD_NAME, "foo"))); - final PrefixQuery secondDisjunct = assertDisjunctionSubQuery(disMaxQuery, PrefixQuery.class, 1); - assertThat(secondDisjunct.getPrefix(), equalTo(new Term(KEYWORD_FIELD_NAME, "foo bar"))); + for (Query disjunct : disMaxQuery.getDisjuncts()) { + if (disjunct instanceof BooleanQuery) { + final BooleanQuery firstDisjunct = (BooleanQuery) disjunct; + assertThat(firstDisjunct.clauses(), hasSize(2)); + assertThat( + assertBooleanSubQuery(firstDisjunct, TermQuery.class, 0).getTerm(), + equalTo(new Term(TEXT_FIELD_NAME, "foo")) + ); + } else if (disjunct instanceof PrefixQuery) { + final PrefixQuery secondDisjunct = (PrefixQuery) disjunct; + assertThat(secondDisjunct.getPrefix(), equalTo(new Term(KEYWORD_FIELD_NAME, "foo bar"))); + } else { + throw new AssertionError(); + } + assertThat(disjunct, either(instanceOf(BooleanQuery.class)).or(instanceOf(PrefixQuery.class))); + } } } diff --git a/server/src/test/java/org/opensearch/index/query/QueryStringQueryBuilderTests.java b/server/src/test/java/org/opensearch/index/query/QueryStringQueryBuilderTests.java index 8eaeaa17f7bb5..d08f2ef170bf2 100644 --- a/server/src/test/java/org/opensearch/index/query/QueryStringQueryBuilderTests.java +++ b/server/src/test/java/org/opensearch/index/query/QueryStringQueryBuilderTests.java @@ -37,6 +37,9 @@ import org.apache.lucene.document.SortedNumericDocValuesField; import org.apache.lucene.index.Term; import org.apache.lucene.queries.BlendedTermQuery; +import org.apache.lucene.queries.spans.SpanNearQuery; +import org.apache.lucene.queries.spans.SpanOrQuery; +import org.apache.lucene.queries.spans.SpanTermQuery; import org.apache.lucene.search.AutomatonQuery; import org.apache.lucene.search.BooleanClause; import org.apache.lucene.search.BooleanClause.Occur; @@ -58,9 +61,6 @@ import org.apache.lucene.search.TermQuery; import org.apache.lucene.search.TermRangeQuery; import org.apache.lucene.search.WildcardQuery; -import org.apache.lucene.search.spans.SpanNearQuery; -import org.apache.lucene.search.spans.SpanOrQuery; -import org.apache.lucene.search.spans.SpanTermQuery; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.automaton.Automata; import org.apache.lucene.util.automaton.Automaton; @@ -95,7 +95,6 @@ import static org.opensearch.index.query.AbstractQueryBuilder.parseInnerQueryBuilder; import static org.opensearch.index.query.QueryBuilders.queryStringQuery; import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertBooleanSubQuery; -import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertDisjunctionSubQuery; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.CoreMatchers.hasItems; import static org.hamcrest.Matchers.containsString; @@ -503,29 +502,29 @@ public void testToQueryMultipleTermsBooleanQuery() throws Exception { public void testToQueryMultipleFieldsBooleanQuery() throws Exception { Query query = queryStringQuery("test").field(TEXT_FIELD_NAME).field(KEYWORD_FIELD_NAME).toQuery(createShardContext()); - assertThat(query, instanceOf(DisjunctionMaxQuery.class)); - DisjunctionMaxQuery bQuery = (DisjunctionMaxQuery) query; - assertThat(bQuery.getDisjuncts().size(), equalTo(2)); - assertThat(assertDisjunctionSubQuery(query, TermQuery.class, 0).getTerm(), equalTo(new Term(TEXT_FIELD_NAME, "test"))); - assertThat(assertDisjunctionSubQuery(query, TermQuery.class, 1).getTerm(), equalTo(new Term(KEYWORD_FIELD_NAME, "test"))); + Query expected = new DisjunctionMaxQuery( + List.of(new TermQuery(new Term(TEXT_FIELD_NAME, "test")), new TermQuery(new Term(KEYWORD_FIELD_NAME, "test"))), + 0 + ); + assertEquals(expected, query); } public void testToQueryMultipleFieldsDisMaxQuery() throws Exception { Query query = queryStringQuery("test").field(TEXT_FIELD_NAME).field(KEYWORD_FIELD_NAME).toQuery(createShardContext()); - assertThat(query, instanceOf(DisjunctionMaxQuery.class)); - DisjunctionMaxQuery disMaxQuery = (DisjunctionMaxQuery) query; - List disjuncts = disMaxQuery.getDisjuncts(); - assertThat(((TermQuery) disjuncts.get(0)).getTerm(), equalTo(new Term(TEXT_FIELD_NAME, "test"))); - assertThat(((TermQuery) disjuncts.get(1)).getTerm(), equalTo(new Term(KEYWORD_FIELD_NAME, "test"))); + Query expected = new DisjunctionMaxQuery( + List.of(new TermQuery(new Term(TEXT_FIELD_NAME, "test")), new TermQuery(new Term(KEYWORD_FIELD_NAME, "test"))), + 0 + ); + assertEquals(expected, query); } public void testToQueryFieldsWildcard() throws Exception { Query query = queryStringQuery("test").field("mapped_str*").toQuery(createShardContext()); - assertThat(query, instanceOf(DisjunctionMaxQuery.class)); - DisjunctionMaxQuery dQuery = (DisjunctionMaxQuery) query; - assertThat(dQuery.getDisjuncts().size(), equalTo(2)); - assertThat(assertDisjunctionSubQuery(query, TermQuery.class, 0).getTerm(), equalTo(new Term(TEXT_FIELD_NAME, "test"))); - assertThat(assertDisjunctionSubQuery(query, TermQuery.class, 1).getTerm(), equalTo(new Term(KEYWORD_FIELD_NAME, "test"))); + Query expected = new DisjunctionMaxQuery( + List.of(new TermQuery(new Term(TEXT_FIELD_NAME, "test")), new TermQuery(new Term(KEYWORD_FIELD_NAME, "test"))), + 0 + ); + assertEquals(expected, query); } /** @@ -544,11 +543,14 @@ public void testAllowLeadingWildcard() throws Exception { public void testToQueryDisMaxQuery() throws Exception { Query query = queryStringQuery("test").field(TEXT_FIELD_NAME, 2.2f).field(KEYWORD_FIELD_NAME).toQuery(createShardContext()); - assertThat(query, instanceOf(DisjunctionMaxQuery.class)); - DisjunctionMaxQuery disMaxQuery = (DisjunctionMaxQuery) query; - List disjuncts = disMaxQuery.getDisjuncts(); - assertTermOrBoostQuery(disjuncts.get(0), TEXT_FIELD_NAME, "test", 2.2f); - assertTermOrBoostQuery(disjuncts.get(1), KEYWORD_FIELD_NAME, "test", 1.0f); + Query expected = new DisjunctionMaxQuery( + List.of( + new BoostQuery(new TermQuery(new Term(TEXT_FIELD_NAME, "test")), 2.2f), + new TermQuery(new Term(KEYWORD_FIELD_NAME, "test")) + ), + 0 + ); + assertEquals(expected, query); } public void testToQueryWildcardQuery() throws Exception { @@ -602,15 +604,27 @@ public void testToQueryWilcardQueryWithSynonyms() throws Exception { Query query = queryParser.parse("first foo-bar-foobar* last"); Query expectedQuery = new BooleanQuery.Builder().add( - new BooleanClause(new SynonymQuery(new Term(TEXT_FIELD_NAME, "first"), new Term(TEXT_FIELD_NAME, "first")), defaultOp) + new BooleanClause( + new SynonymQuery.Builder(TEXT_FIELD_NAME).addTerm(new Term(TEXT_FIELD_NAME, "first")) + .addTerm(new Term(TEXT_FIELD_NAME, "first")) + .build(), + defaultOp + ) ) .add( new BooleanQuery.Builder().add( - new BooleanClause(new SynonymQuery(new Term(TEXT_FIELD_NAME, "foo"), new Term(TEXT_FIELD_NAME, "foo")), defaultOp) + new BooleanClause( + new SynonymQuery.Builder(TEXT_FIELD_NAME).addTerm(new Term(TEXT_FIELD_NAME, "foo")) + .addTerm(new Term(TEXT_FIELD_NAME, "foo")) + .build(), + defaultOp + ) ) .add( new BooleanClause( - new SynonymQuery(new Term(TEXT_FIELD_NAME, "bar"), new Term(TEXT_FIELD_NAME, "bar")), + new SynonymQuery.Builder(TEXT_FIELD_NAME).addTerm(new Term(TEXT_FIELD_NAME, "bar")) + .addTerm(new Term(TEXT_FIELD_NAME, "bar")) + .build(), defaultOp ) ) @@ -625,7 +639,14 @@ public void testToQueryWilcardQueryWithSynonyms() throws Exception { .build(), defaultOp ) - .add(new BooleanClause(new SynonymQuery(new Term(TEXT_FIELD_NAME, "last"), new Term(TEXT_FIELD_NAME, "last")), defaultOp)) + .add( + new BooleanClause( + new SynonymQuery.Builder(TEXT_FIELD_NAME).addTerm(new Term(TEXT_FIELD_NAME, "last")) + .addTerm(new Term(TEXT_FIELD_NAME, "last")) + .build(), + defaultOp + ) + ) .build(); assertThat(query, Matchers.equalTo(expectedQuery)); } diff --git a/server/src/test/java/org/opensearch/index/query/SimpleQueryStringBuilderTests.java b/server/src/test/java/org/opensearch/index/query/SimpleQueryStringBuilderTests.java index 35a04e80e4511..b11e0cab76340 100644 --- a/server/src/test/java/org/opensearch/index/query/SimpleQueryStringBuilderTests.java +++ b/server/src/test/java/org/opensearch/index/query/SimpleQueryStringBuilderTests.java @@ -35,6 +35,10 @@ import org.apache.lucene.analysis.MockSynonymAnalyzer; import org.apache.lucene.analysis.standard.StandardAnalyzer; import org.apache.lucene.index.Term; +import org.apache.lucene.queries.spans.SpanNearQuery; +import org.apache.lucene.queries.spans.SpanOrQuery; +import org.apache.lucene.queries.spans.SpanQuery; +import org.apache.lucene.queries.spans.SpanTermQuery; import org.apache.lucene.search.BooleanClause; import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.BoostQuery; @@ -46,10 +50,6 @@ import org.apache.lucene.search.Query; import org.apache.lucene.search.SynonymQuery; import org.apache.lucene.search.TermQuery; -import org.apache.lucene.search.spans.SpanNearQuery; -import org.apache.lucene.search.spans.SpanOrQuery; -import org.apache.lucene.search.spans.SpanQuery; -import org.apache.lucene.search.spans.SpanTermQuery; import org.apache.lucene.util.TestUtil; import org.opensearch.cluster.metadata.IndexMetadata; import org.opensearch.common.settings.Settings; @@ -509,15 +509,27 @@ public void testAnalyzerWildcardWithSynonyms() throws IOException { parser.setDefaultOperator(defaultOp); Query query = parser.parse("first foo-bar-foobar* last"); Query expectedQuery = new BooleanQuery.Builder().add( - new BooleanClause(new SynonymQuery(new Term(TEXT_FIELD_NAME, "first"), new Term(TEXT_FIELD_NAME, "first")), defaultOp) + new BooleanClause( + new SynonymQuery.Builder(TEXT_FIELD_NAME).addTerm(new Term(TEXT_FIELD_NAME, "first")) + .addTerm(new Term(TEXT_FIELD_NAME, "first")) + .build(), + defaultOp + ) ) .add( new BooleanQuery.Builder().add( - new BooleanClause(new SynonymQuery(new Term(TEXT_FIELD_NAME, "foo"), new Term(TEXT_FIELD_NAME, "foo")), defaultOp) + new BooleanClause( + new SynonymQuery.Builder(TEXT_FIELD_NAME).addTerm(new Term(TEXT_FIELD_NAME, "foo")) + .addTerm(new Term(TEXT_FIELD_NAME, "foo")) + .build(), + defaultOp + ) ) .add( new BooleanClause( - new SynonymQuery(new Term(TEXT_FIELD_NAME, "bar"), new Term(TEXT_FIELD_NAME, "bar")), + new SynonymQuery.Builder(TEXT_FIELD_NAME).addTerm(new Term(TEXT_FIELD_NAME, "bar")) + .addTerm(new Term(TEXT_FIELD_NAME, "bar")) + .build(), defaultOp ) ) @@ -532,7 +544,14 @@ public void testAnalyzerWildcardWithSynonyms() throws IOException { .build(), defaultOp ) - .add(new BooleanClause(new SynonymQuery(new Term(TEXT_FIELD_NAME, "last"), new Term(TEXT_FIELD_NAME, "last")), defaultOp)) + .add( + new BooleanClause( + new SynonymQuery.Builder(TEXT_FIELD_NAME).addTerm(new Term(TEXT_FIELD_NAME, "last")) + .addTerm(new Term(TEXT_FIELD_NAME, "last")) + .build(), + defaultOp + ) + ) .build(); assertThat(query, equalTo(expectedQuery)); } diff --git a/server/src/test/java/org/opensearch/index/query/SpanContainingQueryBuilderTests.java b/server/src/test/java/org/opensearch/index/query/SpanContainingQueryBuilderTests.java index e43bd8e8d4f2b..33be236b45ab6 100644 --- a/server/src/test/java/org/opensearch/index/query/SpanContainingQueryBuilderTests.java +++ b/server/src/test/java/org/opensearch/index/query/SpanContainingQueryBuilderTests.java @@ -32,8 +32,8 @@ package org.opensearch.index.query; +import org.apache.lucene.queries.spans.SpanContainingQuery; import org.apache.lucene.search.Query; -import org.apache.lucene.search.spans.SpanContainingQuery; import org.opensearch.common.ParsingException; import org.opensearch.test.AbstractQueryTestCase; diff --git a/server/src/test/java/org/opensearch/index/query/SpanFirstQueryBuilderTests.java b/server/src/test/java/org/opensearch/index/query/SpanFirstQueryBuilderTests.java index d2c1fd8aaa021..bbae35478c6b5 100644 --- a/server/src/test/java/org/opensearch/index/query/SpanFirstQueryBuilderTests.java +++ b/server/src/test/java/org/opensearch/index/query/SpanFirstQueryBuilderTests.java @@ -32,8 +32,8 @@ package org.opensearch.index.query; +import org.apache.lucene.queries.spans.SpanFirstQuery; import org.apache.lucene.search.Query; -import org.apache.lucene.search.spans.SpanFirstQuery; import org.opensearch.common.ParsingException; import org.opensearch.common.Strings; import org.opensearch.common.xcontent.XContentBuilder; diff --git a/server/src/test/java/org/opensearch/index/query/SpanGapQueryBuilderTests.java b/server/src/test/java/org/opensearch/index/query/SpanGapQueryBuilderTests.java index 5e85a7eb5da01..3b4755d651c50 100644 --- a/server/src/test/java/org/opensearch/index/query/SpanGapQueryBuilderTests.java +++ b/server/src/test/java/org/opensearch/index/query/SpanGapQueryBuilderTests.java @@ -32,11 +32,10 @@ package org.opensearch.index.query; +import org.apache.lucene.queries.spans.SpanNearQuery; +import org.apache.lucene.queries.spans.SpanQuery; +import org.apache.lucene.queries.spans.SpanTermQuery; import org.apache.lucene.search.Query; -import org.apache.lucene.search.spans.SpanBoostQuery; -import org.apache.lucene.search.spans.SpanNearQuery; -import org.apache.lucene.search.spans.SpanQuery; -import org.apache.lucene.search.spans.SpanTermQuery; import org.opensearch.test.AbstractQueryTestCase; import java.io.IOException; @@ -74,9 +73,7 @@ protected SpanNearQueryBuilder doCreateTestQueryBuilder() { protected void doAssertLuceneQuery(SpanNearQueryBuilder queryBuilder, Query query, QueryShardContext context) throws IOException { assertThat( query, - either(instanceOf(SpanNearQuery.class)).or(instanceOf(SpanTermQuery.class)) - .or(instanceOf(SpanBoostQuery.class)) - .or(instanceOf(MatchAllQueryBuilder.class)) + either(instanceOf(SpanNearQuery.class)).or(instanceOf(SpanTermQuery.class)).or(instanceOf(MatchAllQueryBuilder.class)) ); if (query instanceof SpanNearQuery) { SpanNearQuery spanNearQuery = (SpanNearQuery) query; @@ -89,7 +86,7 @@ protected void doAssertLuceneQuery(SpanNearQueryBuilder queryBuilder, Query quer if (spanQB instanceof SpanGapQueryBuilder) continue; assertThat(spanQuery, equalTo(spanQB.toQuery(context))); } - } else if (query instanceof SpanTermQuery || query instanceof SpanBoostQuery) { + } else if (query instanceof SpanTermQuery) { assertThat(queryBuilder.clauses().size(), equalTo(1)); assertThat(query, equalTo(queryBuilder.clauses().get(0).toQuery(context))); } diff --git a/server/src/test/java/org/opensearch/index/query/SpanMultiTermQueryBuilderTests.java b/server/src/test/java/org/opensearch/index/query/SpanMultiTermQueryBuilderTests.java index a17f9e8c9d921..011d05aef1214 100644 --- a/server/src/test/java/org/opensearch/index/query/SpanMultiTermQueryBuilderTests.java +++ b/server/src/test/java/org/opensearch/index/query/SpanMultiTermQueryBuilderTests.java @@ -39,6 +39,10 @@ import org.apache.lucene.index.RandomIndexWriter; import org.apache.lucene.index.Term; import org.apache.lucene.queries.SpanMatchNoDocsQuery; +import org.apache.lucene.queries.spans.FieldMaskingSpanQuery; +import org.apache.lucene.queries.spans.SpanMultiTermQueryWrapper; +import org.apache.lucene.queries.spans.SpanQuery; +import org.apache.lucene.queries.spans.SpanTermQuery; import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.BoostQuery; import org.apache.lucene.search.IndexSearcher; @@ -47,10 +51,6 @@ import org.apache.lucene.search.Query; import org.apache.lucene.search.TermQuery; import org.apache.lucene.search.TopTermsRewrite; -import org.apache.lucene.search.spans.FieldMaskingSpanQuery; -import org.apache.lucene.search.spans.SpanMultiTermQueryWrapper; -import org.apache.lucene.search.spans.SpanQuery; -import org.apache.lucene.search.spans.SpanTermQuery; import org.apache.lucene.store.Directory; import org.opensearch.common.Strings; import org.opensearch.common.compress.CompressedXContent; @@ -184,6 +184,11 @@ public String fieldName() { } } + @Override + protected boolean supportsBoost() { + return false; + } + /** * test checks that we throw an {@link UnsupportedOperationException} if the query wrapped * by {@link SpanMultiTermQueryBuilder} does not generate a lucene {@link MultiTermQuery}. diff --git a/server/src/test/java/org/opensearch/index/query/SpanNearQueryBuilderTests.java b/server/src/test/java/org/opensearch/index/query/SpanNearQueryBuilderTests.java index ae2848f442e35..416e37db764b3 100644 --- a/server/src/test/java/org/opensearch/index/query/SpanNearQueryBuilderTests.java +++ b/server/src/test/java/org/opensearch/index/query/SpanNearQueryBuilderTests.java @@ -33,11 +33,10 @@ package org.opensearch.index.query; import org.apache.lucene.queries.SpanMatchNoDocsQuery; +import org.apache.lucene.queries.spans.SpanNearQuery; +import org.apache.lucene.queries.spans.SpanQuery; +import org.apache.lucene.queries.spans.SpanTermQuery; import org.apache.lucene.search.Query; -import org.apache.lucene.search.spans.SpanBoostQuery; -import org.apache.lucene.search.spans.SpanNearQuery; -import org.apache.lucene.search.spans.SpanQuery; -import org.apache.lucene.search.spans.SpanTermQuery; import org.opensearch.common.ParsingException; import org.opensearch.test.AbstractQueryTestCase; @@ -66,7 +65,6 @@ protected void doAssertLuceneQuery(SpanNearQueryBuilder queryBuilder, Query quer assertThat( query, either(instanceOf(SpanNearQuery.class)).or(instanceOf(SpanTermQuery.class)) - .or(instanceOf(SpanBoostQuery.class)) .or(instanceOf(SpanMatchNoDocsQuery.class)) .or(instanceOf(MatchAllQueryBuilder.class)) ); @@ -79,7 +77,7 @@ protected void doAssertLuceneQuery(SpanNearQueryBuilder queryBuilder, Query quer for (SpanQuery spanQuery : spanNearQuery.getClauses()) { assertThat(spanQuery, equalTo(spanQueryBuilderIterator.next().toQuery(context))); } - } else if (query instanceof SpanTermQuery || query instanceof SpanBoostQuery) { + } else if (query instanceof SpanTermQuery) { assertThat(queryBuilder.clauses().size(), equalTo(1)); assertThat(query, equalTo(queryBuilder.clauses().get(0).toQuery(context))); } diff --git a/server/src/test/java/org/opensearch/index/query/SpanNotQueryBuilderTests.java b/server/src/test/java/org/opensearch/index/query/SpanNotQueryBuilderTests.java index 552539934027d..4222e42af9043 100644 --- a/server/src/test/java/org/opensearch/index/query/SpanNotQueryBuilderTests.java +++ b/server/src/test/java/org/opensearch/index/query/SpanNotQueryBuilderTests.java @@ -32,8 +32,8 @@ package org.opensearch.index.query; +import org.apache.lucene.queries.spans.SpanNotQuery; import org.apache.lucene.search.Query; -import org.apache.lucene.search.spans.SpanNotQuery; import org.opensearch.common.ParsingException; import org.opensearch.common.Strings; import org.opensearch.common.xcontent.XContentBuilder; diff --git a/server/src/test/java/org/opensearch/index/query/SpanOrQueryBuilderTests.java b/server/src/test/java/org/opensearch/index/query/SpanOrQueryBuilderTests.java index ec2da8ab33779..45764708efb46 100644 --- a/server/src/test/java/org/opensearch/index/query/SpanOrQueryBuilderTests.java +++ b/server/src/test/java/org/opensearch/index/query/SpanOrQueryBuilderTests.java @@ -32,9 +32,9 @@ package org.opensearch.index.query; +import org.apache.lucene.queries.spans.SpanOrQuery; +import org.apache.lucene.queries.spans.SpanQuery; import org.apache.lucene.search.Query; -import org.apache.lucene.search.spans.SpanOrQuery; -import org.apache.lucene.search.spans.SpanQuery; import org.opensearch.common.ParsingException; import org.opensearch.test.AbstractQueryTestCase; diff --git a/server/src/test/java/org/opensearch/index/query/SpanTermQueryBuilderTests.java b/server/src/test/java/org/opensearch/index/query/SpanTermQueryBuilderTests.java index ae0da8101a7d1..30f2143d74f1d 100644 --- a/server/src/test/java/org/opensearch/index/query/SpanTermQueryBuilderTests.java +++ b/server/src/test/java/org/opensearch/index/query/SpanTermQueryBuilderTests.java @@ -33,10 +33,10 @@ package org.opensearch.index.query; import com.fasterxml.jackson.core.io.JsonStringEncoder; +import org.apache.lucene.queries.spans.SpanTermQuery; import org.apache.lucene.index.Term; import org.apache.lucene.search.Query; import org.apache.lucene.search.TermQuery; -import org.apache.lucene.search.spans.SpanTermQuery; import org.opensearch.common.ParsingException; import org.opensearch.common.lucene.BytesRefs; import org.opensearch.index.mapper.MappedFieldType; diff --git a/server/src/test/java/org/opensearch/index/query/SpanWithinQueryBuilderTests.java b/server/src/test/java/org/opensearch/index/query/SpanWithinQueryBuilderTests.java index 2cb9d6ae5f91e..25fd137aac286 100644 --- a/server/src/test/java/org/opensearch/index/query/SpanWithinQueryBuilderTests.java +++ b/server/src/test/java/org/opensearch/index/query/SpanWithinQueryBuilderTests.java @@ -32,8 +32,8 @@ package org.opensearch.index.query; +import org.apache.lucene.queries.spans.SpanWithinQuery; import org.apache.lucene.search.Query; -import org.apache.lucene.search.spans.SpanWithinQuery; import org.opensearch.common.ParsingException; import org.opensearch.test.AbstractQueryTestCase; diff --git a/server/src/test/java/org/opensearch/index/query/TermsSetQueryBuilderTests.java b/server/src/test/java/org/opensearch/index/query/TermsSetQueryBuilderTests.java index c6cd667338303..3c39773108830 100644 --- a/server/src/test/java/org/opensearch/index/query/TermsSetQueryBuilderTests.java +++ b/server/src/test/java/org/opensearch/index/query/TermsSetQueryBuilderTests.java @@ -42,7 +42,7 @@ import org.apache.lucene.index.IndexWriterConfig; import org.apache.lucene.index.NoMergePolicy; import org.apache.lucene.index.Term; -import org.apache.lucene.search.CoveringQuery; +import org.apache.lucene.sandbox.search.CoveringQuery; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.MatchNoDocsQuery; import org.apache.lucene.search.Query; diff --git a/server/src/test/java/org/opensearch/index/query/plugin/DummyQueryParserPlugin.java b/server/src/test/java/org/opensearch/index/query/plugin/DummyQueryParserPlugin.java index 64d3a8c682163..37766153efd4c 100644 --- a/server/src/test/java/org/opensearch/index/query/plugin/DummyQueryParserPlugin.java +++ b/server/src/test/java/org/opensearch/index/query/plugin/DummyQueryParserPlugin.java @@ -35,6 +35,7 @@ import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.Query; +import org.apache.lucene.search.QueryVisitor; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Weight; import org.opensearch.plugins.Plugin; @@ -74,5 +75,10 @@ public boolean equals(Object obj) { public int hashCode() { return classHash(); } + + @Override + public void visit(QueryVisitor visitor) { + visitor.visitLeaf(this); + } } } diff --git a/server/src/test/java/org/opensearch/index/search/MultiMatchQueryTests.java b/server/src/test/java/org/opensearch/index/search/MultiMatchQueryTests.java index ef87a70b71594..a7f765fee23da 100644 --- a/server/src/test/java/org/opensearch/index/search/MultiMatchQueryTests.java +++ b/server/src/test/java/org/opensearch/index/search/MultiMatchQueryTests.java @@ -258,16 +258,15 @@ public void testMultiMatchCrossFieldsWithSynonyms() throws IOException { // check that synonym query is used for a single field Query parsedQuery = parser.parse(MultiMatchQueryBuilder.Type.CROSS_FIELDS, fieldNames, "dogs", null); - Term[] terms = new Term[2]; - terms[0] = new Term("name.first", "dog"); - terms[1] = new Term("name.first", "dogs"); - Query expectedQuery = new SynonymQuery(terms); + Query expectedQuery = new SynonymQuery.Builder("name.first").addTerm(new Term("name.first", "dog")) + .addTerm(new Term("name.first", "dogs")) + .build(); assertThat(parsedQuery, equalTo(expectedQuery)); // check that blended term query is used for multiple fields fieldNames.put("name.last", 1.0f); parsedQuery = parser.parse(MultiMatchQueryBuilder.Type.CROSS_FIELDS, fieldNames, "dogs", null); - terms = new Term[4]; + Term[] terms = new Term[4]; terms[0] = new Term("name.first", "dog"); terms[1] = new Term("name.first", "dogs"); terms[2] = new Term("name.last", "dog"); diff --git a/server/src/test/java/org/opensearch/index/search/nested/NestedSortingTests.java b/server/src/test/java/org/opensearch/index/search/nested/NestedSortingTests.java index 6ecc27c155d3d..ee0b99bdc102c 100644 --- a/server/src/test/java/org/opensearch/index/search/nested/NestedSortingTests.java +++ b/server/src/test/java/org/opensearch/index/search/nested/NestedSortingTests.java @@ -57,7 +57,6 @@ import org.apache.lucene.search.join.ToParentBlockJoinQuery; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.TestUtil; -import org.opensearch.Version; import org.opensearch.common.lucene.index.OpenSearchDirectoryReader; import org.opensearch.common.lucene.search.Queries; import org.opensearch.common.settings.Settings; @@ -826,7 +825,7 @@ private static TopFieldDocs search( IndexSearcher searcher ) throws IOException { Query query = new BooleanQuery.Builder().add(queryBuilder.toQuery(queryShardContext), Occur.MUST) - .add(Queries.newNonNestedFilter(Version.CURRENT), Occur.FILTER) + .add(Queries.newNonNestedFilter(), Occur.FILTER) .build(); Sort sort = new Sort(sortBuilder.build(queryShardContext).field); return searcher.search(query, 10, sort); diff --git a/server/src/test/java/org/opensearch/index/similarity/ScriptedSimilarityTests.java b/server/src/test/java/org/opensearch/index/similarity/ScriptedSimilarityTests.java index ed29939163dbd..7f1f4ade53c50 100644 --- a/server/src/test/java/org/opensearch/index/similarity/ScriptedSimilarityTests.java +++ b/server/src/test/java/org/opensearch/index/similarity/ScriptedSimilarityTests.java @@ -74,8 +74,7 @@ public void testSameNormsAsBM25DiscountOverlaps() { private void doTestSameNormsAsBM25(boolean discountOverlaps) { ScriptedSimilarity sim1 = new ScriptedSimilarity("foobar", null, "foobaz", null, discountOverlaps); - BM25Similarity sim2 = new BM25Similarity(); - sim2.setDiscountOverlaps(discountOverlaps); + BM25Similarity sim2 = new BM25Similarity(discountOverlaps); for (int iter = 0; iter < 100; ++iter) { final int length = TestUtil.nextInt(random(), 1, 100); final int position = random().nextInt(length); diff --git a/server/src/test/java/org/opensearch/index/similarity/SimilarityServiceTests.java b/server/src/test/java/org/opensearch/index/similarity/SimilarityServiceTests.java index 4c183aae558bc..eb666f1206c26 100644 --- a/server/src/test/java/org/opensearch/index/similarity/SimilarityServiceTests.java +++ b/server/src/test/java/org/opensearch/index/similarity/SimilarityServiceTests.java @@ -32,11 +32,11 @@ package org.opensearch.index.similarity; import org.apache.lucene.index.FieldInvertState; +import org.apache.lucene.misc.search.similarity.LegacyBM25Similarity; import org.apache.lucene.search.CollectionStatistics; import org.apache.lucene.search.TermStatistics; import org.apache.lucene.search.similarities.BooleanSimilarity; import org.apache.lucene.search.similarities.Similarity; -import org.apache.lucene.search.similarity.LegacyBM25Similarity; import org.opensearch.LegacyESVersion; import org.opensearch.common.settings.Settings; import org.opensearch.index.IndexSettings; diff --git a/server/src/test/java/org/opensearch/index/similarity/SimilarityTests.java b/server/src/test/java/org/opensearch/index/similarity/SimilarityTests.java index 01850c3384e12..418b933558e63 100644 --- a/server/src/test/java/org/opensearch/index/similarity/SimilarityTests.java +++ b/server/src/test/java/org/opensearch/index/similarity/SimilarityTests.java @@ -32,6 +32,7 @@ package org.opensearch.index.similarity; +import org.apache.lucene.misc.search.similarity.LegacyBM25Similarity; import org.apache.lucene.search.similarities.AfterEffectL; import org.apache.lucene.search.similarities.BasicModelG; import org.apache.lucene.search.similarities.BooleanSimilarity; @@ -44,7 +45,6 @@ import org.apache.lucene.search.similarities.LMJelinekMercerSimilarity; import org.apache.lucene.search.similarities.LambdaTTF; import org.apache.lucene.search.similarities.NormalizationH2; -import org.apache.lucene.search.similarity.LegacyBM25Similarity; import org.opensearch.common.Strings; import org.opensearch.common.compress.CompressedXContent; import org.opensearch.common.settings.Settings; diff --git a/server/src/test/java/org/opensearch/index/store/StoreTests.java b/server/src/test/java/org/opensearch/index/store/StoreTests.java index 0a0f011ee7953..53ba689fbe011 100644 --- a/server/src/test/java/org/opensearch/index/store/StoreTests.java +++ b/server/src/test/java/org/opensearch/index/store/StoreTests.java @@ -40,7 +40,6 @@ import org.apache.lucene.document.TextField; import org.apache.lucene.index.CorruptIndexException; import org.apache.lucene.index.DirectoryReader; -import org.apache.lucene.index.IndexFileNames; import org.apache.lucene.index.IndexFormatTooNewException; import org.apache.lucene.index.IndexFormatTooOldException; import org.apache.lucene.index.IndexNotFoundException; @@ -237,10 +236,10 @@ public void testChecksumCorrupted() throws IOException { BytesRef bytesRef = new BytesRef(TestUtil.randomRealisticUnicodeString(random(), 10, 1024)); output.writeBytes(bytesRef.bytes, bytesRef.offset, bytesRef.length); } - output.writeInt(CodecUtil.FOOTER_MAGIC); - output.writeInt(0); + CodecUtil.writeBEInt(output, CodecUtil.FOOTER_MAGIC); + CodecUtil.writeBEInt(output, 0); String checksum = Store.digestToString(output.getChecksum()); - output.writeLong(output.getChecksum() + 1); // write a wrong checksum to the file + CodecUtil.writeBELong(output, output.getChecksum() + 1); // write a wrong checksum to the file output.close(); IndexInput indexInput = dir.openInput("foo.bar", IOContext.DEFAULT); @@ -502,9 +501,7 @@ public void assertDeleteContent(Store store, Directory dir) throws IOException { public static void assertConsistent(Store store, Store.MetadataSnapshot metadata) throws IOException { for (String file : store.directory().listAll()) { - if (!IndexWriter.WRITE_LOCK_NAME.equals(file) - && !IndexFileNames.OLD_SEGMENTS_GEN.equals(file) - && file.startsWith("extra") == false) { + if (IndexWriter.WRITE_LOCK_NAME.equals(file) == false && file.startsWith("extra") == false) { assertTrue( file + " is not in the map: " + metadata.asMap().size() + " vs. " + store.directory().listAll().length, metadata.asMap().containsKey(file) diff --git a/server/src/test/java/org/opensearch/index/translog/TranslogTests.java b/server/src/test/java/org/opensearch/index/translog/TranslogTests.java index 35fec28a1c798..f1a6ba84e8543 100644 --- a/server/src/test/java/org/opensearch/index/translog/TranslogTests.java +++ b/server/src/test/java/org/opensearch/index/translog/TranslogTests.java @@ -34,6 +34,7 @@ import com.carrotsearch.randomizedtesting.generators.RandomPicks; import org.apache.logging.log4j.message.ParameterizedMessage; +import org.apache.lucene.backward_codecs.store.EndiannessReverserUtil; import org.apache.lucene.codecs.CodecUtil; import org.apache.lucene.document.Field; import org.apache.lucene.document.NumericDocValuesField; @@ -43,6 +44,8 @@ import org.apache.lucene.mockfile.FilterFileChannel; import org.apache.lucene.mockfile.FilterFileSystemProvider; import org.apache.lucene.store.AlreadyClosedException; +import org.apache.lucene.store.ByteArrayDataOutput; +import org.apache.lucene.store.DataOutput; import org.apache.lucene.store.MockDirectoryWrapper; import org.apache.lucene.util.LineFileDocs; import org.apache.lucene.util.LuceneTestCase; @@ -1401,7 +1404,8 @@ public void testTranslogWriter() throws IOException { final Set seenSeqNos = new HashSet<>(); boolean opsHaveValidSequenceNumbers = randomBoolean(); for (int i = 0; i < numOps; i++) { - BytesStreamOutput out = new BytesStreamOutput(4); + byte[] bytes = new byte[4]; + DataOutput out = EndiannessReverserUtil.wrapDataOutput(new ByteArrayDataOutput(bytes)); out.writeInt(i); long seqNo; do { @@ -1411,7 +1415,7 @@ public void testTranslogWriter() throws IOException { if (seqNo != SequenceNumbers.UNASSIGNED_SEQ_NO) { seenSeqNos.add(seqNo); } - writer.add(ReleasableBytesReference.wrap(out.bytes()), seqNo); + writer.add(ReleasableBytesReference.wrap(new BytesArray(bytes)), seqNo); } assertThat(persistedSeqNos, empty()); writer.sync(); @@ -1433,9 +1437,10 @@ public void testTranslogWriter() throws IOException { assertThat(reader.getCheckpoint().minSeqNo, equalTo(minSeqNo)); assertThat(reader.getCheckpoint().maxSeqNo, equalTo(maxSeqNo)); - BytesStreamOutput out = new BytesStreamOutput(4); + byte[] bytes = new byte[4]; + DataOutput out = EndiannessReverserUtil.wrapDataOutput(new ByteArrayDataOutput(bytes)); out.writeInt(2048); - writer.add(ReleasableBytesReference.wrap(out.bytes()), randomNonNegativeLong()); + writer.add(ReleasableBytesReference.wrap(new BytesArray(bytes)), randomNonNegativeLong()); if (reader instanceof TranslogReader) { ByteBuffer buffer = ByteBuffer.allocate(4); @@ -1641,9 +1646,10 @@ ChannelFactory getChannelFactory() { ) { TranslogWriter writer = translog.getCurrent(); - BytesStreamOutput out = new BytesStreamOutput(4); + byte[] bytes = new byte[4]; + DataOutput out = EndiannessReverserUtil.wrapDataOutput(new ByteArrayDataOutput(new byte[4])); out.writeInt(1); - writer.add(ReleasableBytesReference.wrap(out.bytes()), 1); + writer.add(ReleasableBytesReference.wrap(new BytesArray(bytes)), 1); assertThat(persistedSeqNos, empty()); startBlocking.set(true); Thread thread = new Thread(() -> { @@ -1657,7 +1663,7 @@ ChannelFactory getChannelFactory() { writeStarted.await(); // Add will not block even though we are currently writing/syncing - writer.add(ReleasableBytesReference.wrap(out.bytes()), 2); + writer.add(ReleasableBytesReference.wrap(new BytesArray(bytes)), 2); blocker.countDown(); // Sync against so that both operations are written @@ -1672,10 +1678,10 @@ public void testCloseIntoReader() throws IOException { try (TranslogWriter writer = translog.createWriter(translog.currentFileGeneration() + 1)) { final int numOps = randomIntBetween(8, 128); for (int i = 0; i < numOps; i++) { - final BytesStreamOutput out = new BytesStreamOutput(4); - out.reset(); + final byte[] bytes = new byte[4]; + final DataOutput out = EndiannessReverserUtil.wrapDataOutput(new ByteArrayDataOutput(bytes)); out.writeInt(i); - writer.add(ReleasableBytesReference.wrap(out.bytes()), randomNonNegativeLong()); + writer.add(ReleasableBytesReference.wrap(new BytesArray(bytes)), randomNonNegativeLong()); } writer.sync(); final Checkpoint writerCheckpoint = writer.getCheckpoint(); diff --git a/server/src/test/java/org/opensearch/indices/IndicesQueryCacheTests.java b/server/src/test/java/org/opensearch/indices/IndicesQueryCacheTests.java index 383c0277e1c27..24d8d51042548 100644 --- a/server/src/test/java/org/opensearch/indices/IndicesQueryCacheTests.java +++ b/server/src/test/java/org/opensearch/indices/IndicesQueryCacheTests.java @@ -36,7 +36,6 @@ import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.IndexWriter; import org.apache.lucene.index.LeafReaderContext; -import org.apache.lucene.index.Term; import org.apache.lucene.search.ConstantScoreScorer; import org.apache.lucene.search.ConstantScoreWeight; import org.apache.lucene.search.DocIdSetIterator; @@ -45,6 +44,7 @@ import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.Query; import org.apache.lucene.search.QueryCachingPolicy; +import org.apache.lucene.search.QueryVisitor; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; import org.apache.lucene.search.ScorerSupplier; @@ -58,7 +58,6 @@ import org.opensearch.test.OpenSearchTestCase; import java.io.IOException; -import java.util.Set; public class IndicesQueryCacheTests extends OpenSearchTestCase { @@ -70,6 +69,11 @@ private static class DummyQuery extends Query { this.id = id; } + @Override + public void visit(QueryVisitor visitor) { + visitor.visitLeaf(this); + } + @Override public boolean equals(Object obj) { return sameClassAs(obj) && id == ((DummyQuery) obj).id; @@ -374,11 +378,6 @@ private static class DummyWeight extends Weight { this.weight = weight; } - @Override - public void extractTerms(Set terms) { - weight.extractTerms(terms); - } - @Override public Explanation explain(LeafReaderContext context, int doc) throws IOException { return weight.explain(context, doc); diff --git a/server/src/test/java/org/opensearch/indices/analysis/AnalysisModuleTests.java b/server/src/test/java/org/opensearch/indices/analysis/AnalysisModuleTests.java index fa927a58a2de1..bc2ecc2e62fae 100644 --- a/server/src/test/java/org/opensearch/indices/analysis/AnalysisModuleTests.java +++ b/server/src/test/java/org/opensearch/indices/analysis/AnalysisModuleTests.java @@ -130,7 +130,6 @@ private Settings loadFromClasspath(String path) throws IOException { .put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT) .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .build(); - } public void testSimpleConfigurationJson() throws IOException { diff --git a/server/src/test/java/org/opensearch/indices/recovery/RecoveryStatusTests.java b/server/src/test/java/org/opensearch/indices/recovery/RecoveryStatusTests.java index c73b802720547..73caa611dbcdb 100644 --- a/server/src/test/java/org/opensearch/indices/recovery/RecoveryStatusTests.java +++ b/server/src/test/java/org/opensearch/indices/recovery/RecoveryStatusTests.java @@ -31,6 +31,7 @@ package org.opensearch.indices.recovery; +import org.apache.lucene.backward_codecs.store.EndiannessReverserUtil; import org.apache.lucene.codecs.CodecUtil; import org.apache.lucene.store.IndexOutput; import org.opensearch.common.util.set.Sets; @@ -65,10 +66,10 @@ public void testRenameTempFiles() throws IOException { indexShard.store() ) ) { - indexOutput.writeInt(1); + EndiannessReverserUtil.wrapDataOutput(indexOutput).writeInt(1); IndexOutput openIndexOutput = multiFileWriter.getOpenIndexOutput("foo.bar"); assertSame(openIndexOutput, indexOutput); - openIndexOutput.writeInt(1); + EndiannessReverserUtil.wrapDataOutput(indexOutput).writeInt(1); CodecUtil.writeFooter(indexOutput); } diff --git a/server/src/test/java/org/apache/lucene/analysis/miscellaneous/DeDuplicatingTokenFilterTests.java b/server/src/test/java/org/opensearch/lucene/analysis/miscellaneous/DeDuplicatingTokenFilterTests.java similarity index 95% rename from server/src/test/java/org/apache/lucene/analysis/miscellaneous/DeDuplicatingTokenFilterTests.java rename to server/src/test/java/org/opensearch/lucene/analysis/miscellaneous/DeDuplicatingTokenFilterTests.java index 5ecd590f907a7..38f1c23bfa1f3 100644 --- a/server/src/test/java/org/apache/lucene/analysis/miscellaneous/DeDuplicatingTokenFilterTests.java +++ b/server/src/test/java/org/opensearch/lucene/analysis/miscellaneous/DeDuplicatingTokenFilterTests.java @@ -30,12 +30,15 @@ * GitHub history for details. */ -package org.apache.lucene.analysis.miscellaneous; +package org.opensearch.lucene.analysis.miscellaneous; import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.MockTokenizer; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.Tokenizer; +import org.apache.lucene.analysis.miscellaneous.DeDuplicatingTokenFilter; +import org.apache.lucene.analysis.miscellaneous.DuplicateByteSequenceSpotter; +import org.apache.lucene.analysis.miscellaneous.DuplicateSequenceAttribute; import org.apache.lucene.analysis.tokenattributes.CharTermAttribute; import org.opensearch.test.OpenSearchTestCase; diff --git a/server/src/test/java/org/apache/lucene/analysis/miscellaneous/TruncateTokenFilterTests.java b/server/src/test/java/org/opensearch/lucene/analysis/miscellaneous/TruncateTokenFilterTests.java similarity index 95% rename from server/src/test/java/org/apache/lucene/analysis/miscellaneous/TruncateTokenFilterTests.java rename to server/src/test/java/org/opensearch/lucene/analysis/miscellaneous/TruncateTokenFilterTests.java index d73ef1f624504..c4601a9053f54 100644 --- a/server/src/test/java/org/apache/lucene/analysis/miscellaneous/TruncateTokenFilterTests.java +++ b/server/src/test/java/org/opensearch/lucene/analysis/miscellaneous/TruncateTokenFilterTests.java @@ -30,12 +30,13 @@ * GitHub history for details. */ -package org.apache.lucene.analysis.miscellaneous; +package org.opensearch.lucene.analysis.miscellaneous; import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.MockTokenizer; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.Tokenizer; +import org.apache.lucene.analysis.miscellaneous.TruncateTokenFilter; import org.apache.lucene.analysis.tokenattributes.CharTermAttribute; import org.opensearch.test.OpenSearchTestCase; diff --git a/server/src/test/java/org/apache/lucene/grouping/CollapsingTopDocsCollectorTests.java b/server/src/test/java/org/opensearch/lucene/grouping/CollapsingTopDocsCollectorTests.java similarity index 99% rename from server/src/test/java/org/apache/lucene/grouping/CollapsingTopDocsCollectorTests.java rename to server/src/test/java/org/opensearch/lucene/grouping/CollapsingTopDocsCollectorTests.java index f5c98323d4c0e..514ff904e6ff3 100644 --- a/server/src/test/java/org/apache/lucene/grouping/CollapsingTopDocsCollectorTests.java +++ b/server/src/test/java/org/opensearch/lucene/grouping/CollapsingTopDocsCollectorTests.java @@ -29,7 +29,7 @@ * GitHub history for details. */ -package org.apache.lucene.grouping; +package org.opensearch.lucene.grouping; import org.apache.lucene.document.Document; import org.apache.lucene.document.NumericDocValuesField; @@ -364,7 +364,7 @@ public SortField sortField(boolean multivalued) { if (multivalued) { return new SortedSetSortField("field", false); } else { - return new SortField("field", SortField.Type.STRING_VAL); + return new SortField("field", SortField.Type.STRING); } } }; @@ -435,7 +435,7 @@ public void testEmptySortedSegment() throws Exception { MappedFieldType fieldType = new MockFieldMapper.FakeFieldType("group"); - Sort sort = new Sort(new SortField("group", SortField.Type.STRING_VAL)); + Sort sort = new Sort(new SortField("group", SortField.Type.STRING)); final CollapsingTopDocsCollector collapsingCollector = CollapsingTopDocsCollector.createKeyword("group", fieldType, sort, 10); searcher.search(new MatchAllDocsQuery(), collapsingCollector); diff --git a/server/src/test/java/org/apache/lucene/index/ShuffleForcedMergePolicyTests.java b/server/src/test/java/org/opensearch/lucene/index/ShuffleForcedMergePolicyTests.java similarity index 89% rename from server/src/test/java/org/apache/lucene/index/ShuffleForcedMergePolicyTests.java rename to server/src/test/java/org/opensearch/lucene/index/ShuffleForcedMergePolicyTests.java index fd5b54aa685b6..fcce7819d6143 100644 --- a/server/src/test/java/org/apache/lucene/index/ShuffleForcedMergePolicyTests.java +++ b/server/src/test/java/org/opensearch/lucene/index/ShuffleForcedMergePolicyTests.java @@ -30,12 +30,21 @@ * GitHub history for details. */ -package org.apache.lucene.index; +package org.opensearch.lucene.index; import org.apache.lucene.document.Document; import org.apache.lucene.document.Field; import org.apache.lucene.document.NumericDocValuesField; import org.apache.lucene.document.StringField; +import org.apache.lucene.index.BaseMergePolicyTestCase; +import org.apache.lucene.index.DirectoryReader; +import org.apache.lucene.index.IndexWriter; +import org.apache.lucene.index.IndexWriterConfig; +import org.apache.lucene.index.LeafReader; +import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.index.MergePolicy; +import org.apache.lucene.index.SegmentInfos; +import org.apache.lucene.index.ShuffleForcedMergePolicy; import org.apache.lucene.search.Sort; import org.apache.lucene.search.SortField; import org.apache.lucene.store.Directory; diff --git a/server/src/test/java/org/opensearch/lucene/misc/search/similarity/LegacyBM25SimilarityTests.java b/server/src/test/java/org/opensearch/lucene/misc/search/similarity/LegacyBM25SimilarityTests.java new file mode 100644 index 0000000000000..7f89176c302df --- /dev/null +++ b/server/src/test/java/org/opensearch/lucene/misc/search/similarity/LegacyBM25SimilarityTests.java @@ -0,0 +1,121 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.opensearch.lucene.misc.search.similarity; + +import java.util.Random; + +import org.apache.lucene.misc.search.similarity.LegacyBM25Similarity; +import org.apache.lucene.search.similarities.BM25Similarity; +import org.apache.lucene.search.similarities.BaseSimilarityTestCase; +import org.apache.lucene.search.similarities.Similarity; + +@Deprecated +public class LegacyBM25SimilarityTests extends BaseSimilarityTestCase { + + public void testIllegalK1() { + IllegalArgumentException expected = expectThrows( + IllegalArgumentException.class, + () -> { new LegacyBM25Similarity(Float.POSITIVE_INFINITY, 0.75f); } + ); + assertTrue(expected.getMessage().contains("illegal k1 value")); + + expected = expectThrows(IllegalArgumentException.class, () -> { new LegacyBM25Similarity(-1, 0.75f); }); + assertTrue(expected.getMessage().contains("illegal k1 value")); + + expected = expectThrows(IllegalArgumentException.class, () -> { new LegacyBM25Similarity(Float.NaN, 0.75f); }); + assertTrue(expected.getMessage().contains("illegal k1 value")); + } + + public void testIllegalB() { + IllegalArgumentException expected = expectThrows(IllegalArgumentException.class, () -> { new LegacyBM25Similarity(1.2f, 2f); }); + assertTrue(expected.getMessage().contains("illegal b value")); + + expected = expectThrows(IllegalArgumentException.class, () -> { new LegacyBM25Similarity(1.2f, -1f); }); + assertTrue(expected.getMessage().contains("illegal b value")); + + expected = expectThrows(IllegalArgumentException.class, () -> { new LegacyBM25Similarity(1.2f, Float.POSITIVE_INFINITY); }); + assertTrue(expected.getMessage().contains("illegal b value")); + + expected = expectThrows(IllegalArgumentException.class, () -> { new LegacyBM25Similarity(1.2f, Float.NaN); }); + assertTrue(expected.getMessage().contains("illegal b value")); + } + + public void testDefaults() { + LegacyBM25Similarity legacyBM25Similarity = new LegacyBM25Similarity(); + BM25Similarity bm25Similarity = new BM25Similarity(); + assertEquals(bm25Similarity.getB(), legacyBM25Similarity.getB(), 0f); + assertEquals(bm25Similarity.getK1(), legacyBM25Similarity.getK1(), 0f); + } + + public void testToString() { + LegacyBM25Similarity legacyBM25Similarity = new LegacyBM25Similarity(); + BM25Similarity bm25Similarity = new BM25Similarity(); + assertEquals(bm25Similarity.toString(), legacyBM25Similarity.toString()); + } + + @Override + protected Similarity getSimilarity(Random random) { + return new LegacyBM25Similarity(randomK1(random), randomB(random)); + } + + private static float randomK1(Random random) { + // term frequency normalization parameter k1 + switch (random.nextInt(4)) { + case 0: + // minimum value + return 0; + case 1: + // tiny value + return Float.MIN_VALUE; + case 2: + // maximum value + // upper bounds on individual term's score is 43.262806 * (k1 + 1) * boost + // we just limit the test to "reasonable" k1 values but don't enforce this anywhere. + return Integer.MAX_VALUE; + default: + // random value + return Integer.MAX_VALUE * random.nextFloat(); + } + } + + private static float randomB(Random random) { + // length normalization parameter b [0 .. 1] + switch (random.nextInt(4)) { + case 0: + // minimum value + return 0; + case 1: + // tiny value + return Float.MIN_VALUE; + case 2: + // maximum value + return 1; + default: + // random value + return random.nextFloat(); + } + } +} diff --git a/server/src/test/java/org/apache/lucene/queries/BaseRandomBinaryDocValuesRangeQueryTestCase.java b/server/src/test/java/org/opensearch/lucene/queries/BaseRandomBinaryDocValuesRangeQueryTestCase.java similarity index 99% rename from server/src/test/java/org/apache/lucene/queries/BaseRandomBinaryDocValuesRangeQueryTestCase.java rename to server/src/test/java/org/opensearch/lucene/queries/BaseRandomBinaryDocValuesRangeQueryTestCase.java index d20a6ad081a39..29a826037770f 100644 --- a/server/src/test/java/org/apache/lucene/queries/BaseRandomBinaryDocValuesRangeQueryTestCase.java +++ b/server/src/test/java/org/opensearch/lucene/queries/BaseRandomBinaryDocValuesRangeQueryTestCase.java @@ -29,7 +29,7 @@ * GitHub history for details. */ -package org.apache.lucene.queries; +package org.opensearch.lucene.queries; import org.apache.lucene.document.BinaryDocValuesField; import org.apache.lucene.document.Field; diff --git a/server/src/test/java/org/apache/lucene/queries/BinaryDocValuesRangeQueryTests.java b/server/src/test/java/org/opensearch/lucene/queries/BinaryDocValuesRangeQueryTests.java similarity index 99% rename from server/src/test/java/org/apache/lucene/queries/BinaryDocValuesRangeQueryTests.java rename to server/src/test/java/org/opensearch/lucene/queries/BinaryDocValuesRangeQueryTests.java index f0096ff4220ae..70e3c7ca53995 100644 --- a/server/src/test/java/org/apache/lucene/queries/BinaryDocValuesRangeQueryTests.java +++ b/server/src/test/java/org/opensearch/lucene/queries/BinaryDocValuesRangeQueryTests.java @@ -29,7 +29,7 @@ * GitHub history for details. */ -package org.apache.lucene.queries; +package org.opensearch.lucene.queries; import org.apache.lucene.document.BinaryDocValuesField; import org.apache.lucene.document.Document; diff --git a/server/src/test/java/org/apache/lucene/queries/BlendedTermQueryTests.java b/server/src/test/java/org/opensearch/lucene/queries/BlendedTermQueryTests.java similarity index 98% rename from server/src/test/java/org/apache/lucene/queries/BlendedTermQueryTests.java rename to server/src/test/java/org/opensearch/lucene/queries/BlendedTermQueryTests.java index 9fb482880c9de..f46a8bbca0d2a 100644 --- a/server/src/test/java/org/apache/lucene/queries/BlendedTermQueryTests.java +++ b/server/src/test/java/org/opensearch/lucene/queries/BlendedTermQueryTests.java @@ -29,7 +29,7 @@ * GitHub history for details. */ -package org.apache.lucene.queries; +package org.opensearch.lucene.queries; import org.apache.lucene.analysis.MockAnalyzer; import org.apache.lucene.document.Document; @@ -39,17 +39,17 @@ import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.IndexWriter; -import org.apache.lucene.index.MultiReader; import org.apache.lucene.index.Term; import org.apache.lucene.index.TermStates; +import org.apache.lucene.queries.BlendedTermQuery; import org.apache.lucene.search.BooleanClause; import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.DisjunctionMaxQuery; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.Query; import org.apache.lucene.search.QueryUtils; +import org.apache.lucene.search.QueryVisitor; import org.apache.lucene.search.ScoreDoc; -import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.TermQuery; import org.apache.lucene.search.TopDocs; import org.apache.lucene.search.similarities.BM25Similarity; @@ -241,8 +241,7 @@ public void testExtractTerms() throws IOException { BlendedTermQuery blendedTermQuery = BlendedTermQuery.dismaxBlendedQuery(terms.toArray(new Term[0]), random().nextFloat()); Set extracted = new HashSet<>(); - IndexSearcher searcher = new IndexSearcher(new MultiReader()); - searcher.createWeight(searcher.rewrite(blendedTermQuery), ScoreMode.COMPLETE_NO_SCORES, 1f).extractTerms(extracted); + blendedTermQuery.visit(QueryVisitor.termCollector(extracted)); assertThat(extracted.size(), equalTo(terms.size())); assertThat(extracted, containsInAnyOrder(terms.toArray(new Term[0]))); } diff --git a/server/src/test/java/org/apache/lucene/queries/DoubleRandomBinaryDocValuesRangeQueryTests.java b/server/src/test/java/org/opensearch/lucene/queries/DoubleRandomBinaryDocValuesRangeQueryTests.java similarity index 99% rename from server/src/test/java/org/apache/lucene/queries/DoubleRandomBinaryDocValuesRangeQueryTests.java rename to server/src/test/java/org/opensearch/lucene/queries/DoubleRandomBinaryDocValuesRangeQueryTests.java index 498342d48d65e..f4d0c827ada38 100644 --- a/server/src/test/java/org/apache/lucene/queries/DoubleRandomBinaryDocValuesRangeQueryTests.java +++ b/server/src/test/java/org/opensearch/lucene/queries/DoubleRandomBinaryDocValuesRangeQueryTests.java @@ -29,7 +29,7 @@ * GitHub history for details. */ -package org.apache.lucene.queries; +package org.opensearch.lucene.queries; import org.opensearch.index.mapper.RangeType; diff --git a/server/src/test/java/org/apache/lucene/queries/FloatRandomBinaryDocValuesRangeQueryTests.java b/server/src/test/java/org/opensearch/lucene/queries/FloatRandomBinaryDocValuesRangeQueryTests.java similarity index 99% rename from server/src/test/java/org/apache/lucene/queries/FloatRandomBinaryDocValuesRangeQueryTests.java rename to server/src/test/java/org/opensearch/lucene/queries/FloatRandomBinaryDocValuesRangeQueryTests.java index 45fcf845e9f61..572c043442746 100644 --- a/server/src/test/java/org/apache/lucene/queries/FloatRandomBinaryDocValuesRangeQueryTests.java +++ b/server/src/test/java/org/opensearch/lucene/queries/FloatRandomBinaryDocValuesRangeQueryTests.java @@ -29,7 +29,7 @@ * GitHub history for details. */ -package org.apache.lucene.queries; +package org.opensearch.lucene.queries; import org.opensearch.index.mapper.RangeType; diff --git a/server/src/test/java/org/apache/lucene/queries/InetAddressRandomBinaryDocValuesRangeQueryTests.java b/server/src/test/java/org/opensearch/lucene/queries/InetAddressRandomBinaryDocValuesRangeQueryTests.java similarity index 81% rename from server/src/test/java/org/apache/lucene/queries/InetAddressRandomBinaryDocValuesRangeQueryTests.java rename to server/src/test/java/org/opensearch/lucene/queries/InetAddressRandomBinaryDocValuesRangeQueryTests.java index 37b758abefe87..b321fc1ed7389 100644 --- a/server/src/test/java/org/apache/lucene/queries/InetAddressRandomBinaryDocValuesRangeQueryTests.java +++ b/server/src/test/java/org/opensearch/lucene/queries/InetAddressRandomBinaryDocValuesRangeQueryTests.java @@ -29,10 +29,9 @@ * GitHub history for details. */ -package org.apache.lucene.queries; +package org.opensearch.lucene.queries; import org.apache.lucene.document.InetAddressPoint; -import org.apache.lucene.util.FutureArrays; import org.opensearch.index.mapper.RangeType; import java.net.InetAddress; @@ -57,7 +56,7 @@ protected Range nextRange(int dimensions) throws Exception { byte[] bMin = InetAddressPoint.encode(min); InetAddress max = nextInetaddress(); byte[] bMax = InetAddressPoint.encode(max); - if (FutureArrays.compareUnsigned(bMin, 0, bMin.length, bMax, 0, bMin.length) > 0) { + if (Arrays.compareUnsigned(bMin, 0, bMin.length, bMax, 0, bMin.length) > 0) { return new IpRange(max, min); } return new IpRange(min, max); @@ -104,7 +103,7 @@ protected void setMin(int dim, Object val) { InetAddress v = (InetAddress) val; byte[] e = InetAddressPoint.encode(v); - if (FutureArrays.compareUnsigned(min, 0, e.length, e, 0, e.length) < 0) { + if (Arrays.compareUnsigned(min, 0, e.length, e, 0, e.length) < 0) { max = e; maxAddress = v; } else { @@ -124,7 +123,7 @@ protected void setMax(int dim, Object val) { InetAddress v = (InetAddress) val; byte[] e = InetAddressPoint.encode(v); - if (FutureArrays.compareUnsigned(max, 0, e.length, e, 0, e.length) > 0) { + if (Arrays.compareUnsigned(max, 0, e.length, e, 0, e.length) > 0) { min = e; minAddress = v; } else { @@ -136,22 +135,22 @@ protected void setMax(int dim, Object val) { @Override protected boolean isDisjoint(Range o) { IpRange other = (IpRange) o; - return FutureArrays.compareUnsigned(min, 0, min.length, other.max, 0, min.length) > 0 - || FutureArrays.compareUnsigned(max, 0, max.length, other.min, 0, max.length) < 0; + return Arrays.compareUnsigned(min, 0, min.length, other.max, 0, min.length) > 0 + || Arrays.compareUnsigned(max, 0, max.length, other.min, 0, max.length) < 0; } @Override protected boolean isWithin(Range o) { IpRange other = (IpRange) o; - return FutureArrays.compareUnsigned(min, 0, min.length, other.min, 0, min.length) >= 0 - && FutureArrays.compareUnsigned(max, 0, max.length, other.max, 0, max.length) <= 0; + return Arrays.compareUnsigned(min, 0, min.length, other.min, 0, min.length) >= 0 + && Arrays.compareUnsigned(max, 0, max.length, other.max, 0, max.length) <= 0; } @Override protected boolean contains(Range o) { IpRange other = (IpRange) o; - return FutureArrays.compareUnsigned(min, 0, min.length, other.min, 0, min.length) <= 0 - && FutureArrays.compareUnsigned(max, 0, max.length, other.max, 0, max.length) >= 0; + return Arrays.compareUnsigned(min, 0, min.length, other.min, 0, min.length) <= 0 + && Arrays.compareUnsigned(max, 0, max.length, other.max, 0, max.length) >= 0; } } diff --git a/server/src/test/java/org/apache/lucene/queries/IntegerRandomBinaryDocValuesRangeQueryTests.java b/server/src/test/java/org/opensearch/lucene/queries/IntegerRandomBinaryDocValuesRangeQueryTests.java similarity index 99% rename from server/src/test/java/org/apache/lucene/queries/IntegerRandomBinaryDocValuesRangeQueryTests.java rename to server/src/test/java/org/opensearch/lucene/queries/IntegerRandomBinaryDocValuesRangeQueryTests.java index bc5fc1b3df2f4..e4d3615277455 100644 --- a/server/src/test/java/org/apache/lucene/queries/IntegerRandomBinaryDocValuesRangeQueryTests.java +++ b/server/src/test/java/org/opensearch/lucene/queries/IntegerRandomBinaryDocValuesRangeQueryTests.java @@ -29,7 +29,7 @@ * GitHub history for details. */ -package org.apache.lucene.queries; +package org.opensearch.lucene.queries; import org.apache.lucene.util.TestUtil; import org.opensearch.index.mapper.RangeType; diff --git a/server/src/test/java/org/apache/lucene/queries/LongRandomBinaryDocValuesRangeQueryTests.java b/server/src/test/java/org/opensearch/lucene/queries/LongRandomBinaryDocValuesRangeQueryTests.java similarity index 99% rename from server/src/test/java/org/apache/lucene/queries/LongRandomBinaryDocValuesRangeQueryTests.java rename to server/src/test/java/org/opensearch/lucene/queries/LongRandomBinaryDocValuesRangeQueryTests.java index 2dd49da45dc11..cd8457b828342 100644 --- a/server/src/test/java/org/apache/lucene/queries/LongRandomBinaryDocValuesRangeQueryTests.java +++ b/server/src/test/java/org/opensearch/lucene/queries/LongRandomBinaryDocValuesRangeQueryTests.java @@ -29,7 +29,7 @@ * GitHub history for details. */ -package org.apache.lucene.queries; +package org.opensearch.lucene.queries; import org.apache.lucene.util.TestUtil; import org.opensearch.index.mapper.RangeType; diff --git a/server/src/test/java/org/apache/lucene/queries/MinDocQueryTests.java b/server/src/test/java/org/opensearch/lucene/queries/MinDocQueryTests.java similarity index 98% rename from server/src/test/java/org/apache/lucene/queries/MinDocQueryTests.java rename to server/src/test/java/org/opensearch/lucene/queries/MinDocQueryTests.java index 0640935481ce4..d5c789ae0aa89 100644 --- a/server/src/test/java/org/apache/lucene/queries/MinDocQueryTests.java +++ b/server/src/test/java/org/opensearch/lucene/queries/MinDocQueryTests.java @@ -30,7 +30,7 @@ * GitHub history for details. */ -package org.apache.lucene.queries; +package org.opensearch.lucene.queries; import org.apache.lucene.document.Document; import org.apache.lucene.index.IndexReader; diff --git a/server/src/test/java/org/apache/lucene/queries/SearchAfterSortedDocQueryTests.java b/server/src/test/java/org/opensearch/lucene/queries/SearchAfterSortedDocQueryTests.java similarity index 99% rename from server/src/test/java/org/apache/lucene/queries/SearchAfterSortedDocQueryTests.java rename to server/src/test/java/org/opensearch/lucene/queries/SearchAfterSortedDocQueryTests.java index 2faa01cc569c5..b4e035443cd82 100644 --- a/server/src/test/java/org/apache/lucene/queries/SearchAfterSortedDocQueryTests.java +++ b/server/src/test/java/org/opensearch/lucene/queries/SearchAfterSortedDocQueryTests.java @@ -30,7 +30,7 @@ * GitHub history for details. */ -package org.apache.lucene.queries; +package org.opensearch.lucene.queries; import org.apache.lucene.document.Document; import org.apache.lucene.document.SortedDocValuesField; diff --git a/server/src/test/java/org/apache/lucene/queries/SpanMatchNoDocsQueryTests.java b/server/src/test/java/org/opensearch/lucene/queries/SpanMatchNoDocsQueryTests.java similarity index 93% rename from server/src/test/java/org/apache/lucene/queries/SpanMatchNoDocsQueryTests.java rename to server/src/test/java/org/opensearch/lucene/queries/SpanMatchNoDocsQueryTests.java index e8ac8f62be1c4..110a64e102ed4 100644 --- a/server/src/test/java/org/apache/lucene/queries/SpanMatchNoDocsQueryTests.java +++ b/server/src/test/java/org/opensearch/lucene/queries/SpanMatchNoDocsQueryTests.java @@ -30,7 +30,7 @@ * GitHub history for details. */ -package org.apache.lucene.queries; +package org.opensearch.lucene.queries; import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.MockAnalyzer; @@ -40,14 +40,15 @@ import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.IndexWriter; import org.apache.lucene.index.Term; +import org.apache.lucene.queries.SpanMatchNoDocsQuery; +import org.apache.lucene.queries.spans.SpanNearQuery; +import org.apache.lucene.queries.spans.SpanOrQuery; +import org.apache.lucene.queries.spans.SpanQuery; +import org.apache.lucene.queries.spans.SpanTermQuery; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.Query; import org.apache.lucene.search.QueryUtils; import org.apache.lucene.search.ScoreDoc; -import org.apache.lucene.search.spans.SpanNearQuery; -import org.apache.lucene.search.spans.SpanOrQuery; -import org.apache.lucene.search.spans.SpanQuery; -import org.apache.lucene.search.spans.SpanTermQuery; import org.apache.lucene.store.Directory; import org.opensearch.test.OpenSearchTestCase; diff --git a/server/src/test/java/org/apache/lucene/search/uhighlight/BoundedBreakIteratorScannerTests.java b/server/src/test/java/org/opensearch/lucene/search/uhighlight/BoundedBreakIteratorScannerTests.java similarity index 98% rename from server/src/test/java/org/apache/lucene/search/uhighlight/BoundedBreakIteratorScannerTests.java rename to server/src/test/java/org/opensearch/lucene/search/uhighlight/BoundedBreakIteratorScannerTests.java index 8f1fa76facffc..d3706e9250271 100644 --- a/server/src/test/java/org/apache/lucene/search/uhighlight/BoundedBreakIteratorScannerTests.java +++ b/server/src/test/java/org/opensearch/lucene/search/uhighlight/BoundedBreakIteratorScannerTests.java @@ -30,8 +30,9 @@ * GitHub history for details. */ -package org.apache.lucene.search.uhighlight; +package org.opensearch.lucene.search.uhighlight; +import org.apache.lucene.search.uhighlight.BoundedBreakIteratorScanner; import org.opensearch.test.OpenSearchTestCase; import java.text.BreakIterator; diff --git a/server/src/test/java/org/apache/lucene/search/uhighlight/CustomPassageFormatterTests.java b/server/src/test/java/org/opensearch/lucene/search/uhighlight/CustomPassageFormatterTests.java similarity index 95% rename from server/src/test/java/org/apache/lucene/search/uhighlight/CustomPassageFormatterTests.java rename to server/src/test/java/org/opensearch/lucene/search/uhighlight/CustomPassageFormatterTests.java index 1d5695e7d54fa..10cdebaf69a36 100644 --- a/server/src/test/java/org/apache/lucene/search/uhighlight/CustomPassageFormatterTests.java +++ b/server/src/test/java/org/opensearch/lucene/search/uhighlight/CustomPassageFormatterTests.java @@ -30,10 +30,13 @@ * GitHub history for details. */ -package org.apache.lucene.search.uhighlight; +package org.opensearch.lucene.search.uhighlight; import org.apache.lucene.search.highlight.DefaultEncoder; import org.apache.lucene.search.highlight.SimpleHTMLEncoder; +import org.apache.lucene.search.uhighlight.CustomPassageFormatter; +import org.apache.lucene.search.uhighlight.Passage; +import org.apache.lucene.search.uhighlight.Snippet; import org.apache.lucene.util.BytesRef; import org.opensearch.test.OpenSearchTestCase; diff --git a/server/src/test/java/org/apache/lucene/search/uhighlight/CustomUnifiedHighlighterTests.java b/server/src/test/java/org/opensearch/lucene/search/uhighlight/CustomUnifiedHighlighterTests.java similarity index 82% rename from server/src/test/java/org/apache/lucene/search/uhighlight/CustomUnifiedHighlighterTests.java rename to server/src/test/java/org/opensearch/lucene/search/uhighlight/CustomUnifiedHighlighterTests.java index 3265626c8a6e9..70a260837271c 100644 --- a/server/src/test/java/org/apache/lucene/search/uhighlight/CustomUnifiedHighlighterTests.java +++ b/server/src/test/java/org/opensearch/lucene/search/uhighlight/CustomUnifiedHighlighterTests.java @@ -30,7 +30,7 @@ * GitHub history for details. */ -package org.apache.lucene.search.uhighlight; +package org.opensearch.lucene.search.uhighlight; import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.custom.CustomAnalyzer; @@ -57,6 +57,11 @@ import org.apache.lucene.search.TermQuery; import org.apache.lucene.search.TopDocs; import org.apache.lucene.search.highlight.DefaultEncoder; +import org.apache.lucene.search.uhighlight.BoundedBreakIteratorScanner; +import org.apache.lucene.search.uhighlight.CustomPassageFormatter; +import org.apache.lucene.search.uhighlight.CustomUnifiedHighlighter; +import org.apache.lucene.search.uhighlight.Snippet; +import org.apache.lucene.search.uhighlight.UnifiedHighlighter; import org.apache.lucene.store.Directory; import org.opensearch.common.Strings; import org.opensearch.common.lucene.search.MultiPhrasePrefixQuery; @@ -79,49 +84,48 @@ private void assertHighlightOneDoc( int noMatchSize, String[] expectedPassages ) throws Exception { - Directory dir = newDirectory(); - IndexWriterConfig iwc = newIndexWriterConfig(analyzer); - iwc.setMergePolicy(newTieredMergePolicy(random())); - RandomIndexWriter iw = new RandomIndexWriter(random(), dir, iwc); - FieldType ft = new FieldType(TextField.TYPE_STORED); - ft.setIndexOptions(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS); - ft.freeze(); - Document doc = new Document(); - for (String input : inputs) { - Field field = new Field(fieldName, "", ft); - field.setStringValue(input); - doc.add(field); + try (Directory dir = newDirectory()) { + IndexWriterConfig iwc = newIndexWriterConfig(analyzer); + iwc.setMergePolicy(newTieredMergePolicy(random())); + RandomIndexWriter iw = new RandomIndexWriter(random(), dir, iwc); + FieldType ft = new FieldType(TextField.TYPE_STORED); + ft.setIndexOptions(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS); + ft.freeze(); + Document doc = new Document(); + for (String input : inputs) { + Field field = new Field(fieldName, "", ft); + field.setStringValue(input); + doc.add(field); + } + iw.addDocument(doc); + try (DirectoryReader reader = iw.getReader()) { + IndexSearcher searcher = newSearcher(reader); + iw.close(); + TopDocs topDocs = searcher.search(new MatchAllDocsQuery(), 1, Sort.INDEXORDER); + assertThat(topDocs.totalHits.value, equalTo(1L)); + String rawValue = Strings.arrayToDelimitedString(inputs, String.valueOf(MULTIVAL_SEP_CHAR)); + CustomUnifiedHighlighter highlighter = new CustomUnifiedHighlighter( + searcher, + analyzer, + UnifiedHighlighter.OffsetSource.ANALYSIS, + new CustomPassageFormatter("", "", new DefaultEncoder()), + locale, + breakIterator, + "index", + "text", + query, + noMatchSize, + expectedPassages.length, + name -> "text".equals(name), + Integer.MAX_VALUE + ); + final Snippet[] snippets = highlighter.highlightField(getOnlyLeafReader(reader), topDocs.scoreDocs[0].doc, () -> rawValue); + assertEquals(snippets.length, expectedPassages.length); + for (int i = 0; i < snippets.length; i++) { + assertEquals(snippets[i].getText(), expectedPassages[i]); + } + } } - iw.addDocument(doc); - DirectoryReader reader = iw.getReader(); - IndexSearcher searcher = newSearcher(reader); - iw.close(); - TopDocs topDocs = searcher.search(new MatchAllDocsQuery(), 1, Sort.INDEXORDER); - assertThat(topDocs.totalHits.value, equalTo(1L)); - String rawValue = Strings.arrayToDelimitedString(inputs, String.valueOf(MULTIVAL_SEP_CHAR)); - CustomUnifiedHighlighter highlighter = new CustomUnifiedHighlighter( - searcher, - analyzer, - null, - new CustomPassageFormatter("", "", new DefaultEncoder()), - locale, - breakIterator, - "index", - "text", - query, - noMatchSize, - expectedPassages.length, - name -> "text".equals(name), - Integer.MAX_VALUE, - Integer.MAX_VALUE - ); - final Snippet[] snippets = highlighter.highlightField(getOnlyLeafReader(reader), topDocs.scoreDocs[0].doc, () -> rawValue); - assertEquals(snippets.length, expectedPassages.length); - for (int i = 0; i < snippets.length; i++) { - assertEquals(snippets[i].getText(), expectedPassages[i]); - } - reader.close(); - dir.close(); } public void testSimple() throws Exception { diff --git a/server/src/test/java/org/apache/lucene/util/CombinedBitSetTests.java b/server/src/test/java/org/opensearch/lucene/util/CombinedBitSetTests.java similarity index 95% rename from server/src/test/java/org/apache/lucene/util/CombinedBitSetTests.java rename to server/src/test/java/org/opensearch/lucene/util/CombinedBitSetTests.java index 0b0d4263fcae4..722ae1a13e15f 100644 --- a/server/src/test/java/org/apache/lucene/util/CombinedBitSetTests.java +++ b/server/src/test/java/org/opensearch/lucene/util/CombinedBitSetTests.java @@ -30,9 +30,13 @@ * GitHub history for details. */ -package org.apache.lucene.util; +package org.opensearch.lucene.util; import org.apache.lucene.search.DocIdSetIterator; +import org.apache.lucene.util.BitSet; +import org.apache.lucene.util.CombinedBitSet; +import org.apache.lucene.util.FixedBitSet; +import org.apache.lucene.util.SparseFixedBitSet; import org.opensearch.test.OpenSearchTestCase; public class CombinedBitSetTests extends OpenSearchTestCase { diff --git a/server/src/test/java/org/opensearch/plugins/PluginsServiceTests.java b/server/src/test/java/org/opensearch/plugins/PluginsServiceTests.java index bddc61211592e..572d2c322153f 100644 --- a/server/src/test/java/org/opensearch/plugins/PluginsServiceTests.java +++ b/server/src/test/java/org/opensearch/plugins/PluginsServiceTests.java @@ -648,6 +648,7 @@ public void testJarHellTransitiveMap() throws Exception { assertThat(deps, containsInAnyOrder(pluginJar.toUri().toURL(), dep1Jar.toUri().toURL(), dep2Jar.toUri().toURL())); } + @AwaitsFix(bugUrl = "https://github.com/opensearch-project/OpenSearch/issues/2063") public void testNonExtensibleDep() throws Exception { // This test opens a child classloader, reading a jar under the test temp // dir (a dummy plugin). Classloaders are closed by GC, so when test teardown @@ -790,6 +791,7 @@ public FakePlugin() { } + @AwaitsFix(bugUrl = "https://github.com/opensearch-project/OpenSearch/issues/2063") public void testExistingMandatoryInstalledPlugin() throws IOException { // This test opens a child classloader, reading a jar under the test temp // dir (a dummy plugin). Classloaders are closed by GC, so when test teardown @@ -823,6 +825,7 @@ public void testExistingMandatoryInstalledPlugin() throws IOException { newPluginsService(settings); } + @AwaitsFix(bugUrl = "https://github.com/opensearch-project/OpenSearch/issues/2063") public void testPluginFromParentClassLoader() throws IOException { final Path pathHome = createTempDir(); final Path plugins = pathHome.resolve("plugins"); @@ -860,6 +863,7 @@ public void testPluginFromParentClassLoader() throws IOException { ); } + @AwaitsFix(bugUrl = "https://github.com/opensearch-project/OpenSearch/issues/2063") public void testPluginLoadFailure() throws IOException { final Path pathHome = createTempDir(); final Path plugins = pathHome.resolve("plugins"); diff --git a/server/src/test/java/org/opensearch/search/aggregations/bucket/composite/CompositeAggregatorTests.java b/server/src/test/java/org/opensearch/search/aggregations/bucket/composite/CompositeAggregatorTests.java index f81bd012bfa63..07c6e927c2030 100644 --- a/server/src/test/java/org/opensearch/search/aggregations/bucket/composite/CompositeAggregatorTests.java +++ b/server/src/test/java/org/opensearch/search/aggregations/bucket/composite/CompositeAggregatorTests.java @@ -313,11 +313,11 @@ public void testUnmappedFieldWithHistogram() throws Exception { final String mappedFieldName = "price"; dataset.addAll( Arrays.asList( - createDocument(mappedFieldName, 103L), - createDocument(mappedFieldName, 51L), - createDocument(mappedFieldName, 56L), - createDocument(mappedFieldName, 105L), - createDocument(mappedFieldName, 25L) + createDocument(mappedFieldName, 103), + createDocument(mappedFieldName, 51), + createDocument(mappedFieldName, 56), + createDocument(mappedFieldName, 105), + createDocument(mappedFieldName, 25) ) ); @@ -1908,10 +1908,10 @@ public void testWithHistogramBucketMissing() throws IOException { final List>> dataset = new ArrayList<>(); dataset.addAll( Arrays.asList( - createDocument("price", 50L, "long", 1L), - createDocument("price", 60L, "long", 2L), - createDocument("price", 70L, "long", 3L), - createDocument("price", 62L, "long", 4L), + createDocument("price", 50, "long", 1L), + createDocument("price", 60, "long", 2L), + createDocument("price", 70, "long", 3L), + createDocument("price", 62, "long", 4L), createDocument("long", 5L) ) ); diff --git a/server/src/test/java/org/opensearch/search/aggregations/bucket/nested/NestedAggregatorTests.java b/server/src/test/java/org/opensearch/search/aggregations/bucket/nested/NestedAggregatorTests.java index 99cba603974d7..b394063033637 100644 --- a/server/src/test/java/org/opensearch/search/aggregations/bucket/nested/NestedAggregatorTests.java +++ b/server/src/test/java/org/opensearch/search/aggregations/bucket/nested/NestedAggregatorTests.java @@ -50,7 +50,6 @@ import org.apache.lucene.search.TermQuery; import org.apache.lucene.store.Directory; import org.apache.lucene.util.BytesRef; -import org.opensearch.Version; import org.opensearch.common.CheckedConsumer; import org.opensearch.common.collect.Tuple; import org.opensearch.common.lucene.search.Queries; @@ -91,7 +90,6 @@ import org.opensearch.search.aggregations.pipeline.InternalSimpleValue; import org.opensearch.search.aggregations.support.AggregationInspectionHelper; import org.opensearch.search.aggregations.support.ValueType; -import org.opensearch.test.VersionUtils; import java.io.IOException; import java.util.ArrayList; @@ -395,7 +393,7 @@ public void testResetRootDocId() throws Exception { MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType(VALUE_FIELD_NAME, NumberFieldMapper.NumberType.LONG); BooleanQuery.Builder bq = new BooleanQuery.Builder(); - bq.add(Queries.newNonNestedFilter(VersionUtils.randomVersion(random())), BooleanClause.Occur.MUST); + bq.add(Queries.newNonNestedFilter(), BooleanClause.Occur.MUST); bq.add(new TermQuery(new Term(IdFieldMapper.NAME, Uid.encodeId("2"))), BooleanClause.Occur.MUST_NOT); InternalNested nested = searchAndReduce( @@ -687,7 +685,7 @@ public void testPreGetChildLeafCollectors() throws IOException { Filter filter = searchAndReduce( newSearcher(indexReader, false, true), - Queries.newNonNestedFilter(Version.CURRENT), + Queries.newNonNestedFilter(), filterAggregationBuilder, fieldType1, fieldType2 diff --git a/server/src/test/java/org/opensearch/search/aggregations/metrics/MaxAggregatorTests.java b/server/src/test/java/org/opensearch/search/aggregations/metrics/MaxAggregatorTests.java index 8623d26be5726..acff8305938e8 100644 --- a/server/src/test/java/org/opensearch/search/aggregations/metrics/MaxAggregatorTests.java +++ b/server/src/test/java/org/opensearch/search/aggregations/metrics/MaxAggregatorTests.java @@ -57,7 +57,6 @@ import org.apache.lucene.search.Query; import org.apache.lucene.store.Directory; import org.apache.lucene.util.Bits; -import org.apache.lucene.util.FutureArrays; import org.opensearch.common.CheckedConsumer; import org.opensearch.common.collect.Tuple; import org.opensearch.common.settings.Settings; @@ -408,7 +407,7 @@ public void visit(int docID, byte[] packedValue) { @Override public PointValues.Relation compare(byte[] minPackedValue, byte[] maxPackedValue) { - if (FutureArrays.equals(maxPackedValue, 0, numBytes, maxValue, 0, numBytes)) { + if (Arrays.equals(maxPackedValue, 0, numBytes, maxValue, 0, numBytes)) { return PointValues.Relation.CELL_CROSSES_QUERY; } return PointValues.Relation.CELL_OUTSIDE_QUERY; diff --git a/server/src/test/java/org/opensearch/search/internal/ContextIndexSearcherTests.java b/server/src/test/java/org/opensearch/search/internal/ContextIndexSearcherTests.java index eb87888530e94..de0a31b9dc04b 100644 --- a/server/src/test/java/org/opensearch/search/internal/ContextIndexSearcherTests.java +++ b/server/src/test/java/org/opensearch/search/internal/ContextIndexSearcherTests.java @@ -58,6 +58,7 @@ import org.apache.lucene.search.LeafCollector; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.Query; +import org.apache.lucene.search.QueryVisitor; import org.apache.lucene.search.Scorable; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; @@ -419,11 +420,6 @@ private static class CreateScorerOnceWeight extends Weight { this.weight = weight; } - @Override - public void extractTerms(Set terms) { - weight.extractTerms(terms); - } - @Override public Explanation explain(LeafReaderContext context, int doc) throws IOException { return weight.explain(context, doc); @@ -483,5 +479,10 @@ public boolean equals(Object obj) { public int hashCode() { return 31 * classHash() + query.hashCode(); } + + @Override + public void visit(QueryVisitor visitor) { + visitor.visitLeaf(this); + } } } diff --git a/server/src/test/java/org/opensearch/search/lookup/LeafFieldsLookupTests.java b/server/src/test/java/org/opensearch/search/lookup/LeafFieldsLookupTests.java index cc7200bbf78c4..7deb6845af607 100644 --- a/server/src/test/java/org/opensearch/search/lookup/LeafFieldsLookupTests.java +++ b/server/src/test/java/org/opensearch/search/lookup/LeafFieldsLookupTests.java @@ -36,6 +36,7 @@ import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.LeafReader; import org.apache.lucene.index.StoredFieldVisitor; +import org.apache.lucene.index.VectorSimilarityFunction; import org.opensearch.index.mapper.MappedFieldType; import org.opensearch.index.mapper.MapperService; import org.opensearch.test.OpenSearchTestCase; @@ -79,6 +80,8 @@ public void setUp() throws Exception { 0, 0, 0, + 0, + VectorSimilarityFunction.EUCLIDEAN, false ); diff --git a/server/src/test/java/org/opensearch/search/profile/query/QueryProfilerTests.java b/server/src/test/java/org/opensearch/search/profile/query/QueryProfilerTests.java index 588d63bffb3bd..30bb29dbf2aa8 100644 --- a/server/src/test/java/org/opensearch/search/profile/query/QueryProfilerTests.java +++ b/server/src/test/java/org/opensearch/search/profile/query/QueryProfilerTests.java @@ -49,6 +49,7 @@ import org.apache.lucene.search.LeafCollector; import org.apache.lucene.search.Query; import org.apache.lucene.search.QueryCachingPolicy; +import org.apache.lucene.search.QueryVisitor; import org.apache.lucene.search.RandomApproximationQuery; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; @@ -72,7 +73,6 @@ import java.util.Collection; import java.util.List; import java.util.Map; -import java.util.Set; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.TimeUnit; @@ -202,7 +202,9 @@ public void testUseIndexStats() throws IOException { Query query = new TermQuery(new Term("foo", "bar")); searcher.count(query); // will use index stats List results = profiler.getTree(); - assertEquals(0, results.size()); + assertEquals(1, results.size()); + ProfileResult result = results.get(0); + assertEquals(0, (long) result.getTimeBreakdown().get("build_scorer_count")); long rewriteTime = profiler.getRewriteTime(); assertThat(rewriteTime, greaterThan(0L)); @@ -255,6 +257,11 @@ public String toString(String field) { return getClass().getSimpleName(); } + @Override + public void visit(QueryVisitor visitor) { + visitor.visitLeaf(this); + } + @Override public boolean equals(Object obj) { return this == obj; @@ -268,10 +275,6 @@ public int hashCode() { @Override public Weight createWeight(IndexSearcher searcher, ScoreMode scoreMode, float boost) throws IOException { return new Weight(this) { - @Override - public void extractTerms(Set terms) { - throw new UnsupportedOperationException(); - } @Override public Explanation explain(LeafReaderContext context, int doc) throws IOException { diff --git a/server/src/test/java/org/opensearch/search/query/QueryPhaseTests.java b/server/src/test/java/org/opensearch/search/query/QueryPhaseTests.java index 07650d3c2a3e2..3f7761f3f18a0 100644 --- a/server/src/test/java/org/opensearch/search/query/QueryPhaseTests.java +++ b/server/src/test/java/org/opensearch/search/query/QueryPhaseTests.java @@ -50,7 +50,9 @@ import org.apache.lucene.index.NoMergePolicy; import org.apache.lucene.index.RandomIndexWriter; import org.apache.lucene.index.Term; -import org.apache.lucene.queries.MinDocQuery; +import org.opensearch.lucene.queries.MinDocQuery; +import org.apache.lucene.queries.spans.SpanNearQuery; +import org.apache.lucene.queries.spans.SpanTermQuery; import org.apache.lucene.search.BooleanClause.Occur; import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.Collector; @@ -77,8 +79,6 @@ import org.apache.lucene.search.Weight; import org.apache.lucene.search.join.BitSetProducer; import org.apache.lucene.search.join.ScoreMode; -import org.apache.lucene.search.spans.SpanNearQuery; -import org.apache.lucene.search.spans.SpanTermQuery; import org.apache.lucene.store.Directory; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.FixedBitSet; @@ -722,7 +722,6 @@ public void testEnhanceSortOnNumeric() throws Exception { searchContext.setTask(task); searchContext.setSize(10); QueryPhase.executeInternal(searchContext); - assertTrue(searchContext.sort().sort.getSort()[0].getCanUsePoints()); assertSortResults(searchContext.queryResult().topDocs().topDocs, (long) numDocs, false); } @@ -735,7 +734,6 @@ public void testEnhanceSortOnNumeric() throws Exception { searchContext.setTask(task); searchContext.setSize(10); QueryPhase.executeInternal(searchContext); - assertTrue(searchContext.sort().sort.getSort()[0].getCanUsePoints()); assertSortResults(searchContext.queryResult().topDocs().topDocs, (long) numDocs, true); } @@ -748,7 +746,6 @@ public void testEnhanceSortOnNumeric() throws Exception { searchContext.setTask(task); searchContext.setSize(10); QueryPhase.executeInternal(searchContext); - assertTrue(searchContext.sort().sort.getSort()[0].getCanUsePoints()); assertSortResults(searchContext.queryResult().topDocs().topDocs, (long) numDocs, false); } @@ -761,7 +758,6 @@ public void testEnhanceSortOnNumeric() throws Exception { searchContext.setTask(task); searchContext.setSize(10); QueryPhase.executeInternal(searchContext); - assertTrue(searchContext.sort().sort.getSort()[0].getCanUsePoints()); assertSortResults(searchContext.queryResult().topDocs().topDocs, (long) numDocs, true); } @@ -775,7 +771,6 @@ public void testEnhanceSortOnNumeric() throws Exception { searchContext.from(5); searchContext.setSize(0); QueryPhase.executeInternal(searchContext); - assertTrue(searchContext.sort().sort.getSort()[0].getCanUsePoints()); assertSortResults(searchContext.queryResult().topDocs().topDocs, (long) numDocs, false); } @@ -803,7 +798,6 @@ public void testEnhanceSortOnNumeric() throws Exception { searchContext.setTask(task); searchContext.setSize(10); QueryPhase.executeInternal(searchContext); - assertTrue(searchContext.sort().sort.getSort()[0].getCanUsePoints()); final TopDocs topDocs = searchContext.queryResult().topDocs().topDocs; long topValue = (long) ((FieldDoc) topDocs.scoreDocs[0]).fields[0]; assertThat(topValue, greaterThan(afterValue)); diff --git a/server/src/test/java/org/opensearch/search/sort/FieldSortBuilderTests.java b/server/src/test/java/org/opensearch/search/sort/FieldSortBuilderTests.java index e53a67ebc7708..070855481966f 100644 --- a/server/src/test/java/org/opensearch/search/sort/FieldSortBuilderTests.java +++ b/server/src/test/java/org/opensearch/search/sort/FieldSortBuilderTests.java @@ -37,7 +37,6 @@ import org.apache.lucene.document.DoublePoint; import org.apache.lucene.document.Field; import org.apache.lucene.document.FloatPoint; -import org.apache.lucene.document.HalfFloatPoint; import org.apache.lucene.document.IntPoint; import org.apache.lucene.document.LongPoint; import org.apache.lucene.document.SortedNumericDocValuesField; @@ -45,6 +44,7 @@ import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.RandomIndexWriter; import org.apache.lucene.index.Term; +import org.apache.lucene.sandbox.document.HalfFloatPoint; import org.apache.lucene.search.AssertingIndexSearcher; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.SortField; diff --git a/test/framework/src/main/java/org/opensearch/indices/analysis/AnalysisFactoryTestCase.java b/test/framework/src/main/java/org/opensearch/indices/analysis/AnalysisFactoryTestCase.java index 1576a95a9a411..80c573f3cc9ae 100644 --- a/test/framework/src/main/java/org/opensearch/indices/analysis/AnalysisFactoryTestCase.java +++ b/test/framework/src/main/java/org/opensearch/indices/analysis/AnalysisFactoryTestCase.java @@ -32,8 +32,8 @@ package org.opensearch.indices.analysis; -import org.apache.lucene.analysis.util.TokenFilterFactory; -import org.apache.lucene.analysis.util.TokenizerFactory; +import org.apache.lucene.analysis.TokenFilterFactory; +import org.apache.lucene.analysis.TokenizerFactory; import org.opensearch.common.collect.MapBuilder; import org.opensearch.index.analysis.HunspellTokenFilterFactory; import org.opensearch.index.analysis.ShingleTokenFilterFactory; @@ -134,6 +134,7 @@ public abstract class AnalysisFactoryTestCase extends OpenSearchTestCase { .put("ngram", MovedToAnalysisCommon.class) .put("norwegianlightstem", MovedToAnalysisCommon.class) .put("norwegianminimalstem", MovedToAnalysisCommon.class) + .put("norwegiannormalization", MovedToAnalysisCommon.class) .put("patterncapturegroup", MovedToAnalysisCommon.class) .put("patternreplace", MovedToAnalysisCommon.class) .put("persiannormalization", MovedToAnalysisCommon.class) @@ -155,8 +156,11 @@ public abstract class AnalysisFactoryTestCase extends OpenSearchTestCase { .put("stemmeroverride", MovedToAnalysisCommon.class) .put("stop", StopTokenFilterFactory.class) .put("swedishlightstem", MovedToAnalysisCommon.class) + .put("swedishminimalstem", MovedToAnalysisCommon.class) .put("synonym", MovedToAnalysisCommon.class) .put("synonymgraph", MovedToAnalysisCommon.class) + .put("telugunormalization", MovedToAnalysisCommon.class) + .put("telugustem", MovedToAnalysisCommon.class) .put("trim", MovedToAnalysisCommon.class) .put("truncate", MovedToAnalysisCommon.class) .put("turkishlowercase", MovedToAnalysisCommon.class) @@ -210,10 +214,9 @@ public abstract class AnalysisFactoryTestCase extends OpenSearchTestCase { .put("delimitedboost", Void.class) // LUCENE-9574: test flags on tokens vs a bitmask and drops tokens that have all specified flags .put("dropifflagged", Void.class) + .put("japanesecompletion", Void.class) // LUCENE-9575: recognize arbitrary patterns that include punctuation .put("patterntyping", Void.class) - .put("telugustem", Void.class) - .put("telugunormalization", Void.class) .immutableMap(); static final Map> KNOWN_CHARFILTERS = new MapBuilder>() @@ -291,7 +294,7 @@ public Map> getPreConfiguredCharFilters() { public void testTokenizers() { Set missing = new TreeSet(); missing.addAll( - org.apache.lucene.analysis.util.TokenizerFactory.availableTokenizers() + org.apache.lucene.analysis.TokenizerFactory.availableTokenizers() .stream() .map(key -> key.toLowerCase(Locale.ROOT)) .collect(Collectors.toSet()) @@ -303,7 +306,7 @@ public void testTokenizers() { public void testCharFilters() { Set missing = new TreeSet(); missing.addAll( - org.apache.lucene.analysis.util.CharFilterFactory.availableCharFilters() + org.apache.lucene.analysis.CharFilterFactory.availableCharFilters() .stream() .map(key -> key.toLowerCase(Locale.ROOT)) .collect(Collectors.toSet()) @@ -315,7 +318,7 @@ public void testCharFilters() { public void testTokenFilters() { Set missing = new TreeSet(); missing.addAll( - org.apache.lucene.analysis.util.TokenFilterFactory.availableTokenFilters() + org.apache.lucene.analysis.TokenFilterFactory.availableTokenFilters() .stream() .map(key -> key.toLowerCase(Locale.ROOT)) .collect(Collectors.toSet()) diff --git a/test/framework/src/main/java/org/opensearch/search/aggregations/AggregatorTestCase.java b/test/framework/src/main/java/org/opensearch/search/aggregations/AggregatorTestCase.java index cbeefa7349e16..241ae1170817a 100644 --- a/test/framework/src/main/java/org/opensearch/search/aggregations/AggregatorTestCase.java +++ b/test/framework/src/main/java/org/opensearch/search/aggregations/AggregatorTestCase.java @@ -34,7 +34,6 @@ import org.apache.lucene.analysis.standard.StandardAnalyzer; import org.apache.lucene.document.BinaryDocValuesField; import org.apache.lucene.document.Document; -import org.apache.lucene.document.HalfFloatPoint; import org.apache.lucene.document.InetAddressPoint; import org.apache.lucene.document.LatLonDocValuesField; import org.apache.lucene.document.SortedNumericDocValuesField; @@ -47,6 +46,7 @@ import org.apache.lucene.index.IndexReaderContext; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.RandomIndexWriter; +import org.apache.lucene.sandbox.document.HalfFloatPoint; import org.apache.lucene.search.AssertingIndexSearcher; import org.apache.lucene.search.Collector; import org.apache.lucene.search.IndexSearcher; diff --git a/test/framework/src/main/java/org/opensearch/test/AbstractQueryTestCase.java b/test/framework/src/main/java/org/opensearch/test/AbstractQueryTestCase.java index 97f0dde027d6b..7ce7903296a5a 100644 --- a/test/framework/src/main/java/org/opensearch/test/AbstractQueryTestCase.java +++ b/test/framework/src/main/java/org/opensearch/test/AbstractQueryTestCase.java @@ -38,7 +38,6 @@ import org.apache.lucene.search.MatchNoDocsQuery; import org.apache.lucene.search.Query; import org.apache.lucene.search.TermQuery; -import org.apache.lucene.search.spans.SpanBoostQuery; import org.opensearch.OpenSearchParseException; import org.opensearch.Version; import org.opensearch.action.support.PlainActionFuture; @@ -545,15 +544,8 @@ private void assertLuceneQuery(QB queryBuilder, Query query, QueryShardContext c } if (query != null) { if (queryBuilder.boost() != AbstractQueryBuilder.DEFAULT_BOOST) { - assertThat( - query, - either(instanceOf(BoostQuery.class)).or(instanceOf(SpanBoostQuery.class)).or(instanceOf(MatchNoDocsQuery.class)) - ); - if (query instanceof SpanBoostQuery) { - SpanBoostQuery spanBoostQuery = (SpanBoostQuery) query; - assertThat(spanBoostQuery.getBoost(), equalTo(queryBuilder.boost())); - query = spanBoostQuery.getQuery(); - } else if (query instanceof BoostQuery) { + assertThat(query, either(instanceOf(BoostQuery.class)).or(instanceOf(MatchNoDocsQuery.class))); + if (query instanceof BoostQuery) { BoostQuery boostQuery = (BoostQuery) query; if (boostQuery.getQuery() instanceof MatchNoDocsQuery == false) { assertThat(boostQuery.getBoost(), equalTo(queryBuilder.boost())); diff --git a/test/framework/src/main/java/org/opensearch/test/CorruptionUtils.java b/test/framework/src/main/java/org/opensearch/test/CorruptionUtils.java index e0007e224591f..1bcde48900364 100644 --- a/test/framework/src/main/java/org/opensearch/test/CorruptionUtils.java +++ b/test/framework/src/main/java/org/opensearch/test/CorruptionUtils.java @@ -100,7 +100,7 @@ public static void corruptFile(Random random, Path... files) throws IOException input.seek(input.length() - CodecUtil.footerLength()); checksumAfterCorruption = input.getChecksum(); input.seek(input.length() - 8); - actualChecksumAfterCorruption = input.readLong(); + actualChecksumAfterCorruption = CodecUtil.readBELong(input); } // we need to add assumptions here that the checksums actually really don't match there is a small chance to get collisions // in the checksum which is ok though.... diff --git a/test/framework/src/main/java/org/opensearch/test/hamcrest/OpenSearchAssertions.java b/test/framework/src/main/java/org/opensearch/test/hamcrest/OpenSearchAssertions.java index c37eb68a42836..421c022b38e9d 100644 --- a/test/framework/src/main/java/org/opensearch/test/hamcrest/OpenSearchAssertions.java +++ b/test/framework/src/main/java/org/opensearch/test/hamcrest/OpenSearchAssertions.java @@ -32,7 +32,6 @@ package org.opensearch.test.hamcrest; import org.apache.lucene.search.BooleanQuery; -import org.apache.lucene.search.DisjunctionMaxQuery; import org.apache.lucene.search.Query; import org.apache.lucene.search.TotalHits; import org.opensearch.OpenSearchException; @@ -564,14 +563,6 @@ public static T assertBooleanSubQuery(Query query, Class su return subqueryType.cast(q.clauses().get(i).getQuery()); } - public static T assertDisjunctionSubQuery(Query query, Class subqueryType, int i) { - assertThat(query, instanceOf(DisjunctionMaxQuery.class)); - DisjunctionMaxQuery q = (DisjunctionMaxQuery) query; - assertThat(q.getDisjuncts().size(), greaterThan(i)); - assertThat(q.getDisjuncts().get(i), instanceOf(subqueryType)); - return subqueryType.cast(q.getDisjuncts().get(i)); - } - /** * Run the request from a given builder and check that it throws an exception of the right type */ From 10b9986e1209da2aeec69181eea87702c01dfdfa Mon Sep 17 00:00:00 2001 From: Rishikesh Pasham <62345295+Rishikesh1159@users.noreply.github.com> Date: Tue, 15 Mar 2022 21:01:28 +0000 Subject: [PATCH 34/46] Override Default Distribution Download Url with Custom Distribution Url when it is passed from Plugin (#2420) * Override default Distribution Download URL with custom Distribution URL Signed-off-by: Rishikesh1159 * Accidently made commit to main branch, this revives it.Override default Distribution Download URL with custom Distribution URL Signed-off-by: Rishikesh1159 * Override Default DistributionDownloadUrl with customDistribution Url passed from Plugins Signed-off-by: Rishikesh1159 --- DEVELOPER_GUIDE.md | 12 +++++ .../gradle/DistributionDownloadPlugin.java | 25 +++++---- .../DistributionDownloadPluginTests.java | 54 +++++++++++++++++++ 3 files changed, 82 insertions(+), 9 deletions(-) diff --git a/DEVELOPER_GUIDE.md b/DEVELOPER_GUIDE.md index 58444441e3258..9b1bc933eb1e3 100644 --- a/DEVELOPER_GUIDE.md +++ b/DEVELOPER_GUIDE.md @@ -33,6 +33,8 @@ - [runtimeOnly](#runtimeonly) - [compileOnly](#compileonly) - [testImplementation](#testimplementation) + - [Gradle Plugins](#gradle-plugins) + - [Distribution Download Plugin](#distribution-download-plugin) - [Misc](#misc) - [git-secrets](#git-secrets) - [Installation](#installation) @@ -361,6 +363,16 @@ somehow. OpenSearch plugins use this configuration to include dependencies that Code that is on the classpath for compiling tests that are part of this project but not production code. The canonical example of this is `junit`. +### Gradle Plugins + +#### Distribution Download Plugin + +The Distribution Download plugin downloads the latest version of OpenSearch by default, and supports overriding this behavior by setting `customDistributionUrl`. +``` +./gradlew integTest -PcustomDistributionUrl="https://ci.opensearch.org/ci/dbc/bundle-build/1.2.0/1127/linux/x64/dist/opensearch-1.2.0-linux-x64.tar.gz" +``` + + ## Misc ### git-secrets diff --git a/buildSrc/src/main/java/org/opensearch/gradle/DistributionDownloadPlugin.java b/buildSrc/src/main/java/org/opensearch/gradle/DistributionDownloadPlugin.java index 843a7f7d2716d..fccdc49ef6fc9 100644 --- a/buildSrc/src/main/java/org/opensearch/gradle/DistributionDownloadPlugin.java +++ b/buildSrc/src/main/java/org/opensearch/gradle/DistributionDownloadPlugin.java @@ -195,15 +195,22 @@ private static void setupDownloadServiceRepo(Project project) { if (project.getRepositories().findByName(DOWNLOAD_REPO_NAME) != null) { return; } - addIvyRepo( - project, - DOWNLOAD_REPO_NAME, - "https://artifacts.opensearch.org", - FAKE_IVY_GROUP, - "/releases" + RELEASE_PATTERN_LAYOUT, - "/release-candidates" + RELEASE_PATTERN_LAYOUT - ); - addIvyRepo(project, SNAPSHOT_REPO_NAME, "https://artifacts.opensearch.org", FAKE_SNAPSHOT_IVY_GROUP, SNAPSHOT_PATTERN_LAYOUT); + Object customDistributionUrl = project.findProperty("customDistributionUrl"); + // checks if custom Distribution Url has been passed by user from plugins + if (customDistributionUrl != null) { + addIvyRepo(project, DOWNLOAD_REPO_NAME, customDistributionUrl.toString(), FAKE_IVY_GROUP, ""); + addIvyRepo(project, SNAPSHOT_REPO_NAME, customDistributionUrl.toString(), FAKE_SNAPSHOT_IVY_GROUP, ""); + } else { + addIvyRepo( + project, + DOWNLOAD_REPO_NAME, + "https://artifacts.opensearch.org", + FAKE_IVY_GROUP, + "/releases" + RELEASE_PATTERN_LAYOUT, + "/release-candidates" + RELEASE_PATTERN_LAYOUT + ); + addIvyRepo(project, SNAPSHOT_REPO_NAME, "https://artifacts.opensearch.org", FAKE_SNAPSHOT_IVY_GROUP, SNAPSHOT_PATTERN_LAYOUT); + } addIvyRepo2(project, DOWNLOAD_REPO_NAME_ES, "https://artifacts-no-kpi.elastic.co", FAKE_IVY_GROUP_ES); addIvyRepo2(project, SNAPSHOT_REPO_NAME_ES, "https://snapshots-no-kpi.elastic.co", FAKE_SNAPSHOT_IVY_GROUP_ES); diff --git a/buildSrc/src/test/java/org/opensearch/gradle/DistributionDownloadPluginTests.java b/buildSrc/src/test/java/org/opensearch/gradle/DistributionDownloadPluginTests.java index 98feb3ef2ac93..446c94acc7ad4 100644 --- a/buildSrc/src/test/java/org/opensearch/gradle/DistributionDownloadPluginTests.java +++ b/buildSrc/src/test/java/org/opensearch/gradle/DistributionDownloadPluginTests.java @@ -32,6 +32,7 @@ package org.opensearch.gradle; +import org.gradle.api.internal.artifacts.repositories.DefaultIvyArtifactRepository; import org.opensearch.gradle.OpenSearchDistribution.Platform; import org.opensearch.gradle.OpenSearchDistribution.Type; import org.opensearch.gradle.info.BuildParams; @@ -79,6 +80,59 @@ public void testVersionDefault() { assertEquals(distro.getVersion(), VersionProperties.getOpenSearch()); } + public void testCustomDistributionUrlWithUrl() { + Project project = ProjectBuilder.builder().build(); + String customUrl = "https://artifacts.opensearch.org/custom"; + project.getExtensions().getExtraProperties().set("customDistributionUrl", customUrl); + DistributionDownloadPlugin plugin = new DistributionDownloadPlugin(); + plugin.apply(project); + assertEquals(4, project.getRepositories().size()); + assertEquals( + ((DefaultIvyArtifactRepository) project.getRepositories().getAt("opensearch-downloads")).getUrl().toString(), + customUrl + ); + assertEquals( + ((DefaultIvyArtifactRepository) project.getRepositories().getAt("opensearch-snapshots")).getUrl().toString(), + customUrl + ); + assertEquals( + ((DefaultIvyArtifactRepository) project.getRepositories().getAt("elasticsearch-downloads")).getUrl().toString(), + "https://artifacts-no-kpi.elastic.co" + ); + assertEquals( + ((DefaultIvyArtifactRepository) project.getRepositories().getAt("elasticsearch-snapshots")).getUrl().toString(), + "https://snapshots-no-kpi.elastic.co" + ); + + } + + public void testCustomDistributionUrlWithoutUrl() { + Project project = ProjectBuilder.builder().build(); + DistributionDownloadPlugin plugin = new DistributionDownloadPlugin(); + plugin.apply(project); + assertEquals(5, project.getRepositories().size()); + assertEquals( + ((DefaultIvyArtifactRepository) project.getRepositories().getAt("opensearch-downloads")).getUrl().toString(), + "https://artifacts.opensearch.org" + ); + assertEquals( + ((DefaultIvyArtifactRepository) project.getRepositories().getAt("opensearch-downloads2")).getUrl().toString(), + "https://artifacts.opensearch.org" + ); + assertEquals( + ((DefaultIvyArtifactRepository) project.getRepositories().getAt("opensearch-snapshots")).getUrl().toString(), + "https://artifacts.opensearch.org" + ); + assertEquals( + ((DefaultIvyArtifactRepository) project.getRepositories().getAt("elasticsearch-downloads")).getUrl().toString(), + "https://artifacts-no-kpi.elastic.co" + ); + assertEquals( + ((DefaultIvyArtifactRepository) project.getRepositories().getAt("elasticsearch-snapshots")).getUrl().toString(), + "https://snapshots-no-kpi.elastic.co" + ); + } + public void testBadVersionFormat() { assertDistroError( createProject(null, false), From 77d106015fdc28ba29bb758ba76e0cf790f61da3 Mon Sep 17 00:00:00 2001 From: Andriy Redko Date: Wed, 16 Mar 2022 11:17:33 -0400 Subject: [PATCH 35/46] Fixing PluginsServiceTests (post Lucene 9 update) (#2484) Signed-off-by: Andriy Redko --- .../test/java/org/opensearch/plugins/PluginsServiceTests.java | 4 ---- 1 file changed, 4 deletions(-) diff --git a/server/src/test/java/org/opensearch/plugins/PluginsServiceTests.java b/server/src/test/java/org/opensearch/plugins/PluginsServiceTests.java index 572d2c322153f..bddc61211592e 100644 --- a/server/src/test/java/org/opensearch/plugins/PluginsServiceTests.java +++ b/server/src/test/java/org/opensearch/plugins/PluginsServiceTests.java @@ -648,7 +648,6 @@ public void testJarHellTransitiveMap() throws Exception { assertThat(deps, containsInAnyOrder(pluginJar.toUri().toURL(), dep1Jar.toUri().toURL(), dep2Jar.toUri().toURL())); } - @AwaitsFix(bugUrl = "https://github.com/opensearch-project/OpenSearch/issues/2063") public void testNonExtensibleDep() throws Exception { // This test opens a child classloader, reading a jar under the test temp // dir (a dummy plugin). Classloaders are closed by GC, so when test teardown @@ -791,7 +790,6 @@ public FakePlugin() { } - @AwaitsFix(bugUrl = "https://github.com/opensearch-project/OpenSearch/issues/2063") public void testExistingMandatoryInstalledPlugin() throws IOException { // This test opens a child classloader, reading a jar under the test temp // dir (a dummy plugin). Classloaders are closed by GC, so when test teardown @@ -825,7 +823,6 @@ public void testExistingMandatoryInstalledPlugin() throws IOException { newPluginsService(settings); } - @AwaitsFix(bugUrl = "https://github.com/opensearch-project/OpenSearch/issues/2063") public void testPluginFromParentClassLoader() throws IOException { final Path pathHome = createTempDir(); final Path plugins = pathHome.resolve("plugins"); @@ -863,7 +860,6 @@ public void testPluginFromParentClassLoader() throws IOException { ); } - @AwaitsFix(bugUrl = "https://github.com/opensearch-project/OpenSearch/issues/2063") public void testPluginLoadFailure() throws IOException { final Path pathHome = createTempDir(); final Path plugins = pathHome.resolve("plugins"); From 02ffd4c5483903d0a91f4196cabd320aebcc71d3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Luk=C3=A1=C5=A1=20Vl=C4=8Dek?= Date: Wed, 16 Mar 2022 18:03:22 +0100 Subject: [PATCH 36/46] Rename reference to project OpenSearch was forked from (#2483) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Fixing log message and renaming internal variable. Improving internal JavaDoc. Signed-off-by: Lukáš Vlček --- .../OpenSearchClientYamlSuiteTestCase.java | 18 +++++++++++++++--- 1 file changed, 15 insertions(+), 3 deletions(-) diff --git a/test/framework/src/main/java/org/opensearch/test/rest/yaml/OpenSearchClientYamlSuiteTestCase.java b/test/framework/src/main/java/org/opensearch/test/rest/yaml/OpenSearchClientYamlSuiteTestCase.java index ca2659e9523e6..f30b1bfdd1e6b 100644 --- a/test/framework/src/main/java/org/opensearch/test/rest/yaml/OpenSearchClientYamlSuiteTestCase.java +++ b/test/framework/src/main/java/org/opensearch/test/rest/yaml/OpenSearchClientYamlSuiteTestCase.java @@ -143,10 +143,15 @@ public void initAndResetContext() throws Exception { validateSpec(restSpec); final List hosts = getClusterHosts(); Tuple versionVersionTuple = readVersionsFromCatNodes(adminClient()); - final Version esVersion = versionVersionTuple.v1(); + final Version minVersion = versionVersionTuple.v1(); final Version masterVersion = versionVersionTuple.v2(); - logger.info("initializing client, minimum es version [{}], master version, [{}], hosts {}", esVersion, masterVersion, hosts); - clientYamlTestClient = initClientYamlTestClient(restSpec, client(), hosts, esVersion, masterVersion); + logger.info( + "initializing client, minimum OpenSearch version [{}], master version, [{}], hosts {}", + minVersion, + masterVersion, + hosts + ); + clientYamlTestClient = initClientYamlTestClient(restSpec, client(), hosts, minVersion, masterVersion); restTestExecutionContext = new ClientYamlTestExecutionContext(clientYamlTestClient, randomizeContentType()); adminExecutionContext = new ClientYamlTestExecutionContext(clientYamlTestClient, false); final String[] denylist = resolvePathsProperty(REST_TESTS_BLACKLIST, null); @@ -321,6 +326,13 @@ private static void validateSpec(ClientYamlSuiteRestSpec restSpec) { } } + /** + * Detect minimal node version and master node version of cluster using REST Client. + * + * @param restClient REST client used to discover cluster nodes + * @return {@link Tuple} of [minimal node version, master node version] + * @throws IOException When _cat API output parsing fails + */ private Tuple readVersionsFromCatNodes(RestClient restClient) throws IOException { // we simply go to the _cat/nodes API and parse all versions in the cluster final Request request = new Request("GET", "/_cat/nodes"); From f52f6f5052d8c610c38b8d6800821ea65fd61413 Mon Sep 17 00:00:00 2001 From: John Mazanec Date: Wed, 16 Mar 2022 13:28:24 -0400 Subject: [PATCH 37/46] Add default for EnginePlugin.getEngineFactory (#2419) Adds default implementation for getEngineFactory in EnginePlugin. The default just returns Optional.empty(), allowing plugin developers to implement this plugin without implementing this method. Signed-off-by: John Mazanec --- .../java/org/opensearch/plugins/EnginePlugin.java | 4 +++- .../index/engine/EngineConfigFactoryTests.java | 15 +++++++++++++++ 2 files changed, 18 insertions(+), 1 deletion(-) diff --git a/server/src/main/java/org/opensearch/plugins/EnginePlugin.java b/server/src/main/java/org/opensearch/plugins/EnginePlugin.java index 4c3a07d7b98d9..31844dbdaec2b 100644 --- a/server/src/main/java/org/opensearch/plugins/EnginePlugin.java +++ b/server/src/main/java/org/opensearch/plugins/EnginePlugin.java @@ -56,7 +56,9 @@ public interface EnginePlugin { * * @return an optional engine factory */ - Optional getEngineFactory(IndexSettings indexSettings); + default Optional getEngineFactory(IndexSettings indexSettings) { + return Optional.empty(); + } /** * EXPERT: diff --git a/server/src/test/java/org/opensearch/index/engine/EngineConfigFactoryTests.java b/server/src/test/java/org/opensearch/index/engine/EngineConfigFactoryTests.java index a6bc87d53c004..8030619500278 100644 --- a/server/src/test/java/org/opensearch/index/engine/EngineConfigFactoryTests.java +++ b/server/src/test/java/org/opensearch/index/engine/EngineConfigFactoryTests.java @@ -146,6 +146,21 @@ public void testCreateCodecServiceFromFactory() { assertNotNull(config.getCodec()); } + public void testGetEngineFactory() { + final EngineFactory engineFactory = config -> null; + EnginePlugin enginePluginThatImplementsGetEngineFactory = new EnginePlugin() { + @Override + public Optional getEngineFactory(IndexSettings indexSettings) { + return Optional.of(engineFactory); + } + }; + assertEquals(engineFactory, enginePluginThatImplementsGetEngineFactory.getEngineFactory(null).orElse(null)); + + EnginePlugin enginePluginThatDoesNotImplementsGetEngineFactory = new EnginePlugin() { + }; + assertFalse(enginePluginThatDoesNotImplementsGetEngineFactory.getEngineFactory(null).isPresent()); + } + private static class FooEnginePlugin extends Plugin implements EnginePlugin { @Override public Optional getEngineFactory(final IndexSettings indexSettings) { From bd21043d6536406a5b246f57d5c317fc51fd1ba3 Mon Sep 17 00:00:00 2001 From: Nick Knize Date: Wed, 16 Mar 2022 12:30:48 -0500 Subject: [PATCH 38/46] [Unmute] IndexPrimaryRelocationIT (#2488) Unmutes IndexPrimaryRelocationIT.testPrimaryRelocationWhileIndexing which was fixed by LuceneChangesSnapshot using accurate ops history. Signed-off-by: Nicholas Walter Knize --- .../opensearch/indices/recovery/IndexPrimaryRelocationIT.java | 1 - 1 file changed, 1 deletion(-) diff --git a/server/src/internalClusterTest/java/org/opensearch/indices/recovery/IndexPrimaryRelocationIT.java b/server/src/internalClusterTest/java/org/opensearch/indices/recovery/IndexPrimaryRelocationIT.java index 3f174dd0fdd6a..7fd2466647272 100644 --- a/server/src/internalClusterTest/java/org/opensearch/indices/recovery/IndexPrimaryRelocationIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/indices/recovery/IndexPrimaryRelocationIT.java @@ -56,7 +56,6 @@ public class IndexPrimaryRelocationIT extends OpenSearchIntegTestCase { private static final int RELOCATION_COUNT = 15; - @AwaitsFix(bugUrl = "https://github.com/opensearch-project/OpenSearch/issues/2063") public void testPrimaryRelocationWhileIndexing() throws Exception { internalCluster().ensureAtLeastNumDataNodes(randomIntBetween(2, 3)); client().admin() From 05a58192433f6e007ade61b4b4f21d0f068d50e0 Mon Sep 17 00:00:00 2001 From: Nick Knize Date: Wed, 16 Mar 2022 14:47:25 -0500 Subject: [PATCH 39/46] [Upgrade] Lucene 9.1.0-snapshot-ea989fe8f30 (#2487) * [Upgrade] Lucene 9.1.0-snapshot-ea989fe8f30 Upgrades from Lucene 9.0.0 to 9.1.0-snapshot-ea989fe8f30 in preparation for 9.1.0 GA. Signed-off-by: Nicholas Walter Knize * Add spanishplural token filter Signed-off-by: Nicholas Walter Knize * fix KNOWN_TOKENIZERS Signed-off-by: Nicholas Walter Knize --- .../gradle/plugin/PluginBuildPlugin.groovy | 2 +- .../TestingConventionsPrecommitPlugin.java | 2 +- .../forbidden/opensearch-test-signatures.txt | 4 +- buildSrc/version.properties | 2 +- .../client/IndicesRequestConvertersTests.java | 2 +- .../settings/KeyStoreCommandTestCase.java | 2 +- .../plugins/InstallPluginCommandTests.java | 2 +- .../plugins/ListPluginsCommandTests.java | 2 +- .../plugins/RemovePluginCommandTests.java | 2 +- .../core/internal/io/IOUtilsTests.java | 4 +- .../stats/MatrixStatsAggregatorTests.java | 2 +- ...ncatenateGraphTokenFilterFactoryTests.java | 4 +- .../FlattenGraphTokenFilterFactoryTests.java | 4 +- .../common/NGramTokenizerFactoryTests.java | 2 +- .../RemoveDuplicatesFilterFactoryTests.java | 4 +- .../common/SynonymsAnalysisTests.java | 2 +- .../common/UniqueTokenFilterTests.java | 2 +- .../WhitespaceTokenizerFactoryTests.java | 2 +- .../lucene-expressions-9.0.0.jar.sha1 | 1 - ...ssions-9.1.0-snapshot-ea989fe8f30.jar.sha1 | 1 + .../mapper/TokenCountFieldMapperTests.java | 6 +- .../join/query/HasChildQueryBuilder.java | 2 +- .../ChildrenToParentAggregatorTests.java | 2 +- .../ParentToChildrenAggregatorTests.java | 2 +- ...rcolatorMatchedSlotSubFetchPhaseTests.java | 2 +- .../reindex/AsyncBulkByScrollActionTests.java | 2 +- .../ClientScrollableHitSourceTests.java | 2 +- .../index/reindex/RoundTripTests.java | 2 +- .../UpdateByQueryWhileModifyingTests.java | 2 +- .../netty4/Netty4ClientYamlTestSuiteIT.java | 2 +- .../lucene-analysis-icu-9.0.0.jar.sha1 | 1 - ...is-icu-9.1.0-snapshot-ea989fe8f30.jar.sha1 | 1 + .../index/analysis/IcuAnalyzerTests.java | 2 +- .../analysis/IcuTokenizerFactoryTests.java | 2 +- .../IndexableBinaryStringToolsTests.java | 4 +- .../lucene-analysis-kuromoji-9.0.0.jar.sha1 | 1 - ...romoji-9.1.0-snapshot-ea989fe8f30.jar.sha1 | 1 + .../index/analysis/KuromojiAnalysisTests.java | 2 +- .../lucene-analysis-nori-9.0.0.jar.sha1 | 1 - ...s-nori-9.1.0-snapshot-ea989fe8f30.jar.sha1 | 1 + .../lucene-analysis-phonetic-9.0.0.jar.sha1 | 1 - ...onetic-9.1.0-snapshot-ea989fe8f30.jar.sha1 | 1 + .../analysis/SimplePhoneticAnalysisTests.java | 2 +- .../lucene-analysis-smartcn-9.0.0.jar.sha1 | 1 - ...martcn-9.1.0-snapshot-ea989fe8f30.jar.sha1 | 1 + .../lucene-analysis-stempel-9.0.0.jar.sha1 | 1 - ...tempel-9.1.0-snapshot-ea989fe8f30.jar.sha1 | 1 + .../analysis/AnalysisPolishFactoryTests.java | 4 +- .../lucene-analysis-morfologik-9.0.0.jar.sha1 | 1 - ...ologik-9.1.0-snapshot-ea989fe8f30.jar.sha1 | 1 + .../analysis/UkrainianAnalyzerProvider.java | 2 +- .../ingest/attachment/TikaDocTests.java | 4 +- .../AnnotatedTextHighlighterTests.java | 2 +- .../repositories/hdfs/TestingFs.java | 2 +- .../cli/EvilEnvironmentAwareCommandTests.java | 2 +- .../settings/EvilKeyStoreWrapperTests.java | 2 +- .../MixedClusterClientYamlTestSuiteIT.java | 3 +- .../java/org/opensearch/search/CCSDuelIT.java | 2 +- .../MultiClusterSearchYamlTestSuiteIT.java | 2 +- .../bootstrap/SpawnerNoBootstrapTests.java | 2 +- .../UpgradeClusterClientYamlTestSuiteIT.java | 2 +- ...okeTestMultiNodeClientYamlTestSuiteIT.java | 3 +- qa/wildfly/build.gradle | 2 +- .../org/opensearch/wildfly/WildflyIT.java | 4 +- .../test/rest/ClientYamlTestSuiteIT.java | 2 +- server/build.gradle | 2 +- .../lucene-analysis-common-9.0.0.jar.sha1 | 1 - ...common-9.1.0-snapshot-ea989fe8f30.jar.sha1 | 1 + .../lucene-backward-codecs-9.0.0.jar.sha1 | 1 - ...codecs-9.1.0-snapshot-ea989fe8f30.jar.sha1 | 1 + server/licenses/lucene-core-9.0.0.jar.sha1 | 1 - ...e-core-9.1.0-snapshot-ea989fe8f30.jar.sha1 | 1 + .../licenses/lucene-grouping-9.0.0.jar.sha1 | 1 - ...ouping-9.1.0-snapshot-ea989fe8f30.jar.sha1 | 1 + .../lucene-highlighter-9.0.0.jar.sha1 | 1 - ...ighter-9.1.0-snapshot-ea989fe8f30.jar.sha1 | 1 + server/licenses/lucene-join-9.0.0.jar.sha1 | 1 - ...e-join-9.1.0-snapshot-ea989fe8f30.jar.sha1 | 1 + server/licenses/lucene-memory-9.0.0.jar.sha1 | 1 - ...memory-9.1.0-snapshot-ea989fe8f30.jar.sha1 | 1 + server/licenses/lucene-misc-9.0.0.jar.sha1 | 1 - ...e-misc-9.1.0-snapshot-ea989fe8f30.jar.sha1 | 1 + server/licenses/lucene-queries-9.0.0.jar.sha1 | 1 - ...ueries-9.1.0-snapshot-ea989fe8f30.jar.sha1 | 1 + .../lucene-queryparser-9.0.0.jar.sha1 | 1 - ...parser-9.1.0-snapshot-ea989fe8f30.jar.sha1 | 1 + server/licenses/lucene-sandbox-9.0.0.jar.sha1 | 1 - ...andbox-9.1.0-snapshot-ea989fe8f30.jar.sha1 | 1 + .../lucene-spatial-extras-9.0.0.jar.sha1 | 1 - ...extras-9.1.0-snapshot-ea989fe8f30.jar.sha1 | 1 + .../licenses/lucene-spatial3d-9.0.0.jar.sha1 | 1 - ...tial3d-9.1.0-snapshot-ea989fe8f30.jar.sha1 | 1 + server/licenses/lucene-suggest-9.0.0.jar.sha1 | 1 - ...uggest-9.1.0-snapshot-ea989fe8f30.jar.sha1 | 1 + .../RecoveryWithUnsupportedIndicesIT.java | 4 +- .../cluster/allocation/ClusterRerouteIT.java | 2 +- .../decider/DiskThresholdDeciderIT.java | 6 +- .../discovery/DiskDisruptionIT.java | 2 +- .../index/engine/MaxDocsLimitIT.java | 6 +- .../indices/stats/IndexStatsIT.java | 2 +- .../org/opensearch/recovery/RelocationIT.java | 2 +- .../recovery/TruncatedRecoveryIT.java | 4 +- .../search/StressSearchServiceReaperIT.java | 2 +- .../basic/SearchWithRandomExceptionsIT.java | 2 +- .../basic/SearchWithRandomIOExceptionsIT.java | 2 +- .../highlight/HighlighterSearchIT.java | 4 +- .../search/functionscore/QueryRescorerIT.java | 2 +- .../ProfilerSingleNodeNetworkTest.java | 2 +- .../search/profile/query/QueryProfilerIT.java | 2 +- .../search/query/SearchQueryIT.java | 4 +- .../opensearch/search/sort/FieldSortIT.java | 2 +- .../suggest/CompletionSuggestSearchIT.java | 2 +- .../ContextCompletionSuggestSearchIT.java | 2 +- .../versioning/SimpleVersioningIT.java | 2 +- .../search/grouping/CollapseTopFieldDocs.java | 2 +- .../src/main/java/org/opensearch/Version.java | 2 +- .../search/BottomSortValuesCollector.java | 2 +- .../org/opensearch/common/lucene/Lucene.java | 4 +- .../lucene/search/MultiPhrasePrefixQuery.java | 2 +- .../common/lucene/search/Queries.java | 62 ++++++++++++ .../opensearch/index/codec/CodecService.java | 8 +- .../PerFieldMappingPostingFormatCodec.java | 4 +- .../index/engine/TranslogLeafReader.java | 2 +- .../BytesRefFieldComparatorSource.java | 2 +- .../DoubleValuesComparatorSource.java | 4 +- .../FloatValuesComparatorSource.java | 4 +- .../LongValuesComparatorSource.java | 4 +- .../queries/SearchAfterSortedDocQuery.java | 2 +- .../bucket/composite/CompositeAggregator.java | 4 +- .../internal/ExitableDirectoryReader.java | 98 ++++++++++++++++--- .../search/sort/GeoDistanceSortBuilder.java | 4 +- .../indices/TransportAnalyzeActionTests.java | 4 +- .../action/bulk/BulkShardRequestTests.java | 2 +- .../BottomSortValuesCollectorTests.java | 2 +- .../action/search/DfsQueryPhaseTests.java | 2 +- .../action/search/FetchSearchPhaseTests.java | 2 +- .../termvectors/GetTermVectorsTests.java | 2 +- .../BalanceUnbalancedClusterTests.java | 2 +- .../java/org/opensearch/common/UUIDTests.java | 2 +- .../blobstore/fs/FsBlobContainerTests.java | 6 +- .../common/compress/DeflateCompressTests.java | 4 +- .../DeflateCompressedXContentTests.java | 2 +- .../common/geo/GeoWKTShapeParserTests.java | 2 +- .../common/io/FileSystemUtilsTests.java | 2 +- .../opensearch/common/lucene/LuceneTests.java | 6 +- .../common/lucene/ShardCoreKeyMapTests.java | 2 +- .../search/function/MinScoreScorerTests.java | 4 +- .../morelikethis/XMoreLikeThisTests.java | 6 +- .../common/util/BytesRefHashTests.java | 2 +- .../opensearch/env/NodeEnvironmentTests.java | 2 +- .../GatewayMetaStatePersistedStateTests.java | 2 +- .../IncrementalClusterStateWriterTests.java | 2 +- .../gateway/MetadataStateFormatTests.java | 4 +- .../opensearch/index/IndexModuleTests.java | 2 +- .../java/org/opensearch/index/IndexTests.java | 2 +- .../index/analysis/AnalysisRegistryTests.java | 2 +- .../index/analysis/CustomNormalizerTests.java | 4 +- .../opensearch/index/codec/CodecTests.java | 14 +-- .../engine/CompletionStatsCacheTests.java | 4 +- .../index/engine/InternalEngineTests.java | 11 +-- .../index/engine/LiveVersionMapTests.java | 4 +- .../index/engine/ReadOnlyEngineTests.java | 2 +- .../RecoverySourcePruneMergePolicyTests.java | 2 +- .../index/engine/VersionValueTests.java | 2 +- .../AbstractStringFieldDataTestCase.java | 2 +- .../plain/HalfFloatFielddataTests.java | 2 +- .../mapper/DocumentFieldMapperTests.java | 2 +- .../index/mapper/KeywordFieldMapperTests.java | 6 +- .../index/mapper/NumberFieldTypeTests.java | 2 +- .../index/mapper/TextFieldMapperTests.java | 6 +- .../org/opensearch/index/mapper/UidTests.java | 2 +- .../index/query/IntervalBuilderTests.java | 4 +- .../MatchBoolPrefixQueryBuilderTests.java | 2 +- .../index/query/MatchQueryBuilderTests.java | 4 +- .../index/query/QueryShardContextTests.java | 2 +- .../query/QueryStringQueryBuilderTests.java | 2 +- .../query/SimpleQueryStringBuilderTests.java | 4 +- .../query/SpanMultiTermQueryBuilderTests.java | 2 +- .../FunctionScoreEquivalenceTests.java | 4 +- .../functionscore/FunctionScoreTests.java | 2 +- .../reindex/BulkByScrollResponseTests.java | 2 +- .../reindex/BulkByScrollTaskStatusTests.java | 4 +- .../reindex/DeleteByQueryRequestTests.java | 2 +- .../reindex/UpdateByQueryRequestTests.java | 2 +- .../index/search/MultiMatchQueryTests.java | 2 +- .../search/nested/NestedSortingTests.java | 2 +- .../index/shard/IndexReaderWrapperTests.java | 2 +- .../index/shard/NewPathForShardTests.java | 2 +- .../RemoveCorruptedShardDataCommandTests.java | 2 +- .../index/shard/ShardSplittingQueryTests.java | 2 +- .../index/shard/ShardUtilsTests.java | 2 +- .../similarity/ScriptedSimilarityTests.java | 2 +- .../store/ByteSizeCachingDirectoryTests.java | 2 +- .../opensearch/index/store/StoreTests.java | 6 +- .../index/translog/TestTranslog.java | 2 +- .../index/translog/TranslogTests.java | 10 +- .../indices/analysis/AnalysisModuleTests.java | 4 +- .../recovery/RecoverySourceHandlerTests.java | 4 +- .../DeDuplicatingTokenFilterTests.java | 2 +- .../TruncateTokenFilterTests.java | 2 +- .../CollapsingTopDocsCollectorTests.java | 4 +- .../index/ShuffleForcedMergePolicyTests.java | 2 +- .../similarity/LegacyBM25SimilarityTests.java | 2 +- ...ndomBinaryDocValuesRangeQueryTestCase.java | 2 +- .../BinaryDocValuesRangeQueryTests.java | 2 +- .../lucene/queries/BlendedTermQueryTests.java | 4 +- ...rRandomBinaryDocValuesRangeQueryTests.java | 2 +- ...gRandomBinaryDocValuesRangeQueryTests.java | 2 +- .../lucene/queries/MinDocQueryTests.java | 4 +- .../SearchAfterSortedDocQueryTests.java | 4 +- .../queries/SpanMatchNoDocsQueryTests.java | 4 +- .../CustomUnifiedHighlighterTests.java | 2 +- .../monitor/fs/FsHealthServiceTests.java | 4 +- .../java/org/opensearch/node/NodeTests.java | 2 +- .../plugins/PluginsServiceTests.java | 2 +- .../BlobStoreRepositoryRestoreTests.java | 2 +- .../repositories/fs/FsRepositoryTests.java | 4 +- .../search/DefaultSearchContextTests.java | 2 +- .../search/SearchCancellationTests.java | 4 +- .../opensearch/search/SearchHitsTests.java | 2 +- .../MultiBucketCollectorTests.java | 2 +- .../BestBucketsDeferringCollectorTests.java | 2 +- .../bucket/BucketsAggregatorTests.java | 2 +- .../bucket/GlobalAggregatorTests.java | 2 +- .../composite/CompositeAggregatorTests.java | 6 +- .../CompositeValuesCollectorQueueTests.java | 2 +- .../bucket/filter/FilterAggregatorTests.java | 2 +- .../bucket/filter/FiltersAggregatorTests.java | 2 +- .../geogrid/GeoGridAggregatorTestCase.java | 2 +- .../AutoDateHistogramAggregatorTests.java | 2 +- .../DateHistogramAggregatorTestCase.java | 2 +- .../DateHistogramAggregatorTests.java | 2 +- .../DateRangeHistogramAggregatorTests.java | 2 +- .../histogram/InternalHistogramTests.java | 2 +- .../InternalVariableWidthHistogramTests.java | 2 +- .../NumericHistogramAggregatorTests.java | 2 +- .../RangeHistogramAggregatorTests.java | 2 +- ...VariableWidthHistogramAggregatorTests.java | 2 +- .../missing/MissingAggregatorTests.java | 2 +- .../bucket/nested/NestedAggregatorTests.java | 2 +- .../nested/ReverseNestedAggregatorTests.java | 2 +- .../range/BinaryRangeAggregatorTests.java | 2 +- .../range/DateRangeAggregatorTests.java | 2 +- .../bucket/range/IpRangeAggregatorTests.java | 2 +- .../bucket/range/RangeAggregatorTests.java | 2 +- .../BestDocsDeferringCollectorTests.java | 2 +- .../sampler/DiversifiedSamplerTests.java | 2 +- .../terms/BinaryTermsAggregatorTests.java | 2 +- .../terms/KeywordTermsAggregatorTests.java | 2 +- .../terms/NumericTermsAggregatorTests.java | 2 +- .../terms/RareTermsAggregatorTests.java | 2 +- .../SignificantTermsAggregatorTests.java | 2 +- .../bucket/terms/TermsAggregatorTests.java | 2 +- .../metrics/AvgAggregatorTests.java | 2 +- .../metrics/CardinalityAggregatorTests.java | 2 +- .../metrics/ExtendedStatsAggregatorTests.java | 2 +- .../metrics/GeoBoundsAggregatorTests.java | 2 +- .../metrics/GeoCentroidAggregatorTests.java | 2 +- .../HDRPercentileRanksAggregatorTests.java | 2 +- .../HDRPercentilesAggregatorTests.java | 2 +- .../metrics/InternalTopHitsTests.java | 2 +- .../metrics/MaxAggregatorTests.java | 2 +- ...edianAbsoluteDeviationAggregatorTests.java | 2 +- .../metrics/MinAggregatorTests.java | 2 +- .../ScriptedMetricAggregatorTests.java | 2 +- .../metrics/StatsAggregatorTests.java | 2 +- .../metrics/SumAggregatorTests.java | 2 +- ...TDigestPercentileRanksAggregatorTests.java | 2 +- .../TDigestPercentilesAggregatorTests.java | 2 +- .../metrics/TopHitsAggregatorTests.java | 2 +- .../metrics/ValueCountAggregatorTests.java | 2 +- .../metrics/WeightedAvgAggregatorTests.java | 2 +- .../pipeline/AvgBucketAggregatorTests.java | 2 +- .../pipeline/BucketScriptAggregatorTests.java | 2 +- .../CumulativeSumAggregatorTests.java | 2 +- .../pipeline/DerivativeAggregatorTests.java | 2 +- .../pipeline/MovFnAggrgatorTests.java | 2 +- .../support/MissingValuesTests.java | 2 +- .../search/collapse/CollapseBuilderTests.java | 2 +- .../highlight/PlainHighlighterTests.java | 4 +- .../search/geo/GeoShapeQueryTests.java | 2 +- .../profile/query/QueryProfilerTests.java | 6 +- .../profile/query/RandomQueryGenerator.java | 2 +- .../search/query/QueryPhaseTests.java | 7 +- .../searchafter/SearchAfterBuilderTests.java | 2 +- .../slice/DocValuesSliceQueryTests.java | 4 +- .../search/slice/SliceBuilderTests.java | 2 +- .../search/slice/TermsSliceQueryTests.java | 4 +- .../search/sort/FieldSortBuilderTests.java | 4 +- .../opensearch/watcher/FileWatcherTests.java | 2 +- .../bootstrap/BootstrapForTesting.java | 2 +- .../cluster/DataStreamTestHelper.java | 2 +- .../CoordinationStateTestCluster.java | 2 +- .../cluster/routing/TestShardRouting.java | 2 +- .../service/FakeThreadPoolMasterService.java | 2 +- .../common/io/PathUtilsForTesting.java | 2 +- .../opensearch/common/util/MockBigArrays.java | 2 +- .../common/util/MockPageCacheRecycler.java | 2 +- .../org/opensearch/geo/GeometryTestUtils.java | 2 +- .../org/opensearch/index/MapperTestUtils.java | 2 +- .../index/MockEngineFactoryPlugin.java | 2 +- .../index/mapper/MapperServiceTestCase.java | 2 +- .../OpenSearchBaseDirectoryTestCase.java | 6 +- .../analysis/AnalysisFactoryTestCase.java | 8 +- ...chMockAPIBasedRepositoryIntegTestCase.java | 2 +- .../aggregations/AggregatorTestCase.java | 10 +- .../org/opensearch/test/CorruptionUtils.java | 2 +- .../opensearch/test/FieldMaskingReader.java | 2 +- .../opensearch/test/InternalTestCluster.java | 4 +- .../opensearch/test/MockKeywordPlugin.java | 2 +- .../test/OpenSearchIntegTestCase.java | 2 +- .../opensearch/test/OpenSearchTestCase.java | 10 +- .../test/OpenSearchTokenStreamTestCase.java | 6 +- .../test/client/RandomizingClient.java | 2 +- .../test/engine/MockEngineSupport.java | 4 +- .../test/hamcrest/OpenSearchAssertions.java | 4 +- .../OpenSearchClientYamlSuiteTestCase.java | 2 +- .../test/store/MockFSDirectoryFactory.java | 8 +- .../test/transport/MockTransport.java | 2 +- .../test/test/InternalTestClusterTests.java | 2 +- 320 files changed, 555 insertions(+), 417 deletions(-) delete mode 100644 modules/lang-expression/licenses/lucene-expressions-9.0.0.jar.sha1 create mode 100644 modules/lang-expression/licenses/lucene-expressions-9.1.0-snapshot-ea989fe8f30.jar.sha1 delete mode 100644 plugins/analysis-icu/licenses/lucene-analysis-icu-9.0.0.jar.sha1 create mode 100644 plugins/analysis-icu/licenses/lucene-analysis-icu-9.1.0-snapshot-ea989fe8f30.jar.sha1 delete mode 100644 plugins/analysis-kuromoji/licenses/lucene-analysis-kuromoji-9.0.0.jar.sha1 create mode 100644 plugins/analysis-kuromoji/licenses/lucene-analysis-kuromoji-9.1.0-snapshot-ea989fe8f30.jar.sha1 delete mode 100644 plugins/analysis-nori/licenses/lucene-analysis-nori-9.0.0.jar.sha1 create mode 100644 plugins/analysis-nori/licenses/lucene-analysis-nori-9.1.0-snapshot-ea989fe8f30.jar.sha1 delete mode 100644 plugins/analysis-phonetic/licenses/lucene-analysis-phonetic-9.0.0.jar.sha1 create mode 100644 plugins/analysis-phonetic/licenses/lucene-analysis-phonetic-9.1.0-snapshot-ea989fe8f30.jar.sha1 delete mode 100644 plugins/analysis-smartcn/licenses/lucene-analysis-smartcn-9.0.0.jar.sha1 create mode 100644 plugins/analysis-smartcn/licenses/lucene-analysis-smartcn-9.1.0-snapshot-ea989fe8f30.jar.sha1 delete mode 100644 plugins/analysis-stempel/licenses/lucene-analysis-stempel-9.0.0.jar.sha1 create mode 100644 plugins/analysis-stempel/licenses/lucene-analysis-stempel-9.1.0-snapshot-ea989fe8f30.jar.sha1 delete mode 100644 plugins/analysis-ukrainian/licenses/lucene-analysis-morfologik-9.0.0.jar.sha1 create mode 100644 plugins/analysis-ukrainian/licenses/lucene-analysis-morfologik-9.1.0-snapshot-ea989fe8f30.jar.sha1 delete mode 100644 server/licenses/lucene-analysis-common-9.0.0.jar.sha1 create mode 100644 server/licenses/lucene-analysis-common-9.1.0-snapshot-ea989fe8f30.jar.sha1 delete mode 100644 server/licenses/lucene-backward-codecs-9.0.0.jar.sha1 create mode 100644 server/licenses/lucene-backward-codecs-9.1.0-snapshot-ea989fe8f30.jar.sha1 delete mode 100644 server/licenses/lucene-core-9.0.0.jar.sha1 create mode 100644 server/licenses/lucene-core-9.1.0-snapshot-ea989fe8f30.jar.sha1 delete mode 100644 server/licenses/lucene-grouping-9.0.0.jar.sha1 create mode 100644 server/licenses/lucene-grouping-9.1.0-snapshot-ea989fe8f30.jar.sha1 delete mode 100644 server/licenses/lucene-highlighter-9.0.0.jar.sha1 create mode 100644 server/licenses/lucene-highlighter-9.1.0-snapshot-ea989fe8f30.jar.sha1 delete mode 100644 server/licenses/lucene-join-9.0.0.jar.sha1 create mode 100644 server/licenses/lucene-join-9.1.0-snapshot-ea989fe8f30.jar.sha1 delete mode 100644 server/licenses/lucene-memory-9.0.0.jar.sha1 create mode 100644 server/licenses/lucene-memory-9.1.0-snapshot-ea989fe8f30.jar.sha1 delete mode 100644 server/licenses/lucene-misc-9.0.0.jar.sha1 create mode 100644 server/licenses/lucene-misc-9.1.0-snapshot-ea989fe8f30.jar.sha1 delete mode 100644 server/licenses/lucene-queries-9.0.0.jar.sha1 create mode 100644 server/licenses/lucene-queries-9.1.0-snapshot-ea989fe8f30.jar.sha1 delete mode 100644 server/licenses/lucene-queryparser-9.0.0.jar.sha1 create mode 100644 server/licenses/lucene-queryparser-9.1.0-snapshot-ea989fe8f30.jar.sha1 delete mode 100644 server/licenses/lucene-sandbox-9.0.0.jar.sha1 create mode 100644 server/licenses/lucene-sandbox-9.1.0-snapshot-ea989fe8f30.jar.sha1 delete mode 100644 server/licenses/lucene-spatial-extras-9.0.0.jar.sha1 create mode 100644 server/licenses/lucene-spatial-extras-9.1.0-snapshot-ea989fe8f30.jar.sha1 delete mode 100644 server/licenses/lucene-spatial3d-9.0.0.jar.sha1 create mode 100644 server/licenses/lucene-spatial3d-9.1.0-snapshot-ea989fe8f30.jar.sha1 delete mode 100644 server/licenses/lucene-suggest-9.0.0.jar.sha1 create mode 100644 server/licenses/lucene-suggest-9.1.0-snapshot-ea989fe8f30.jar.sha1 diff --git a/buildSrc/src/main/groovy/org/opensearch/gradle/plugin/PluginBuildPlugin.groovy b/buildSrc/src/main/groovy/org/opensearch/gradle/plugin/PluginBuildPlugin.groovy index 427e3f2740a89..31677965ab0d3 100644 --- a/buildSrc/src/main/groovy/org/opensearch/gradle/plugin/PluginBuildPlugin.groovy +++ b/buildSrc/src/main/groovy/org/opensearch/gradle/plugin/PluginBuildPlugin.groovy @@ -123,7 +123,7 @@ class PluginBuildPlugin implements Plugin { naming.clear() naming { Tests { - baseClass 'org.apache.lucene.util.LuceneTestCase' + baseClass 'org.apache.lucene.tests.util.LuceneTestCase' } IT { baseClass 'org.opensearch.test.OpenSearchIntegTestCase' diff --git a/buildSrc/src/main/java/org/opensearch/gradle/precommit/TestingConventionsPrecommitPlugin.java b/buildSrc/src/main/java/org/opensearch/gradle/precommit/TestingConventionsPrecommitPlugin.java index 4c965d6a0fe90..08376e76c9c20 100644 --- a/buildSrc/src/main/java/org/opensearch/gradle/precommit/TestingConventionsPrecommitPlugin.java +++ b/buildSrc/src/main/java/org/opensearch/gradle/precommit/TestingConventionsPrecommitPlugin.java @@ -43,7 +43,7 @@ public TaskProvider createTask(Project project) { .register("testingConventions", TestingConventionsTasks.class); testingConventions.configure(t -> { TestingConventionRule testsRule = t.getNaming().maybeCreate("Tests"); - testsRule.baseClass("org.apache.lucene.util.LuceneTestCase"); + testsRule.baseClass("org.apache.lucene.tests.util.LuceneTestCase"); TestingConventionRule itRule = t.getNaming().maybeCreate("IT"); itRule.baseClass("org.opensearch.test.OpenSearchIntegTestCase"); itRule.baseClass("org.opensearch.test.rest.OpenSearchRestTestCase"); diff --git a/buildSrc/src/main/resources/forbidden/opensearch-test-signatures.txt b/buildSrc/src/main/resources/forbidden/opensearch-test-signatures.txt index 766e13878cc25..aeb5e25decf62 100644 --- a/buildSrc/src/main/resources/forbidden/opensearch-test-signatures.txt +++ b/buildSrc/src/main/resources/forbidden/opensearch-test-signatures.txt @@ -19,9 +19,9 @@ com.carrotsearch.randomizedtesting.annotations.Seed @ Don't commit hardcoded see com.carrotsearch.randomizedtesting.annotations.Repeat @ Don't commit hardcoded repeats org.apache.lucene.codecs.Codec#setDefault(org.apache.lucene.codecs.Codec) @ Use the SuppressCodecs("*") annotation instead -org.apache.lucene.util.LuceneTestCase$Slow @ Don't write slow tests +org.apache.lucene.tests.util.LuceneTestCase$Slow @ Don't write slow tests org.junit.Ignore @ Use AwaitsFix instead -org.apache.lucene.util.LuceneTestCase$Nightly @ We don't run nightly tests at this point! +org.apache.lucene.tests.util.LuceneTestCase$Nightly @ We don't run nightly tests at this point! com.carrotsearch.randomizedtesting.annotations.Nightly @ We don't run nightly tests at this point! org.junit.Test @defaultMessage Just name your test method testFooBar diff --git a/buildSrc/version.properties b/buildSrc/version.properties index 7682a982e8186..5202b60cbdc20 100644 --- a/buildSrc/version.properties +++ b/buildSrc/version.properties @@ -1,5 +1,5 @@ opensearch = 2.0.0 -lucene = 9.0.0 +lucene = 9.1.0-snapshot-ea989fe8f30 bundled_jdk_vendor = adoptium bundled_jdk = 17.0.2+8 diff --git a/client/rest-high-level/src/test/java/org/opensearch/client/IndicesRequestConvertersTests.java b/client/rest-high-level/src/test/java/org/opensearch/client/IndicesRequestConvertersTests.java index 7276cbb44b030..f853378e789fa 100644 --- a/client/rest-high-level/src/test/java/org/opensearch/client/IndicesRequestConvertersTests.java +++ b/client/rest-high-level/src/test/java/org/opensearch/client/IndicesRequestConvertersTests.java @@ -37,7 +37,7 @@ import org.apache.http.client.methods.HttpHead; import org.apache.http.client.methods.HttpPost; import org.apache.http.client.methods.HttpPut; -import org.apache.lucene.util.LuceneTestCase; +import org.apache.lucene.tests.util.LuceneTestCase; import org.opensearch.action.ActionRequestValidationException; import org.opensearch.action.admin.indices.alias.Alias; import org.opensearch.action.admin.indices.alias.IndicesAliasesRequest; diff --git a/distribution/tools/keystore-cli/src/test/java/org/opensearch/common/settings/KeyStoreCommandTestCase.java b/distribution/tools/keystore-cli/src/test/java/org/opensearch/common/settings/KeyStoreCommandTestCase.java index b88eae3005c87..aa31e07368fc2 100644 --- a/distribution/tools/keystore-cli/src/test/java/org/opensearch/common/settings/KeyStoreCommandTestCase.java +++ b/distribution/tools/keystore-cli/src/test/java/org/opensearch/common/settings/KeyStoreCommandTestCase.java @@ -43,7 +43,7 @@ import com.google.common.jimfs.Configuration; import com.google.common.jimfs.Jimfs; import org.opensearch.core.internal.io.IOUtils; -import org.apache.lucene.util.LuceneTestCase; +import org.apache.lucene.tests.util.LuceneTestCase; import org.opensearch.cli.CommandTestCase; import org.opensearch.common.io.PathUtilsForTesting; import org.opensearch.env.Environment; diff --git a/distribution/tools/plugin-cli/src/test/java/org/opensearch/plugins/InstallPluginCommandTests.java b/distribution/tools/plugin-cli/src/test/java/org/opensearch/plugins/InstallPluginCommandTests.java index e0e5cbc54276e..c1b4568759f4d 100644 --- a/distribution/tools/plugin-cli/src/test/java/org/opensearch/plugins/InstallPluginCommandTests.java +++ b/distribution/tools/plugin-cli/src/test/java/org/opensearch/plugins/InstallPluginCommandTests.java @@ -35,7 +35,7 @@ import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; import com.google.common.jimfs.Configuration; import com.google.common.jimfs.Jimfs; -import org.apache.lucene.util.LuceneTestCase; +import org.apache.lucene.tests.util.LuceneTestCase; import org.bouncycastle.bcpg.ArmoredOutputStream; import org.bouncycastle.bcpg.BCPGOutputStream; import org.bouncycastle.bcpg.HashAlgorithmTags; diff --git a/distribution/tools/plugin-cli/src/test/java/org/opensearch/plugins/ListPluginsCommandTests.java b/distribution/tools/plugin-cli/src/test/java/org/opensearch/plugins/ListPluginsCommandTests.java index 46a439fcbc8ac..376e470159731 100644 --- a/distribution/tools/plugin-cli/src/test/java/org/opensearch/plugins/ListPluginsCommandTests.java +++ b/distribution/tools/plugin-cli/src/test/java/org/opensearch/plugins/ListPluginsCommandTests.java @@ -40,7 +40,7 @@ import java.util.Map; import java.util.stream.Collectors; -import org.apache.lucene.util.LuceneTestCase; +import org.apache.lucene.tests.util.LuceneTestCase; import org.opensearch.LegacyESVersion; import org.opensearch.Version; import org.opensearch.cli.ExitCodes; diff --git a/distribution/tools/plugin-cli/src/test/java/org/opensearch/plugins/RemovePluginCommandTests.java b/distribution/tools/plugin-cli/src/test/java/org/opensearch/plugins/RemovePluginCommandTests.java index bae64dfcfc42a..8f9aa27be7e84 100644 --- a/distribution/tools/plugin-cli/src/test/java/org/opensearch/plugins/RemovePluginCommandTests.java +++ b/distribution/tools/plugin-cli/src/test/java/org/opensearch/plugins/RemovePluginCommandTests.java @@ -32,7 +32,7 @@ package org.opensearch.plugins; -import org.apache.lucene.util.LuceneTestCase; +import org.apache.lucene.tests.util.LuceneTestCase; import org.opensearch.Version; import org.opensearch.cli.ExitCodes; import org.opensearch.cli.MockTerminal; diff --git a/libs/core/src/test/java/org/opensearch/core/internal/io/IOUtilsTests.java b/libs/core/src/test/java/org/opensearch/core/internal/io/IOUtilsTests.java index 85632ae406a1b..f1c8642b73044 100644 --- a/libs/core/src/test/java/org/opensearch/core/internal/io/IOUtilsTests.java +++ b/libs/core/src/test/java/org/opensearch/core/internal/io/IOUtilsTests.java @@ -30,8 +30,8 @@ package org.opensearch.core.internal.io; -import org.apache.lucene.mockfile.FilterFileSystemProvider; -import org.apache.lucene.mockfile.FilterPath; +import org.apache.lucene.tests.mockfile.FilterFileSystemProvider; +import org.apache.lucene.tests.mockfile.FilterPath; import org.apache.lucene.util.Constants; import org.opensearch.common.CheckedConsumer; import org.opensearch.common.io.PathUtils; diff --git a/modules/aggs-matrix-stats/src/test/java/org/opensearch/search/aggregations/matrix/stats/MatrixStatsAggregatorTests.java b/modules/aggs-matrix-stats/src/test/java/org/opensearch/search/aggregations/matrix/stats/MatrixStatsAggregatorTests.java index c07956d9d8f5c..da3228829816b 100644 --- a/modules/aggs-matrix-stats/src/test/java/org/opensearch/search/aggregations/matrix/stats/MatrixStatsAggregatorTests.java +++ b/modules/aggs-matrix-stats/src/test/java/org/opensearch/search/aggregations/matrix/stats/MatrixStatsAggregatorTests.java @@ -36,7 +36,7 @@ import org.apache.lucene.document.SortedNumericDocValuesField; import org.apache.lucene.document.StringField; import org.apache.lucene.index.IndexReader; -import org.apache.lucene.index.RandomIndexWriter; +import org.apache.lucene.tests.index.RandomIndexWriter; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.store.Directory; diff --git a/modules/analysis-common/src/test/java/org/opensearch/analysis/common/ConcatenateGraphTokenFilterFactoryTests.java b/modules/analysis-common/src/test/java/org/opensearch/analysis/common/ConcatenateGraphTokenFilterFactoryTests.java index ef4146b65872d..509010e209088 100644 --- a/modules/analysis-common/src/test/java/org/opensearch/analysis/common/ConcatenateGraphTokenFilterFactoryTests.java +++ b/modules/analysis-common/src/test/java/org/opensearch/analysis/common/ConcatenateGraphTokenFilterFactoryTests.java @@ -8,8 +8,8 @@ package org.opensearch.analysis.common; -import org.apache.lucene.analysis.CannedTokenStream; -import org.apache.lucene.analysis.Token; +import org.apache.lucene.tests.analysis.CannedTokenStream; +import org.apache.lucene.tests.analysis.Token; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.Tokenizer; import org.apache.lucene.analysis.core.WhitespaceTokenizer; diff --git a/modules/analysis-common/src/test/java/org/opensearch/analysis/common/FlattenGraphTokenFilterFactoryTests.java b/modules/analysis-common/src/test/java/org/opensearch/analysis/common/FlattenGraphTokenFilterFactoryTests.java index 8fbcc9ea4ed49..7d9555923e4ef 100644 --- a/modules/analysis-common/src/test/java/org/opensearch/analysis/common/FlattenGraphTokenFilterFactoryTests.java +++ b/modules/analysis-common/src/test/java/org/opensearch/analysis/common/FlattenGraphTokenFilterFactoryTests.java @@ -32,8 +32,8 @@ package org.opensearch.analysis.common; -import org.apache.lucene.analysis.CannedTokenStream; -import org.apache.lucene.analysis.Token; +import org.apache.lucene.tests.analysis.CannedTokenStream; +import org.apache.lucene.tests.analysis.Token; import org.apache.lucene.analysis.TokenStream; import org.opensearch.common.settings.Settings; import org.opensearch.index.Index; diff --git a/modules/analysis-common/src/test/java/org/opensearch/analysis/common/NGramTokenizerFactoryTests.java b/modules/analysis-common/src/test/java/org/opensearch/analysis/common/NGramTokenizerFactoryTests.java index c23b2c6082e19..49ccc742a355c 100644 --- a/modules/analysis-common/src/test/java/org/opensearch/analysis/common/NGramTokenizerFactoryTests.java +++ b/modules/analysis-common/src/test/java/org/opensearch/analysis/common/NGramTokenizerFactoryTests.java @@ -32,7 +32,7 @@ package org.opensearch.analysis.common; -import org.apache.lucene.analysis.MockTokenizer; +import org.apache.lucene.tests.analysis.MockTokenizer; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.Tokenizer; import org.apache.lucene.analysis.ngram.EdgeNGramTokenFilter; diff --git a/modules/analysis-common/src/test/java/org/opensearch/analysis/common/RemoveDuplicatesFilterFactoryTests.java b/modules/analysis-common/src/test/java/org/opensearch/analysis/common/RemoveDuplicatesFilterFactoryTests.java index cb22835c364db..9de6f2a5eb2fd 100644 --- a/modules/analysis-common/src/test/java/org/opensearch/analysis/common/RemoveDuplicatesFilterFactoryTests.java +++ b/modules/analysis-common/src/test/java/org/opensearch/analysis/common/RemoveDuplicatesFilterFactoryTests.java @@ -32,8 +32,8 @@ package org.opensearch.analysis.common; -import org.apache.lucene.analysis.CannedTokenStream; -import org.apache.lucene.analysis.Token; +import org.apache.lucene.tests.analysis.CannedTokenStream; +import org.apache.lucene.tests.analysis.Token; import org.opensearch.common.settings.Settings; import org.opensearch.env.Environment; import org.opensearch.index.analysis.AnalysisTestsHelper; diff --git a/modules/analysis-common/src/test/java/org/opensearch/analysis/common/SynonymsAnalysisTests.java b/modules/analysis-common/src/test/java/org/opensearch/analysis/common/SynonymsAnalysisTests.java index 81d59ef3349d5..8094e24b9adc8 100644 --- a/modules/analysis-common/src/test/java/org/opensearch/analysis/common/SynonymsAnalysisTests.java +++ b/modules/analysis-common/src/test/java/org/opensearch/analysis/common/SynonymsAnalysisTests.java @@ -33,7 +33,7 @@ package org.opensearch.analysis.common; import org.apache.lucene.analysis.Analyzer; -import org.apache.lucene.analysis.BaseTokenStreamTestCase; +import org.apache.lucene.tests.analysis.BaseTokenStreamTestCase; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.core.KeywordTokenizer; import org.apache.lucene.analysis.tokenattributes.CharTermAttribute; diff --git a/modules/analysis-common/src/test/java/org/opensearch/analysis/common/UniqueTokenFilterTests.java b/modules/analysis-common/src/test/java/org/opensearch/analysis/common/UniqueTokenFilterTests.java index 0ab9919ad4813..a321fd4a5879c 100644 --- a/modules/analysis-common/src/test/java/org/opensearch/analysis/common/UniqueTokenFilterTests.java +++ b/modules/analysis-common/src/test/java/org/opensearch/analysis/common/UniqueTokenFilterTests.java @@ -33,7 +33,7 @@ package org.opensearch.analysis.common; import org.apache.lucene.analysis.Analyzer; -import org.apache.lucene.analysis.MockTokenizer; +import org.apache.lucene.tests.analysis.MockTokenizer; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.Tokenizer; import org.apache.lucene.analysis.tokenattributes.CharTermAttribute; diff --git a/modules/analysis-common/src/test/java/org/opensearch/analysis/common/WhitespaceTokenizerFactoryTests.java b/modules/analysis-common/src/test/java/org/opensearch/analysis/common/WhitespaceTokenizerFactoryTests.java index d98e358621d2e..c0034c0e8ef20 100644 --- a/modules/analysis-common/src/test/java/org/opensearch/analysis/common/WhitespaceTokenizerFactoryTests.java +++ b/modules/analysis-common/src/test/java/org/opensearch/analysis/common/WhitespaceTokenizerFactoryTests.java @@ -47,7 +47,7 @@ import java.io.Reader; import java.io.StringReader; -import static org.apache.lucene.analysis.BaseTokenStreamTestCase.assertTokenStreamContents; +import static org.apache.lucene.tests.analysis.BaseTokenStreamTestCase.assertTokenStreamContents; public class WhitespaceTokenizerFactoryTests extends OpenSearchTestCase { diff --git a/modules/lang-expression/licenses/lucene-expressions-9.0.0.jar.sha1 b/modules/lang-expression/licenses/lucene-expressions-9.0.0.jar.sha1 deleted file mode 100644 index 21edcc44b664e..0000000000000 --- a/modules/lang-expression/licenses/lucene-expressions-9.0.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -0a3d818d6f6fb113831ed34553b24763fbda1e84 \ No newline at end of file diff --git a/modules/lang-expression/licenses/lucene-expressions-9.1.0-snapshot-ea989fe8f30.jar.sha1 b/modules/lang-expression/licenses/lucene-expressions-9.1.0-snapshot-ea989fe8f30.jar.sha1 new file mode 100644 index 0000000000000..fb85ff4827c36 --- /dev/null +++ b/modules/lang-expression/licenses/lucene-expressions-9.1.0-snapshot-ea989fe8f30.jar.sha1 @@ -0,0 +1 @@ +c7317bb4e72b820a516e0c8a90beac5acc82c2e2 \ No newline at end of file diff --git a/modules/mapper-extras/src/test/java/org/opensearch/index/mapper/TokenCountFieldMapperTests.java b/modules/mapper-extras/src/test/java/org/opensearch/index/mapper/TokenCountFieldMapperTests.java index c3850b151dde8..e9d3767373b95 100644 --- a/modules/mapper-extras/src/test/java/org/opensearch/index/mapper/TokenCountFieldMapperTests.java +++ b/modules/mapper-extras/src/test/java/org/opensearch/index/mapper/TokenCountFieldMapperTests.java @@ -33,9 +33,9 @@ package org.opensearch.index.mapper; import org.apache.lucene.analysis.Analyzer; -import org.apache.lucene.analysis.CannedTokenStream; -import org.apache.lucene.analysis.MockTokenizer; -import org.apache.lucene.analysis.Token; +import org.apache.lucene.tests.analysis.CannedTokenStream; +import org.apache.lucene.tests.analysis.MockTokenizer; +import org.apache.lucene.tests.analysis.Token; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.core.KeywordAnalyzer; import org.apache.lucene.analysis.standard.StandardAnalyzer; diff --git a/modules/parent-join/src/main/java/org/opensearch/join/query/HasChildQueryBuilder.java b/modules/parent-join/src/main/java/org/opensearch/join/query/HasChildQueryBuilder.java index be957146da21d..b25c1fd45e56e 100644 --- a/modules/parent-join/src/main/java/org/opensearch/join/query/HasChildQueryBuilder.java +++ b/modules/parent-join/src/main/java/org/opensearch/join/query/HasChildQueryBuilder.java @@ -443,7 +443,7 @@ public Query rewrite(IndexReader reader) throws IOException { // blow up since for this query to work we have to have a DirectoryReader otherwise // we can't load global ordinals - for this to work we simply check if the reader has no leaves // and rewrite to match nothing - return new MatchNoDocsQuery(); + return Queries.newMatchNoDocsQueryWithoutRewrite("unable to load global ordinals with an empty directory reader"); } throw new IllegalStateException( "can't load global ordinals for reader of type: " + reader.getClass() + " must be a DirectoryReader" diff --git a/modules/parent-join/src/test/java/org/opensearch/join/aggregations/ChildrenToParentAggregatorTests.java b/modules/parent-join/src/test/java/org/opensearch/join/aggregations/ChildrenToParentAggregatorTests.java index ad57770eefd65..0d134592fa678 100644 --- a/modules/parent-join/src/test/java/org/opensearch/join/aggregations/ChildrenToParentAggregatorTests.java +++ b/modules/parent-join/src/test/java/org/opensearch/join/aggregations/ChildrenToParentAggregatorTests.java @@ -38,7 +38,7 @@ import org.apache.lucene.document.StringField; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.IndexReader; -import org.apache.lucene.index.RandomIndexWriter; +import org.apache.lucene.tests.index.RandomIndexWriter; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.Query; diff --git a/modules/parent-join/src/test/java/org/opensearch/join/aggregations/ParentToChildrenAggregatorTests.java b/modules/parent-join/src/test/java/org/opensearch/join/aggregations/ParentToChildrenAggregatorTests.java index 4373a74b17215..2ed06ee0c0ea9 100644 --- a/modules/parent-join/src/test/java/org/opensearch/join/aggregations/ParentToChildrenAggregatorTests.java +++ b/modules/parent-join/src/test/java/org/opensearch/join/aggregations/ParentToChildrenAggregatorTests.java @@ -39,7 +39,7 @@ import org.apache.lucene.document.StringField; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.IndexReader; -import org.apache.lucene.index.RandomIndexWriter; +import org.apache.lucene.tests.index.RandomIndexWriter; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.Query; diff --git a/modules/percolator/src/test/java/org/opensearch/percolator/PercolatorMatchedSlotSubFetchPhaseTests.java b/modules/percolator/src/test/java/org/opensearch/percolator/PercolatorMatchedSlotSubFetchPhaseTests.java index 7eb9f95aa89af..efa5a7a3d5095 100644 --- a/modules/percolator/src/test/java/org/opensearch/percolator/PercolatorMatchedSlotSubFetchPhaseTests.java +++ b/modules/percolator/src/test/java/org/opensearch/percolator/PercolatorMatchedSlotSubFetchPhaseTests.java @@ -36,7 +36,7 @@ import org.apache.lucene.document.NumericDocValuesField; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.LeafReaderContext; -import org.apache.lucene.index.RandomIndexWriter; +import org.apache.lucene.tests.index.RandomIndexWriter; import org.apache.lucene.index.Term; import org.apache.lucene.index.memory.MemoryIndex; import org.apache.lucene.search.MatchAllDocsQuery; diff --git a/modules/reindex/src/test/java/org/opensearch/index/reindex/AsyncBulkByScrollActionTests.java b/modules/reindex/src/test/java/org/opensearch/index/reindex/AsyncBulkByScrollActionTests.java index 9c2e44f580628..145aaaf24fbee 100644 --- a/modules/reindex/src/test/java/org/opensearch/index/reindex/AsyncBulkByScrollActionTests.java +++ b/modules/reindex/src/test/java/org/opensearch/index/reindex/AsyncBulkByScrollActionTests.java @@ -121,7 +121,7 @@ import static java.util.Collections.singleton; import static java.util.Collections.singletonList; import static java.util.Collections.synchronizedSet; -import static org.apache.lucene.util.TestUtil.randomSimpleString; +import static org.apache.lucene.tests.util.TestUtil.randomSimpleString; import static org.opensearch.action.bulk.BackoffPolicy.constantBackoff; import static org.opensearch.common.unit.TimeValue.timeValueMillis; import static org.opensearch.common.unit.TimeValue.timeValueSeconds; diff --git a/modules/reindex/src/test/java/org/opensearch/index/reindex/ClientScrollableHitSourceTests.java b/modules/reindex/src/test/java/org/opensearch/index/reindex/ClientScrollableHitSourceTests.java index 8af217e5140e1..9333e5bd3dcc9 100644 --- a/modules/reindex/src/test/java/org/opensearch/index/reindex/ClientScrollableHitSourceTests.java +++ b/modules/reindex/src/test/java/org/opensearch/index/reindex/ClientScrollableHitSourceTests.java @@ -69,7 +69,7 @@ import java.util.stream.IntStream; import static java.util.Collections.emptyMap; -import static org.apache.lucene.util.TestUtil.randomSimpleString; +import static org.apache.lucene.tests.util.TestUtil.randomSimpleString; import static org.opensearch.common.unit.TimeValue.timeValueSeconds; import static org.hamcrest.Matchers.instanceOf; diff --git a/modules/reindex/src/test/java/org/opensearch/index/reindex/RoundTripTests.java b/modules/reindex/src/test/java/org/opensearch/index/reindex/RoundTripTests.java index 204f4ea49a22a..6239946852cf8 100644 --- a/modules/reindex/src/test/java/org/opensearch/index/reindex/RoundTripTests.java +++ b/modules/reindex/src/test/java/org/opensearch/index/reindex/RoundTripTests.java @@ -50,7 +50,7 @@ import java.util.HashMap; import java.util.Map; -import static org.apache.lucene.util.TestUtil.randomSimpleString; +import static org.apache.lucene.tests.util.TestUtil.randomSimpleString; import static org.opensearch.common.unit.TimeValue.parseTimeValue; /** diff --git a/modules/reindex/src/test/java/org/opensearch/index/reindex/UpdateByQueryWhileModifyingTests.java b/modules/reindex/src/test/java/org/opensearch/index/reindex/UpdateByQueryWhileModifyingTests.java index 3c2e302cb85e7..f203625dcfc44 100644 --- a/modules/reindex/src/test/java/org/opensearch/index/reindex/UpdateByQueryWhileModifyingTests.java +++ b/modules/reindex/src/test/java/org/opensearch/index/reindex/UpdateByQueryWhileModifyingTests.java @@ -40,7 +40,7 @@ import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicReference; -import static org.apache.lucene.util.TestUtil.randomSimpleString; +import static org.apache.lucene.tests.util.TestUtil.randomSimpleString; import static org.opensearch.action.support.WriteRequest.RefreshPolicy.IMMEDIATE; import static org.hamcrest.Matchers.either; import static org.hamcrest.Matchers.equalTo; diff --git a/modules/transport-netty4/src/yamlRestTest/java/org/opensearch/http/netty4/Netty4ClientYamlTestSuiteIT.java b/modules/transport-netty4/src/yamlRestTest/java/org/opensearch/http/netty4/Netty4ClientYamlTestSuiteIT.java index 4cdd836a602db..45693078174a8 100644 --- a/modules/transport-netty4/src/yamlRestTest/java/org/opensearch/http/netty4/Netty4ClientYamlTestSuiteIT.java +++ b/modules/transport-netty4/src/yamlRestTest/java/org/opensearch/http/netty4/Netty4ClientYamlTestSuiteIT.java @@ -36,7 +36,7 @@ import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; import com.carrotsearch.randomizedtesting.annotations.TimeoutSuite; -import org.apache.lucene.util.TimeUnits; +import org.apache.lucene.tests.util.TimeUnits; import org.opensearch.test.rest.yaml.ClientYamlTestCandidate; import org.opensearch.test.rest.yaml.OpenSearchClientYamlSuiteTestCase; import org.junit.BeforeClass; diff --git a/plugins/analysis-icu/licenses/lucene-analysis-icu-9.0.0.jar.sha1 b/plugins/analysis-icu/licenses/lucene-analysis-icu-9.0.0.jar.sha1 deleted file mode 100644 index a0df1a4b7cb2e..0000000000000 --- a/plugins/analysis-icu/licenses/lucene-analysis-icu-9.0.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -a23a2c1c9baad61b6fb5380f072e41534c275875 \ No newline at end of file diff --git a/plugins/analysis-icu/licenses/lucene-analysis-icu-9.1.0-snapshot-ea989fe8f30.jar.sha1 b/plugins/analysis-icu/licenses/lucene-analysis-icu-9.1.0-snapshot-ea989fe8f30.jar.sha1 new file mode 100644 index 0000000000000..2f0a6ad50e337 --- /dev/null +++ b/plugins/analysis-icu/licenses/lucene-analysis-icu-9.1.0-snapshot-ea989fe8f30.jar.sha1 @@ -0,0 +1 @@ +77930f802430648980eded22ca6ed47fedaeaba4 \ No newline at end of file diff --git a/plugins/analysis-icu/src/test/java/org/opensearch/index/analysis/IcuAnalyzerTests.java b/plugins/analysis-icu/src/test/java/org/opensearch/index/analysis/IcuAnalyzerTests.java index 40d503b1b3756..e490248fc8122 100644 --- a/plugins/analysis-icu/src/test/java/org/opensearch/index/analysis/IcuAnalyzerTests.java +++ b/plugins/analysis-icu/src/test/java/org/opensearch/index/analysis/IcuAnalyzerTests.java @@ -33,7 +33,7 @@ package org.opensearch.index.analysis; import org.apache.lucene.analysis.Analyzer; -import org.apache.lucene.analysis.BaseTokenStreamTestCase; +import org.apache.lucene.tests.analysis.BaseTokenStreamTestCase; import org.opensearch.Version; import org.opensearch.cluster.metadata.IndexMetadata; import org.opensearch.common.settings.Settings; diff --git a/plugins/analysis-icu/src/test/java/org/opensearch/index/analysis/IcuTokenizerFactoryTests.java b/plugins/analysis-icu/src/test/java/org/opensearch/index/analysis/IcuTokenizerFactoryTests.java index 3ce9bc918bad1..115171aec184f 100644 --- a/plugins/analysis-icu/src/test/java/org/opensearch/index/analysis/IcuTokenizerFactoryTests.java +++ b/plugins/analysis-icu/src/test/java/org/opensearch/index/analysis/IcuTokenizerFactoryTests.java @@ -48,7 +48,7 @@ import java.nio.file.Files; import java.nio.file.Path; -import static org.apache.lucene.analysis.BaseTokenStreamTestCase.assertTokenStreamContents; +import static org.apache.lucene.tests.analysis.BaseTokenStreamTestCase.assertTokenStreamContents; public class IcuTokenizerFactoryTests extends OpenSearchTestCase { diff --git a/plugins/analysis-icu/src/test/java/org/opensearch/index/analysis/IndexableBinaryStringToolsTests.java b/plugins/analysis-icu/src/test/java/org/opensearch/index/analysis/IndexableBinaryStringToolsTests.java index a39dc76c36619..63e21b2f7903b 100644 --- a/plugins/analysis-icu/src/test/java/org/opensearch/index/analysis/IndexableBinaryStringToolsTests.java +++ b/plugins/analysis-icu/src/test/java/org/opensearch/index/analysis/IndexableBinaryStringToolsTests.java @@ -35,8 +35,8 @@ import com.carrotsearch.randomizedtesting.annotations.ThreadLeakScope.Scope; import com.carrotsearch.randomizedtesting.annotations.TimeoutSuite; import org.apache.lucene.util.ArrayUtil; -import org.apache.lucene.util.LuceneTestCase; -import org.apache.lucene.util.TimeUnits; +import org.apache.lucene.tests.util.LuceneTestCase; +import org.apache.lucene.tests.util.TimeUnits; import org.opensearch.test.junit.listeners.ReproduceInfoPrinter; import org.junit.BeforeClass; diff --git a/plugins/analysis-kuromoji/licenses/lucene-analysis-kuromoji-9.0.0.jar.sha1 b/plugins/analysis-kuromoji/licenses/lucene-analysis-kuromoji-9.0.0.jar.sha1 deleted file mode 100644 index 7eb72638fd6d2..0000000000000 --- a/plugins/analysis-kuromoji/licenses/lucene-analysis-kuromoji-9.0.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -55f00abe01e51181d687c6bbceca8544f319b97d \ No newline at end of file diff --git a/plugins/analysis-kuromoji/licenses/lucene-analysis-kuromoji-9.1.0-snapshot-ea989fe8f30.jar.sha1 b/plugins/analysis-kuromoji/licenses/lucene-analysis-kuromoji-9.1.0-snapshot-ea989fe8f30.jar.sha1 new file mode 100644 index 0000000000000..a0d112dd733ab --- /dev/null +++ b/plugins/analysis-kuromoji/licenses/lucene-analysis-kuromoji-9.1.0-snapshot-ea989fe8f30.jar.sha1 @@ -0,0 +1 @@ +c66f568fa9138c6ab6f3abf1efbfab3c7b5991d4 \ No newline at end of file diff --git a/plugins/analysis-kuromoji/src/test/java/org/opensearch/index/analysis/KuromojiAnalysisTests.java b/plugins/analysis-kuromoji/src/test/java/org/opensearch/index/analysis/KuromojiAnalysisTests.java index 591cbbec555a6..e17658d83a085 100644 --- a/plugins/analysis-kuromoji/src/test/java/org/opensearch/index/analysis/KuromojiAnalysisTests.java +++ b/plugins/analysis-kuromoji/src/test/java/org/opensearch/index/analysis/KuromojiAnalysisTests.java @@ -53,7 +53,7 @@ import java.nio.file.Files; import java.nio.file.Path; -import static org.apache.lucene.analysis.BaseTokenStreamTestCase.assertTokenStreamContents; +import static org.apache.lucene.tests.analysis.BaseTokenStreamTestCase.assertTokenStreamContents; import static org.hamcrest.CoreMatchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; diff --git a/plugins/analysis-nori/licenses/lucene-analysis-nori-9.0.0.jar.sha1 b/plugins/analysis-nori/licenses/lucene-analysis-nori-9.0.0.jar.sha1 deleted file mode 100644 index 4d787ad04791f..0000000000000 --- a/plugins/analysis-nori/licenses/lucene-analysis-nori-9.0.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -c5258e674ad9c189338b026710869c2955d8e11d \ No newline at end of file diff --git a/plugins/analysis-nori/licenses/lucene-analysis-nori-9.1.0-snapshot-ea989fe8f30.jar.sha1 b/plugins/analysis-nori/licenses/lucene-analysis-nori-9.1.0-snapshot-ea989fe8f30.jar.sha1 new file mode 100644 index 0000000000000..a3f939bfe9e05 --- /dev/null +++ b/plugins/analysis-nori/licenses/lucene-analysis-nori-9.1.0-snapshot-ea989fe8f30.jar.sha1 @@ -0,0 +1 @@ +e8c47600ea859b999a5f5647341b0350b03dafcd \ No newline at end of file diff --git a/plugins/analysis-phonetic/licenses/lucene-analysis-phonetic-9.0.0.jar.sha1 b/plugins/analysis-phonetic/licenses/lucene-analysis-phonetic-9.0.0.jar.sha1 deleted file mode 100644 index 8d915a28087e6..0000000000000 --- a/plugins/analysis-phonetic/licenses/lucene-analysis-phonetic-9.0.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -437960fac10a9f8327fbd87be4e408eb140988b3 \ No newline at end of file diff --git a/plugins/analysis-phonetic/licenses/lucene-analysis-phonetic-9.1.0-snapshot-ea989fe8f30.jar.sha1 b/plugins/analysis-phonetic/licenses/lucene-analysis-phonetic-9.1.0-snapshot-ea989fe8f30.jar.sha1 new file mode 100644 index 0000000000000..e2006546433fd --- /dev/null +++ b/plugins/analysis-phonetic/licenses/lucene-analysis-phonetic-9.1.0-snapshot-ea989fe8f30.jar.sha1 @@ -0,0 +1 @@ +6f0f5c71052beee26e4ce99e1147ce406234f417 \ No newline at end of file diff --git a/plugins/analysis-phonetic/src/test/java/org/opensearch/index/analysis/SimplePhoneticAnalysisTests.java b/plugins/analysis-phonetic/src/test/java/org/opensearch/index/analysis/SimplePhoneticAnalysisTests.java index 73e6b814afaf5..69d9fe9762c32 100644 --- a/plugins/analysis-phonetic/src/test/java/org/opensearch/index/analysis/SimplePhoneticAnalysisTests.java +++ b/plugins/analysis-phonetic/src/test/java/org/opensearch/index/analysis/SimplePhoneticAnalysisTests.java @@ -32,7 +32,7 @@ package org.opensearch.index.analysis; -import org.apache.lucene.analysis.BaseTokenStreamTestCase; +import org.apache.lucene.tests.analysis.BaseTokenStreamTestCase; import org.apache.lucene.analysis.Tokenizer; import org.apache.lucene.analysis.core.WhitespaceTokenizer; import org.apache.lucene.analysis.phonetic.DaitchMokotoffSoundexFilter; diff --git a/plugins/analysis-smartcn/licenses/lucene-analysis-smartcn-9.0.0.jar.sha1 b/plugins/analysis-smartcn/licenses/lucene-analysis-smartcn-9.0.0.jar.sha1 deleted file mode 100644 index d57bf6b3ab80d..0000000000000 --- a/plugins/analysis-smartcn/licenses/lucene-analysis-smartcn-9.0.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -fe96c0b4609be5f7450773c2d7f099c51f4b1f7a \ No newline at end of file diff --git a/plugins/analysis-smartcn/licenses/lucene-analysis-smartcn-9.1.0-snapshot-ea989fe8f30.jar.sha1 b/plugins/analysis-smartcn/licenses/lucene-analysis-smartcn-9.1.0-snapshot-ea989fe8f30.jar.sha1 new file mode 100644 index 0000000000000..e675c5774f5a4 --- /dev/null +++ b/plugins/analysis-smartcn/licenses/lucene-analysis-smartcn-9.1.0-snapshot-ea989fe8f30.jar.sha1 @@ -0,0 +1 @@ +32aad8b8491df3c9862e7fe75e98bccdb6a25bda \ No newline at end of file diff --git a/plugins/analysis-stempel/licenses/lucene-analysis-stempel-9.0.0.jar.sha1 b/plugins/analysis-stempel/licenses/lucene-analysis-stempel-9.0.0.jar.sha1 deleted file mode 100644 index ade92c37c5865..0000000000000 --- a/plugins/analysis-stempel/licenses/lucene-analysis-stempel-9.0.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -b92e86dd451d225e68ee4abac5b00bf883b6ea00 \ No newline at end of file diff --git a/plugins/analysis-stempel/licenses/lucene-analysis-stempel-9.1.0-snapshot-ea989fe8f30.jar.sha1 b/plugins/analysis-stempel/licenses/lucene-analysis-stempel-9.1.0-snapshot-ea989fe8f30.jar.sha1 new file mode 100644 index 0000000000000..053f5c97d65dc --- /dev/null +++ b/plugins/analysis-stempel/licenses/lucene-analysis-stempel-9.1.0-snapshot-ea989fe8f30.jar.sha1 @@ -0,0 +1 @@ +ef546cfaaf727d93c4e86ddc7f77b525af135623 \ No newline at end of file diff --git a/plugins/analysis-stempel/src/test/java/org/opensearch/index/analysis/AnalysisPolishFactoryTests.java b/plugins/analysis-stempel/src/test/java/org/opensearch/index/analysis/AnalysisPolishFactoryTests.java index fb9c9d0ac3d0a..da6699fc1e95e 100644 --- a/plugins/analysis-stempel/src/test/java/org/opensearch/index/analysis/AnalysisPolishFactoryTests.java +++ b/plugins/analysis-stempel/src/test/java/org/opensearch/index/analysis/AnalysisPolishFactoryTests.java @@ -33,8 +33,8 @@ package org.opensearch.index.analysis; import org.apache.lucene.analysis.Analyzer; -import org.apache.lucene.analysis.BaseTokenStreamTestCase; -import org.apache.lucene.analysis.MockTokenizer; +import org.apache.lucene.tests.analysis.BaseTokenStreamTestCase; +import org.apache.lucene.tests.analysis.MockTokenizer; import org.apache.lucene.analysis.Tokenizer; import org.opensearch.Version; import org.opensearch.cluster.metadata.IndexMetadata; diff --git a/plugins/analysis-ukrainian/licenses/lucene-analysis-morfologik-9.0.0.jar.sha1 b/plugins/analysis-ukrainian/licenses/lucene-analysis-morfologik-9.0.0.jar.sha1 deleted file mode 100644 index 433ce1f0552c8..0000000000000 --- a/plugins/analysis-ukrainian/licenses/lucene-analysis-morfologik-9.0.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -048fddf601c6de7dd296f6da3f394544618f7cea \ No newline at end of file diff --git a/plugins/analysis-ukrainian/licenses/lucene-analysis-morfologik-9.1.0-snapshot-ea989fe8f30.jar.sha1 b/plugins/analysis-ukrainian/licenses/lucene-analysis-morfologik-9.1.0-snapshot-ea989fe8f30.jar.sha1 new file mode 100644 index 0000000000000..e5a2a0b0b4ab3 --- /dev/null +++ b/plugins/analysis-ukrainian/licenses/lucene-analysis-morfologik-9.1.0-snapshot-ea989fe8f30.jar.sha1 @@ -0,0 +1 @@ +21c3511469f67019804e41a8d83ffc5c36de6479 \ No newline at end of file diff --git a/plugins/analysis-ukrainian/src/main/java/org/opensearch/index/analysis/UkrainianAnalyzerProvider.java b/plugins/analysis-ukrainian/src/main/java/org/opensearch/index/analysis/UkrainianAnalyzerProvider.java index 9beced7d465a6..e752807c49f07 100644 --- a/plugins/analysis-ukrainian/src/main/java/org/opensearch/index/analysis/UkrainianAnalyzerProvider.java +++ b/plugins/analysis-ukrainian/src/main/java/org/opensearch/index/analysis/UkrainianAnalyzerProvider.java @@ -45,7 +45,7 @@ public class UkrainianAnalyzerProvider extends AbstractIndexAnalyzerProvider parameters() throws Exception { return OpenSearchClientYamlSuiteTestCase.createParameters(); } } - diff --git a/qa/wildfly/build.gradle b/qa/wildfly/build.gradle index 8f6afefe97e5c..7cb08a9de6f08 100644 --- a/qa/wildfly/build.gradle +++ b/qa/wildfly/build.gradle @@ -102,7 +102,7 @@ testingConventions { // We only have one "special" integration test here to connect to wildfly naming { IT { - baseClass 'org.apache.lucene.util.LuceneTestCase' + baseClass 'org.apache.lucene.tests.util.LuceneTestCase' } } } diff --git a/qa/wildfly/src/test/java/org/opensearch/wildfly/WildflyIT.java b/qa/wildfly/src/test/java/org/opensearch/wildfly/WildflyIT.java index 1c448362bbead..7961ca69c2d29 100644 --- a/qa/wildfly/src/test/java/org/opensearch/wildfly/WildflyIT.java +++ b/qa/wildfly/src/test/java/org/opensearch/wildfly/WildflyIT.java @@ -42,8 +42,8 @@ import org.apache.http.util.EntityUtils; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; -import org.apache.lucene.util.LuceneTestCase; -import org.apache.lucene.util.TestRuleLimitSysouts; +import org.apache.lucene.tests.util.LuceneTestCase; +import org.apache.lucene.tests.util.TestRuleLimitSysouts; import org.opensearch.cluster.ClusterModule; import org.opensearch.common.xcontent.DeprecationHandler; import org.opensearch.common.xcontent.NamedXContentRegistry; diff --git a/rest-api-spec/src/yamlRestTest/java/org/opensearch/test/rest/ClientYamlTestSuiteIT.java b/rest-api-spec/src/yamlRestTest/java/org/opensearch/test/rest/ClientYamlTestSuiteIT.java index 86b4567c3ad4d..9a1973e9d5aeb 100644 --- a/rest-api-spec/src/yamlRestTest/java/org/opensearch/test/rest/ClientYamlTestSuiteIT.java +++ b/rest-api-spec/src/yamlRestTest/java/org/opensearch/test/rest/ClientYamlTestSuiteIT.java @@ -35,7 +35,7 @@ import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; import com.carrotsearch.randomizedtesting.annotations.TimeoutSuite; -import org.apache.lucene.util.TimeUnits; +import org.apache.lucene.tests.util.TimeUnits; import org.opensearch.test.rest.yaml.ClientYamlTestCandidate; import org.opensearch.test.rest.yaml.OpenSearchClientYamlSuiteTestCase; diff --git a/server/build.gradle b/server/build.gradle index dcf4d43c60192..45ec2300008f2 100644 --- a/server/build.gradle +++ b/server/build.gradle @@ -182,7 +182,7 @@ tasks.named("testingConventions").configure { naming.clear() naming { Tests { - baseClass "org.apache.lucene.util.LuceneTestCase" + baseClass "org.apache.lucene.tests.util.LuceneTestCase" } IT { baseClass "org.opensearch.test.OpenSearchIntegTestCase" diff --git a/server/licenses/lucene-analysis-common-9.0.0.jar.sha1 b/server/licenses/lucene-analysis-common-9.0.0.jar.sha1 deleted file mode 100644 index 2ed9dbcbe22f6..0000000000000 --- a/server/licenses/lucene-analysis-common-9.0.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -f78890829c3d6f15de48fdbc2c77ef4c0e3f005c \ No newline at end of file diff --git a/server/licenses/lucene-analysis-common-9.1.0-snapshot-ea989fe8f30.jar.sha1 b/server/licenses/lucene-analysis-common-9.1.0-snapshot-ea989fe8f30.jar.sha1 new file mode 100644 index 0000000000000..6ef0f1eafc345 --- /dev/null +++ b/server/licenses/lucene-analysis-common-9.1.0-snapshot-ea989fe8f30.jar.sha1 @@ -0,0 +1 @@ +bafd720282a371efe7b0e7238f9dee7e2ad3a586 \ No newline at end of file diff --git a/server/licenses/lucene-backward-codecs-9.0.0.jar.sha1 b/server/licenses/lucene-backward-codecs-9.0.0.jar.sha1 deleted file mode 100644 index acf5a2b543199..0000000000000 --- a/server/licenses/lucene-backward-codecs-9.0.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -9fb48d0244799e18299449ee62459caab0728490 \ No newline at end of file diff --git a/server/licenses/lucene-backward-codecs-9.1.0-snapshot-ea989fe8f30.jar.sha1 b/server/licenses/lucene-backward-codecs-9.1.0-snapshot-ea989fe8f30.jar.sha1 new file mode 100644 index 0000000000000..017333945a866 --- /dev/null +++ b/server/licenses/lucene-backward-codecs-9.1.0-snapshot-ea989fe8f30.jar.sha1 @@ -0,0 +1 @@ +597fe288a252a14c0876451c97afee2b4529f85a \ No newline at end of file diff --git a/server/licenses/lucene-core-9.0.0.jar.sha1 b/server/licenses/lucene-core-9.0.0.jar.sha1 deleted file mode 100644 index c874382fc8355..0000000000000 --- a/server/licenses/lucene-core-9.0.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -be679fd274f264e4e8b02bc032d2788cd4076ab4 \ No newline at end of file diff --git a/server/licenses/lucene-core-9.1.0-snapshot-ea989fe8f30.jar.sha1 b/server/licenses/lucene-core-9.1.0-snapshot-ea989fe8f30.jar.sha1 new file mode 100644 index 0000000000000..a2ba0f0ffa43c --- /dev/null +++ b/server/licenses/lucene-core-9.1.0-snapshot-ea989fe8f30.jar.sha1 @@ -0,0 +1 @@ +80cd2fff33ced89924771c7079d42bf82f1266f6 \ No newline at end of file diff --git a/server/licenses/lucene-grouping-9.0.0.jar.sha1 b/server/licenses/lucene-grouping-9.0.0.jar.sha1 deleted file mode 100644 index 18a81b5fa97ff..0000000000000 --- a/server/licenses/lucene-grouping-9.0.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -27ebe235d427b4e392fabab9b6bfa09524ca7f8b \ No newline at end of file diff --git a/server/licenses/lucene-grouping-9.1.0-snapshot-ea989fe8f30.jar.sha1 b/server/licenses/lucene-grouping-9.1.0-snapshot-ea989fe8f30.jar.sha1 new file mode 100644 index 0000000000000..ac0c1be0f952b --- /dev/null +++ b/server/licenses/lucene-grouping-9.1.0-snapshot-ea989fe8f30.jar.sha1 @@ -0,0 +1 @@ +7059f47668a2942c60ad03b1d58eca8dcb010e4e \ No newline at end of file diff --git a/server/licenses/lucene-highlighter-9.0.0.jar.sha1 b/server/licenses/lucene-highlighter-9.0.0.jar.sha1 deleted file mode 100644 index 5503495c2f86c..0000000000000 --- a/server/licenses/lucene-highlighter-9.0.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -a3cb395c2e8c672e6eec951b2b02371a4a883f73 \ No newline at end of file diff --git a/server/licenses/lucene-highlighter-9.1.0-snapshot-ea989fe8f30.jar.sha1 b/server/licenses/lucene-highlighter-9.1.0-snapshot-ea989fe8f30.jar.sha1 new file mode 100644 index 0000000000000..fa08ed63f7c44 --- /dev/null +++ b/server/licenses/lucene-highlighter-9.1.0-snapshot-ea989fe8f30.jar.sha1 @@ -0,0 +1 @@ +3c841ca23eb08a939fa49ba4af249c3b6d849c42 \ No newline at end of file diff --git a/server/licenses/lucene-join-9.0.0.jar.sha1 b/server/licenses/lucene-join-9.0.0.jar.sha1 deleted file mode 100644 index dcbaa17875435..0000000000000 --- a/server/licenses/lucene-join-9.0.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -94a855b5d09a6601289aeaeba0f11d5539552590 \ No newline at end of file diff --git a/server/licenses/lucene-join-9.1.0-snapshot-ea989fe8f30.jar.sha1 b/server/licenses/lucene-join-9.1.0-snapshot-ea989fe8f30.jar.sha1 new file mode 100644 index 0000000000000..2a3e2a9107a60 --- /dev/null +++ b/server/licenses/lucene-join-9.1.0-snapshot-ea989fe8f30.jar.sha1 @@ -0,0 +1 @@ +4984e041ae68f5939c01e41b2c9648ae2c021340 \ No newline at end of file diff --git a/server/licenses/lucene-memory-9.0.0.jar.sha1 b/server/licenses/lucene-memory-9.0.0.jar.sha1 deleted file mode 100644 index 157597ce9878f..0000000000000 --- a/server/licenses/lucene-memory-9.0.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -2371c95031422bc1f501d43ffcc7311baed4b35b \ No newline at end of file diff --git a/server/licenses/lucene-memory-9.1.0-snapshot-ea989fe8f30.jar.sha1 b/server/licenses/lucene-memory-9.1.0-snapshot-ea989fe8f30.jar.sha1 new file mode 100644 index 0000000000000..eefd08d222ef8 --- /dev/null +++ b/server/licenses/lucene-memory-9.1.0-snapshot-ea989fe8f30.jar.sha1 @@ -0,0 +1 @@ +fead9467ce65469579168eb0f47e014fdb3c63d9 \ No newline at end of file diff --git a/server/licenses/lucene-misc-9.0.0.jar.sha1 b/server/licenses/lucene-misc-9.0.0.jar.sha1 deleted file mode 100644 index ef031d34305a2..0000000000000 --- a/server/licenses/lucene-misc-9.0.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -25c6170f4fa2f707908dfb92fbafc76727f901e0 \ No newline at end of file diff --git a/server/licenses/lucene-misc-9.1.0-snapshot-ea989fe8f30.jar.sha1 b/server/licenses/lucene-misc-9.1.0-snapshot-ea989fe8f30.jar.sha1 new file mode 100644 index 0000000000000..226f97cf6f3bc --- /dev/null +++ b/server/licenses/lucene-misc-9.1.0-snapshot-ea989fe8f30.jar.sha1 @@ -0,0 +1 @@ +d98ab1966b8ca53b70fe071281bcea27d602ec30 \ No newline at end of file diff --git a/server/licenses/lucene-queries-9.0.0.jar.sha1 b/server/licenses/lucene-queries-9.0.0.jar.sha1 deleted file mode 100644 index 4b43c9e6b709a..0000000000000 --- a/server/licenses/lucene-queries-9.0.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -87b4c7833d30895baf7091f9cb0db878e970b604 \ No newline at end of file diff --git a/server/licenses/lucene-queries-9.1.0-snapshot-ea989fe8f30.jar.sha1 b/server/licenses/lucene-queries-9.1.0-snapshot-ea989fe8f30.jar.sha1 new file mode 100644 index 0000000000000..c151e6b76e21a --- /dev/null +++ b/server/licenses/lucene-queries-9.1.0-snapshot-ea989fe8f30.jar.sha1 @@ -0,0 +1 @@ +895e27127ae55031e35e152da8be941bd55f7f6a \ No newline at end of file diff --git a/server/licenses/lucene-queryparser-9.0.0.jar.sha1 b/server/licenses/lucene-queryparser-9.0.0.jar.sha1 deleted file mode 100644 index 62a4650a168c7..0000000000000 --- a/server/licenses/lucene-queryparser-9.0.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -bf13395ad2033bca3182fcbc83204e8ae1951945 \ No newline at end of file diff --git a/server/licenses/lucene-queryparser-9.1.0-snapshot-ea989fe8f30.jar.sha1 b/server/licenses/lucene-queryparser-9.1.0-snapshot-ea989fe8f30.jar.sha1 new file mode 100644 index 0000000000000..b73b7152aed05 --- /dev/null +++ b/server/licenses/lucene-queryparser-9.1.0-snapshot-ea989fe8f30.jar.sha1 @@ -0,0 +1 @@ +1433392237ea01ef35f4e2ffc52f496b0669624c \ No newline at end of file diff --git a/server/licenses/lucene-sandbox-9.0.0.jar.sha1 b/server/licenses/lucene-sandbox-9.0.0.jar.sha1 deleted file mode 100644 index 4396efda1a83b..0000000000000 --- a/server/licenses/lucene-sandbox-9.0.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -3c153a1dc1da3f98083cc932c9476df4b77b0ca5 \ No newline at end of file diff --git a/server/licenses/lucene-sandbox-9.1.0-snapshot-ea989fe8f30.jar.sha1 b/server/licenses/lucene-sandbox-9.1.0-snapshot-ea989fe8f30.jar.sha1 new file mode 100644 index 0000000000000..d441dd2f8cb31 --- /dev/null +++ b/server/licenses/lucene-sandbox-9.1.0-snapshot-ea989fe8f30.jar.sha1 @@ -0,0 +1 @@ +b0688963ca8288f5a3e47ca6e4b38bc2fde780e7 \ No newline at end of file diff --git a/server/licenses/lucene-spatial-extras-9.0.0.jar.sha1 b/server/licenses/lucene-spatial-extras-9.0.0.jar.sha1 deleted file mode 100644 index a742934def499..0000000000000 --- a/server/licenses/lucene-spatial-extras-9.0.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -91535ef6512c45c7e2b113b04cab7738ee774893 \ No newline at end of file diff --git a/server/licenses/lucene-spatial-extras-9.1.0-snapshot-ea989fe8f30.jar.sha1 b/server/licenses/lucene-spatial-extras-9.1.0-snapshot-ea989fe8f30.jar.sha1 new file mode 100644 index 0000000000000..5ffa78a6e7d87 --- /dev/null +++ b/server/licenses/lucene-spatial-extras-9.1.0-snapshot-ea989fe8f30.jar.sha1 @@ -0,0 +1 @@ +206e8918a726710c8a6fb927e59adf26c6ad5bed \ No newline at end of file diff --git a/server/licenses/lucene-spatial3d-9.0.0.jar.sha1 b/server/licenses/lucene-spatial3d-9.0.0.jar.sha1 deleted file mode 100644 index 0722795c260ad..0000000000000 --- a/server/licenses/lucene-spatial3d-9.0.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -6b4ee47f218ed3d123c1b07671677a2e4f3c133b \ No newline at end of file diff --git a/server/licenses/lucene-spatial3d-9.1.0-snapshot-ea989fe8f30.jar.sha1 b/server/licenses/lucene-spatial3d-9.1.0-snapshot-ea989fe8f30.jar.sha1 new file mode 100644 index 0000000000000..8c4bb08303c34 --- /dev/null +++ b/server/licenses/lucene-spatial3d-9.1.0-snapshot-ea989fe8f30.jar.sha1 @@ -0,0 +1 @@ +3d1e26c37b45bdf2ef598d16468220ab33983a8f \ No newline at end of file diff --git a/server/licenses/lucene-suggest-9.0.0.jar.sha1 b/server/licenses/lucene-suggest-9.0.0.jar.sha1 deleted file mode 100644 index 7eb41e758379e..0000000000000 --- a/server/licenses/lucene-suggest-9.0.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -a7d0e7279737114c039f5214082da948732096a6 \ No newline at end of file diff --git a/server/licenses/lucene-suggest-9.1.0-snapshot-ea989fe8f30.jar.sha1 b/server/licenses/lucene-suggest-9.1.0-snapshot-ea989fe8f30.jar.sha1 new file mode 100644 index 0000000000000..3c8d9b87da0e5 --- /dev/null +++ b/server/licenses/lucene-suggest-9.1.0-snapshot-ea989fe8f30.jar.sha1 @@ -0,0 +1 @@ +69ab05339614766c732fef7c037cc5b676bd40dc \ No newline at end of file diff --git a/server/src/internalClusterTest/java/org/opensearch/bwcompat/RecoveryWithUnsupportedIndicesIT.java b/server/src/internalClusterTest/java/org/opensearch/bwcompat/RecoveryWithUnsupportedIndicesIT.java index 01eedd3be5078..047584c1f13cb 100644 --- a/server/src/internalClusterTest/java/org/opensearch/bwcompat/RecoveryWithUnsupportedIndicesIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/bwcompat/RecoveryWithUnsupportedIndicesIT.java @@ -39,8 +39,8 @@ import java.util.ArrayList; import java.util.List; -import org.apache.lucene.util.LuceneTestCase; -import org.apache.lucene.util.TestUtil; +import org.apache.lucene.tests.util.LuceneTestCase; +import org.apache.lucene.tests.util.TestUtil; import org.opensearch.ExceptionsHelper; import org.opensearch.common.settings.Settings; import org.opensearch.env.Environment; diff --git a/server/src/internalClusterTest/java/org/opensearch/cluster/allocation/ClusterRerouteIT.java b/server/src/internalClusterTest/java/org/opensearch/cluster/allocation/ClusterRerouteIT.java index cdf853c2ad9ae..ee2a8784fa0ed 100644 --- a/server/src/internalClusterTest/java/org/opensearch/cluster/allocation/ClusterRerouteIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/cluster/allocation/ClusterRerouteIT.java @@ -35,7 +35,7 @@ import org.apache.logging.log4j.Level; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; -import org.apache.lucene.util.LuceneTestCase; +import org.apache.lucene.tests.util.LuceneTestCase; import org.opensearch.action.admin.cluster.health.ClusterHealthResponse; import org.opensearch.action.admin.cluster.reroute.ClusterRerouteResponse; import org.opensearch.action.admin.cluster.reroute.TransportClusterRerouteAction; diff --git a/server/src/internalClusterTest/java/org/opensearch/cluster/routing/allocation/decider/DiskThresholdDeciderIT.java b/server/src/internalClusterTest/java/org/opensearch/cluster/routing/allocation/decider/DiskThresholdDeciderIT.java index 96f059695e719..eb3e61d83a948 100644 --- a/server/src/internalClusterTest/java/org/opensearch/cluster/routing/allocation/decider/DiskThresholdDeciderIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/cluster/routing/allocation/decider/DiskThresholdDeciderIT.java @@ -32,9 +32,9 @@ package org.opensearch.cluster.routing.allocation.decider; -import org.apache.lucene.mockfile.FilterFileStore; -import org.apache.lucene.mockfile.FilterFileSystemProvider; -import org.apache.lucene.mockfile.FilterPath; +import org.apache.lucene.tests.mockfile.FilterFileStore; +import org.apache.lucene.tests.mockfile.FilterFileSystemProvider; +import org.apache.lucene.tests.mockfile.FilterPath; import org.apache.lucene.util.Constants; import org.opensearch.action.admin.cluster.snapshots.create.CreateSnapshotResponse; diff --git a/server/src/internalClusterTest/java/org/opensearch/discovery/DiskDisruptionIT.java b/server/src/internalClusterTest/java/org/opensearch/discovery/DiskDisruptionIT.java index cd35670279bc6..ef00150b7c814 100644 --- a/server/src/internalClusterTest/java/org/opensearch/discovery/DiskDisruptionIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/discovery/DiskDisruptionIT.java @@ -32,7 +32,7 @@ package org.opensearch.discovery; import com.carrotsearch.randomizedtesting.RandomizedTest; -import org.apache.lucene.mockfile.FilterFileSystemProvider; +import org.apache.lucene.tests.mockfile.FilterFileSystemProvider; import org.opensearch.action.admin.indices.stats.ShardStats; import org.opensearch.cluster.metadata.IndexMetadata; import org.opensearch.common.io.PathUtils; diff --git a/server/src/internalClusterTest/java/org/opensearch/index/engine/MaxDocsLimitIT.java b/server/src/internalClusterTest/java/org/opensearch/index/engine/MaxDocsLimitIT.java index da3b30030581f..b548d4061475c 100644 --- a/server/src/internalClusterTest/java/org/opensearch/index/engine/MaxDocsLimitIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/index/engine/MaxDocsLimitIT.java @@ -32,8 +32,6 @@ package org.opensearch.index.engine; -import org.apache.lucene.index.IndexWriterMaxDocsChanger; - import org.opensearch.action.index.IndexResponse; import org.opensearch.action.search.SearchResponse; import org.opensearch.cluster.metadata.IndexMetadata; @@ -94,12 +92,12 @@ protected Collection> nodePlugins() { @Before public void setMaxDocs() { maxDocs.set(randomIntBetween(10, 100)); // Do not set this too low as we can fail to write the cluster state - IndexWriterMaxDocsChanger.setMaxDocs(maxDocs.get()); + setIndexWriterMaxDocs(maxDocs.get()); } @After public void restoreMaxDocs() { - IndexWriterMaxDocsChanger.restoreMaxDocs(); + restoreIndexWriterMaxDocs(); } public void testMaxDocsLimit() throws Exception { diff --git a/server/src/internalClusterTest/java/org/opensearch/indices/stats/IndexStatsIT.java b/server/src/internalClusterTest/java/org/opensearch/indices/stats/IndexStatsIT.java index c503dd9f83273..a24f55be3f010 100644 --- a/server/src/internalClusterTest/java/org/opensearch/indices/stats/IndexStatsIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/indices/stats/IndexStatsIT.java @@ -32,7 +32,7 @@ package org.opensearch.indices.stats; -import org.apache.lucene.util.LuceneTestCase.SuppressCodecs; +import org.apache.lucene.tests.util.LuceneTestCase.SuppressCodecs; import org.opensearch.action.DocWriteResponse; import org.opensearch.action.admin.cluster.node.stats.NodesStatsResponse; import org.opensearch.action.admin.indices.create.CreateIndexRequest; diff --git a/server/src/internalClusterTest/java/org/opensearch/recovery/RelocationIT.java b/server/src/internalClusterTest/java/org/opensearch/recovery/RelocationIT.java index d17761f62eb53..06475f1e7ac9d 100644 --- a/server/src/internalClusterTest/java/org/opensearch/recovery/RelocationIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/recovery/RelocationIT.java @@ -34,7 +34,7 @@ import com.carrotsearch.hppc.cursors.ObjectCursor; import org.apache.lucene.index.IndexFileNames; -import org.apache.lucene.util.English; +import org.apache.lucene.tests.util.English; import org.opensearch.action.ActionFuture; import org.opensearch.action.admin.cluster.health.ClusterHealthResponse; import org.opensearch.action.admin.cluster.reroute.ClusterRerouteResponse; diff --git a/server/src/internalClusterTest/java/org/opensearch/recovery/TruncatedRecoveryIT.java b/server/src/internalClusterTest/java/org/opensearch/recovery/TruncatedRecoveryIT.java index 45fb1a8fc58c0..71da9168c6205 100644 --- a/server/src/internalClusterTest/java/org/opensearch/recovery/TruncatedRecoveryIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/recovery/TruncatedRecoveryIT.java @@ -32,8 +32,8 @@ package org.opensearch.recovery; -import org.apache.lucene.util.English; -import org.apache.lucene.util.LuceneTestCase.SuppressCodecs; +import org.apache.lucene.tests.util.English; +import org.apache.lucene.tests.util.LuceneTestCase.SuppressCodecs; import org.opensearch.action.admin.cluster.node.stats.NodeStats; import org.opensearch.action.admin.cluster.node.stats.NodesStatsResponse; import org.opensearch.action.index.IndexRequestBuilder; diff --git a/server/src/internalClusterTest/java/org/opensearch/search/StressSearchServiceReaperIT.java b/server/src/internalClusterTest/java/org/opensearch/search/StressSearchServiceReaperIT.java index 96f3f710e4b7d..42e515cca9b6b 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/StressSearchServiceReaperIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/StressSearchServiceReaperIT.java @@ -31,7 +31,7 @@ package org.opensearch.search; -import org.apache.lucene.util.English; +import org.apache.lucene.tests.util.English; import org.opensearch.action.index.IndexRequestBuilder; import org.opensearch.action.search.SearchResponse; import org.opensearch.common.settings.Settings; diff --git a/server/src/internalClusterTest/java/org/opensearch/search/basic/SearchWithRandomExceptionsIT.java b/server/src/internalClusterTest/java/org/opensearch/search/basic/SearchWithRandomExceptionsIT.java index 9efb07fc7e581..0b55ea9119d89 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/basic/SearchWithRandomExceptionsIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/basic/SearchWithRandomExceptionsIT.java @@ -35,7 +35,7 @@ import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.FilterDirectoryReader; import org.apache.lucene.index.LeafReader; -import org.apache.lucene.util.English; +import org.apache.lucene.tests.util.English; import org.opensearch.OpenSearchException; import org.opensearch.action.DocWriteResponse; diff --git a/server/src/internalClusterTest/java/org/opensearch/search/basic/SearchWithRandomIOExceptionsIT.java b/server/src/internalClusterTest/java/org/opensearch/search/basic/SearchWithRandomIOExceptionsIT.java index 094ab8a19c88b..b0adc00f37fee 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/basic/SearchWithRandomIOExceptionsIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/basic/SearchWithRandomIOExceptionsIT.java @@ -32,7 +32,7 @@ package org.opensearch.search.basic; -import org.apache.lucene.util.English; +import org.apache.lucene.tests.util.English; import org.opensearch.OpenSearchException; import org.opensearch.action.DocWriteResponse; diff --git a/server/src/internalClusterTest/java/org/opensearch/search/fetch/subphase/highlight/HighlighterSearchIT.java b/server/src/internalClusterTest/java/org/opensearch/search/fetch/subphase/highlight/HighlighterSearchIT.java index d1b3895ff40e1..27513b575d6f0 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/fetch/subphase/highlight/HighlighterSearchIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/fetch/subphase/highlight/HighlighterSearchIT.java @@ -33,8 +33,8 @@ import com.carrotsearch.randomizedtesting.generators.RandomPicks; import org.apache.lucene.analysis.Analyzer; -import org.apache.lucene.analysis.MockAnalyzer; -import org.apache.lucene.analysis.MockTokenizer; +import org.apache.lucene.tests.analysis.MockAnalyzer; +import org.apache.lucene.tests.analysis.MockTokenizer; import org.apache.lucene.analysis.TokenFilter; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.tokenattributes.CharTermAttribute; diff --git a/server/src/internalClusterTest/java/org/opensearch/search/functionscore/QueryRescorerIT.java b/server/src/internalClusterTest/java/org/opensearch/search/functionscore/QueryRescorerIT.java index a21363e58949b..e216e92c63bad 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/functionscore/QueryRescorerIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/functionscore/QueryRescorerIT.java @@ -33,7 +33,7 @@ package org.opensearch.search.functionscore; import org.apache.lucene.search.Explanation; -import org.apache.lucene.util.English; +import org.apache.lucene.tests.util.English; import org.opensearch.action.index.IndexRequestBuilder; import org.opensearch.action.search.SearchRequestBuilder; import org.opensearch.action.search.SearchResponse; diff --git a/server/src/internalClusterTest/java/org/opensearch/search/profile/ProfilerSingleNodeNetworkTest.java b/server/src/internalClusterTest/java/org/opensearch/search/profile/ProfilerSingleNodeNetworkTest.java index 24885c1f853ef..12d68c9c38ca1 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/profile/ProfilerSingleNodeNetworkTest.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/profile/ProfilerSingleNodeNetworkTest.java @@ -7,7 +7,7 @@ package org.opensearch.search.profile; -import org.apache.lucene.util.English; +import org.apache.lucene.tests.util.English; import org.opensearch.action.index.IndexRequestBuilder; import org.opensearch.action.search.SearchResponse; import org.opensearch.action.search.SearchType; diff --git a/server/src/internalClusterTest/java/org/opensearch/search/profile/query/QueryProfilerIT.java b/server/src/internalClusterTest/java/org/opensearch/search/profile/query/QueryProfilerIT.java index 65d3ee2779de8..a74f359f2542e 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/profile/query/QueryProfilerIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/profile/query/QueryProfilerIT.java @@ -32,7 +32,7 @@ package org.opensearch.search.profile.query; -import org.apache.lucene.util.English; +import org.apache.lucene.tests.util.English; import org.opensearch.action.index.IndexRequestBuilder; import org.opensearch.action.search.*; import org.opensearch.common.settings.Settings; diff --git a/server/src/internalClusterTest/java/org/opensearch/search/query/SearchQueryIT.java b/server/src/internalClusterTest/java/org/opensearch/search/query/SearchQueryIT.java index c9bb746973226..fb744ccfc4655 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/query/SearchQueryIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/query/SearchQueryIT.java @@ -32,13 +32,13 @@ package org.opensearch.search.query; -import org.apache.lucene.analysis.MockTokenizer; +import org.apache.lucene.tests.analysis.MockTokenizer; import org.apache.lucene.analysis.pattern.PatternReplaceCharFilter; import org.apache.lucene.index.IndexReader; import org.apache.lucene.search.MultiTermQuery; import org.apache.lucene.search.join.ScoreMode; import org.apache.lucene.util.AttributeSource; -import org.apache.lucene.util.English; +import org.apache.lucene.tests.util.English; import org.opensearch.action.index.IndexRequestBuilder; import org.opensearch.action.search.SearchPhaseExecutionException; diff --git a/server/src/internalClusterTest/java/org/opensearch/search/sort/FieldSortIT.java b/server/src/internalClusterTest/java/org/opensearch/search/sort/FieldSortIT.java index 92dfedeb99a23..38aac2850dc56 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/sort/FieldSortIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/sort/FieldSortIT.java @@ -33,7 +33,7 @@ package org.opensearch.search.sort; import org.apache.lucene.util.BytesRef; -import org.apache.lucene.util.TestUtil; +import org.apache.lucene.tests.util.TestUtil; import org.apache.lucene.util.UnicodeUtil; import org.opensearch.OpenSearchException; diff --git a/server/src/internalClusterTest/java/org/opensearch/search/suggest/CompletionSuggestSearchIT.java b/server/src/internalClusterTest/java/org/opensearch/search/suggest/CompletionSuggestSearchIT.java index 099ffbc278f81..e85eff8450ca4 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/suggest/CompletionSuggestSearchIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/suggest/CompletionSuggestSearchIT.java @@ -34,7 +34,7 @@ import com.carrotsearch.randomizedtesting.generators.RandomStrings; import org.apache.lucene.analysis.TokenStreamToAutomaton; import org.apache.lucene.search.suggest.document.ContextSuggestField; -import org.apache.lucene.util.LuceneTestCase.SuppressCodecs; +import org.apache.lucene.tests.util.LuceneTestCase.SuppressCodecs; import org.opensearch.action.admin.indices.forcemerge.ForceMergeResponse; import org.opensearch.action.admin.indices.segments.IndexShardSegments; import org.opensearch.action.admin.indices.segments.ShardSegments; diff --git a/server/src/internalClusterTest/java/org/opensearch/search/suggest/ContextCompletionSuggestSearchIT.java b/server/src/internalClusterTest/java/org/opensearch/search/suggest/ContextCompletionSuggestSearchIT.java index c9b14993d6e49..acc4350d149cd 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/suggest/ContextCompletionSuggestSearchIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/suggest/ContextCompletionSuggestSearchIT.java @@ -32,7 +32,7 @@ package org.opensearch.search.suggest; import com.carrotsearch.randomizedtesting.generators.RandomStrings; -import org.apache.lucene.util.LuceneTestCase.SuppressCodecs; +import org.apache.lucene.tests.util.LuceneTestCase.SuppressCodecs; import org.opensearch.action.index.IndexRequestBuilder; import org.opensearch.action.index.IndexResponse; diff --git a/server/src/internalClusterTest/java/org/opensearch/versioning/SimpleVersioningIT.java b/server/src/internalClusterTest/java/org/opensearch/versioning/SimpleVersioningIT.java index 629b20edbb44d..5898bba9762ad 100644 --- a/server/src/internalClusterTest/java/org/opensearch/versioning/SimpleVersioningIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/versioning/SimpleVersioningIT.java @@ -31,7 +31,7 @@ package org.opensearch.versioning; -import org.apache.lucene.util.TestUtil; +import org.apache.lucene.tests.util.TestUtil; import org.opensearch.action.ActionResponse; import org.opensearch.action.DocWriteRequest; import org.opensearch.action.DocWriteResponse; diff --git a/server/src/main/java/org/apache/lucene/search/grouping/CollapseTopFieldDocs.java b/server/src/main/java/org/apache/lucene/search/grouping/CollapseTopFieldDocs.java index eee710892312a..fe26c313d72b2 100644 --- a/server/src/main/java/org/apache/lucene/search/grouping/CollapseTopFieldDocs.java +++ b/server/src/main/java/org/apache/lucene/search/grouping/CollapseTopFieldDocs.java @@ -144,7 +144,7 @@ private static class MergeSortQueue extends PriorityQueue { reverseMul = new int[sortFields.length]; for (int compIDX = 0; compIDX < sortFields.length; compIDX++) { final SortField sortField = sortFields[compIDX]; - comparators[compIDX] = sortField.getComparator(1, compIDX); + comparators[compIDX] = sortField.getComparator(1, false); reverseMul[compIDX] = sortField.getReverse() ? -1 : 1; } } diff --git a/server/src/main/java/org/opensearch/Version.java b/server/src/main/java/org/opensearch/Version.java index e8a06af50f525..860bb2c5ada69 100644 --- a/server/src/main/java/org/opensearch/Version.java +++ b/server/src/main/java/org/opensearch/Version.java @@ -80,7 +80,7 @@ public class Version implements Comparable, ToXContentFragment { public static final Version V_1_2_5 = new Version(1020599, org.apache.lucene.util.Version.LUCENE_8_10_1); public static final Version V_1_3_0 = new Version(1030099, org.apache.lucene.util.Version.LUCENE_8_10_1); public static final Version V_1_4_0 = new Version(1040099, org.apache.lucene.util.Version.LUCENE_8_10_1); - public static final Version V_2_0_0 = new Version(2000099, org.apache.lucene.util.Version.LUCENE_9_0_0); + public static final Version V_2_0_0 = new Version(2000099, org.apache.lucene.util.Version.LUCENE_9_1_0); public static final Version CURRENT = V_2_0_0; public static Version readVersion(StreamInput in) throws IOException { diff --git a/server/src/main/java/org/opensearch/action/search/BottomSortValuesCollector.java b/server/src/main/java/org/opensearch/action/search/BottomSortValuesCollector.java index 0ee2bba73f009..64f50d2f37cdb 100644 --- a/server/src/main/java/org/opensearch/action/search/BottomSortValuesCollector.java +++ b/server/src/main/java/org/opensearch/action/search/BottomSortValuesCollector.java @@ -57,7 +57,7 @@ class BottomSortValuesCollector { this.reverseMuls = new int[sortFields.length]; this.sortFields = sortFields; for (int i = 0; i < sortFields.length; i++) { - comparators[i] = sortFields[i].getComparator(1, i); + comparators[i] = sortFields[i].getComparator(1, false); reverseMuls[i] = sortFields[i].getReverse() ? -1 : 1; } } diff --git a/server/src/main/java/org/opensearch/common/lucene/Lucene.java b/server/src/main/java/org/opensearch/common/lucene/Lucene.java index 6e17aab92f24b..4cbc7a6668dee 100644 --- a/server/src/main/java/org/opensearch/common/lucene/Lucene.java +++ b/server/src/main/java/org/opensearch/common/lucene/Lucene.java @@ -120,7 +120,7 @@ import java.util.Map; public class Lucene { - public static final String LATEST_CODEC = "Lucene90"; + public static final String LATEST_CODEC = "Lucene91"; public static final String SOFT_DELETES_FIELD = "__soft_deletes"; @@ -1118,7 +1118,7 @@ public VectorValues getVectorValues(String field) throws IOException { } @Override - public TopDocs searchNearestVectors(String field, float[] target, int k, Bits acceptDocs) throws IOException { + public TopDocs searchNearestVectors(String field, float[] target, int k, Bits acceptDocs, int visitedLimit) throws IOException { return null; } }; diff --git a/server/src/main/java/org/opensearch/common/lucene/search/MultiPhrasePrefixQuery.java b/server/src/main/java/org/opensearch/common/lucene/search/MultiPhrasePrefixQuery.java index 711ff9860a5ce..982779333e7da 100644 --- a/server/src/main/java/org/opensearch/common/lucene/search/MultiPhrasePrefixQuery.java +++ b/server/src/main/java/org/opensearch/common/lucene/search/MultiPhrasePrefixQuery.java @@ -186,7 +186,7 @@ public Query rewrite(IndexReader reader) throws IOException { // which rewrites query with an empty reader. return new BooleanQuery.Builder().add(query.build(), BooleanClause.Occur.MUST) .add( - Queries.newMatchNoDocsQuery("No terms supplied for " + MultiPhrasePrefixQuery.class.getName()), + Queries.newMatchNoDocsQueryWithoutRewrite("No terms supplied for " + MultiPhrasePrefixQuery.class.getName()), BooleanClause.Occur.MUST ) .build(); diff --git a/server/src/main/java/org/opensearch/common/lucene/search/Queries.java b/server/src/main/java/org/opensearch/common/lucene/search/Queries.java index ef10d1eb0d221..2f500efba9e8e 100644 --- a/server/src/main/java/org/opensearch/common/lucene/search/Queries.java +++ b/server/src/main/java/org/opensearch/common/lucene/search/Queries.java @@ -32,25 +32,34 @@ package org.opensearch.common.lucene.search; +import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.Term; import org.apache.lucene.queries.ExtendedCommonTermsQuery; import org.apache.lucene.search.BooleanClause; import org.apache.lucene.search.BooleanClause.Occur; import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.DocValuesFieldExistsQuery; +import org.apache.lucene.search.Explanation; +import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.MatchNoDocsQuery; import org.apache.lucene.search.PrefixQuery; import org.apache.lucene.search.Query; +import org.apache.lucene.search.QueryVisitor; +import org.apache.lucene.search.ScoreMode; +import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.Weight; import org.apache.lucene.util.BytesRef; import org.opensearch.OpenSearchException; import org.opensearch.common.Nullable; import org.opensearch.index.mapper.SeqNoFieldMapper; import org.opensearch.index.mapper.TypeFieldMapper; +import java.io.IOException; import java.util.Collection; import java.util.Collections; import java.util.List; +import java.util.Objects; import java.util.regex.Pattern; public class Queries { @@ -200,4 +209,57 @@ public static int calculateMinShouldMatch(int optionalClauseCount, String spec) return result < 0 ? 0 : result; } + + public static Query newMatchNoDocsQueryWithoutRewrite(String reason) { + return new MatchNoDocsWithoutRewriteQuery(reason); + } + + static class MatchNoDocsWithoutRewriteQuery extends Query { + private final String reason; + + public MatchNoDocsWithoutRewriteQuery(String reason) { + this.reason = reason; + } + + @Override + public Weight createWeight(IndexSearcher searcher, ScoreMode scoreMode, float boost) throws IOException { + return new Weight(this) { + @Override + public Explanation explain(LeafReaderContext context, int doc) { + return Explanation.noMatch(reason); + } + + @Override + public Scorer scorer(LeafReaderContext context) { + return null; + } + + @Override + public boolean isCacheable(LeafReaderContext ctx) { + return true; + } + }; + } + + @Override + public String toString(String field) { + return "MatchNoDocsWithoutRewriteQuery(" + reason + ")"; + } + + @Override + public void visit(QueryVisitor visitor) { + // noop + } + + @Override + public boolean equals(Object o) { + return o instanceof MatchNoDocsWithoutRewriteQuery && Objects.equals(this.reason, ((MatchNoDocsWithoutRewriteQuery) o).reason); + } + + @Override + public int hashCode() { + return Objects.hashCode(reason); + } + } + } diff --git a/server/src/main/java/org/opensearch/index/codec/CodecService.java b/server/src/main/java/org/opensearch/index/codec/CodecService.java index d22c7239922bc..e445ad88f497e 100644 --- a/server/src/main/java/org/opensearch/index/codec/CodecService.java +++ b/server/src/main/java/org/opensearch/index/codec/CodecService.java @@ -34,8 +34,8 @@ import org.apache.logging.log4j.Logger; import org.apache.lucene.codecs.Codec; -import org.apache.lucene.codecs.lucene90.Lucene90Codec; -import org.apache.lucene.codecs.lucene90.Lucene90Codec.Mode; +import org.apache.lucene.codecs.lucene91.Lucene91Codec; +import org.apache.lucene.codecs.lucene91.Lucene91Codec.Mode; import org.opensearch.common.Nullable; import org.opensearch.common.collect.MapBuilder; import org.opensearch.index.mapper.MapperService; @@ -60,8 +60,8 @@ public class CodecService { public CodecService(@Nullable MapperService mapperService, Logger logger) { final MapBuilder codecs = MapBuilder.newMapBuilder(); if (mapperService == null) { - codecs.put(DEFAULT_CODEC, new Lucene90Codec()); - codecs.put(BEST_COMPRESSION_CODEC, new Lucene90Codec(Mode.BEST_COMPRESSION)); + codecs.put(DEFAULT_CODEC, new Lucene91Codec()); + codecs.put(BEST_COMPRESSION_CODEC, new Lucene91Codec(Mode.BEST_COMPRESSION)); } else { codecs.put(DEFAULT_CODEC, new PerFieldMappingPostingFormatCodec(Mode.BEST_SPEED, mapperService, logger)); codecs.put(BEST_COMPRESSION_CODEC, new PerFieldMappingPostingFormatCodec(Mode.BEST_COMPRESSION, mapperService, logger)); diff --git a/server/src/main/java/org/opensearch/index/codec/PerFieldMappingPostingFormatCodec.java b/server/src/main/java/org/opensearch/index/codec/PerFieldMappingPostingFormatCodec.java index 20a8ff7ca9170..fa01390d13419 100644 --- a/server/src/main/java/org/opensearch/index/codec/PerFieldMappingPostingFormatCodec.java +++ b/server/src/main/java/org/opensearch/index/codec/PerFieldMappingPostingFormatCodec.java @@ -36,7 +36,7 @@ import org.apache.lucene.codecs.Codec; import org.apache.lucene.codecs.DocValuesFormat; import org.apache.lucene.codecs.PostingsFormat; -import org.apache.lucene.codecs.lucene90.Lucene90Codec; +import org.apache.lucene.codecs.lucene91.Lucene91Codec; import org.apache.lucene.codecs.lucene90.Lucene90DocValuesFormat; import org.opensearch.common.lucene.Lucene; import org.opensearch.index.mapper.CompletionFieldMapper; @@ -51,7 +51,7 @@ * per index in real time via the mapping API. If no specific postings format is * configured for a specific field the default postings format is used. */ -public class PerFieldMappingPostingFormatCodec extends Lucene90Codec { +public class PerFieldMappingPostingFormatCodec extends Lucene91Codec { private final Logger logger; private final MapperService mapperService; private final DocValuesFormat dvFormat = new Lucene90DocValuesFormat(); diff --git a/server/src/main/java/org/opensearch/index/engine/TranslogLeafReader.java b/server/src/main/java/org/opensearch/index/engine/TranslogLeafReader.java index 07fe3f9230de4..09a670237a8a5 100644 --- a/server/src/main/java/org/opensearch/index/engine/TranslogLeafReader.java +++ b/server/src/main/java/org/opensearch/index/engine/TranslogLeafReader.java @@ -242,7 +242,7 @@ public VectorValues getVectorValues(String field) throws IOException { } @Override - public TopDocs searchNearestVectors(String field, float[] target, int k, Bits acceptDocs) throws IOException { + public TopDocs searchNearestVectors(String field, float[] target, int k, Bits acceptDocs, int visitedLimit) throws IOException { throw new UnsupportedOperationException(); } } diff --git a/server/src/main/java/org/opensearch/index/fielddata/fieldcomparator/BytesRefFieldComparatorSource.java b/server/src/main/java/org/opensearch/index/fielddata/fieldcomparator/BytesRefFieldComparatorSource.java index 0392018112619..d25443914ebaa 100644 --- a/server/src/main/java/org/opensearch/index/fielddata/fieldcomparator/BytesRefFieldComparatorSource.java +++ b/server/src/main/java/org/opensearch/index/fielddata/fieldcomparator/BytesRefFieldComparatorSource.java @@ -91,7 +91,7 @@ protected SortedBinaryDocValues getValues(LeafReaderContext context) throws IOEx protected void setScorer(Scorable scorer) {} @Override - public FieldComparator newComparator(String fieldname, int numHits, int sortPos, boolean reversed) { + public FieldComparator newComparator(String fieldname, int numHits, boolean enableSkipping, boolean reversed) { assert indexFieldData == null || fieldname.equals(indexFieldData.getFieldName()); final boolean sortMissingLast = sortMissingLast(missingValue) ^ reversed; diff --git a/server/src/main/java/org/opensearch/index/fielddata/fieldcomparator/DoubleValuesComparatorSource.java b/server/src/main/java/org/opensearch/index/fielddata/fieldcomparator/DoubleValuesComparatorSource.java index f80d6961fe9be..135101eae362d 100644 --- a/server/src/main/java/org/opensearch/index/fielddata/fieldcomparator/DoubleValuesComparatorSource.java +++ b/server/src/main/java/org/opensearch/index/fielddata/fieldcomparator/DoubleValuesComparatorSource.java @@ -96,13 +96,13 @@ private NumericDoubleValues getNumericDocValues(LeafReaderContext context, doubl protected void setScorer(Scorable scorer) {} @Override - public FieldComparator newComparator(String fieldname, int numHits, int sortPos, boolean reversed) { + public FieldComparator newComparator(String fieldname, int numHits, boolean enableSkipping, boolean reversed) { assert indexFieldData == null || fieldname.equals(indexFieldData.getFieldName()); final double dMissingValue = (Double) missingObject(missingValue, reversed); // NOTE: it's important to pass null as a missing value in the constructor so that // the comparator doesn't check docsWithField since we replace missing values in select() - return new DoubleComparator(numHits, null, null, reversed, sortPos) { + return new DoubleComparator(numHits, null, null, reversed, false) { @Override public LeafFieldComparator getLeafComparator(LeafReaderContext context) throws IOException { return new DoubleLeafComparator(context) { diff --git a/server/src/main/java/org/opensearch/index/fielddata/fieldcomparator/FloatValuesComparatorSource.java b/server/src/main/java/org/opensearch/index/fielddata/fieldcomparator/FloatValuesComparatorSource.java index 3f8f573eff27b..f8371e8235070 100644 --- a/server/src/main/java/org/opensearch/index/fielddata/fieldcomparator/FloatValuesComparatorSource.java +++ b/server/src/main/java/org/opensearch/index/fielddata/fieldcomparator/FloatValuesComparatorSource.java @@ -89,13 +89,13 @@ private NumericDoubleValues getNumericDocValues(LeafReaderContext context, float } @Override - public FieldComparator newComparator(String fieldname, int numHits, int sortPos, boolean reversed) { + public FieldComparator newComparator(String fieldname, int numHits, boolean enableSkipping, boolean reversed) { assert indexFieldData == null || fieldname.equals(indexFieldData.getFieldName()); final float fMissingValue = (Float) missingObject(missingValue, reversed); // NOTE: it's important to pass null as a missing value in the constructor so that // the comparator doesn't check docsWithField since we replace missing values in select() - return new FloatComparator(numHits, null, null, reversed, sortPos) { + return new FloatComparator(numHits, null, null, reversed, false) { @Override public LeafFieldComparator getLeafComparator(LeafReaderContext context) throws IOException { return new FloatLeafComparator(context) { diff --git a/server/src/main/java/org/opensearch/index/fielddata/fieldcomparator/LongValuesComparatorSource.java b/server/src/main/java/org/opensearch/index/fielddata/fieldcomparator/LongValuesComparatorSource.java index b036ec14d9bd0..3871425f1255f 100644 --- a/server/src/main/java/org/opensearch/index/fielddata/fieldcomparator/LongValuesComparatorSource.java +++ b/server/src/main/java/org/opensearch/index/fielddata/fieldcomparator/LongValuesComparatorSource.java @@ -112,13 +112,13 @@ private NumericDocValues getNumericDocValues(LeafReaderContext context, long mis } @Override - public FieldComparator newComparator(String fieldname, int numHits, int sortPos, boolean reversed) { + public FieldComparator newComparator(String fieldname, int numHits, boolean enableSkipping, boolean reversed) { assert indexFieldData == null || fieldname.equals(indexFieldData.getFieldName()); final long lMissingValue = (Long) missingObject(missingValue, reversed); // NOTE: it's important to pass null as a missing value in the constructor so that // the comparator doesn't check docsWithField since we replace missing values in select() - return new LongComparator(numHits, null, null, reversed, sortPos) { + return new LongComparator(numHits, null, null, reversed, false) { @Override public LeafFieldComparator getLeafComparator(LeafReaderContext context) throws IOException { return new LongLeafComparator(context) { diff --git a/server/src/main/java/org/opensearch/lucene/queries/SearchAfterSortedDocQuery.java b/server/src/main/java/org/opensearch/lucene/queries/SearchAfterSortedDocQuery.java index fd4d84fabe9c7..70cc73d2108cc 100644 --- a/server/src/main/java/org/opensearch/lucene/queries/SearchAfterSortedDocQuery.java +++ b/server/src/main/java/org/opensearch/lucene/queries/SearchAfterSortedDocQuery.java @@ -76,7 +76,7 @@ public SearchAfterSortedDocQuery(Sort sort, FieldDoc after) { this.reverseMuls = new int[numFields]; for (int i = 0; i < numFields; i++) { SortField sortField = sort.getSort()[i]; - FieldComparator fieldComparator = sortField.getComparator(1, i); + FieldComparator fieldComparator = sortField.getComparator(1, false); @SuppressWarnings("unchecked") FieldComparator comparator = (FieldComparator) fieldComparator; comparator.setTopValue(after.fields[i]); diff --git a/server/src/main/java/org/opensearch/search/aggregations/bucket/composite/CompositeAggregator.java b/server/src/main/java/org/opensearch/search/aggregations/bucket/composite/CompositeAggregator.java index 73dc838a36198..6af887201d994 100644 --- a/server/src/main/java/org/opensearch/search/aggregations/bucket/composite/CompositeAggregator.java +++ b/server/src/main/java/org/opensearch/search/aggregations/bucket/composite/CompositeAggregator.java @@ -349,8 +349,8 @@ public int hashCode() { } @Override - public FieldComparator getComparator(int numHits, int sortPos) { - return new LongComparator(1, delegate.getField(), (Long) missingValue, delegate.getReverse(), sortPos) { + public FieldComparator getComparator(int numHits, boolean enableSkipping) { + return new LongComparator(1, delegate.getField(), (Long) missingValue, delegate.getReverse(), false) { @Override public LeafFieldComparator getLeafComparator(LeafReaderContext context) throws IOException { return new LongLeafComparator(context) { diff --git a/server/src/main/java/org/opensearch/search/internal/ExitableDirectoryReader.java b/server/src/main/java/org/opensearch/search/internal/ExitableDirectoryReader.java index 2dc926176b1b4..9664fe3cb9085 100644 --- a/server/src/main/java/org/opensearch/search/internal/ExitableDirectoryReader.java +++ b/server/src/main/java/org/opensearch/search/internal/ExitableDirectoryReader.java @@ -196,6 +196,84 @@ public BytesRef next() throws IOException { } } + // delegates to PointValues but adds query cancellation checks + private static class ExitablePointTree implements PointValues.PointTree { + private final PointValues values; + private final PointValues.PointTree pointTree; + private final ExitableIntersectVisitor exitableIntersectVisitor; + private final QueryCancellation queryCancellation; + private int calls; + + private ExitablePointTree(PointValues values, PointValues.PointTree pointTree, QueryCancellation queryCancellation) { + this.values = values; + this.pointTree = pointTree; + this.exitableIntersectVisitor = new ExitableIntersectVisitor(queryCancellation); + this.queryCancellation = queryCancellation; + } + + @Override + public PointValues.PointTree clone() { + queryCancellation.checkCancelled(); + return new ExitablePointTree(values, pointTree.clone(), queryCancellation); + } + + @Override + public boolean moveToChild() throws IOException { + checkAndThrowWithSampling(); + return pointTree.moveToChild(); + } + + @Override + public boolean moveToSibling() throws IOException { + checkAndThrowWithSampling(); + return pointTree.moveToSibling(); + } + + @Override + public boolean moveToParent() throws IOException { + checkAndThrowWithSampling(); + return pointTree.moveToParent(); + } + + @Override + public byte[] getMinPackedValue() { + checkAndThrowWithSampling(); + return pointTree.getMinPackedValue(); + } + + @Override + public byte[] getMaxPackedValue() { + checkAndThrowWithSampling(); + return pointTree.getMaxPackedValue(); + } + + @Override + public long size() { + queryCancellation.checkCancelled(); + return pointTree.size(); + } + + @Override + public void visitDocIDs(PointValues.IntersectVisitor visitor) throws IOException { + queryCancellation.checkCancelled(); + pointTree.visitDocIDs(visitor); + } + + @Override + public void visitDocValues(PointValues.IntersectVisitor visitor) throws IOException { + queryCancellation.checkCancelled(); + exitableIntersectVisitor.setVisitor(visitor); + pointTree.visitDocValues(exitableIntersectVisitor); + } + + // reuse ExitableIntersectVisitor#checkAndThrowWithSampling + private void checkAndThrowWithSampling() { + if ((calls++ & ExitableIntersectVisitor.MAX_CALLS_BEFORE_QUERY_TIMEOUT_CHECK) == 0) { + queryCancellation.checkCancelled(); + } + } + } + /** * Wrapper class for {@link PointValues} that checks for query cancellation or timeout. */ @@ -211,15 +289,8 @@ private ExitablePointValues(PointValues in, QueryCancellation queryCancellation) } @Override - public void intersect(IntersectVisitor visitor) throws IOException { - queryCancellation.checkCancelled(); - in.intersect(new ExitableIntersectVisitor(visitor, queryCancellation)); - } - - @Override - public long estimatePointCount(IntersectVisitor visitor) { - queryCancellation.checkCancelled(); - return in.estimatePointCount(visitor); + public PointTree getPointTree() throws IOException { + return new ExitablePointTree(in, in.getPointTree(), queryCancellation); } @Override @@ -269,12 +340,11 @@ private static class ExitableIntersectVisitor implements PointValues.IntersectVi private static final int MAX_CALLS_BEFORE_QUERY_TIMEOUT_CHECK = (1 << 13) - 1; // 8191 - private final PointValues.IntersectVisitor in; + private PointValues.IntersectVisitor in; private final QueryCancellation queryCancellation; private int calls; - private ExitableIntersectVisitor(PointValues.IntersectVisitor in, QueryCancellation queryCancellation) { - this.in = in; + private ExitableIntersectVisitor(QueryCancellation queryCancellation) { this.queryCancellation = queryCancellation; } @@ -284,6 +354,10 @@ private void checkAndThrowWithSampling() { } } + private void setVisitor(PointValues.IntersectVisitor in) { + this.in = in; + } + @Override public void visit(int docID) throws IOException { checkAndThrowWithSampling(); diff --git a/server/src/main/java/org/opensearch/search/sort/GeoDistanceSortBuilder.java b/server/src/main/java/org/opensearch/search/sort/GeoDistanceSortBuilder.java index 2dd3481cc804f..2ab9703b45980 100644 --- a/server/src/main/java/org/opensearch/search/sort/GeoDistanceSortBuilder.java +++ b/server/src/main/java/org/opensearch/search/sort/GeoDistanceSortBuilder.java @@ -731,8 +731,8 @@ private NumericDoubleValues getNumericDoubleValues(LeafReaderContext context) th } @Override - public FieldComparator newComparator(String fieldname, int numHits, int sortPos, boolean reversed) { - return new DoubleComparator(numHits, null, null, reversed, sortPos) { + public FieldComparator newComparator(String fieldname, int numHits, boolean enableSkipping, boolean reversed) { + return new DoubleComparator(numHits, null, null, reversed, enableSkipping) { @Override public LeafFieldComparator getLeafComparator(LeafReaderContext context) throws IOException { return new DoubleLeafComparator(context) { diff --git a/server/src/test/java/org/opensearch/action/admin/indices/TransportAnalyzeActionTests.java b/server/src/test/java/org/opensearch/action/admin/indices/TransportAnalyzeActionTests.java index 726a37a30c893..17cc539a5d561 100644 --- a/server/src/test/java/org/opensearch/action/admin/indices/TransportAnalyzeActionTests.java +++ b/server/src/test/java/org/opensearch/action/admin/indices/TransportAnalyzeActionTests.java @@ -31,8 +31,8 @@ package org.opensearch.action.admin.indices; -import org.apache.lucene.analysis.MockTokenFilter; -import org.apache.lucene.analysis.MockTokenizer; +import org.apache.lucene.tests.analysis.MockTokenFilter; +import org.apache.lucene.tests.analysis.MockTokenizer; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.util.automaton.Automata; import org.apache.lucene.util.automaton.CharacterRunAutomaton; diff --git a/server/src/test/java/org/opensearch/action/bulk/BulkShardRequestTests.java b/server/src/test/java/org/opensearch/action/bulk/BulkShardRequestTests.java index fb41fdf210f64..5aa4553133190 100644 --- a/server/src/test/java/org/opensearch/action/bulk/BulkShardRequestTests.java +++ b/server/src/test/java/org/opensearch/action/bulk/BulkShardRequestTests.java @@ -36,7 +36,7 @@ import org.opensearch.index.shard.ShardId; import org.opensearch.test.OpenSearchTestCase; -import static org.apache.lucene.util.TestUtil.randomSimpleString; +import static org.apache.lucene.tests.util.TestUtil.randomSimpleString; public class BulkShardRequestTests extends OpenSearchTestCase { public void testToString() { diff --git a/server/src/test/java/org/opensearch/action/search/BottomSortValuesCollectorTests.java b/server/src/test/java/org/opensearch/action/search/BottomSortValuesCollectorTests.java index 060f5d2a44b63..67a151646c635 100644 --- a/server/src/test/java/org/opensearch/action/search/BottomSortValuesCollectorTests.java +++ b/server/src/test/java/org/opensearch/action/search/BottomSortValuesCollectorTests.java @@ -257,7 +257,7 @@ private Object[] newDateNanoArray(String... values) { private TopFieldDocs createTopDocs(SortField sortField, int totalHits, Object[] values) { FieldDoc[] fieldDocs = new FieldDoc[values.length]; - FieldComparator cmp = sortField.getComparator(1, 0); + FieldComparator cmp = sortField.getComparator(1, false); for (int i = 0; i < values.length; i++) { fieldDocs[i] = new FieldDoc(i, Float.NaN, new Object[] { values[i] }); } diff --git a/server/src/test/java/org/opensearch/action/search/DfsQueryPhaseTests.java b/server/src/test/java/org/opensearch/action/search/DfsQueryPhaseTests.java index e8c5fddb4bf75..d746e5639fcb1 100644 --- a/server/src/test/java/org/opensearch/action/search/DfsQueryPhaseTests.java +++ b/server/src/test/java/org/opensearch/action/search/DfsQueryPhaseTests.java @@ -36,7 +36,7 @@ import org.apache.lucene.search.TermStatistics; import org.apache.lucene.search.TopDocs; import org.apache.lucene.search.TotalHits; -import org.apache.lucene.store.MockDirectoryWrapper; +import org.apache.lucene.tests.store.MockDirectoryWrapper; import org.opensearch.action.OriginalIndices; import org.opensearch.common.breaker.CircuitBreaker; import org.opensearch.common.breaker.NoopCircuitBreaker; diff --git a/server/src/test/java/org/opensearch/action/search/FetchSearchPhaseTests.java b/server/src/test/java/org/opensearch/action/search/FetchSearchPhaseTests.java index ac68dc78e41df..6c77e5f6efd5d 100644 --- a/server/src/test/java/org/opensearch/action/search/FetchSearchPhaseTests.java +++ b/server/src/test/java/org/opensearch/action/search/FetchSearchPhaseTests.java @@ -34,7 +34,7 @@ import org.apache.lucene.search.ScoreDoc; import org.apache.lucene.search.TopDocs; import org.apache.lucene.search.TotalHits; -import org.apache.lucene.store.MockDirectoryWrapper; +import org.apache.lucene.tests.store.MockDirectoryWrapper; import org.opensearch.action.OriginalIndices; import org.opensearch.common.UUIDs; import org.opensearch.common.breaker.CircuitBreaker; diff --git a/server/src/test/java/org/opensearch/action/termvectors/GetTermVectorsTests.java b/server/src/test/java/org/opensearch/action/termvectors/GetTermVectorsTests.java index b7cdb3301384a..5486d110c9329 100644 --- a/server/src/test/java/org/opensearch/action/termvectors/GetTermVectorsTests.java +++ b/server/src/test/java/org/opensearch/action/termvectors/GetTermVectorsTests.java @@ -31,7 +31,7 @@ package org.opensearch.action.termvectors; -import org.apache.lucene.analysis.MockTokenizer; +import org.apache.lucene.tests.analysis.MockTokenizer; import org.apache.lucene.analysis.TokenFilter; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.payloads.FloatEncoder; diff --git a/server/src/test/java/org/opensearch/cluster/routing/allocation/BalanceUnbalancedClusterTests.java b/server/src/test/java/org/opensearch/cluster/routing/allocation/BalanceUnbalancedClusterTests.java index e08431e5fc4ab..75961491f5cf0 100644 --- a/server/src/test/java/org/opensearch/cluster/routing/allocation/BalanceUnbalancedClusterTests.java +++ b/server/src/test/java/org/opensearch/cluster/routing/allocation/BalanceUnbalancedClusterTests.java @@ -31,7 +31,7 @@ package org.opensearch.cluster.routing.allocation; -import org.apache.lucene.util.TestUtil; +import org.apache.lucene.tests.util.TestUtil; import org.opensearch.Version; import org.opensearch.cluster.ClusterState; import org.opensearch.cluster.OpenSearchAllocationTestCase; diff --git a/server/src/test/java/org/opensearch/common/UUIDTests.java b/server/src/test/java/org/opensearch/common/UUIDTests.java index 4126c3aa30083..a71b68bfe859e 100644 --- a/server/src/test/java/org/opensearch/common/UUIDTests.java +++ b/server/src/test/java/org/opensearch/common/UUIDTests.java @@ -43,7 +43,7 @@ import org.apache.lucene.index.IndexWriterConfig; import org.apache.lucene.index.SerialMergeScheduler; import org.apache.lucene.store.Directory; -import org.apache.lucene.util.TestUtil; +import org.apache.lucene.tests.util.TestUtil; import org.opensearch.common.lucene.Lucene; import org.opensearch.common.unit.ByteSizeValue; import org.opensearch.common.unit.TimeValue; diff --git a/server/src/test/java/org/opensearch/common/blobstore/fs/FsBlobContainerTests.java b/server/src/test/java/org/opensearch/common/blobstore/fs/FsBlobContainerTests.java index 85ac19a6e518f..83f5c0445e21d 100644 --- a/server/src/test/java/org/opensearch/common/blobstore/fs/FsBlobContainerTests.java +++ b/server/src/test/java/org/opensearch/common/blobstore/fs/FsBlobContainerTests.java @@ -31,9 +31,9 @@ package org.opensearch.common.blobstore.fs; -import org.apache.lucene.mockfile.FilterFileSystemProvider; -import org.apache.lucene.mockfile.FilterSeekableByteChannel; -import org.apache.lucene.util.LuceneTestCase; +import org.apache.lucene.tests.mockfile.FilterFileSystemProvider; +import org.apache.lucene.tests.mockfile.FilterSeekableByteChannel; +import org.apache.lucene.tests.util.LuceneTestCase; import org.opensearch.common.blobstore.BlobPath; import org.opensearch.common.io.PathUtils; import org.opensearch.common.io.PathUtilsForTesting; diff --git a/server/src/test/java/org/opensearch/common/compress/DeflateCompressTests.java b/server/src/test/java/org/opensearch/common/compress/DeflateCompressTests.java index a2b2227effb7b..f215817914ee0 100644 --- a/server/src/test/java/org/opensearch/common/compress/DeflateCompressTests.java +++ b/server/src/test/java/org/opensearch/common/compress/DeflateCompressTests.java @@ -32,8 +32,8 @@ package org.opensearch.common.compress; -import org.apache.lucene.util.LineFileDocs; -import org.apache.lucene.util.TestUtil; +import org.apache.lucene.tests.util.LineFileDocs; +import org.apache.lucene.tests.util.TestUtil; import org.opensearch.test.OpenSearchTestCase; import java.io.ByteArrayInputStream; diff --git a/server/src/test/java/org/opensearch/common/compress/DeflateCompressedXContentTests.java b/server/src/test/java/org/opensearch/common/compress/DeflateCompressedXContentTests.java index 24ea9e30ecdfb..a14fabc30050d 100644 --- a/server/src/test/java/org/opensearch/common/compress/DeflateCompressedXContentTests.java +++ b/server/src/test/java/org/opensearch/common/compress/DeflateCompressedXContentTests.java @@ -32,7 +32,7 @@ package org.opensearch.common.compress; -import org.apache.lucene.util.TestUtil; +import org.apache.lucene.tests.util.TestUtil; import org.opensearch.common.bytes.BytesReference; import org.opensearch.common.io.stream.BytesStreamOutput; import org.opensearch.test.OpenSearchTestCase; diff --git a/server/src/test/java/org/opensearch/common/geo/GeoWKTShapeParserTests.java b/server/src/test/java/org/opensearch/common/geo/GeoWKTShapeParserTests.java index d8a1bcb97d678..cd05331442be2 100644 --- a/server/src/test/java/org/opensearch/common/geo/GeoWKTShapeParserTests.java +++ b/server/src/test/java/org/opensearch/common/geo/GeoWKTShapeParserTests.java @@ -31,7 +31,7 @@ package org.opensearch.common.geo; -import org.apache.lucene.geo.GeoTestUtil; +import org.apache.lucene.tests.geo.GeoTestUtil; import org.opensearch.OpenSearchException; import org.opensearch.OpenSearchParseException; import org.opensearch.Version; diff --git a/server/src/test/java/org/opensearch/common/io/FileSystemUtilsTests.java b/server/src/test/java/org/opensearch/common/io/FileSystemUtilsTests.java index 406ea1ea56c5b..bb0a1c486a30c 100644 --- a/server/src/test/java/org/opensearch/common/io/FileSystemUtilsTests.java +++ b/server/src/test/java/org/opensearch/common/io/FileSystemUtilsTests.java @@ -33,7 +33,7 @@ package org.opensearch.common.io; import org.apache.lucene.util.Constants; -import org.apache.lucene.util.LuceneTestCase.SuppressFileSystems; +import org.apache.lucene.tests.util.LuceneTestCase.SuppressFileSystems; import org.opensearch.test.OpenSearchTestCase; import org.junit.Before; diff --git a/server/src/test/java/org/opensearch/common/lucene/LuceneTests.java b/server/src/test/java/org/opensearch/common/lucene/LuceneTests.java index 69c431994ba7e..4c179309f16ba 100644 --- a/server/src/test/java/org/opensearch/common/lucene/LuceneTests.java +++ b/server/src/test/java/org/opensearch/common/lucene/LuceneTests.java @@ -31,7 +31,7 @@ package org.opensearch.common.lucene; -import org.apache.lucene.analysis.MockAnalyzer; +import org.apache.lucene.tests.analysis.MockAnalyzer; import org.apache.lucene.analysis.core.KeywordAnalyzer; import org.apache.lucene.document.Document; import org.apache.lucene.document.Field; @@ -47,7 +47,7 @@ import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.NoDeletionPolicy; import org.apache.lucene.index.NoMergePolicy; -import org.apache.lucene.index.RandomIndexWriter; +import org.apache.lucene.tests.index.RandomIndexWriter; import org.apache.lucene.index.SegmentInfos; import org.apache.lucene.index.SoftDeletesRetentionMergePolicy; import org.apache.lucene.index.Term; @@ -70,7 +70,7 @@ import org.apache.lucene.search.Weight; import org.apache.lucene.store.Directory; import org.apache.lucene.store.MMapDirectory; -import org.apache.lucene.store.MockDirectoryWrapper; +import org.apache.lucene.tests.store.MockDirectoryWrapper; import org.apache.lucene.util.Bits; import org.apache.lucene.util.BytesRef; import org.opensearch.common.collect.Tuple; diff --git a/server/src/test/java/org/opensearch/common/lucene/ShardCoreKeyMapTests.java b/server/src/test/java/org/opensearch/common/lucene/ShardCoreKeyMapTests.java index e133e341b475d..8abfb14fe3087 100644 --- a/server/src/test/java/org/opensearch/common/lucene/ShardCoreKeyMapTests.java +++ b/server/src/test/java/org/opensearch/common/lucene/ShardCoreKeyMapTests.java @@ -37,7 +37,7 @@ import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.LeafReader; import org.apache.lucene.index.LeafReaderContext; -import org.apache.lucene.index.RandomIndexWriter; +import org.apache.lucene.tests.index.RandomIndexWriter; import org.apache.lucene.store.AlreadyClosedException; import org.apache.lucene.store.Directory; import org.opensearch.common.lucene.index.OpenSearchDirectoryReader; diff --git a/server/src/test/java/org/opensearch/common/lucene/search/function/MinScoreScorerTests.java b/server/src/test/java/org/opensearch/common/lucene/search/function/MinScoreScorerTests.java index 26674189f3cd8..8de2a54a3df37 100644 --- a/server/src/test/java/org/opensearch/common/lucene/search/function/MinScoreScorerTests.java +++ b/server/src/test/java/org/opensearch/common/lucene/search/function/MinScoreScorerTests.java @@ -39,8 +39,8 @@ import org.apache.lucene.search.Scorer; import org.apache.lucene.search.TwoPhaseIterator; import org.apache.lucene.search.Weight; -import org.apache.lucene.util.LuceneTestCase; -import org.apache.lucene.util.TestUtil; +import org.apache.lucene.tests.util.LuceneTestCase; +import org.apache.lucene.tests.util.TestUtil; import java.io.IOException; import java.util.Arrays; diff --git a/server/src/test/java/org/opensearch/common/lucene/search/morelikethis/XMoreLikeThisTests.java b/server/src/test/java/org/opensearch/common/lucene/search/morelikethis/XMoreLikeThisTests.java index ad60895505214..e6a621037ef6d 100644 --- a/server/src/test/java/org/opensearch/common/lucene/search/morelikethis/XMoreLikeThisTests.java +++ b/server/src/test/java/org/opensearch/common/lucene/search/morelikethis/XMoreLikeThisTests.java @@ -32,12 +32,12 @@ package org.opensearch.common.lucene.search.morelikethis; -import org.apache.lucene.analysis.MockAnalyzer; -import org.apache.lucene.analysis.MockTokenizer; +import org.apache.lucene.tests.analysis.MockAnalyzer; +import org.apache.lucene.tests.analysis.MockTokenizer; import org.apache.lucene.document.Document; import org.apache.lucene.document.Field; import org.apache.lucene.index.IndexReader; -import org.apache.lucene.index.RandomIndexWriter; +import org.apache.lucene.tests.index.RandomIndexWriter; import org.apache.lucene.index.Term; import org.apache.lucene.queries.mlt.MoreLikeThis; import org.apache.lucene.search.BooleanClause; diff --git a/server/src/test/java/org/opensearch/common/util/BytesRefHashTests.java b/server/src/test/java/org/opensearch/common/util/BytesRefHashTests.java index 4107d1a24e6fc..1859a7d4a8f9d 100644 --- a/server/src/test/java/org/opensearch/common/util/BytesRefHashTests.java +++ b/server/src/test/java/org/opensearch/common/util/BytesRefHashTests.java @@ -37,7 +37,7 @@ import com.carrotsearch.hppc.cursors.ObjectLongCursor; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRefBuilder; -import org.apache.lucene.util.TestUtil; +import org.apache.lucene.tests.util.TestUtil; import org.opensearch.common.settings.Settings; import org.opensearch.indices.breaker.NoneCircuitBreakerService; import org.opensearch.test.OpenSearchTestCase; diff --git a/server/src/test/java/org/opensearch/env/NodeEnvironmentTests.java b/server/src/test/java/org/opensearch/env/NodeEnvironmentTests.java index 011b5c8ea0e4e..e6f0f2824b7d3 100644 --- a/server/src/test/java/org/opensearch/env/NodeEnvironmentTests.java +++ b/server/src/test/java/org/opensearch/env/NodeEnvironmentTests.java @@ -32,7 +32,7 @@ package org.opensearch.env; import org.apache.lucene.index.SegmentInfos; -import org.apache.lucene.util.LuceneTestCase; +import org.apache.lucene.tests.util.LuceneTestCase; import org.opensearch.cluster.node.DiscoveryNodeRole; import org.opensearch.common.SuppressForbidden; import org.opensearch.common.io.PathUtils; diff --git a/server/src/test/java/org/opensearch/gateway/GatewayMetaStatePersistedStateTests.java b/server/src/test/java/org/opensearch/gateway/GatewayMetaStatePersistedStateTests.java index 23977877773f0..4ab5d7c44f356 100644 --- a/server/src/test/java/org/opensearch/gateway/GatewayMetaStatePersistedStateTests.java +++ b/server/src/test/java/org/opensearch/gateway/GatewayMetaStatePersistedStateTests.java @@ -33,7 +33,7 @@ package org.opensearch.gateway; import org.apache.lucene.store.Directory; -import org.apache.lucene.store.MockDirectoryWrapper; +import org.apache.lucene.tests.store.MockDirectoryWrapper; import org.opensearch.ExceptionsHelper; import org.opensearch.Version; import org.opensearch.cluster.ClusterName; diff --git a/server/src/test/java/org/opensearch/gateway/IncrementalClusterStateWriterTests.java b/server/src/test/java/org/opensearch/gateway/IncrementalClusterStateWriterTests.java index 31ec7c7f2ffe3..21d5c897c4a7d 100644 --- a/server/src/test/java/org/opensearch/gateway/IncrementalClusterStateWriterTests.java +++ b/server/src/test/java/org/opensearch/gateway/IncrementalClusterStateWriterTests.java @@ -35,7 +35,7 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.apache.lucene.store.Directory; -import org.apache.lucene.store.MockDirectoryWrapper; +import org.apache.lucene.tests.store.MockDirectoryWrapper; import org.opensearch.Version; import org.opensearch.cluster.ClusterName; import org.opensearch.cluster.ClusterState; diff --git a/server/src/test/java/org/opensearch/gateway/MetadataStateFormatTests.java b/server/src/test/java/org/opensearch/gateway/MetadataStateFormatTests.java index 70e1e8d73ef3a..fe7b1255cf716 100644 --- a/server/src/test/java/org/opensearch/gateway/MetadataStateFormatTests.java +++ b/server/src/test/java/org/opensearch/gateway/MetadataStateFormatTests.java @@ -37,9 +37,9 @@ import org.apache.lucene.store.Directory; import org.apache.lucene.store.IOContext; import org.apache.lucene.store.IndexInput; -import org.apache.lucene.store.MockDirectoryWrapper; +import org.apache.lucene.tests.store.MockDirectoryWrapper; import org.apache.lucene.store.NIOFSDirectory; -import org.apache.lucene.util.LuceneTestCase; +import org.apache.lucene.tests.util.LuceneTestCase; import org.opensearch.cluster.ClusterModule; import org.opensearch.cluster.metadata.Metadata; import org.opensearch.common.xcontent.NamedXContentRegistry; diff --git a/server/src/test/java/org/opensearch/index/IndexModuleTests.java b/server/src/test/java/org/opensearch/index/IndexModuleTests.java index 3be0304381810..daa9186dfd8c0 100644 --- a/server/src/test/java/org/opensearch/index/IndexModuleTests.java +++ b/server/src/test/java/org/opensearch/index/IndexModuleTests.java @@ -33,7 +33,7 @@ import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.standard.StandardTokenizer; -import org.apache.lucene.index.AssertingDirectoryReader; +import org.apache.lucene.tests.index.AssertingDirectoryReader; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.FieldInvertState; import org.apache.lucene.index.Term; diff --git a/server/src/test/java/org/opensearch/index/IndexTests.java b/server/src/test/java/org/opensearch/index/IndexTests.java index f611537bb06fe..ed845e27bfb2f 100644 --- a/server/src/test/java/org/opensearch/index/IndexTests.java +++ b/server/src/test/java/org/opensearch/index/IndexTests.java @@ -43,7 +43,7 @@ import java.io.IOException; -import static org.apache.lucene.util.TestUtil.randomSimpleString; +import static org.apache.lucene.tests.util.TestUtil.randomSimpleString; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.not; diff --git a/server/src/test/java/org/opensearch/index/analysis/AnalysisRegistryTests.java b/server/src/test/java/org/opensearch/index/analysis/AnalysisRegistryTests.java index b3be1995352a0..cbc189be491cd 100644 --- a/server/src/test/java/org/opensearch/index/analysis/AnalysisRegistryTests.java +++ b/server/src/test/java/org/opensearch/index/analysis/AnalysisRegistryTests.java @@ -34,7 +34,7 @@ import com.carrotsearch.randomizedtesting.generators.RandomPicks; import org.apache.lucene.analysis.Analyzer; -import org.apache.lucene.analysis.MockTokenFilter; +import org.apache.lucene.tests.analysis.MockTokenFilter; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.Tokenizer; import org.apache.lucene.analysis.en.EnglishAnalyzer; diff --git a/server/src/test/java/org/opensearch/index/analysis/CustomNormalizerTests.java b/server/src/test/java/org/opensearch/index/analysis/CustomNormalizerTests.java index f9da31fab962b..d610dfab12f46 100644 --- a/server/src/test/java/org/opensearch/index/analysis/CustomNormalizerTests.java +++ b/server/src/test/java/org/opensearch/index/analysis/CustomNormalizerTests.java @@ -32,8 +32,8 @@ package org.opensearch.index.analysis; -import org.apache.lucene.analysis.MockLowerCaseFilter; -import org.apache.lucene.analysis.MockTokenizer; +import org.apache.lucene.tests.analysis.MockLowerCaseFilter; +import org.apache.lucene.tests.analysis.MockTokenizer; import org.apache.lucene.util.BytesRef; import org.opensearch.common.settings.Settings; import org.opensearch.env.Environment; diff --git a/server/src/test/java/org/opensearch/index/codec/CodecTests.java b/server/src/test/java/org/opensearch/index/codec/CodecTests.java index 66de4d03ebbbf..94b78da402b44 100644 --- a/server/src/test/java/org/opensearch/index/codec/CodecTests.java +++ b/server/src/test/java/org/opensearch/index/codec/CodecTests.java @@ -34,7 +34,7 @@ import org.apache.logging.log4j.LogManager; import org.apache.lucene.codecs.Codec; -import org.apache.lucene.codecs.lucene90.Lucene90Codec; +import org.apache.lucene.codecs.lucene91.Lucene91Codec; import org.apache.lucene.codecs.lucene90.Lucene90StoredFieldsFormat; import org.apache.lucene.document.Document; import org.apache.lucene.index.DirectoryReader; @@ -42,7 +42,7 @@ import org.apache.lucene.index.IndexWriterConfig; import org.apache.lucene.index.SegmentReader; import org.apache.lucene.store.Directory; -import org.apache.lucene.util.LuceneTestCase.SuppressCodecs; +import org.apache.lucene.tests.util.LuceneTestCase.SuppressCodecs; import org.opensearch.common.settings.Settings; import org.opensearch.env.Environment; import org.opensearch.index.IndexSettings; @@ -65,21 +65,21 @@ public class CodecTests extends OpenSearchTestCase { public void testResolveDefaultCodecs() throws Exception { CodecService codecService = createCodecService(); assertThat(codecService.codec("default"), instanceOf(PerFieldMappingPostingFormatCodec.class)); - assertThat(codecService.codec("default"), instanceOf(Lucene90Codec.class)); + assertThat(codecService.codec("default"), instanceOf(Lucene91Codec.class)); } public void testDefault() throws Exception { Codec codec = createCodecService().codec("default"); - assertStoredFieldsCompressionEquals(Lucene90Codec.Mode.BEST_SPEED, codec); + assertStoredFieldsCompressionEquals(Lucene91Codec.Mode.BEST_SPEED, codec); } public void testBestCompression() throws Exception { Codec codec = createCodecService().codec("best_compression"); - assertStoredFieldsCompressionEquals(Lucene90Codec.Mode.BEST_COMPRESSION, codec); + assertStoredFieldsCompressionEquals(Lucene91Codec.Mode.BEST_COMPRESSION, codec); } // write some docs with it, inspect .si to see this was the used compression - private void assertStoredFieldsCompressionEquals(Lucene90Codec.Mode expected, Codec actual) throws Exception { + private void assertStoredFieldsCompressionEquals(Lucene91Codec.Mode expected, Codec actual) throws Exception { Directory dir = newDirectory(); IndexWriterConfig iwc = newIndexWriterConfig(null); iwc.setCodec(actual); @@ -91,7 +91,7 @@ private void assertStoredFieldsCompressionEquals(Lucene90Codec.Mode expected, Co SegmentReader sr = (SegmentReader) ir.leaves().get(0).reader(); String v = sr.getSegmentInfo().info.getAttribute(Lucene90StoredFieldsFormat.MODE_KEY); assertNotNull(v); - assertEquals(expected, Lucene90Codec.Mode.valueOf(v)); + assertEquals(expected, Lucene91Codec.Mode.valueOf(v)); ir.close(); dir.close(); } diff --git a/server/src/test/java/org/opensearch/index/engine/CompletionStatsCacheTests.java b/server/src/test/java/org/opensearch/index/engine/CompletionStatsCacheTests.java index 30285b1a3a014..66b066b907100 100644 --- a/server/src/test/java/org/opensearch/index/engine/CompletionStatsCacheTests.java +++ b/server/src/test/java/org/opensearch/index/engine/CompletionStatsCacheTests.java @@ -32,7 +32,7 @@ package org.opensearch.index.engine; import org.apache.lucene.codecs.PostingsFormat; -import org.apache.lucene.codecs.lucene90.Lucene90Codec; +import org.apache.lucene.codecs.lucene91.Lucene91Codec; import org.apache.lucene.document.Document; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.IndexWriter; @@ -70,7 +70,7 @@ public void testExceptionsAreNotCached() { public void testCompletionStatsCache() throws IOException, InterruptedException { final IndexWriterConfig indexWriterConfig = newIndexWriterConfig(); final PostingsFormat postingsFormat = new Completion90PostingsFormat(); - indexWriterConfig.setCodec(new Lucene90Codec() { + indexWriterConfig.setCodec(new Lucene91Codec() { @Override public PostingsFormat getPostingsFormatForField(String field) { return postingsFormat; // all fields are suggest fields diff --git a/server/src/test/java/org/opensearch/index/engine/InternalEngineTests.java b/server/src/test/java/org/opensearch/index/engine/InternalEngineTests.java index 5202e04990f95..361013149578e 100644 --- a/server/src/test/java/org/opensearch/index/engine/InternalEngineTests.java +++ b/server/src/test/java/org/opensearch/index/engine/InternalEngineTests.java @@ -50,7 +50,6 @@ import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.IndexWriter; import org.apache.lucene.index.IndexWriterConfig; -import org.apache.lucene.index.IndexWriterMaxDocsChanger; import org.apache.lucene.index.IndexableField; import org.apache.lucene.index.LeafReader; import org.apache.lucene.index.LeafReaderContext; @@ -77,7 +76,7 @@ import org.apache.lucene.store.AlreadyClosedException; import org.apache.lucene.store.Directory; import org.apache.lucene.store.Lock; -import org.apache.lucene.store.MockDirectoryWrapper; +import org.apache.lucene.tests.store.MockDirectoryWrapper; import org.apache.lucene.util.Bits; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.FixedBitSet; @@ -7121,7 +7120,7 @@ public void testProducesStoredFieldsReader() throws Exception { public void testMaxDocsOnPrimary() throws Exception { engine.close(); int maxDocs = randomIntBetween(1, 100); - IndexWriterMaxDocsChanger.setMaxDocs(maxDocs); + setIndexWriterMaxDocs(maxDocs); try { engine = new InternalTestEngine(engine.config(), maxDocs, LocalCheckpointTracker::new); int numDocs = between(maxDocs + 1, maxDocs * 2); @@ -7156,14 +7155,14 @@ public void testMaxDocsOnPrimary() throws Exception { assertFalse(engine.isClosed.get()); } } finally { - IndexWriterMaxDocsChanger.restoreMaxDocs(); + restoreIndexWriterMaxDocs(); } } public void testMaxDocsOnReplica() throws Exception { engine.close(); int maxDocs = randomIntBetween(1, 100); - IndexWriterMaxDocsChanger.setMaxDocs(maxDocs); + setIndexWriterMaxDocs(maxDocs); try { engine = new InternalTestEngine(engine.config(), maxDocs, LocalCheckpointTracker::new); int numDocs = between(maxDocs + 1, maxDocs * 2); @@ -7176,7 +7175,7 @@ public void testMaxDocsOnReplica() throws Exception { assertThat(error.getMessage(), containsString("number of documents in the index cannot exceed " + maxDocs)); assertTrue(engine.isClosed.get()); } finally { - IndexWriterMaxDocsChanger.restoreMaxDocs(); + restoreIndexWriterMaxDocs(); } } } diff --git a/server/src/test/java/org/opensearch/index/engine/LiveVersionMapTests.java b/server/src/test/java/org/opensearch/index/engine/LiveVersionMapTests.java index f9c6c065904d7..b1e033232420b 100644 --- a/server/src/test/java/org/opensearch/index/engine/LiveVersionMapTests.java +++ b/server/src/test/java/org/opensearch/index/engine/LiveVersionMapTests.java @@ -35,8 +35,8 @@ import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRefBuilder; import org.apache.lucene.util.Constants; -import org.apache.lucene.util.RamUsageTester; -import org.apache.lucene.util.TestUtil; +import org.apache.lucene.tests.util.RamUsageTester; +import org.apache.lucene.tests.util.TestUtil; import org.opensearch.common.lease.Releasable; import org.opensearch.index.translog.Translog; import org.opensearch.test.OpenSearchTestCase; diff --git a/server/src/test/java/org/opensearch/index/engine/ReadOnlyEngineTests.java b/server/src/test/java/org/opensearch/index/engine/ReadOnlyEngineTests.java index 95a2db9d74c38..2106c5e1067fb 100644 --- a/server/src/test/java/org/opensearch/index/engine/ReadOnlyEngineTests.java +++ b/server/src/test/java/org/opensearch/index/engine/ReadOnlyEngineTests.java @@ -33,7 +33,7 @@ import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.IndexReader; -import org.apache.lucene.util.LuceneTestCase; +import org.apache.lucene.tests.util.LuceneTestCase; import org.opensearch.Version; import org.opensearch.common.bytes.BytesArray; import org.opensearch.common.lucene.index.OpenSearchDirectoryReader; diff --git a/server/src/test/java/org/opensearch/index/engine/RecoverySourcePruneMergePolicyTests.java b/server/src/test/java/org/opensearch/index/engine/RecoverySourcePruneMergePolicyTests.java index 4587b022f0483..a8279ed908779 100644 --- a/server/src/test/java/org/opensearch/index/engine/RecoverySourcePruneMergePolicyTests.java +++ b/server/src/test/java/org/opensearch/index/engine/RecoverySourcePruneMergePolicyTests.java @@ -56,7 +56,7 @@ import org.apache.lucene.search.TermQuery; import org.apache.lucene.store.Directory; import org.apache.lucene.util.InfoStream; -import org.apache.lucene.util.NullInfoStream; +import org.apache.lucene.tests.util.NullInfoStream; import org.opensearch.test.OpenSearchTestCase; import java.io.IOException; diff --git a/server/src/test/java/org/opensearch/index/engine/VersionValueTests.java b/server/src/test/java/org/opensearch/index/engine/VersionValueTests.java index dc6511b9e2632..81e0ff19c0070 100644 --- a/server/src/test/java/org/opensearch/index/engine/VersionValueTests.java +++ b/server/src/test/java/org/opensearch/index/engine/VersionValueTests.java @@ -32,7 +32,7 @@ package org.opensearch.index.engine; -import org.apache.lucene.util.RamUsageTester; +import org.apache.lucene.tests.util.RamUsageTester; import org.opensearch.index.translog.Translog; import org.opensearch.test.OpenSearchTestCase; diff --git a/server/src/test/java/org/opensearch/index/fielddata/AbstractStringFieldDataTestCase.java b/server/src/test/java/org/opensearch/index/fielddata/AbstractStringFieldDataTestCase.java index 33813580468aa..763ee59a385a2 100644 --- a/server/src/test/java/org/opensearch/index/fielddata/AbstractStringFieldDataTestCase.java +++ b/server/src/test/java/org/opensearch/index/fielddata/AbstractStringFieldDataTestCase.java @@ -58,7 +58,7 @@ import org.apache.lucene.util.Accountable; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.FixedBitSet; -import org.apache.lucene.util.TestUtil; +import org.apache.lucene.tests.util.TestUtil; import org.apache.lucene.util.UnicodeUtil; import org.opensearch.common.lucene.index.OpenSearchDirectoryReader; import org.opensearch.common.lucene.search.Queries; diff --git a/server/src/test/java/org/opensearch/index/fielddata/plain/HalfFloatFielddataTests.java b/server/src/test/java/org/opensearch/index/fielddata/plain/HalfFloatFielddataTests.java index 60bd5a32f0fbf..be631cc311b2a 100644 --- a/server/src/test/java/org/opensearch/index/fielddata/plain/HalfFloatFielddataTests.java +++ b/server/src/test/java/org/opensearch/index/fielddata/plain/HalfFloatFielddataTests.java @@ -40,7 +40,7 @@ import org.apache.lucene.index.LeafReader; import org.apache.lucene.store.Directory; import org.opensearch.core.internal.io.IOUtils; -import org.apache.lucene.util.TestUtil; +import org.apache.lucene.tests.util.TestUtil; import org.opensearch.index.fielddata.FieldData; import org.opensearch.index.fielddata.SortedNumericDoubleValues; import org.opensearch.index.mapper.NumberFieldMapper; diff --git a/server/src/test/java/org/opensearch/index/mapper/DocumentFieldMapperTests.java b/server/src/test/java/org/opensearch/index/mapper/DocumentFieldMapperTests.java index 3d2b8c4457c17..c74b5745cd83f 100644 --- a/server/src/test/java/org/opensearch/index/mapper/DocumentFieldMapperTests.java +++ b/server/src/test/java/org/opensearch/index/mapper/DocumentFieldMapperTests.java @@ -36,7 +36,7 @@ import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.Tokenizer; import org.apache.lucene.analysis.tokenattributes.CharTermAttribute; -import org.apache.lucene.util.LuceneTestCase; +import org.apache.lucene.tests.util.LuceneTestCase; import org.opensearch.index.analysis.AnalyzerScope; import org.opensearch.index.analysis.NamedAnalyzer; import org.opensearch.index.query.QueryShardContext; diff --git a/server/src/test/java/org/opensearch/index/mapper/KeywordFieldMapperTests.java b/server/src/test/java/org/opensearch/index/mapper/KeywordFieldMapperTests.java index 9c2ca84859d66..7059e908c078f 100644 --- a/server/src/test/java/org/opensearch/index/mapper/KeywordFieldMapperTests.java +++ b/server/src/test/java/org/opensearch/index/mapper/KeywordFieldMapperTests.java @@ -33,8 +33,8 @@ package org.opensearch.index.mapper; import org.apache.lucene.analysis.Analyzer; -import org.apache.lucene.analysis.MockLowerCaseFilter; -import org.apache.lucene.analysis.MockTokenizer; +import org.apache.lucene.tests.analysis.MockLowerCaseFilter; +import org.apache.lucene.tests.analysis.MockTokenizer; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.core.LowerCaseFilter; import org.apache.lucene.analysis.core.WhitespaceTokenizer; @@ -69,7 +69,7 @@ import static java.util.Collections.singletonList; import static java.util.Collections.singletonMap; -import static org.apache.lucene.analysis.BaseTokenStreamTestCase.assertTokenStreamContents; +import static org.apache.lucene.tests.analysis.BaseTokenStreamTestCase.assertTokenStreamContents; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; diff --git a/server/src/test/java/org/opensearch/index/mapper/NumberFieldTypeTests.java b/server/src/test/java/org/opensearch/index/mapper/NumberFieldTypeTests.java index 57f3f3693257b..3b6d757c81b83 100644 --- a/server/src/test/java/org/opensearch/index/mapper/NumberFieldTypeTests.java +++ b/server/src/test/java/org/opensearch/index/mapper/NumberFieldTypeTests.java @@ -52,7 +52,7 @@ import org.apache.lucene.search.SortField; import org.apache.lucene.store.Directory; import org.apache.lucene.util.BytesRef; -import org.apache.lucene.util.TestUtil; +import org.apache.lucene.tests.util.TestUtil; import org.opensearch.Version; import org.opensearch.cluster.metadata.IndexMetadata; import org.opensearch.common.settings.Settings; diff --git a/server/src/test/java/org/opensearch/index/mapper/TextFieldMapperTests.java b/server/src/test/java/org/opensearch/index/mapper/TextFieldMapperTests.java index 2db5f2eea3596..8a5a0e8864d3e 100644 --- a/server/src/test/java/org/opensearch/index/mapper/TextFieldMapperTests.java +++ b/server/src/test/java/org/opensearch/index/mapper/TextFieldMapperTests.java @@ -33,10 +33,10 @@ package org.opensearch.index.mapper; import org.apache.lucene.analysis.Analyzer; -import org.apache.lucene.analysis.CannedTokenStream; -import org.apache.lucene.analysis.MockSynonymAnalyzer; +import org.apache.lucene.tests.analysis.CannedTokenStream; +import org.apache.lucene.tests.analysis.MockSynonymAnalyzer; import org.apache.lucene.analysis.StopFilter; -import org.apache.lucene.analysis.Token; +import org.apache.lucene.tests.analysis.Token; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.core.KeywordAnalyzer; import org.apache.lucene.analysis.core.WhitespaceAnalyzer; diff --git a/server/src/test/java/org/opensearch/index/mapper/UidTests.java b/server/src/test/java/org/opensearch/index/mapper/UidTests.java index 34e7682474f56..a7253d9a6a7c4 100644 --- a/server/src/test/java/org/opensearch/index/mapper/UidTests.java +++ b/server/src/test/java/org/opensearch/index/mapper/UidTests.java @@ -32,7 +32,7 @@ package org.opensearch.index.mapper; import org.apache.lucene.util.BytesRef; -import org.apache.lucene.util.TestUtil; +import org.apache.lucene.tests.util.TestUtil; import org.opensearch.test.OpenSearchTestCase; import java.util.Arrays; diff --git a/server/src/test/java/org/opensearch/index/query/IntervalBuilderTests.java b/server/src/test/java/org/opensearch/index/query/IntervalBuilderTests.java index 9cb8108818705..b31bcc10a28cc 100644 --- a/server/src/test/java/org/opensearch/index/query/IntervalBuilderTests.java +++ b/server/src/test/java/org/opensearch/index/query/IntervalBuilderTests.java @@ -33,8 +33,8 @@ package org.opensearch.index.query; import org.apache.lucene.analysis.CachingTokenFilter; -import org.apache.lucene.analysis.CannedTokenStream; -import org.apache.lucene.analysis.Token; +import org.apache.lucene.tests.analysis.CannedTokenStream; +import org.apache.lucene.tests.analysis.Token; import org.apache.lucene.analysis.standard.StandardAnalyzer; import org.apache.lucene.queries.intervals.Intervals; import org.apache.lucene.queries.intervals.IntervalsSource; diff --git a/server/src/test/java/org/opensearch/index/query/MatchBoolPrefixQueryBuilderTests.java b/server/src/test/java/org/opensearch/index/query/MatchBoolPrefixQueryBuilderTests.java index 9ae95fd941a59..8a6b09b1ea925 100644 --- a/server/src/test/java/org/opensearch/index/query/MatchBoolPrefixQueryBuilderTests.java +++ b/server/src/test/java/org/opensearch/index/query/MatchBoolPrefixQueryBuilderTests.java @@ -32,7 +32,7 @@ package org.opensearch.index.query; -import org.apache.lucene.analysis.MockSynonymAnalyzer; +import org.apache.lucene.tests.analysis.MockSynonymAnalyzer; import org.apache.lucene.index.Term; import org.apache.lucene.search.BooleanClause; import org.apache.lucene.search.BooleanQuery; diff --git a/server/src/test/java/org/opensearch/index/query/MatchQueryBuilderTests.java b/server/src/test/java/org/opensearch/index/query/MatchQueryBuilderTests.java index 354e932f6b9f9..dac32849e3cef 100644 --- a/server/src/test/java/org/opensearch/index/query/MatchQueryBuilderTests.java +++ b/server/src/test/java/org/opensearch/index/query/MatchQueryBuilderTests.java @@ -33,8 +33,8 @@ package org.opensearch.index.query; import org.apache.lucene.analysis.Analyzer; -import org.apache.lucene.analysis.CannedBinaryTokenStream; -import org.apache.lucene.analysis.MockSynonymAnalyzer; +import org.apache.lucene.tests.analysis.CannedBinaryTokenStream; +import org.apache.lucene.tests.analysis.MockSynonymAnalyzer; import org.apache.lucene.index.Term; import org.apache.lucene.queries.ExtendedCommonTermsQuery; import org.apache.lucene.queries.spans.SpanNearQuery; diff --git a/server/src/test/java/org/opensearch/index/query/QueryShardContextTests.java b/server/src/test/java/org/opensearch/index/query/QueryShardContextTests.java index b803e7b5686dc..2f8ab12a2f3e3 100644 --- a/server/src/test/java/org/opensearch/index/query/QueryShardContextTests.java +++ b/server/src/test/java/org/opensearch/index/query/QueryShardContextTests.java @@ -35,7 +35,7 @@ import org.apache.lucene.document.StringField; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.LeafReaderContext; -import org.apache.lucene.index.RandomIndexWriter; +import org.apache.lucene.tests.index.RandomIndexWriter; import org.apache.lucene.index.Term; import org.apache.lucene.search.Collector; import org.apache.lucene.search.IndexSearcher; diff --git a/server/src/test/java/org/opensearch/index/query/QueryStringQueryBuilderTests.java b/server/src/test/java/org/opensearch/index/query/QueryStringQueryBuilderTests.java index d08f2ef170bf2..b091fc2103344 100644 --- a/server/src/test/java/org/opensearch/index/query/QueryStringQueryBuilderTests.java +++ b/server/src/test/java/org/opensearch/index/query/QueryStringQueryBuilderTests.java @@ -32,7 +32,7 @@ package org.opensearch.index.query; -import org.apache.lucene.analysis.MockSynonymAnalyzer; +import org.apache.lucene.tests.analysis.MockSynonymAnalyzer; import org.apache.lucene.document.LongPoint; import org.apache.lucene.document.SortedNumericDocValuesField; import org.apache.lucene.index.Term; diff --git a/server/src/test/java/org/opensearch/index/query/SimpleQueryStringBuilderTests.java b/server/src/test/java/org/opensearch/index/query/SimpleQueryStringBuilderTests.java index b11e0cab76340..1101da8de70fe 100644 --- a/server/src/test/java/org/opensearch/index/query/SimpleQueryStringBuilderTests.java +++ b/server/src/test/java/org/opensearch/index/query/SimpleQueryStringBuilderTests.java @@ -32,7 +32,7 @@ package org.opensearch.index.query; -import org.apache.lucene.analysis.MockSynonymAnalyzer; +import org.apache.lucene.tests.analysis.MockSynonymAnalyzer; import org.apache.lucene.analysis.standard.StandardAnalyzer; import org.apache.lucene.index.Term; import org.apache.lucene.queries.spans.SpanNearQuery; @@ -50,7 +50,7 @@ import org.apache.lucene.search.Query; import org.apache.lucene.search.SynonymQuery; import org.apache.lucene.search.TermQuery; -import org.apache.lucene.util.TestUtil; +import org.apache.lucene.tests.util.TestUtil; import org.opensearch.cluster.metadata.IndexMetadata; import org.opensearch.common.settings.Settings; import org.opensearch.index.search.SimpleQueryStringQueryParser; diff --git a/server/src/test/java/org/opensearch/index/query/SpanMultiTermQueryBuilderTests.java b/server/src/test/java/org/opensearch/index/query/SpanMultiTermQueryBuilderTests.java index 011d05aef1214..50f51f9ff68d1 100644 --- a/server/src/test/java/org/opensearch/index/query/SpanMultiTermQueryBuilderTests.java +++ b/server/src/test/java/org/opensearch/index/query/SpanMultiTermQueryBuilderTests.java @@ -36,7 +36,7 @@ import org.apache.lucene.document.Field; import org.apache.lucene.document.TextField; import org.apache.lucene.index.IndexReader; -import org.apache.lucene.index.RandomIndexWriter; +import org.apache.lucene.tests.index.RandomIndexWriter; import org.apache.lucene.index.Term; import org.apache.lucene.queries.SpanMatchNoDocsQuery; import org.apache.lucene.queries.spans.FieldMaskingSpanQuery; diff --git a/server/src/test/java/org/opensearch/index/query/functionscore/FunctionScoreEquivalenceTests.java b/server/src/test/java/org/opensearch/index/query/functionscore/FunctionScoreEquivalenceTests.java index 3db870941992a..a78cccbbc8083 100644 --- a/server/src/test/java/org/opensearch/index/query/functionscore/FunctionScoreEquivalenceTests.java +++ b/server/src/test/java/org/opensearch/index/query/functionscore/FunctionScoreEquivalenceTests.java @@ -34,8 +34,8 @@ import org.apache.lucene.index.Term; import org.apache.lucene.search.MatchNoDocsQuery; import org.apache.lucene.search.Query; -import org.apache.lucene.search.RandomApproximationQuery; -import org.apache.lucene.search.SearchEquivalenceTestBase; +import org.apache.lucene.tests.search.RandomApproximationQuery; +import org.apache.lucene.tests.search.SearchEquivalenceTestBase; import org.apache.lucene.search.TermQuery; import org.opensearch.bootstrap.BootstrapForTesting; import org.opensearch.common.lucene.search.function.FunctionScoreQuery; diff --git a/server/src/test/java/org/opensearch/index/query/functionscore/FunctionScoreTests.java b/server/src/test/java/org/opensearch/index/query/functionscore/FunctionScoreTests.java index 2bfcec1bf786c..3bcda9f5e762f 100644 --- a/server/src/test/java/org/opensearch/index/query/functionscore/FunctionScoreTests.java +++ b/server/src/test/java/org/opensearch/index/query/functionscore/FunctionScoreTests.java @@ -45,7 +45,7 @@ import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.Query; -import org.apache.lucene.search.RandomApproximationQuery; +import org.apache.lucene.tests.search.RandomApproximationQuery; import org.apache.lucene.search.Scorer; import org.apache.lucene.search.SortField; import org.apache.lucene.search.TermQuery; diff --git a/server/src/test/java/org/opensearch/index/reindex/BulkByScrollResponseTests.java b/server/src/test/java/org/opensearch/index/reindex/BulkByScrollResponseTests.java index 6ee3f3c0bced4..f9101ee8a50c1 100644 --- a/server/src/test/java/org/opensearch/index/reindex/BulkByScrollResponseTests.java +++ b/server/src/test/java/org/opensearch/index/reindex/BulkByScrollResponseTests.java @@ -51,7 +51,7 @@ import static java.util.Collections.emptyList; import static java.util.Collections.singletonList; -import static org.apache.lucene.util.TestUtil.randomSimpleString; +import static org.apache.lucene.tests.util.TestUtil.randomSimpleString; import static org.opensearch.common.unit.TimeValue.timeValueMillis; public class BulkByScrollResponseTests extends AbstractXContentTestCase { diff --git a/server/src/test/java/org/opensearch/index/reindex/BulkByScrollTaskStatusTests.java b/server/src/test/java/org/opensearch/index/reindex/BulkByScrollTaskStatusTests.java index 3921417d42cf0..bd8acb8bc18b7 100644 --- a/server/src/test/java/org/opensearch/index/reindex/BulkByScrollTaskStatusTests.java +++ b/server/src/test/java/org/opensearch/index/reindex/BulkByScrollTaskStatusTests.java @@ -32,7 +32,7 @@ package org.opensearch.index.reindex; -import org.apache.lucene.util.LuceneTestCase; +import org.apache.lucene.tests.util.LuceneTestCase; import org.opensearch.OpenSearchException; import org.opensearch.Version; import org.opensearch.common.Randomness; @@ -55,7 +55,7 @@ import static java.lang.Math.abs; import static java.util.stream.Collectors.toList; -import static org.apache.lucene.util.TestUtil.randomSimpleString; +import static org.apache.lucene.tests.util.TestUtil.randomSimpleString; import static org.hamcrest.Matchers.equalTo; public class BulkByScrollTaskStatusTests extends AbstractXContentTestCase { diff --git a/server/src/test/java/org/opensearch/index/reindex/DeleteByQueryRequestTests.java b/server/src/test/java/org/opensearch/index/reindex/DeleteByQueryRequestTests.java index 3ff4b3ec26231..747ce00134adc 100644 --- a/server/src/test/java/org/opensearch/index/reindex/DeleteByQueryRequestTests.java +++ b/server/src/test/java/org/opensearch/index/reindex/DeleteByQueryRequestTests.java @@ -40,7 +40,7 @@ import java.io.IOException; -import static org.apache.lucene.util.TestUtil.randomSimpleString; +import static org.apache.lucene.tests.util.TestUtil.randomSimpleString; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.not; diff --git a/server/src/test/java/org/opensearch/index/reindex/UpdateByQueryRequestTests.java b/server/src/test/java/org/opensearch/index/reindex/UpdateByQueryRequestTests.java index 57a1135eaf9be..4cdb94897d4dc 100644 --- a/server/src/test/java/org/opensearch/index/reindex/UpdateByQueryRequestTests.java +++ b/server/src/test/java/org/opensearch/index/reindex/UpdateByQueryRequestTests.java @@ -37,7 +37,7 @@ import java.io.IOException; -import static org.apache.lucene.util.TestUtil.randomSimpleString; +import static org.apache.lucene.tests.util.TestUtil.randomSimpleString; public class UpdateByQueryRequestTests extends AbstractBulkByScrollRequestTestCase { public void testUpdateByQueryRequestImplementsIndicesRequestReplaceable() { diff --git a/server/src/test/java/org/opensearch/index/search/MultiMatchQueryTests.java b/server/src/test/java/org/opensearch/index/search/MultiMatchQueryTests.java index a7f765fee23da..f0a70b55e0fdd 100644 --- a/server/src/test/java/org/opensearch/index/search/MultiMatchQueryTests.java +++ b/server/src/test/java/org/opensearch/index/search/MultiMatchQueryTests.java @@ -32,7 +32,7 @@ package org.opensearch.index.search; -import org.apache.lucene.analysis.MockSynonymAnalyzer; +import org.apache.lucene.tests.analysis.MockSynonymAnalyzer; import org.apache.lucene.index.Term; import org.apache.lucene.queries.BlendedTermQuery; import org.apache.lucene.search.BooleanClause; diff --git a/server/src/test/java/org/opensearch/index/search/nested/NestedSortingTests.java b/server/src/test/java/org/opensearch/index/search/nested/NestedSortingTests.java index ee0b99bdc102c..a8cd6c5411875 100644 --- a/server/src/test/java/org/opensearch/index/search/nested/NestedSortingTests.java +++ b/server/src/test/java/org/opensearch/index/search/nested/NestedSortingTests.java @@ -56,7 +56,7 @@ import org.apache.lucene.search.join.ScoreMode; import org.apache.lucene.search.join.ToParentBlockJoinQuery; import org.apache.lucene.util.BytesRef; -import org.apache.lucene.util.TestUtil; +import org.apache.lucene.tests.util.TestUtil; import org.opensearch.common.lucene.index.OpenSearchDirectoryReader; import org.opensearch.common.lucene.search.Queries; import org.opensearch.common.settings.Settings; diff --git a/server/src/test/java/org/opensearch/index/shard/IndexReaderWrapperTests.java b/server/src/test/java/org/opensearch/index/shard/IndexReaderWrapperTests.java index f2d8e96d63605..6b58633d5cd2e 100644 --- a/server/src/test/java/org/opensearch/index/shard/IndexReaderWrapperTests.java +++ b/server/src/test/java/org/opensearch/index/shard/IndexReaderWrapperTests.java @@ -36,7 +36,7 @@ import org.apache.lucene.document.StringField; import org.apache.lucene.document.TextField; import org.apache.lucene.index.DirectoryReader; -import org.apache.lucene.index.FieldFilterLeafReader; +import org.apache.lucene.tests.index.FieldFilterLeafReader; import org.apache.lucene.index.FilterDirectoryReader; import org.apache.lucene.index.IndexWriter; import org.apache.lucene.index.IndexWriterConfig; diff --git a/server/src/test/java/org/opensearch/index/shard/NewPathForShardTests.java b/server/src/test/java/org/opensearch/index/shard/NewPathForShardTests.java index b3c69d27bc9a5..0d2a27aa0714e 100644 --- a/server/src/test/java/org/opensearch/index/shard/NewPathForShardTests.java +++ b/server/src/test/java/org/opensearch/index/shard/NewPathForShardTests.java @@ -31,7 +31,7 @@ package org.opensearch.index.shard; -import org.apache.lucene.mockfile.FilterFileSystemProvider; +import org.apache.lucene.tests.mockfile.FilterFileSystemProvider; import org.opensearch.cluster.metadata.IndexMetadata; import org.opensearch.common.io.PathUtils; import org.opensearch.common.io.PathUtilsForTesting; diff --git a/server/src/test/java/org/opensearch/index/shard/RemoveCorruptedShardDataCommandTests.java b/server/src/test/java/org/opensearch/index/shard/RemoveCorruptedShardDataCommandTests.java index 1b8809ba04278..9a2a0dd7e070c 100644 --- a/server/src/test/java/org/opensearch/index/shard/RemoveCorruptedShardDataCommandTests.java +++ b/server/src/test/java/org/opensearch/index/shard/RemoveCorruptedShardDataCommandTests.java @@ -33,7 +33,7 @@ import joptsimple.OptionParser; import joptsimple.OptionSet; -import org.apache.lucene.store.BaseDirectoryWrapper; +import org.apache.lucene.tests.store.BaseDirectoryWrapper; import org.opensearch.OpenSearchException; import org.opensearch.Version; import org.opensearch.cli.MockTerminal; diff --git a/server/src/test/java/org/opensearch/index/shard/ShardSplittingQueryTests.java b/server/src/test/java/org/opensearch/index/shard/ShardSplittingQueryTests.java index 4a5fa82a2f36b..b39ff0c9b97b3 100644 --- a/server/src/test/java/org/opensearch/index/shard/ShardSplittingQueryTests.java +++ b/server/src/test/java/org/opensearch/index/shard/ShardSplittingQueryTests.java @@ -38,7 +38,7 @@ import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.IndexableField; import org.apache.lucene.index.LeafReaderContext; -import org.apache.lucene.index.RandomIndexWriter; +import org.apache.lucene.tests.index.RandomIndexWriter; import org.apache.lucene.index.SortedNumericDocValues; import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.IndexSearcher; diff --git a/server/src/test/java/org/opensearch/index/shard/ShardUtilsTests.java b/server/src/test/java/org/opensearch/index/shard/ShardUtilsTests.java index 7cb42464c1f72..25a385f157b5c 100644 --- a/server/src/test/java/org/opensearch/index/shard/ShardUtilsTests.java +++ b/server/src/test/java/org/opensearch/index/shard/ShardUtilsTests.java @@ -37,7 +37,7 @@ import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.IndexWriter; import org.apache.lucene.index.LeafReaderContext; -import org.apache.lucene.store.BaseDirectoryWrapper; +import org.apache.lucene.tests.store.BaseDirectoryWrapper; import org.opensearch.common.lucene.index.OpenSearchDirectoryReader; import org.opensearch.core.internal.io.IOUtils; import org.opensearch.test.OpenSearchTestCase; diff --git a/server/src/test/java/org/opensearch/index/similarity/ScriptedSimilarityTests.java b/server/src/test/java/org/opensearch/index/similarity/ScriptedSimilarityTests.java index 7f1f4ade53c50..ef935ba64f1ca 100644 --- a/server/src/test/java/org/opensearch/index/similarity/ScriptedSimilarityTests.java +++ b/server/src/test/java/org/opensearch/index/similarity/ScriptedSimilarityTests.java @@ -52,7 +52,7 @@ import org.apache.lucene.search.similarities.BM25Similarity; import org.apache.lucene.store.ByteBuffersDirectory; import org.apache.lucene.store.Directory; -import org.apache.lucene.util.TestUtil; +import org.apache.lucene.tests.util.TestUtil; import org.apache.lucene.util.Version; import org.opensearch.script.SimilarityScript; import org.opensearch.script.SimilarityWeightScript; diff --git a/server/src/test/java/org/opensearch/index/store/ByteSizeCachingDirectoryTests.java b/server/src/test/java/org/opensearch/index/store/ByteSizeCachingDirectoryTests.java index 2bfaa0ee51753..1de7587626712 100644 --- a/server/src/test/java/org/opensearch/index/store/ByteSizeCachingDirectoryTests.java +++ b/server/src/test/java/org/opensearch/index/store/ByteSizeCachingDirectoryTests.java @@ -36,7 +36,7 @@ import org.apache.lucene.store.FilterDirectory; import org.apache.lucene.store.IOContext; import org.apache.lucene.store.IndexOutput; -import org.apache.lucene.util.LuceneTestCase; +import org.apache.lucene.tests.util.LuceneTestCase; import org.opensearch.common.unit.TimeValue; import org.opensearch.test.OpenSearchTestCase; diff --git a/server/src/test/java/org/opensearch/index/store/StoreTests.java b/server/src/test/java/org/opensearch/index/store/StoreTests.java index 53ba689fbe011..fdec86e7912fd 100644 --- a/server/src/test/java/org/opensearch/index/store/StoreTests.java +++ b/server/src/test/java/org/opensearch/index/store/StoreTests.java @@ -31,7 +31,7 @@ package org.opensearch.index.store; -import org.apache.lucene.analysis.MockAnalyzer; +import org.apache.lucene.tests.analysis.MockAnalyzer; import org.apache.lucene.codecs.CodecUtil; import org.apache.lucene.document.Document; import org.apache.lucene.document.Field; @@ -51,7 +51,7 @@ import org.apache.lucene.index.SegmentInfos; import org.apache.lucene.index.SnapshotDeletionPolicy; import org.apache.lucene.index.Term; -import org.apache.lucene.store.BaseDirectoryWrapper; +import org.apache.lucene.tests.store.BaseDirectoryWrapper; import org.apache.lucene.store.ByteBuffersDirectory; import org.apache.lucene.store.ChecksumIndexInput; import org.apache.lucene.store.Directory; @@ -61,7 +61,7 @@ import org.apache.lucene.store.IndexOutput; import org.apache.lucene.store.NIOFSDirectory; import org.apache.lucene.util.BytesRef; -import org.apache.lucene.util.TestUtil; +import org.apache.lucene.tests.util.TestUtil; import org.apache.lucene.util.Version; import org.opensearch.ExceptionsHelper; import org.opensearch.cluster.metadata.IndexMetadata; diff --git a/server/src/test/java/org/opensearch/index/translog/TestTranslog.java b/server/src/test/java/org/opensearch/index/translog/TestTranslog.java index 1bf245dc72a20..4e9d56aeb9573 100644 --- a/server/src/test/java/org/opensearch/index/translog/TestTranslog.java +++ b/server/src/test/java/org/opensearch/index/translog/TestTranslog.java @@ -35,7 +35,7 @@ import com.carrotsearch.randomizedtesting.generators.RandomNumbers; import com.carrotsearch.randomizedtesting.generators.RandomPicks; import org.apache.logging.log4j.Logger; -import org.apache.lucene.util.LuceneTestCase; +import org.apache.lucene.tests.util.LuceneTestCase; import org.opensearch.common.io.stream.InputStreamStreamInput; import org.opensearch.core.internal.io.IOUtils; diff --git a/server/src/test/java/org/opensearch/index/translog/TranslogTests.java b/server/src/test/java/org/opensearch/index/translog/TranslogTests.java index f1a6ba84e8543..153677e00c22b 100644 --- a/server/src/test/java/org/opensearch/index/translog/TranslogTests.java +++ b/server/src/test/java/org/opensearch/index/translog/TranslogTests.java @@ -41,14 +41,14 @@ import org.apache.lucene.document.TextField; import org.apache.lucene.index.IndexFormatTooOldException; import org.apache.lucene.index.Term; -import org.apache.lucene.mockfile.FilterFileChannel; -import org.apache.lucene.mockfile.FilterFileSystemProvider; +import org.apache.lucene.tests.mockfile.FilterFileChannel; +import org.apache.lucene.tests.mockfile.FilterFileSystemProvider; import org.apache.lucene.store.AlreadyClosedException; import org.apache.lucene.store.ByteArrayDataOutput; import org.apache.lucene.store.DataOutput; -import org.apache.lucene.store.MockDirectoryWrapper; -import org.apache.lucene.util.LineFileDocs; -import org.apache.lucene.util.LuceneTestCase; +import org.apache.lucene.tests.store.MockDirectoryWrapper; +import org.apache.lucene.tests.util.LineFileDocs; +import org.apache.lucene.tests.util.LuceneTestCase; import org.opensearch.Assertions; import org.opensearch.Version; import org.opensearch.cluster.metadata.IndexMetadata; diff --git a/server/src/test/java/org/opensearch/indices/analysis/AnalysisModuleTests.java b/server/src/test/java/org/opensearch/indices/analysis/AnalysisModuleTests.java index bc2ecc2e62fae..efec81e803f1c 100644 --- a/server/src/test/java/org/opensearch/indices/analysis/AnalysisModuleTests.java +++ b/server/src/test/java/org/opensearch/indices/analysis/AnalysisModuleTests.java @@ -34,7 +34,7 @@ import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.CharFilter; -import org.apache.lucene.analysis.MockTokenizer; +import org.apache.lucene.tests.analysis.MockTokenizer; import org.apache.lucene.analysis.TokenFilter; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.Tokenizer; @@ -87,7 +87,7 @@ import static java.util.Collections.singletonList; import static java.util.Collections.singletonMap; -import static org.apache.lucene.analysis.BaseTokenStreamTestCase.assertTokenStreamContents; +import static org.apache.lucene.tests.analysis.BaseTokenStreamTestCase.assertTokenStreamContents; import static org.hamcrest.Matchers.either; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; diff --git a/server/src/test/java/org/opensearch/indices/recovery/RecoverySourceHandlerTests.java b/server/src/test/java/org/opensearch/indices/recovery/RecoverySourceHandlerTests.java index dbafab49d8655..80326fbf2de6b 100644 --- a/server/src/test/java/org/opensearch/indices/recovery/RecoverySourceHandlerTests.java +++ b/server/src/test/java/org/opensearch/indices/recovery/RecoverySourceHandlerTests.java @@ -40,9 +40,9 @@ import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.IndexCommit; import org.apache.lucene.index.IndexReader; -import org.apache.lucene.index.RandomIndexWriter; +import org.apache.lucene.tests.index.RandomIndexWriter; import org.apache.lucene.index.Term; -import org.apache.lucene.store.BaseDirectoryWrapper; +import org.apache.lucene.tests.store.BaseDirectoryWrapper; import org.apache.lucene.store.Directory; import org.apache.lucene.store.IOContext; import org.apache.lucene.util.SetOnce; diff --git a/server/src/test/java/org/opensearch/lucene/analysis/miscellaneous/DeDuplicatingTokenFilterTests.java b/server/src/test/java/org/opensearch/lucene/analysis/miscellaneous/DeDuplicatingTokenFilterTests.java index 38f1c23bfa1f3..1bcb17a81efee 100644 --- a/server/src/test/java/org/opensearch/lucene/analysis/miscellaneous/DeDuplicatingTokenFilterTests.java +++ b/server/src/test/java/org/opensearch/lucene/analysis/miscellaneous/DeDuplicatingTokenFilterTests.java @@ -33,7 +33,7 @@ package org.opensearch.lucene.analysis.miscellaneous; import org.apache.lucene.analysis.Analyzer; -import org.apache.lucene.analysis.MockTokenizer; +import org.apache.lucene.tests.analysis.MockTokenizer; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.Tokenizer; import org.apache.lucene.analysis.miscellaneous.DeDuplicatingTokenFilter; diff --git a/server/src/test/java/org/opensearch/lucene/analysis/miscellaneous/TruncateTokenFilterTests.java b/server/src/test/java/org/opensearch/lucene/analysis/miscellaneous/TruncateTokenFilterTests.java index c4601a9053f54..bce9073f85bbe 100644 --- a/server/src/test/java/org/opensearch/lucene/analysis/miscellaneous/TruncateTokenFilterTests.java +++ b/server/src/test/java/org/opensearch/lucene/analysis/miscellaneous/TruncateTokenFilterTests.java @@ -33,7 +33,7 @@ package org.opensearch.lucene.analysis.miscellaneous; import org.apache.lucene.analysis.Analyzer; -import org.apache.lucene.analysis.MockTokenizer; +import org.apache.lucene.tests.analysis.MockTokenizer; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.Tokenizer; import org.apache.lucene.analysis.miscellaneous.TruncateTokenFilter; diff --git a/server/src/test/java/org/opensearch/lucene/grouping/CollapsingTopDocsCollectorTests.java b/server/src/test/java/org/opensearch/lucene/grouping/CollapsingTopDocsCollectorTests.java index 514ff904e6ff3..9a2a44abbb4ef 100644 --- a/server/src/test/java/org/opensearch/lucene/grouping/CollapsingTopDocsCollectorTests.java +++ b/server/src/test/java/org/opensearch/lucene/grouping/CollapsingTopDocsCollectorTests.java @@ -40,8 +40,8 @@ import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.IndexReaderContext; import org.apache.lucene.index.LeafReaderContext; -import org.apache.lucene.index.RandomIndexWriter; -import org.apache.lucene.search.CheckHits; +import org.apache.lucene.tests.index.RandomIndexWriter; +import org.apache.lucene.tests.search.CheckHits; import org.apache.lucene.search.Collector; import org.apache.lucene.search.FieldDoc; import org.apache.lucene.search.IndexSearcher; diff --git a/server/src/test/java/org/opensearch/lucene/index/ShuffleForcedMergePolicyTests.java b/server/src/test/java/org/opensearch/lucene/index/ShuffleForcedMergePolicyTests.java index fcce7819d6143..e583c12473a30 100644 --- a/server/src/test/java/org/opensearch/lucene/index/ShuffleForcedMergePolicyTests.java +++ b/server/src/test/java/org/opensearch/lucene/index/ShuffleForcedMergePolicyTests.java @@ -36,7 +36,7 @@ import org.apache.lucene.document.Field; import org.apache.lucene.document.NumericDocValuesField; import org.apache.lucene.document.StringField; -import org.apache.lucene.index.BaseMergePolicyTestCase; +import org.apache.lucene.tests.index.BaseMergePolicyTestCase; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.IndexWriter; import org.apache.lucene.index.IndexWriterConfig; diff --git a/server/src/test/java/org/opensearch/lucene/misc/search/similarity/LegacyBM25SimilarityTests.java b/server/src/test/java/org/opensearch/lucene/misc/search/similarity/LegacyBM25SimilarityTests.java index 7f89176c302df..209b0d959c8b0 100644 --- a/server/src/test/java/org/opensearch/lucene/misc/search/similarity/LegacyBM25SimilarityTests.java +++ b/server/src/test/java/org/opensearch/lucene/misc/search/similarity/LegacyBM25SimilarityTests.java @@ -29,7 +29,7 @@ import org.apache.lucene.misc.search.similarity.LegacyBM25Similarity; import org.apache.lucene.search.similarities.BM25Similarity; -import org.apache.lucene.search.similarities.BaseSimilarityTestCase; +import org.apache.lucene.tests.search.similarities.BaseSimilarityTestCase; import org.apache.lucene.search.similarities.Similarity; @Deprecated diff --git a/server/src/test/java/org/opensearch/lucene/queries/BaseRandomBinaryDocValuesRangeQueryTestCase.java b/server/src/test/java/org/opensearch/lucene/queries/BaseRandomBinaryDocValuesRangeQueryTestCase.java index 29a826037770f..4e12c946cf353 100644 --- a/server/src/test/java/org/opensearch/lucene/queries/BaseRandomBinaryDocValuesRangeQueryTestCase.java +++ b/server/src/test/java/org/opensearch/lucene/queries/BaseRandomBinaryDocValuesRangeQueryTestCase.java @@ -33,7 +33,7 @@ import org.apache.lucene.document.BinaryDocValuesField; import org.apache.lucene.document.Field; -import org.apache.lucene.search.BaseRangeFieldQueryTestCase; +import org.apache.lucene.tests.search.BaseRangeFieldQueryTestCase; import org.apache.lucene.search.Query; import org.apache.lucene.util.BytesRef; import org.opensearch.index.mapper.RangeFieldMapper; diff --git a/server/src/test/java/org/opensearch/lucene/queries/BinaryDocValuesRangeQueryTests.java b/server/src/test/java/org/opensearch/lucene/queries/BinaryDocValuesRangeQueryTests.java index 70e3c7ca53995..faf84b53e0b16 100644 --- a/server/src/test/java/org/opensearch/lucene/queries/BinaryDocValuesRangeQueryTests.java +++ b/server/src/test/java/org/opensearch/lucene/queries/BinaryDocValuesRangeQueryTests.java @@ -34,7 +34,7 @@ import org.apache.lucene.document.BinaryDocValuesField; import org.apache.lucene.document.Document; import org.apache.lucene.index.IndexReader; -import org.apache.lucene.index.RandomIndexWriter; +import org.apache.lucene.tests.index.RandomIndexWriter; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.Query; import org.apache.lucene.store.Directory; diff --git a/server/src/test/java/org/opensearch/lucene/queries/BlendedTermQueryTests.java b/server/src/test/java/org/opensearch/lucene/queries/BlendedTermQueryTests.java index f46a8bbca0d2a..bf9aac344e3df 100644 --- a/server/src/test/java/org/opensearch/lucene/queries/BlendedTermQueryTests.java +++ b/server/src/test/java/org/opensearch/lucene/queries/BlendedTermQueryTests.java @@ -31,7 +31,7 @@ package org.opensearch.lucene.queries; -import org.apache.lucene.analysis.MockAnalyzer; +import org.apache.lucene.tests.analysis.MockAnalyzer; import org.apache.lucene.document.Document; import org.apache.lucene.document.Field; import org.apache.lucene.document.FieldType; @@ -47,7 +47,7 @@ import org.apache.lucene.search.DisjunctionMaxQuery; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.Query; -import org.apache.lucene.search.QueryUtils; +import org.apache.lucene.tests.search.QueryUtils; import org.apache.lucene.search.QueryVisitor; import org.apache.lucene.search.ScoreDoc; import org.apache.lucene.search.TermQuery; diff --git a/server/src/test/java/org/opensearch/lucene/queries/IntegerRandomBinaryDocValuesRangeQueryTests.java b/server/src/test/java/org/opensearch/lucene/queries/IntegerRandomBinaryDocValuesRangeQueryTests.java index e4d3615277455..4428fe979b5f4 100644 --- a/server/src/test/java/org/opensearch/lucene/queries/IntegerRandomBinaryDocValuesRangeQueryTests.java +++ b/server/src/test/java/org/opensearch/lucene/queries/IntegerRandomBinaryDocValuesRangeQueryTests.java @@ -31,7 +31,7 @@ package org.opensearch.lucene.queries; -import org.apache.lucene.util.TestUtil; +import org.apache.lucene.tests.util.TestUtil; import org.opensearch.index.mapper.RangeType; public class IntegerRandomBinaryDocValuesRangeQueryTests extends BaseRandomBinaryDocValuesRangeQueryTestCase { diff --git a/server/src/test/java/org/opensearch/lucene/queries/LongRandomBinaryDocValuesRangeQueryTests.java b/server/src/test/java/org/opensearch/lucene/queries/LongRandomBinaryDocValuesRangeQueryTests.java index cd8457b828342..6d01e96b48132 100644 --- a/server/src/test/java/org/opensearch/lucene/queries/LongRandomBinaryDocValuesRangeQueryTests.java +++ b/server/src/test/java/org/opensearch/lucene/queries/LongRandomBinaryDocValuesRangeQueryTests.java @@ -31,7 +31,7 @@ package org.opensearch.lucene.queries; -import org.apache.lucene.util.TestUtil; +import org.apache.lucene.tests.util.TestUtil; import org.opensearch.index.mapper.RangeType; public class LongRandomBinaryDocValuesRangeQueryTests extends BaseRandomBinaryDocValuesRangeQueryTestCase { diff --git a/server/src/test/java/org/opensearch/lucene/queries/MinDocQueryTests.java b/server/src/test/java/org/opensearch/lucene/queries/MinDocQueryTests.java index d5c789ae0aa89..a75a1d5b2fdb7 100644 --- a/server/src/test/java/org/opensearch/lucene/queries/MinDocQueryTests.java +++ b/server/src/test/java/org/opensearch/lucene/queries/MinDocQueryTests.java @@ -35,10 +35,10 @@ import org.apache.lucene.document.Document; import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.MultiReader; -import org.apache.lucene.index.RandomIndexWriter; +import org.apache.lucene.tests.index.RandomIndexWriter; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.Query; -import org.apache.lucene.search.QueryUtils; +import org.apache.lucene.tests.search.QueryUtils; import org.apache.lucene.store.Directory; import org.opensearch.test.OpenSearchTestCase; diff --git a/server/src/test/java/org/opensearch/lucene/queries/SearchAfterSortedDocQueryTests.java b/server/src/test/java/org/opensearch/lucene/queries/SearchAfterSortedDocQueryTests.java index b4e035443cd82..4920e252e7a6b 100644 --- a/server/src/test/java/org/opensearch/lucene/queries/SearchAfterSortedDocQueryTests.java +++ b/server/src/test/java/org/opensearch/lucene/queries/SearchAfterSortedDocQueryTests.java @@ -38,12 +38,12 @@ import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.IndexWriterConfig; import org.apache.lucene.index.LeafReaderContext; -import org.apache.lucene.index.RandomIndexWriter; +import org.apache.lucene.tests.index.RandomIndexWriter; import org.apache.lucene.index.ReaderUtil; import org.apache.lucene.search.FieldDoc; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.MatchAllDocsQuery; -import org.apache.lucene.search.QueryUtils; +import org.apache.lucene.tests.search.QueryUtils; import org.apache.lucene.search.ScoreDoc; import org.apache.lucene.search.Sort; import org.apache.lucene.search.SortField; diff --git a/server/src/test/java/org/opensearch/lucene/queries/SpanMatchNoDocsQueryTests.java b/server/src/test/java/org/opensearch/lucene/queries/SpanMatchNoDocsQueryTests.java index 110a64e102ed4..bf266af38fc59 100644 --- a/server/src/test/java/org/opensearch/lucene/queries/SpanMatchNoDocsQueryTests.java +++ b/server/src/test/java/org/opensearch/lucene/queries/SpanMatchNoDocsQueryTests.java @@ -33,7 +33,7 @@ package org.opensearch.lucene.queries; import org.apache.lucene.analysis.Analyzer; -import org.apache.lucene.analysis.MockAnalyzer; +import org.apache.lucene.tests.analysis.MockAnalyzer; import org.apache.lucene.document.Document; import org.apache.lucene.document.Field; import org.apache.lucene.index.DirectoryReader; @@ -47,7 +47,7 @@ import org.apache.lucene.queries.spans.SpanTermQuery; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.Query; -import org.apache.lucene.search.QueryUtils; +import org.apache.lucene.tests.search.QueryUtils; import org.apache.lucene.search.ScoreDoc; import org.apache.lucene.store.Directory; import org.opensearch.test.OpenSearchTestCase; diff --git a/server/src/test/java/org/opensearch/lucene/search/uhighlight/CustomUnifiedHighlighterTests.java b/server/src/test/java/org/opensearch/lucene/search/uhighlight/CustomUnifiedHighlighterTests.java index 70a260837271c..5383a153034e9 100644 --- a/server/src/test/java/org/opensearch/lucene/search/uhighlight/CustomUnifiedHighlighterTests.java +++ b/server/src/test/java/org/opensearch/lucene/search/uhighlight/CustomUnifiedHighlighterTests.java @@ -43,7 +43,7 @@ import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.IndexWriterConfig; -import org.apache.lucene.index.RandomIndexWriter; +import org.apache.lucene.tests.index.RandomIndexWriter; import org.apache.lucene.index.Term; import org.apache.lucene.queries.CommonTermsQuery; import org.apache.lucene.search.BooleanClause; diff --git a/server/src/test/java/org/opensearch/monitor/fs/FsHealthServiceTests.java b/server/src/test/java/org/opensearch/monitor/fs/FsHealthServiceTests.java index 7517e24d555db..0246c8a85e97a 100644 --- a/server/src/test/java/org/opensearch/monitor/fs/FsHealthServiceTests.java +++ b/server/src/test/java/org/opensearch/monitor/fs/FsHealthServiceTests.java @@ -34,8 +34,8 @@ import org.apache.logging.log4j.Level; import org.apache.logging.log4j.LogManager; -import org.apache.lucene.mockfile.FilterFileChannel; -import org.apache.lucene.mockfile.FilterFileSystemProvider; +import org.apache.lucene.tests.mockfile.FilterFileChannel; +import org.apache.lucene.tests.mockfile.FilterFileSystemProvider; import org.opensearch.cluster.coordination.DeterministicTaskQueue; import org.opensearch.common.io.PathUtils; import org.opensearch.common.io.PathUtilsForTesting; diff --git a/server/src/test/java/org/opensearch/node/NodeTests.java b/server/src/test/java/org/opensearch/node/NodeTests.java index a253c618ac659..783fe1abdca17 100644 --- a/server/src/test/java/org/opensearch/node/NodeTests.java +++ b/server/src/test/java/org/opensearch/node/NodeTests.java @@ -31,7 +31,7 @@ package org.opensearch.node; -import org.apache.lucene.util.LuceneTestCase; +import org.apache.lucene.tests.util.LuceneTestCase; import org.apache.lucene.util.SetOnce; import org.opensearch.bootstrap.BootstrapCheck; import org.opensearch.bootstrap.BootstrapContext; diff --git a/server/src/test/java/org/opensearch/plugins/PluginsServiceTests.java b/server/src/test/java/org/opensearch/plugins/PluginsServiceTests.java index bddc61211592e..e022e78e7424b 100644 --- a/server/src/test/java/org/opensearch/plugins/PluginsServiceTests.java +++ b/server/src/test/java/org/opensearch/plugins/PluginsServiceTests.java @@ -35,7 +35,7 @@ import org.apache.logging.log4j.Level; import org.apache.logging.log4j.LogManager; import org.apache.lucene.util.Constants; -import org.apache.lucene.util.LuceneTestCase; +import org.apache.lucene.tests.util.LuceneTestCase; import org.opensearch.LegacyESVersion; import org.opensearch.Version; import org.opensearch.bootstrap.JarHell; diff --git a/server/src/test/java/org/opensearch/repositories/blobstore/BlobStoreRepositoryRestoreTests.java b/server/src/test/java/org/opensearch/repositories/blobstore/BlobStoreRepositoryRestoreTests.java index a5224489b5bba..7cbe3d6dbd30a 100644 --- a/server/src/test/java/org/opensearch/repositories/blobstore/BlobStoreRepositoryRestoreTests.java +++ b/server/src/test/java/org/opensearch/repositories/blobstore/BlobStoreRepositoryRestoreTests.java @@ -33,7 +33,7 @@ package org.opensearch.repositories.blobstore; import org.apache.lucene.store.Directory; -import org.apache.lucene.util.TestUtil; +import org.apache.lucene.tests.util.TestUtil; import org.opensearch.Version; import org.opensearch.action.support.PlainActionFuture; import org.opensearch.cluster.metadata.Metadata; diff --git a/server/src/test/java/org/opensearch/repositories/fs/FsRepositoryTests.java b/server/src/test/java/org/opensearch/repositories/fs/FsRepositoryTests.java index f2c6a13b92597..2bfcec6e75ffc 100644 --- a/server/src/test/java/org/opensearch/repositories/fs/FsRepositoryTests.java +++ b/server/src/test/java/org/opensearch/repositories/fs/FsRepositoryTests.java @@ -31,7 +31,7 @@ package org.opensearch.repositories.fs; -import org.apache.lucene.analysis.MockAnalyzer; +import org.apache.lucene.tests.analysis.MockAnalyzer; import org.apache.lucene.document.Document; import org.apache.lucene.document.Field; import org.apache.lucene.document.SortedDocValuesField; @@ -46,7 +46,7 @@ import org.apache.lucene.store.Directory; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.IOSupplier; -import org.apache.lucene.util.TestUtil; +import org.apache.lucene.tests.util.TestUtil; import org.opensearch.Version; import org.opensearch.action.support.PlainActionFuture; import org.opensearch.cluster.metadata.IndexMetadata; diff --git a/server/src/test/java/org/opensearch/search/DefaultSearchContextTests.java b/server/src/test/java/org/opensearch/search/DefaultSearchContextTests.java index 48c4717f664cb..3429e1635b58b 100644 --- a/server/src/test/java/org/opensearch/search/DefaultSearchContextTests.java +++ b/server/src/test/java/org/opensearch/search/DefaultSearchContextTests.java @@ -33,7 +33,7 @@ package org.opensearch.search; import org.apache.lucene.index.IndexReader; -import org.apache.lucene.index.RandomIndexWriter; +import org.apache.lucene.tests.index.RandomIndexWriter; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.MatchNoDocsQuery; import org.apache.lucene.search.Query; diff --git a/server/src/test/java/org/opensearch/search/SearchCancellationTests.java b/server/src/test/java/org/opensearch/search/SearchCancellationTests.java index 3942bdc0247f4..1927558f94094 100644 --- a/server/src/test/java/org/opensearch/search/SearchCancellationTests.java +++ b/server/src/test/java/org/opensearch/search/SearchCancellationTests.java @@ -38,14 +38,14 @@ import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.NoMergePolicy; import org.apache.lucene.index.PointValues; -import org.apache.lucene.index.RandomIndexWriter; +import org.apache.lucene.tests.index.RandomIndexWriter; import org.apache.lucene.index.Terms; import org.apache.lucene.index.TermsEnum; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.TotalHitCountCollector; import org.apache.lucene.store.Directory; -import org.apache.lucene.util.TestUtil; +import org.apache.lucene.tests.util.TestUtil; import org.apache.lucene.util.automaton.CompiledAutomaton; import org.apache.lucene.util.automaton.RegExp; import org.opensearch.core.internal.io.IOUtils; diff --git a/server/src/test/java/org/opensearch/search/SearchHitsTests.java b/server/src/test/java/org/opensearch/search/SearchHitsTests.java index fc5bfc90a1e34..11df58c4ee7c7 100644 --- a/server/src/test/java/org/opensearch/search/SearchHitsTests.java +++ b/server/src/test/java/org/opensearch/search/SearchHitsTests.java @@ -34,7 +34,7 @@ import org.apache.lucene.search.SortField; import org.apache.lucene.search.TotalHits; -import org.apache.lucene.util.TestUtil; +import org.apache.lucene.tests.util.TestUtil; import org.opensearch.action.OriginalIndices; import org.opensearch.common.Strings; import org.opensearch.common.bytes.BytesReference; diff --git a/server/src/test/java/org/opensearch/search/aggregations/MultiBucketCollectorTests.java b/server/src/test/java/org/opensearch/search/aggregations/MultiBucketCollectorTests.java index d20c71704c76f..ae1d5c60c231a 100644 --- a/server/src/test/java/org/opensearch/search/aggregations/MultiBucketCollectorTests.java +++ b/server/src/test/java/org/opensearch/search/aggregations/MultiBucketCollectorTests.java @@ -35,7 +35,7 @@ import org.apache.lucene.document.Document; import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.LeafReaderContext; -import org.apache.lucene.index.RandomIndexWriter; +import org.apache.lucene.tests.index.RandomIndexWriter; import org.apache.lucene.search.CollectionTerminatedException; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.MatchAllDocsQuery; diff --git a/server/src/test/java/org/opensearch/search/aggregations/bucket/BestBucketsDeferringCollectorTests.java b/server/src/test/java/org/opensearch/search/aggregations/bucket/BestBucketsDeferringCollectorTests.java index e0d90d96923b8..cc9628a13c060 100644 --- a/server/src/test/java/org/opensearch/search/aggregations/bucket/BestBucketsDeferringCollectorTests.java +++ b/server/src/test/java/org/opensearch/search/aggregations/bucket/BestBucketsDeferringCollectorTests.java @@ -37,7 +37,7 @@ import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.LeafReaderContext; -import org.apache.lucene.index.RandomIndexWriter; +import org.apache.lucene.tests.index.RandomIndexWriter; import org.apache.lucene.index.Term; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.MatchAllDocsQuery; diff --git a/server/src/test/java/org/opensearch/search/aggregations/bucket/BucketsAggregatorTests.java b/server/src/test/java/org/opensearch/search/aggregations/bucket/BucketsAggregatorTests.java index 0f401a95713b9..55f8d11e2d934 100644 --- a/server/src/test/java/org/opensearch/search/aggregations/bucket/BucketsAggregatorTests.java +++ b/server/src/test/java/org/opensearch/search/aggregations/bucket/BucketsAggregatorTests.java @@ -37,7 +37,7 @@ import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.LeafReaderContext; -import org.apache.lucene.index.RandomIndexWriter; +import org.apache.lucene.tests.index.RandomIndexWriter; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.store.Directory; import org.opensearch.common.breaker.CircuitBreaker; diff --git a/server/src/test/java/org/opensearch/search/aggregations/bucket/GlobalAggregatorTests.java b/server/src/test/java/org/opensearch/search/aggregations/bucket/GlobalAggregatorTests.java index 3f0dd797f1c77..abcaf06645a20 100644 --- a/server/src/test/java/org/opensearch/search/aggregations/bucket/GlobalAggregatorTests.java +++ b/server/src/test/java/org/opensearch/search/aggregations/bucket/GlobalAggregatorTests.java @@ -35,7 +35,7 @@ import org.apache.lucene.document.SortedNumericDocValuesField; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.IndexReader; -import org.apache.lucene.index.RandomIndexWriter; +import org.apache.lucene.tests.index.RandomIndexWriter; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.store.Directory; diff --git a/server/src/test/java/org/opensearch/search/aggregations/bucket/composite/CompositeAggregatorTests.java b/server/src/test/java/org/opensearch/search/aggregations/bucket/composite/CompositeAggregatorTests.java index 07c6e927c2030..88b2323b8adfc 100644 --- a/server/src/test/java/org/opensearch/search/aggregations/bucket/composite/CompositeAggregatorTests.java +++ b/server/src/test/java/org/opensearch/search/aggregations/bucket/composite/CompositeAggregatorTests.java @@ -32,7 +32,7 @@ package org.opensearch.search.aggregations.bucket.composite; -import org.apache.lucene.analysis.MockAnalyzer; +import org.apache.lucene.tests.analysis.MockAnalyzer; import org.apache.lucene.document.Document; import org.apache.lucene.document.DoublePoint; import org.apache.lucene.document.Field; @@ -46,7 +46,7 @@ import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.IndexWriterConfig; -import org.apache.lucene.index.RandomIndexWriter; +import org.apache.lucene.tests.index.RandomIndexWriter; import org.apache.lucene.index.Term; import org.apache.lucene.search.DocValuesFieldExistsQuery; import org.apache.lucene.search.IndexSearcher; @@ -60,7 +60,7 @@ import org.apache.lucene.store.Directory; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.NumericUtils; -import org.apache.lucene.util.TestUtil; +import org.apache.lucene.tests.util.TestUtil; import org.opensearch.OpenSearchParseException; import org.opensearch.common.geo.GeoPoint; import org.opensearch.common.settings.Settings; diff --git a/server/src/test/java/org/opensearch/search/aggregations/bucket/composite/CompositeValuesCollectorQueueTests.java b/server/src/test/java/org/opensearch/search/aggregations/bucket/composite/CompositeValuesCollectorQueueTests.java index 0ad6d30df337f..3dccdf8dab95e 100644 --- a/server/src/test/java/org/opensearch/search/aggregations/bucket/composite/CompositeValuesCollectorQueueTests.java +++ b/server/src/test/java/org/opensearch/search/aggregations/bucket/composite/CompositeValuesCollectorQueueTests.java @@ -43,7 +43,7 @@ import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.IndexWriterConfig; import org.apache.lucene.index.LeafReaderContext; -import org.apache.lucene.index.RandomIndexWriter; +import org.apache.lucene.tests.index.RandomIndexWriter; import org.apache.lucene.search.CollectionTerminatedException; import org.apache.lucene.search.DocIdSet; import org.apache.lucene.search.MatchAllDocsQuery; diff --git a/server/src/test/java/org/opensearch/search/aggregations/bucket/filter/FilterAggregatorTests.java b/server/src/test/java/org/opensearch/search/aggregations/bucket/filter/FilterAggregatorTests.java index 8a32e3655d6e1..4ed95bf7391a6 100644 --- a/server/src/test/java/org/opensearch/search/aggregations/bucket/filter/FilterAggregatorTests.java +++ b/server/src/test/java/org/opensearch/search/aggregations/bucket/filter/FilterAggregatorTests.java @@ -35,7 +35,7 @@ import org.apache.lucene.document.Field; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.IndexReader; -import org.apache.lucene.index.RandomIndexWriter; +import org.apache.lucene.tests.index.RandomIndexWriter; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.store.Directory; diff --git a/server/src/test/java/org/opensearch/search/aggregations/bucket/filter/FiltersAggregatorTests.java b/server/src/test/java/org/opensearch/search/aggregations/bucket/filter/FiltersAggregatorTests.java index 8412665a1e8f6..8ff2e57d5dbb6 100644 --- a/server/src/test/java/org/opensearch/search/aggregations/bucket/filter/FiltersAggregatorTests.java +++ b/server/src/test/java/org/opensearch/search/aggregations/bucket/filter/FiltersAggregatorTests.java @@ -35,7 +35,7 @@ import org.apache.lucene.document.Field; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.IndexReader; -import org.apache.lucene.index.RandomIndexWriter; +import org.apache.lucene.tests.index.RandomIndexWriter; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.store.Directory; diff --git a/server/src/test/java/org/opensearch/search/aggregations/bucket/geogrid/GeoGridAggregatorTestCase.java b/server/src/test/java/org/opensearch/search/aggregations/bucket/geogrid/GeoGridAggregatorTestCase.java index 8292377349b79..17fddb8978499 100644 --- a/server/src/test/java/org/opensearch/search/aggregations/bucket/geogrid/GeoGridAggregatorTestCase.java +++ b/server/src/test/java/org/opensearch/search/aggregations/bucket/geogrid/GeoGridAggregatorTestCase.java @@ -37,7 +37,7 @@ import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.IndexableField; -import org.apache.lucene.index.RandomIndexWriter; +import org.apache.lucene.tests.index.RandomIndexWriter; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.Query; diff --git a/server/src/test/java/org/opensearch/search/aggregations/bucket/histogram/AutoDateHistogramAggregatorTests.java b/server/src/test/java/org/opensearch/search/aggregations/bucket/histogram/AutoDateHistogramAggregatorTests.java index e69feb3744aa2..0f49e02febabe 100644 --- a/server/src/test/java/org/opensearch/search/aggregations/bucket/histogram/AutoDateHistogramAggregatorTests.java +++ b/server/src/test/java/org/opensearch/search/aggregations/bucket/histogram/AutoDateHistogramAggregatorTests.java @@ -39,7 +39,7 @@ import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.IndexableField; -import org.apache.lucene.index.RandomIndexWriter; +import org.apache.lucene.tests.index.RandomIndexWriter; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.MatchNoDocsQuery; diff --git a/server/src/test/java/org/opensearch/search/aggregations/bucket/histogram/DateHistogramAggregatorTestCase.java b/server/src/test/java/org/opensearch/search/aggregations/bucket/histogram/DateHistogramAggregatorTestCase.java index 3524bef8b842c..ff9122aa42326 100644 --- a/server/src/test/java/org/opensearch/search/aggregations/bucket/histogram/DateHistogramAggregatorTestCase.java +++ b/server/src/test/java/org/opensearch/search/aggregations/bucket/histogram/DateHistogramAggregatorTestCase.java @@ -34,7 +34,7 @@ import org.apache.lucene.document.SortedNumericDocValuesField; import org.apache.lucene.document.SortedSetDocValuesField; -import org.apache.lucene.index.RandomIndexWriter; +import org.apache.lucene.tests.index.RandomIndexWriter; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.util.BytesRef; import org.opensearch.common.CheckedBiConsumer; diff --git a/server/src/test/java/org/opensearch/search/aggregations/bucket/histogram/DateHistogramAggregatorTests.java b/server/src/test/java/org/opensearch/search/aggregations/bucket/histogram/DateHistogramAggregatorTests.java index d954c2641c048..597175d89bcfe 100644 --- a/server/src/test/java/org/opensearch/search/aggregations/bucket/histogram/DateHistogramAggregatorTests.java +++ b/server/src/test/java/org/opensearch/search/aggregations/bucket/histogram/DateHistogramAggregatorTests.java @@ -37,7 +37,7 @@ import org.apache.lucene.document.SortedNumericDocValuesField; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.IndexReader; -import org.apache.lucene.index.RandomIndexWriter; +import org.apache.lucene.tests.index.RandomIndexWriter; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.MatchNoDocsQuery; diff --git a/server/src/test/java/org/opensearch/search/aggregations/bucket/histogram/DateRangeHistogramAggregatorTests.java b/server/src/test/java/org/opensearch/search/aggregations/bucket/histogram/DateRangeHistogramAggregatorTests.java index 413ba02b21272..87f3ed166e5d0 100644 --- a/server/src/test/java/org/opensearch/search/aggregations/bucket/histogram/DateRangeHistogramAggregatorTests.java +++ b/server/src/test/java/org/opensearch/search/aggregations/bucket/histogram/DateRangeHistogramAggregatorTests.java @@ -36,7 +36,7 @@ import org.apache.lucene.document.Document; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.IndexReader; -import org.apache.lucene.index.RandomIndexWriter; +import org.apache.lucene.tests.index.RandomIndexWriter; import org.apache.lucene.queries.BinaryDocValuesRangeQuery; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.MatchAllDocsQuery; diff --git a/server/src/test/java/org/opensearch/search/aggregations/bucket/histogram/InternalHistogramTests.java b/server/src/test/java/org/opensearch/search/aggregations/bucket/histogram/InternalHistogramTests.java index 7211b0366e36a..288b22ccfcc92 100644 --- a/server/src/test/java/org/opensearch/search/aggregations/bucket/histogram/InternalHistogramTests.java +++ b/server/src/test/java/org/opensearch/search/aggregations/bucket/histogram/InternalHistogramTests.java @@ -32,7 +32,7 @@ package org.opensearch.search.aggregations.bucket.histogram; -import org.apache.lucene.util.TestUtil; +import org.apache.lucene.tests.util.TestUtil; import org.opensearch.search.DocValueFormat; import org.opensearch.search.aggregations.BucketOrder; import org.opensearch.search.aggregations.InternalAggregations; diff --git a/server/src/test/java/org/opensearch/search/aggregations/bucket/histogram/InternalVariableWidthHistogramTests.java b/server/src/test/java/org/opensearch/search/aggregations/bucket/histogram/InternalVariableWidthHistogramTests.java index 5788dbac67e39..72b11c1657fb5 100644 --- a/server/src/test/java/org/opensearch/search/aggregations/bucket/histogram/InternalVariableWidthHistogramTests.java +++ b/server/src/test/java/org/opensearch/search/aggregations/bucket/histogram/InternalVariableWidthHistogramTests.java @@ -32,7 +32,7 @@ package org.opensearch.search.aggregations.bucket.histogram; -import org.apache.lucene.util.TestUtil; +import org.apache.lucene.tests.util.TestUtil; import org.opensearch.common.breaker.CircuitBreaker; import org.opensearch.common.settings.Settings; import org.opensearch.common.util.MockBigArrays; diff --git a/server/src/test/java/org/opensearch/search/aggregations/bucket/histogram/NumericHistogramAggregatorTests.java b/server/src/test/java/org/opensearch/search/aggregations/bucket/histogram/NumericHistogramAggregatorTests.java index 4313b9667c411..e7b22a9a57476 100644 --- a/server/src/test/java/org/opensearch/search/aggregations/bucket/histogram/NumericHistogramAggregatorTests.java +++ b/server/src/test/java/org/opensearch/search/aggregations/bucket/histogram/NumericHistogramAggregatorTests.java @@ -37,7 +37,7 @@ import org.apache.lucene.document.SortedSetDocValuesField; import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.IndexableField; -import org.apache.lucene.index.RandomIndexWriter; +import org.apache.lucene.tests.index.RandomIndexWriter; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.store.Directory; diff --git a/server/src/test/java/org/opensearch/search/aggregations/bucket/histogram/RangeHistogramAggregatorTests.java b/server/src/test/java/org/opensearch/search/aggregations/bucket/histogram/RangeHistogramAggregatorTests.java index 482f3eb02b7dc..41bd0d77bff00 100644 --- a/server/src/test/java/org/opensearch/search/aggregations/bucket/histogram/RangeHistogramAggregatorTests.java +++ b/server/src/test/java/org/opensearch/search/aggregations/bucket/histogram/RangeHistogramAggregatorTests.java @@ -37,7 +37,7 @@ import org.apache.lucene.document.SortedNumericDocValuesField; import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.IndexableField; -import org.apache.lucene.index.RandomIndexWriter; +import org.apache.lucene.tests.index.RandomIndexWriter; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.store.Directory; diff --git a/server/src/test/java/org/opensearch/search/aggregations/bucket/histogram/VariableWidthHistogramAggregatorTests.java b/server/src/test/java/org/opensearch/search/aggregations/bucket/histogram/VariableWidthHistogramAggregatorTests.java index af1a112a40924..b1d62f3402bc3 100644 --- a/server/src/test/java/org/opensearch/search/aggregations/bucket/histogram/VariableWidthHistogramAggregatorTests.java +++ b/server/src/test/java/org/opensearch/search/aggregations/bucket/histogram/VariableWidthHistogramAggregatorTests.java @@ -36,7 +36,7 @@ import org.apache.lucene.document.SortedNumericDocValuesField; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.IndexReader; -import org.apache.lucene.index.RandomIndexWriter; +import org.apache.lucene.tests.index.RandomIndexWriter; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.Query; diff --git a/server/src/test/java/org/opensearch/search/aggregations/bucket/missing/MissingAggregatorTests.java b/server/src/test/java/org/opensearch/search/aggregations/bucket/missing/MissingAggregatorTests.java index 02667bd6ec5ae..e888972b8e447 100644 --- a/server/src/test/java/org/opensearch/search/aggregations/bucket/missing/MissingAggregatorTests.java +++ b/server/src/test/java/org/opensearch/search/aggregations/bucket/missing/MissingAggregatorTests.java @@ -37,7 +37,7 @@ import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.IndexableField; -import org.apache.lucene.index.RandomIndexWriter; +import org.apache.lucene.tests.index.RandomIndexWriter; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.Query; import org.apache.lucene.store.Directory; diff --git a/server/src/test/java/org/opensearch/search/aggregations/bucket/nested/NestedAggregatorTests.java b/server/src/test/java/org/opensearch/search/aggregations/bucket/nested/NestedAggregatorTests.java index b394063033637..8ab0cc0023346 100644 --- a/server/src/test/java/org/opensearch/search/aggregations/bucket/nested/NestedAggregatorTests.java +++ b/server/src/test/java/org/opensearch/search/aggregations/bucket/nested/NestedAggregatorTests.java @@ -41,7 +41,7 @@ import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.IndexWriterConfig; import org.apache.lucene.index.NoMergePolicy; -import org.apache.lucene.index.RandomIndexWriter; +import org.apache.lucene.tests.index.RandomIndexWriter; import org.apache.lucene.index.Term; import org.apache.lucene.search.BooleanClause; import org.apache.lucene.search.BooleanQuery; diff --git a/server/src/test/java/org/opensearch/search/aggregations/bucket/nested/ReverseNestedAggregatorTests.java b/server/src/test/java/org/opensearch/search/aggregations/bucket/nested/ReverseNestedAggregatorTests.java index 60486aedd3d0d..cf0e31bc63467 100644 --- a/server/src/test/java/org/opensearch/search/aggregations/bucket/nested/ReverseNestedAggregatorTests.java +++ b/server/src/test/java/org/opensearch/search/aggregations/bucket/nested/ReverseNestedAggregatorTests.java @@ -37,7 +37,7 @@ import org.apache.lucene.document.SortedNumericDocValuesField; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.IndexReader; -import org.apache.lucene.index.RandomIndexWriter; +import org.apache.lucene.tests.index.RandomIndexWriter; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.store.Directory; import org.opensearch.index.mapper.IdFieldMapper; diff --git a/server/src/test/java/org/opensearch/search/aggregations/bucket/range/BinaryRangeAggregatorTests.java b/server/src/test/java/org/opensearch/search/aggregations/bucket/range/BinaryRangeAggregatorTests.java index e9270c4ad1556..9c2578a2378cc 100644 --- a/server/src/test/java/org/opensearch/search/aggregations/bucket/range/BinaryRangeAggregatorTests.java +++ b/server/src/test/java/org/opensearch/search/aggregations/bucket/range/BinaryRangeAggregatorTests.java @@ -37,7 +37,7 @@ import java.util.Set; import org.apache.lucene.util.BytesRef; -import org.apache.lucene.util.TestUtil; +import org.apache.lucene.tests.util.TestUtil; import org.opensearch.index.fielddata.AbstractSortedSetDocValues; import org.opensearch.index.fielddata.SortedBinaryDocValues; import org.opensearch.search.aggregations.LeafBucketCollector; diff --git a/server/src/test/java/org/opensearch/search/aggregations/bucket/range/DateRangeAggregatorTests.java b/server/src/test/java/org/opensearch/search/aggregations/bucket/range/DateRangeAggregatorTests.java index f9e3a6068ebfd..e6d7230aa6cb0 100644 --- a/server/src/test/java/org/opensearch/search/aggregations/bucket/range/DateRangeAggregatorTests.java +++ b/server/src/test/java/org/opensearch/search/aggregations/bucket/range/DateRangeAggregatorTests.java @@ -37,7 +37,7 @@ import org.apache.lucene.document.SortedSetDocValuesField; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.IndexReader; -import org.apache.lucene.index.RandomIndexWriter; +import org.apache.lucene.tests.index.RandomIndexWriter; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.Query; diff --git a/server/src/test/java/org/opensearch/search/aggregations/bucket/range/IpRangeAggregatorTests.java b/server/src/test/java/org/opensearch/search/aggregations/bucket/range/IpRangeAggregatorTests.java index deeea6bc1daef..b74f21ef09037 100644 --- a/server/src/test/java/org/opensearch/search/aggregations/bucket/range/IpRangeAggregatorTests.java +++ b/server/src/test/java/org/opensearch/search/aggregations/bucket/range/IpRangeAggregatorTests.java @@ -35,7 +35,7 @@ import org.apache.lucene.document.InetAddressPoint; import org.apache.lucene.document.SortedSetDocValuesField; import org.apache.lucene.index.IndexReader; -import org.apache.lucene.index.RandomIndexWriter; +import org.apache.lucene.tests.index.RandomIndexWriter; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.store.Directory; diff --git a/server/src/test/java/org/opensearch/search/aggregations/bucket/range/RangeAggregatorTests.java b/server/src/test/java/org/opensearch/search/aggregations/bucket/range/RangeAggregatorTests.java index 3825efe3e29c2..f1be4d1ede930 100644 --- a/server/src/test/java/org/opensearch/search/aggregations/bucket/range/RangeAggregatorTests.java +++ b/server/src/test/java/org/opensearch/search/aggregations/bucket/range/RangeAggregatorTests.java @@ -37,7 +37,7 @@ import org.apache.lucene.document.SortedSetDocValuesField; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.IndexReader; -import org.apache.lucene.index.RandomIndexWriter; +import org.apache.lucene.tests.index.RandomIndexWriter; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.Query; diff --git a/server/src/test/java/org/opensearch/search/aggregations/bucket/sampler/BestDocsDeferringCollectorTests.java b/server/src/test/java/org/opensearch/search/aggregations/bucket/sampler/BestDocsDeferringCollectorTests.java index 46a7953d5315a..427d0b89aa688 100644 --- a/server/src/test/java/org/opensearch/search/aggregations/bucket/sampler/BestDocsDeferringCollectorTests.java +++ b/server/src/test/java/org/opensearch/search/aggregations/bucket/sampler/BestDocsDeferringCollectorTests.java @@ -37,7 +37,7 @@ import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.LeafReaderContext; -import org.apache.lucene.index.RandomIndexWriter; +import org.apache.lucene.tests.index.RandomIndexWriter; import org.apache.lucene.index.Term; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.ScoreDoc; diff --git a/server/src/test/java/org/opensearch/search/aggregations/bucket/sampler/DiversifiedSamplerTests.java b/server/src/test/java/org/opensearch/search/aggregations/bucket/sampler/DiversifiedSamplerTests.java index b66694dda229a..bb07b9c4af37e 100644 --- a/server/src/test/java/org/opensearch/search/aggregations/bucket/sampler/DiversifiedSamplerTests.java +++ b/server/src/test/java/org/opensearch/search/aggregations/bucket/sampler/DiversifiedSamplerTests.java @@ -40,7 +40,7 @@ import org.apache.lucene.document.TextField; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.IndexReader; -import org.apache.lucene.index.RandomIndexWriter; +import org.apache.lucene.tests.index.RandomIndexWriter; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.store.Directory; diff --git a/server/src/test/java/org/opensearch/search/aggregations/bucket/terms/BinaryTermsAggregatorTests.java b/server/src/test/java/org/opensearch/search/aggregations/bucket/terms/BinaryTermsAggregatorTests.java index ff348c6fca057..7703afa88d93c 100644 --- a/server/src/test/java/org/opensearch/search/aggregations/bucket/terms/BinaryTermsAggregatorTests.java +++ b/server/src/test/java/org/opensearch/search/aggregations/bucket/terms/BinaryTermsAggregatorTests.java @@ -34,7 +34,7 @@ import org.apache.lucene.document.Document; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.IndexReader; -import org.apache.lucene.index.RandomIndexWriter; +import org.apache.lucene.tests.index.RandomIndexWriter; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.MatchNoDocsQuery; diff --git a/server/src/test/java/org/opensearch/search/aggregations/bucket/terms/KeywordTermsAggregatorTests.java b/server/src/test/java/org/opensearch/search/aggregations/bucket/terms/KeywordTermsAggregatorTests.java index cf0a1bf090411..05197c7e85844 100644 --- a/server/src/test/java/org/opensearch/search/aggregations/bucket/terms/KeywordTermsAggregatorTests.java +++ b/server/src/test/java/org/opensearch/search/aggregations/bucket/terms/KeywordTermsAggregatorTests.java @@ -35,7 +35,7 @@ import org.apache.lucene.document.SortedSetDocValuesField; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.IndexReader; -import org.apache.lucene.index.RandomIndexWriter; +import org.apache.lucene.tests.index.RandomIndexWriter; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.MatchNoDocsQuery; diff --git a/server/src/test/java/org/opensearch/search/aggregations/bucket/terms/NumericTermsAggregatorTests.java b/server/src/test/java/org/opensearch/search/aggregations/bucket/terms/NumericTermsAggregatorTests.java index 4d900c9a18f79..13e41d5a2e543 100644 --- a/server/src/test/java/org/opensearch/search/aggregations/bucket/terms/NumericTermsAggregatorTests.java +++ b/server/src/test/java/org/opensearch/search/aggregations/bucket/terms/NumericTermsAggregatorTests.java @@ -36,7 +36,7 @@ import org.apache.lucene.document.SortedNumericDocValuesField; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.IndexReader; -import org.apache.lucene.index.RandomIndexWriter; +import org.apache.lucene.tests.index.RandomIndexWriter; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.MatchNoDocsQuery; diff --git a/server/src/test/java/org/opensearch/search/aggregations/bucket/terms/RareTermsAggregatorTests.java b/server/src/test/java/org/opensearch/search/aggregations/bucket/terms/RareTermsAggregatorTests.java index e31472ffebcdd..9a9a03e715644 100644 --- a/server/src/test/java/org/opensearch/search/aggregations/bucket/terms/RareTermsAggregatorTests.java +++ b/server/src/test/java/org/opensearch/search/aggregations/bucket/terms/RareTermsAggregatorTests.java @@ -41,7 +41,7 @@ import org.apache.lucene.document.SortedSetDocValuesField; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.IndexReader; -import org.apache.lucene.index.RandomIndexWriter; +import org.apache.lucene.tests.index.RandomIndexWriter; import org.apache.lucene.search.DocValuesFieldExistsQuery; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.MatchAllDocsQuery; diff --git a/server/src/test/java/org/opensearch/search/aggregations/bucket/terms/SignificantTermsAggregatorTests.java b/server/src/test/java/org/opensearch/search/aggregations/bucket/terms/SignificantTermsAggregatorTests.java index 2c86adfa20358..883196d290154 100644 --- a/server/src/test/java/org/opensearch/search/aggregations/bucket/terms/SignificantTermsAggregatorTests.java +++ b/server/src/test/java/org/opensearch/search/aggregations/bucket/terms/SignificantTermsAggregatorTests.java @@ -43,7 +43,7 @@ import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.IndexWriter; import org.apache.lucene.index.IndexWriterConfig; -import org.apache.lucene.index.RandomIndexWriter; +import org.apache.lucene.tests.index.RandomIndexWriter; import org.apache.lucene.index.Term; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.MatchAllDocsQuery; diff --git a/server/src/test/java/org/opensearch/search/aggregations/bucket/terms/TermsAggregatorTests.java b/server/src/test/java/org/opensearch/search/aggregations/bucket/terms/TermsAggregatorTests.java index 85816ef51d521..a9e819e7cbaf2 100644 --- a/server/src/test/java/org/opensearch/search/aggregations/bucket/terms/TermsAggregatorTests.java +++ b/server/src/test/java/org/opensearch/search/aggregations/bucket/terms/TermsAggregatorTests.java @@ -44,7 +44,7 @@ import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.IndexableField; -import org.apache.lucene.index.RandomIndexWriter; +import org.apache.lucene.tests.index.RandomIndexWriter; import org.apache.lucene.search.DocValuesFieldExistsQuery; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.MatchAllDocsQuery; diff --git a/server/src/test/java/org/opensearch/search/aggregations/metrics/AvgAggregatorTests.java b/server/src/test/java/org/opensearch/search/aggregations/metrics/AvgAggregatorTests.java index 2dab40eb0e17d..99c6ed121011e 100644 --- a/server/src/test/java/org/opensearch/search/aggregations/metrics/AvgAggregatorTests.java +++ b/server/src/test/java/org/opensearch/search/aggregations/metrics/AvgAggregatorTests.java @@ -39,7 +39,7 @@ import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.MultiReader; -import org.apache.lucene.index.RandomIndexWriter; +import org.apache.lucene.tests.index.RandomIndexWriter; import org.apache.lucene.search.DocValuesFieldExistsQuery; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.MatchAllDocsQuery; diff --git a/server/src/test/java/org/opensearch/search/aggregations/metrics/CardinalityAggregatorTests.java b/server/src/test/java/org/opensearch/search/aggregations/metrics/CardinalityAggregatorTests.java index 252fed3a43f0c..e33851f286613 100644 --- a/server/src/test/java/org/opensearch/search/aggregations/metrics/CardinalityAggregatorTests.java +++ b/server/src/test/java/org/opensearch/search/aggregations/metrics/CardinalityAggregatorTests.java @@ -36,7 +36,7 @@ import org.apache.lucene.document.IntPoint; import org.apache.lucene.document.NumericDocValuesField; import org.apache.lucene.document.SortedNumericDocValuesField; -import org.apache.lucene.index.RandomIndexWriter; +import org.apache.lucene.tests.index.RandomIndexWriter; import org.apache.lucene.search.DocValuesFieldExistsQuery; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.Query; diff --git a/server/src/test/java/org/opensearch/search/aggregations/metrics/ExtendedStatsAggregatorTests.java b/server/src/test/java/org/opensearch/search/aggregations/metrics/ExtendedStatsAggregatorTests.java index c46f3a4d6b5df..6fab4436182c2 100644 --- a/server/src/test/java/org/opensearch/search/aggregations/metrics/ExtendedStatsAggregatorTests.java +++ b/server/src/test/java/org/opensearch/search/aggregations/metrics/ExtendedStatsAggregatorTests.java @@ -36,7 +36,7 @@ import org.apache.lucene.document.NumericDocValuesField; import org.apache.lucene.document.SortedNumericDocValuesField; import org.apache.lucene.index.IndexReader; -import org.apache.lucene.index.RandomIndexWriter; +import org.apache.lucene.tests.index.RandomIndexWriter; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.store.Directory; diff --git a/server/src/test/java/org/opensearch/search/aggregations/metrics/GeoBoundsAggregatorTests.java b/server/src/test/java/org/opensearch/search/aggregations/metrics/GeoBoundsAggregatorTests.java index 78b2d047cfa06..6440c62e58e18 100644 --- a/server/src/test/java/org/opensearch/search/aggregations/metrics/GeoBoundsAggregatorTests.java +++ b/server/src/test/java/org/opensearch/search/aggregations/metrics/GeoBoundsAggregatorTests.java @@ -35,7 +35,7 @@ import org.apache.lucene.document.Document; import org.apache.lucene.document.LatLonDocValuesField; import org.apache.lucene.index.IndexReader; -import org.apache.lucene.index.RandomIndexWriter; +import org.apache.lucene.tests.index.RandomIndexWriter; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.store.Directory; diff --git a/server/src/test/java/org/opensearch/search/aggregations/metrics/GeoCentroidAggregatorTests.java b/server/src/test/java/org/opensearch/search/aggregations/metrics/GeoCentroidAggregatorTests.java index ee95b373c779a..6883a7ff15953 100644 --- a/server/src/test/java/org/opensearch/search/aggregations/metrics/GeoCentroidAggregatorTests.java +++ b/server/src/test/java/org/opensearch/search/aggregations/metrics/GeoCentroidAggregatorTests.java @@ -34,7 +34,7 @@ import org.apache.lucene.document.Document; import org.apache.lucene.document.LatLonDocValuesField; import org.apache.lucene.index.IndexReader; -import org.apache.lucene.index.RandomIndexWriter; +import org.apache.lucene.tests.index.RandomIndexWriter; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.store.Directory; diff --git a/server/src/test/java/org/opensearch/search/aggregations/metrics/HDRPercentileRanksAggregatorTests.java b/server/src/test/java/org/opensearch/search/aggregations/metrics/HDRPercentileRanksAggregatorTests.java index 3e1e3bb0da6f3..e459b63aa058b 100644 --- a/server/src/test/java/org/opensearch/search/aggregations/metrics/HDRPercentileRanksAggregatorTests.java +++ b/server/src/test/java/org/opensearch/search/aggregations/metrics/HDRPercentileRanksAggregatorTests.java @@ -36,7 +36,7 @@ import org.apache.lucene.document.SortedNumericDocValuesField; import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.MultiReader; -import org.apache.lucene.index.RandomIndexWriter; +import org.apache.lucene.tests.index.RandomIndexWriter; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.store.Directory; diff --git a/server/src/test/java/org/opensearch/search/aggregations/metrics/HDRPercentilesAggregatorTests.java b/server/src/test/java/org/opensearch/search/aggregations/metrics/HDRPercentilesAggregatorTests.java index cff342096704f..bbf7f6cfd9cc2 100644 --- a/server/src/test/java/org/opensearch/search/aggregations/metrics/HDRPercentilesAggregatorTests.java +++ b/server/src/test/java/org/opensearch/search/aggregations/metrics/HDRPercentilesAggregatorTests.java @@ -39,7 +39,7 @@ import org.apache.lucene.document.SortedSetDocValuesField; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.IndexReader; -import org.apache.lucene.index.RandomIndexWriter; +import org.apache.lucene.tests.index.RandomIndexWriter; import org.apache.lucene.search.DocValuesFieldExistsQuery; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.MatchAllDocsQuery; diff --git a/server/src/test/java/org/opensearch/search/aggregations/metrics/InternalTopHitsTests.java b/server/src/test/java/org/opensearch/search/aggregations/metrics/InternalTopHitsTests.java index 07037be28a19f..ec4a0e1e4a675 100644 --- a/server/src/test/java/org/opensearch/search/aggregations/metrics/InternalTopHitsTests.java +++ b/server/src/test/java/org/opensearch/search/aggregations/metrics/InternalTopHitsTests.java @@ -356,7 +356,7 @@ private Comparator sortFieldsComparator(SortField[] sortFields) { FieldComparator[] comparators = new FieldComparator[sortFields.length]; for (int i = 0; i < sortFields.length; i++) { // Values passed to getComparator shouldn't matter - comparators[i] = sortFields[i].getComparator(0, 0); + comparators[i] = sortFields[i].getComparator(0, false); } return (lhs, rhs) -> { FieldDoc l = (FieldDoc) lhs; diff --git a/server/src/test/java/org/opensearch/search/aggregations/metrics/MaxAggregatorTests.java b/server/src/test/java/org/opensearch/search/aggregations/metrics/MaxAggregatorTests.java index acff8305938e8..3d5ad3f5c163c 100644 --- a/server/src/test/java/org/opensearch/search/aggregations/metrics/MaxAggregatorTests.java +++ b/server/src/test/java/org/opensearch/search/aggregations/metrics/MaxAggregatorTests.java @@ -49,7 +49,7 @@ import org.apache.lucene.index.MultiReader; import org.apache.lucene.index.NoMergePolicy; import org.apache.lucene.index.PointValues; -import org.apache.lucene.index.RandomIndexWriter; +import org.apache.lucene.tests.index.RandomIndexWriter; import org.apache.lucene.index.Term; import org.apache.lucene.search.DocValuesFieldExistsQuery; import org.apache.lucene.search.IndexSearcher; diff --git a/server/src/test/java/org/opensearch/search/aggregations/metrics/MedianAbsoluteDeviationAggregatorTests.java b/server/src/test/java/org/opensearch/search/aggregations/metrics/MedianAbsoluteDeviationAggregatorTests.java index 661438832afec..0cf898090e752 100644 --- a/server/src/test/java/org/opensearch/search/aggregations/metrics/MedianAbsoluteDeviationAggregatorTests.java +++ b/server/src/test/java/org/opensearch/search/aggregations/metrics/MedianAbsoluteDeviationAggregatorTests.java @@ -36,7 +36,7 @@ import org.apache.lucene.document.NumericDocValuesField; import org.apache.lucene.document.SortedNumericDocValuesField; import org.apache.lucene.index.IndexableField; -import org.apache.lucene.index.RandomIndexWriter; +import org.apache.lucene.tests.index.RandomIndexWriter; import org.apache.lucene.search.DocValuesFieldExistsQuery; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.Query; diff --git a/server/src/test/java/org/opensearch/search/aggregations/metrics/MinAggregatorTests.java b/server/src/test/java/org/opensearch/search/aggregations/metrics/MinAggregatorTests.java index 0fa168b0e7371..05b3c5c7e57db 100644 --- a/server/src/test/java/org/opensearch/search/aggregations/metrics/MinAggregatorTests.java +++ b/server/src/test/java/org/opensearch/search/aggregations/metrics/MinAggregatorTests.java @@ -50,7 +50,7 @@ import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.MultiReader; import org.apache.lucene.index.NoMergePolicy; -import org.apache.lucene.index.RandomIndexWriter; +import org.apache.lucene.tests.index.RandomIndexWriter; import org.apache.lucene.index.Term; import org.apache.lucene.search.DocValuesFieldExistsQuery; import org.apache.lucene.search.IndexSearcher; diff --git a/server/src/test/java/org/opensearch/search/aggregations/metrics/ScriptedMetricAggregatorTests.java b/server/src/test/java/org/opensearch/search/aggregations/metrics/ScriptedMetricAggregatorTests.java index df5dbede848b6..f2a61521eff1f 100644 --- a/server/src/test/java/org/opensearch/search/aggregations/metrics/ScriptedMetricAggregatorTests.java +++ b/server/src/test/java/org/opensearch/search/aggregations/metrics/ScriptedMetricAggregatorTests.java @@ -37,7 +37,7 @@ import org.apache.lucene.document.SortedSetDocValuesField; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.IndexReader; -import org.apache.lucene.index.RandomIndexWriter; +import org.apache.lucene.tests.index.RandomIndexWriter; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.Query; diff --git a/server/src/test/java/org/opensearch/search/aggregations/metrics/StatsAggregatorTests.java b/server/src/test/java/org/opensearch/search/aggregations/metrics/StatsAggregatorTests.java index 66a6642f499ae..d8d736595164a 100644 --- a/server/src/test/java/org/opensearch/search/aggregations/metrics/StatsAggregatorTests.java +++ b/server/src/test/java/org/opensearch/search/aggregations/metrics/StatsAggregatorTests.java @@ -37,7 +37,7 @@ import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.IndexableField; import org.apache.lucene.index.MultiReader; -import org.apache.lucene.index.RandomIndexWriter; +import org.apache.lucene.tests.index.RandomIndexWriter; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.store.Directory; diff --git a/server/src/test/java/org/opensearch/search/aggregations/metrics/SumAggregatorTests.java b/server/src/test/java/org/opensearch/search/aggregations/metrics/SumAggregatorTests.java index 08a257935eb9e..8c0087ca0b87d 100644 --- a/server/src/test/java/org/opensearch/search/aggregations/metrics/SumAggregatorTests.java +++ b/server/src/test/java/org/opensearch/search/aggregations/metrics/SumAggregatorTests.java @@ -40,7 +40,7 @@ import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.IndexableField; import org.apache.lucene.index.MultiReader; -import org.apache.lucene.index.RandomIndexWriter; +import org.apache.lucene.tests.index.RandomIndexWriter; import org.apache.lucene.index.Term; import org.apache.lucene.search.DocValuesFieldExistsQuery; import org.apache.lucene.search.IndexSearcher; diff --git a/server/src/test/java/org/opensearch/search/aggregations/metrics/TDigestPercentileRanksAggregatorTests.java b/server/src/test/java/org/opensearch/search/aggregations/metrics/TDigestPercentileRanksAggregatorTests.java index e877b9330557f..2bdc8d88bc366 100644 --- a/server/src/test/java/org/opensearch/search/aggregations/metrics/TDigestPercentileRanksAggregatorTests.java +++ b/server/src/test/java/org/opensearch/search/aggregations/metrics/TDigestPercentileRanksAggregatorTests.java @@ -36,7 +36,7 @@ import org.apache.lucene.document.SortedNumericDocValuesField; import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.MultiReader; -import org.apache.lucene.index.RandomIndexWriter; +import org.apache.lucene.tests.index.RandomIndexWriter; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.store.Directory; diff --git a/server/src/test/java/org/opensearch/search/aggregations/metrics/TDigestPercentilesAggregatorTests.java b/server/src/test/java/org/opensearch/search/aggregations/metrics/TDigestPercentilesAggregatorTests.java index 3122b2fee372c..50415dc10df7e 100644 --- a/server/src/test/java/org/opensearch/search/aggregations/metrics/TDigestPercentilesAggregatorTests.java +++ b/server/src/test/java/org/opensearch/search/aggregations/metrics/TDigestPercentilesAggregatorTests.java @@ -37,7 +37,7 @@ import org.apache.lucene.document.SortedNumericDocValuesField; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.IndexReader; -import org.apache.lucene.index.RandomIndexWriter; +import org.apache.lucene.tests.index.RandomIndexWriter; import org.apache.lucene.search.DocValuesFieldExistsQuery; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.MatchAllDocsQuery; diff --git a/server/src/test/java/org/opensearch/search/aggregations/metrics/TopHitsAggregatorTests.java b/server/src/test/java/org/opensearch/search/aggregations/metrics/TopHitsAggregatorTests.java index a8f9383c07125..7dc80ab6f4a7b 100644 --- a/server/src/test/java/org/opensearch/search/aggregations/metrics/TopHitsAggregatorTests.java +++ b/server/src/test/java/org/opensearch/search/aggregations/metrics/TopHitsAggregatorTests.java @@ -40,7 +40,7 @@ import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.IndexWriter; -import org.apache.lucene.index.RandomIndexWriter; +import org.apache.lucene.tests.index.RandomIndexWriter; import org.apache.lucene.index.Term; import org.apache.lucene.queryparser.classic.QueryParser; import org.apache.lucene.search.BooleanClause.Occur; diff --git a/server/src/test/java/org/opensearch/search/aggregations/metrics/ValueCountAggregatorTests.java b/server/src/test/java/org/opensearch/search/aggregations/metrics/ValueCountAggregatorTests.java index f5b6e289e3584..590c927a44fe0 100644 --- a/server/src/test/java/org/opensearch/search/aggregations/metrics/ValueCountAggregatorTests.java +++ b/server/src/test/java/org/opensearch/search/aggregations/metrics/ValueCountAggregatorTests.java @@ -41,7 +41,7 @@ import org.apache.lucene.document.SortedDocValuesField; import org.apache.lucene.document.SortedNumericDocValuesField; import org.apache.lucene.document.SortedSetDocValuesField; -import org.apache.lucene.index.RandomIndexWriter; +import org.apache.lucene.tests.index.RandomIndexWriter; import org.apache.lucene.search.DocValuesFieldExistsQuery; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.Query; diff --git a/server/src/test/java/org/opensearch/search/aggregations/metrics/WeightedAvgAggregatorTests.java b/server/src/test/java/org/opensearch/search/aggregations/metrics/WeightedAvgAggregatorTests.java index 2024143416991..844f05f1d2208 100644 --- a/server/src/test/java/org/opensearch/search/aggregations/metrics/WeightedAvgAggregatorTests.java +++ b/server/src/test/java/org/opensearch/search/aggregations/metrics/WeightedAvgAggregatorTests.java @@ -37,7 +37,7 @@ import org.apache.lucene.document.SortedNumericDocValuesField; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.IndexReader; -import org.apache.lucene.index.RandomIndexWriter; +import org.apache.lucene.tests.index.RandomIndexWriter; import org.apache.lucene.search.DocValuesFieldExistsQuery; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.MatchAllDocsQuery; diff --git a/server/src/test/java/org/opensearch/search/aggregations/pipeline/AvgBucketAggregatorTests.java b/server/src/test/java/org/opensearch/search/aggregations/pipeline/AvgBucketAggregatorTests.java index b9e7cf5e0efbd..a5779d4a289f8 100644 --- a/server/src/test/java/org/opensearch/search/aggregations/pipeline/AvgBucketAggregatorTests.java +++ b/server/src/test/java/org/opensearch/search/aggregations/pipeline/AvgBucketAggregatorTests.java @@ -36,7 +36,7 @@ import org.apache.lucene.document.SortedNumericDocValuesField; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.IndexReader; -import org.apache.lucene.index.RandomIndexWriter; +import org.apache.lucene.tests.index.RandomIndexWriter; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.Query; diff --git a/server/src/test/java/org/opensearch/search/aggregations/pipeline/BucketScriptAggregatorTests.java b/server/src/test/java/org/opensearch/search/aggregations/pipeline/BucketScriptAggregatorTests.java index 82b3a368069ab..a1ff2a40d0404 100644 --- a/server/src/test/java/org/opensearch/search/aggregations/pipeline/BucketScriptAggregatorTests.java +++ b/server/src/test/java/org/opensearch/search/aggregations/pipeline/BucketScriptAggregatorTests.java @@ -37,7 +37,7 @@ import org.apache.lucene.document.SortedSetDocValuesField; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.IndexReader; -import org.apache.lucene.index.RandomIndexWriter; +import org.apache.lucene.tests.index.RandomIndexWriter; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.Query; diff --git a/server/src/test/java/org/opensearch/search/aggregations/pipeline/CumulativeSumAggregatorTests.java b/server/src/test/java/org/opensearch/search/aggregations/pipeline/CumulativeSumAggregatorTests.java index 6c88ed3165ae0..fdd898b8fbc4c 100644 --- a/server/src/test/java/org/opensearch/search/aggregations/pipeline/CumulativeSumAggregatorTests.java +++ b/server/src/test/java/org/opensearch/search/aggregations/pipeline/CumulativeSumAggregatorTests.java @@ -37,7 +37,7 @@ import org.apache.lucene.document.SortedNumericDocValuesField; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.IndexReader; -import org.apache.lucene.index.RandomIndexWriter; +import org.apache.lucene.tests.index.RandomIndexWriter; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.MatchNoDocsQuery; diff --git a/server/src/test/java/org/opensearch/search/aggregations/pipeline/DerivativeAggregatorTests.java b/server/src/test/java/org/opensearch/search/aggregations/pipeline/DerivativeAggregatorTests.java index e7418237a735f..88628cd44c721 100644 --- a/server/src/test/java/org/opensearch/search/aggregations/pipeline/DerivativeAggregatorTests.java +++ b/server/src/test/java/org/opensearch/search/aggregations/pipeline/DerivativeAggregatorTests.java @@ -36,7 +36,7 @@ import org.apache.lucene.document.NumericDocValuesField; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.IndexReader; -import org.apache.lucene.index.RandomIndexWriter; +import org.apache.lucene.tests.index.RandomIndexWriter; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.Query; diff --git a/server/src/test/java/org/opensearch/search/aggregations/pipeline/MovFnAggrgatorTests.java b/server/src/test/java/org/opensearch/search/aggregations/pipeline/MovFnAggrgatorTests.java index 276285d2e494f..d841625272828 100644 --- a/server/src/test/java/org/opensearch/search/aggregations/pipeline/MovFnAggrgatorTests.java +++ b/server/src/test/java/org/opensearch/search/aggregations/pipeline/MovFnAggrgatorTests.java @@ -38,7 +38,7 @@ import org.apache.lucene.document.SortedNumericDocValuesField; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.IndexReader; -import org.apache.lucene.index.RandomIndexWriter; +import org.apache.lucene.tests.index.RandomIndexWriter; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.Query; diff --git a/server/src/test/java/org/opensearch/search/aggregations/support/MissingValuesTests.java b/server/src/test/java/org/opensearch/search/aggregations/support/MissingValuesTests.java index 40a60ea4a3ca3..598c1323fc13f 100644 --- a/server/src/test/java/org/opensearch/search/aggregations/support/MissingValuesTests.java +++ b/server/src/test/java/org/opensearch/search/aggregations/support/MissingValuesTests.java @@ -38,7 +38,7 @@ import org.apache.lucene.index.SortedNumericDocValues; import org.apache.lucene.index.SortedSetDocValues; import org.apache.lucene.util.BytesRef; -import org.apache.lucene.util.TestUtil; +import org.apache.lucene.tests.util.TestUtil; import org.opensearch.common.geo.GeoPoint; import org.opensearch.index.fielddata.AbstractSortedNumericDocValues; import org.opensearch.index.fielddata.AbstractSortedSetDocValues; diff --git a/server/src/test/java/org/opensearch/search/collapse/CollapseBuilderTests.java b/server/src/test/java/org/opensearch/search/collapse/CollapseBuilderTests.java index 8a05f4041d0e1..88060b645fb90 100644 --- a/server/src/test/java/org/opensearch/search/collapse/CollapseBuilderTests.java +++ b/server/src/test/java/org/opensearch/search/collapse/CollapseBuilderTests.java @@ -31,7 +31,7 @@ package org.opensearch.search.collapse; -import org.apache.lucene.analysis.MockAnalyzer; +import org.apache.lucene.tests.analysis.MockAnalyzer; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.IndexWriter; diff --git a/server/src/test/java/org/opensearch/search/fetch/subphase/highlight/PlainHighlighterTests.java b/server/src/test/java/org/opensearch/search/fetch/subphase/highlight/PlainHighlighterTests.java index 95dca93e9d657..18288aeac13e1 100644 --- a/server/src/test/java/org/opensearch/search/fetch/subphase/highlight/PlainHighlighterTests.java +++ b/server/src/test/java/org/opensearch/search/fetch/subphase/highlight/PlainHighlighterTests.java @@ -32,12 +32,12 @@ package org.opensearch.search.fetch.subphase.highlight; -import org.apache.lucene.analysis.MockAnalyzer; +import org.apache.lucene.tests.analysis.MockAnalyzer; import org.apache.lucene.index.Term; import org.apache.lucene.search.PhraseQuery; import org.apache.lucene.search.Query; import org.apache.lucene.search.highlight.QueryScorer; -import org.apache.lucene.util.LuceneTestCase; +import org.apache.lucene.tests.util.LuceneTestCase; public class PlainHighlighterTests extends LuceneTestCase { diff --git a/server/src/test/java/org/opensearch/search/geo/GeoShapeQueryTests.java b/server/src/test/java/org/opensearch/search/geo/GeoShapeQueryTests.java index 1722cb564e231..61458512b84e4 100644 --- a/server/src/test/java/org/opensearch/search/geo/GeoShapeQueryTests.java +++ b/server/src/test/java/org/opensearch/search/geo/GeoShapeQueryTests.java @@ -33,7 +33,7 @@ package org.opensearch.search.geo; import com.carrotsearch.randomizedtesting.generators.RandomNumbers; -import org.apache.lucene.geo.GeoTestUtil; +import org.apache.lucene.tests.geo.GeoTestUtil; import org.opensearch.action.index.IndexRequest; import org.opensearch.action.search.SearchResponse; import org.opensearch.common.CheckedSupplier; diff --git a/server/src/test/java/org/opensearch/search/profile/query/QueryProfilerTests.java b/server/src/test/java/org/opensearch/search/profile/query/QueryProfilerTests.java index 30bb29dbf2aa8..afaab15e1431e 100644 --- a/server/src/test/java/org/opensearch/search/profile/query/QueryProfilerTests.java +++ b/server/src/test/java/org/opensearch/search/profile/query/QueryProfilerTests.java @@ -41,7 +41,7 @@ import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.IndexWriter; import org.apache.lucene.index.LeafReaderContext; -import org.apache.lucene.index.RandomIndexWriter; +import org.apache.lucene.tests.index.RandomIndexWriter; import org.apache.lucene.index.Term; import org.apache.lucene.search.Explanation; import org.apache.lucene.search.IndexSearcher; @@ -50,7 +50,7 @@ import org.apache.lucene.search.Query; import org.apache.lucene.search.QueryCachingPolicy; import org.apache.lucene.search.QueryVisitor; -import org.apache.lucene.search.RandomApproximationQuery; +import org.apache.lucene.tests.search.RandomApproximationQuery; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; import org.apache.lucene.search.ScorerSupplier; @@ -59,7 +59,7 @@ import org.apache.lucene.search.TotalHitCountCollector; import org.apache.lucene.search.Weight; import org.apache.lucene.store.Directory; -import org.apache.lucene.util.TestUtil; +import org.apache.lucene.tests.util.TestUtil; import org.opensearch.core.internal.io.IOUtils; import org.opensearch.search.internal.ContextIndexSearcher; import org.opensearch.search.profile.ProfileResult; diff --git a/server/src/test/java/org/opensearch/search/profile/query/RandomQueryGenerator.java b/server/src/test/java/org/opensearch/search/profile/query/RandomQueryGenerator.java index cd5e6d5b920df..2409333c6bcef 100644 --- a/server/src/test/java/org/opensearch/search/profile/query/RandomQueryGenerator.java +++ b/server/src/test/java/org/opensearch/search/profile/query/RandomQueryGenerator.java @@ -32,7 +32,7 @@ package org.opensearch.search.profile.query; -import org.apache.lucene.util.English; +import org.apache.lucene.tests.util.English; import org.opensearch.common.unit.Fuzziness; import org.opensearch.index.query.BoolQueryBuilder; import org.opensearch.index.query.DisMaxQueryBuilder; diff --git a/server/src/test/java/org/opensearch/search/query/QueryPhaseTests.java b/server/src/test/java/org/opensearch/search/query/QueryPhaseTests.java index 3f7761f3f18a0..b87c11dce5be2 100644 --- a/server/src/test/java/org/opensearch/search/query/QueryPhaseTests.java +++ b/server/src/test/java/org/opensearch/search/query/QueryPhaseTests.java @@ -48,7 +48,7 @@ import org.apache.lucene.index.IndexWriterConfig; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.NoMergePolicy; -import org.apache.lucene.index.RandomIndexWriter; +import org.apache.lucene.tests.index.RandomIndexWriter; import org.apache.lucene.index.Term; import org.opensearch.lucene.queries.MinDocQuery; import org.apache.lucene.queries.spans.SpanNearQuery; @@ -609,7 +609,10 @@ public void testIndexSortScrollOptimization() throws Exception { FieldDoc firstDoc = (FieldDoc) context.queryResult().topDocs().topDocs.scoreDocs[0]; for (int i = 0; i < searchSortAndFormat.sort.getSort().length; i++) { @SuppressWarnings("unchecked") - FieldComparator comparator = (FieldComparator) searchSortAndFormat.sort.getSort()[i].getComparator(1, i); + FieldComparator comparator = (FieldComparator) searchSortAndFormat.sort.getSort()[i].getComparator( + 1, + false + ); int cmp = comparator.compareValues(firstDoc.fields[i], lastDoc.fields[i]); if (cmp == 0) { continue; diff --git a/server/src/test/java/org/opensearch/search/searchafter/SearchAfterBuilderTests.java b/server/src/test/java/org/opensearch/search/searchafter/SearchAfterBuilderTests.java index f3cc07e6d8aa6..6d2817269c838 100644 --- a/server/src/test/java/org/opensearch/search/searchafter/SearchAfterBuilderTests.java +++ b/server/src/test/java/org/opensearch/search/searchafter/SearchAfterBuilderTests.java @@ -278,7 +278,7 @@ public SortField.Type reducedType() { } @Override - public FieldComparator newComparator(String fieldname, int numHits, int sortPos, boolean reversed) { + public FieldComparator newComparator(String fieldname, int numHits, boolean enableSkipping, boolean reversed) { return null; } diff --git a/server/src/test/java/org/opensearch/search/slice/DocValuesSliceQueryTests.java b/server/src/test/java/org/opensearch/search/slice/DocValuesSliceQueryTests.java index 0a730a3fb8d4f..2bf1225aa9b11 100644 --- a/server/src/test/java/org/opensearch/search/slice/DocValuesSliceQueryTests.java +++ b/server/src/test/java/org/opensearch/search/slice/DocValuesSliceQueryTests.java @@ -39,11 +39,11 @@ import org.apache.lucene.document.StringField; import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.LeafReaderContext; -import org.apache.lucene.index.RandomIndexWriter; +import org.apache.lucene.tests.index.RandomIndexWriter; import org.apache.lucene.search.Collector; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.LeafCollector; -import org.apache.lucene.search.QueryUtils; +import org.apache.lucene.tests.search.QueryUtils; import org.apache.lucene.search.Scorable; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.store.Directory; diff --git a/server/src/test/java/org/opensearch/search/slice/SliceBuilderTests.java b/server/src/test/java/org/opensearch/search/slice/SliceBuilderTests.java index 6deae9d8dae33..a7cf4ef6c0a2b 100644 --- a/server/src/test/java/org/opensearch/search/slice/SliceBuilderTests.java +++ b/server/src/test/java/org/opensearch/search/slice/SliceBuilderTests.java @@ -32,7 +32,7 @@ package org.opensearch.search.slice; -import org.apache.lucene.analysis.MockAnalyzer; +import org.apache.lucene.tests.analysis.MockAnalyzer; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.DocValuesType; import org.apache.lucene.index.IndexReader; diff --git a/server/src/test/java/org/opensearch/search/slice/TermsSliceQueryTests.java b/server/src/test/java/org/opensearch/search/slice/TermsSliceQueryTests.java index c9277382a45dc..84a65b3c8a7cb 100644 --- a/server/src/test/java/org/opensearch/search/slice/TermsSliceQueryTests.java +++ b/server/src/test/java/org/opensearch/search/slice/TermsSliceQueryTests.java @@ -38,11 +38,11 @@ import org.apache.lucene.document.StringField; import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.LeafReaderContext; -import org.apache.lucene.index.RandomIndexWriter; +import org.apache.lucene.tests.index.RandomIndexWriter; import org.apache.lucene.search.Collector; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.LeafCollector; -import org.apache.lucene.search.QueryUtils; +import org.apache.lucene.tests.search.QueryUtils; import org.apache.lucene.search.Scorable; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.store.Directory; diff --git a/server/src/test/java/org/opensearch/search/sort/FieldSortBuilderTests.java b/server/src/test/java/org/opensearch/search/sort/FieldSortBuilderTests.java index 070855481966f..44d48e9073e23 100644 --- a/server/src/test/java/org/opensearch/search/sort/FieldSortBuilderTests.java +++ b/server/src/test/java/org/opensearch/search/sort/FieldSortBuilderTests.java @@ -42,10 +42,10 @@ import org.apache.lucene.document.SortedNumericDocValuesField; import org.apache.lucene.document.TextField; import org.apache.lucene.index.DirectoryReader; -import org.apache.lucene.index.RandomIndexWriter; +import org.apache.lucene.tests.index.RandomIndexWriter; import org.apache.lucene.index.Term; import org.apache.lucene.sandbox.document.HalfFloatPoint; -import org.apache.lucene.search.AssertingIndexSearcher; +import org.apache.lucene.tests.search.AssertingIndexSearcher; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.SortField; import org.apache.lucene.search.SortedNumericSelector; diff --git a/server/src/test/java/org/opensearch/watcher/FileWatcherTests.java b/server/src/test/java/org/opensearch/watcher/FileWatcherTests.java index 6a8525de034c8..74c9cd91b72f0 100644 --- a/server/src/test/java/org/opensearch/watcher/FileWatcherTests.java +++ b/server/src/test/java/org/opensearch/watcher/FileWatcherTests.java @@ -32,7 +32,7 @@ package org.opensearch.watcher; import org.opensearch.core.internal.io.IOUtils; -import org.apache.lucene.util.LuceneTestCase; +import org.apache.lucene.tests.util.LuceneTestCase; import org.opensearch.test.OpenSearchTestCase; import java.io.BufferedWriter; diff --git a/test/framework/src/main/java/org/opensearch/bootstrap/BootstrapForTesting.java b/test/framework/src/main/java/org/opensearch/bootstrap/BootstrapForTesting.java index 1b34dcacef7e7..4f135f2d14a75 100644 --- a/test/framework/src/main/java/org/opensearch/bootstrap/BootstrapForTesting.java +++ b/test/framework/src/main/java/org/opensearch/bootstrap/BootstrapForTesting.java @@ -35,7 +35,7 @@ import com.carrotsearch.randomizedtesting.RandomizedRunner; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; -import org.apache.lucene.util.LuceneTestCase; +import org.apache.lucene.tests.util.LuceneTestCase; import org.opensearch.common.Booleans; import org.opensearch.common.Strings; import org.opensearch.common.SuppressForbidden; diff --git a/test/framework/src/main/java/org/opensearch/cluster/DataStreamTestHelper.java b/test/framework/src/main/java/org/opensearch/cluster/DataStreamTestHelper.java index d3d17f28de344..af78fd36dceb4 100644 --- a/test/framework/src/main/java/org/opensearch/cluster/DataStreamTestHelper.java +++ b/test/framework/src/main/java/org/opensearch/cluster/DataStreamTestHelper.java @@ -32,7 +32,7 @@ package org.opensearch.cluster; -import org.apache.lucene.util.LuceneTestCase; +import org.apache.lucene.tests.util.LuceneTestCase; import org.opensearch.Version; import org.opensearch.cluster.metadata.DataStream; import org.opensearch.cluster.metadata.IndexMetadata; diff --git a/test/framework/src/main/java/org/opensearch/cluster/coordination/CoordinationStateTestCluster.java b/test/framework/src/main/java/org/opensearch/cluster/coordination/CoordinationStateTestCluster.java index 32ef47c4366a4..0e308e64617fe 100644 --- a/test/framework/src/main/java/org/opensearch/cluster/coordination/CoordinationStateTestCluster.java +++ b/test/framework/src/main/java/org/opensearch/cluster/coordination/CoordinationStateTestCluster.java @@ -50,7 +50,7 @@ import static com.carrotsearch.randomizedtesting.RandomizedTest.rarely; import static java.util.stream.Collectors.toSet; -import static org.apache.lucene.util.LuceneTestCase.random; +import static org.apache.lucene.tests.util.LuceneTestCase.random; import static org.opensearch.test.OpenSearchTestCase.randomBoolean; import static org.opensearch.test.OpenSearchTestCase.randomFrom; import static org.opensearch.test.OpenSearchTestCase.randomIntBetween; diff --git a/test/framework/src/main/java/org/opensearch/cluster/routing/TestShardRouting.java b/test/framework/src/main/java/org/opensearch/cluster/routing/TestShardRouting.java index c164733370359..29207cbc4ada1 100644 --- a/test/framework/src/main/java/org/opensearch/cluster/routing/TestShardRouting.java +++ b/test/framework/src/main/java/org/opensearch/cluster/routing/TestShardRouting.java @@ -41,7 +41,7 @@ import org.opensearch.snapshots.SnapshotId; import org.opensearch.test.OpenSearchTestCase; -import static org.apache.lucene.util.LuceneTestCase.random; +import static org.apache.lucene.tests.util.LuceneTestCase.random; import static org.opensearch.test.OpenSearchTestCase.randomAlphaOfLength; /** diff --git a/test/framework/src/main/java/org/opensearch/cluster/service/FakeThreadPoolMasterService.java b/test/framework/src/main/java/org/opensearch/cluster/service/FakeThreadPoolMasterService.java index c3c591f5e1ea8..14d9f9554004f 100644 --- a/test/framework/src/main/java/org/opensearch/cluster/service/FakeThreadPoolMasterService.java +++ b/test/framework/src/main/java/org/opensearch/cluster/service/FakeThreadPoolMasterService.java @@ -52,7 +52,7 @@ import java.util.concurrent.TimeUnit; import java.util.function.Consumer; -import static org.apache.lucene.util.LuceneTestCase.random; +import static org.apache.lucene.tests.util.LuceneTestCase.random; import static org.opensearch.test.OpenSearchTestCase.randomInt; public class FakeThreadPoolMasterService extends MasterService { diff --git a/test/framework/src/main/java/org/opensearch/common/io/PathUtilsForTesting.java b/test/framework/src/main/java/org/opensearch/common/io/PathUtilsForTesting.java index 2811c45f03e08..e6f2ac5d4c4de 100644 --- a/test/framework/src/main/java/org/opensearch/common/io/PathUtilsForTesting.java +++ b/test/framework/src/main/java/org/opensearch/common/io/PathUtilsForTesting.java @@ -32,7 +32,7 @@ package org.opensearch.common.io; -import org.apache.lucene.util.LuceneTestCase; +import org.apache.lucene.tests.util.LuceneTestCase; import java.nio.file.FileSystem; diff --git a/test/framework/src/main/java/org/opensearch/common/util/MockBigArrays.java b/test/framework/src/main/java/org/opensearch/common/util/MockBigArrays.java index b46a7bdbcfce7..fc628ca5228e6 100644 --- a/test/framework/src/main/java/org/opensearch/common/util/MockBigArrays.java +++ b/test/framework/src/main/java/org/opensearch/common/util/MockBigArrays.java @@ -37,7 +37,7 @@ import org.apache.lucene.util.Accountable; import org.apache.lucene.util.Accountables; import org.apache.lucene.util.BytesRef; -import org.apache.lucene.util.LuceneTestCase; +import org.apache.lucene.tests.util.LuceneTestCase; import org.opensearch.common.breaker.CircuitBreaker; import org.opensearch.common.util.set.Sets; import org.opensearch.indices.breaker.CircuitBreakerService; diff --git a/test/framework/src/main/java/org/opensearch/common/util/MockPageCacheRecycler.java b/test/framework/src/main/java/org/opensearch/common/util/MockPageCacheRecycler.java index 3ae9ac6996524..471cf01a3f7d2 100644 --- a/test/framework/src/main/java/org/opensearch/common/util/MockPageCacheRecycler.java +++ b/test/framework/src/main/java/org/opensearch/common/util/MockPageCacheRecycler.java @@ -32,7 +32,7 @@ package org.opensearch.common.util; -import org.apache.lucene.util.LuceneTestCase; +import org.apache.lucene.tests.util.LuceneTestCase; import org.opensearch.common.recycler.Recycler.V; import org.opensearch.common.settings.Settings; import org.opensearch.common.util.set.Sets; diff --git a/test/framework/src/main/java/org/opensearch/geo/GeometryTestUtils.java b/test/framework/src/main/java/org/opensearch/geo/GeometryTestUtils.java index e3fa5281b54de..b588243803d30 100644 --- a/test/framework/src/main/java/org/opensearch/geo/GeometryTestUtils.java +++ b/test/framework/src/main/java/org/opensearch/geo/GeometryTestUtils.java @@ -32,7 +32,7 @@ package org.opensearch.geo; -import org.apache.lucene.geo.GeoTestUtil; +import org.apache.lucene.tests.geo.GeoTestUtil; import org.opensearch.geometry.Circle; import org.opensearch.geometry.Geometry; import org.opensearch.geometry.GeometryCollection; diff --git a/test/framework/src/main/java/org/opensearch/index/MapperTestUtils.java b/test/framework/src/main/java/org/opensearch/index/MapperTestUtils.java index c64a36c60eb73..b01b4ee20c421 100644 --- a/test/framework/src/main/java/org/opensearch/index/MapperTestUtils.java +++ b/test/framework/src/main/java/org/opensearch/index/MapperTestUtils.java @@ -52,7 +52,7 @@ import java.nio.file.Path; import java.util.Collections; -import static org.apache.lucene.util.LuceneTestCase.expectThrows; +import static org.apache.lucene.tests.util.LuceneTestCase.expectThrows; import static org.opensearch.test.OpenSearchTestCase.createTestAnalysis; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.containsString; diff --git a/test/framework/src/main/java/org/opensearch/index/MockEngineFactoryPlugin.java b/test/framework/src/main/java/org/opensearch/index/MockEngineFactoryPlugin.java index 812546b9c65e7..7d3282ca4443a 100644 --- a/test/framework/src/main/java/org/opensearch/index/MockEngineFactoryPlugin.java +++ b/test/framework/src/main/java/org/opensearch/index/MockEngineFactoryPlugin.java @@ -31,7 +31,7 @@ package org.opensearch.index; -import org.apache.lucene.index.AssertingDirectoryReader; +import org.apache.lucene.tests.index.AssertingDirectoryReader; import org.apache.lucene.index.FilterDirectoryReader; import org.opensearch.common.settings.Setting; import org.opensearch.index.engine.EngineFactory; diff --git a/test/framework/src/main/java/org/opensearch/index/mapper/MapperServiceTestCase.java b/test/framework/src/main/java/org/opensearch/index/mapper/MapperServiceTestCase.java index 03ac664da1734..db6c53bd0aa77 100644 --- a/test/framework/src/main/java/org/opensearch/index/mapper/MapperServiceTestCase.java +++ b/test/framework/src/main/java/org/opensearch/index/mapper/MapperServiceTestCase.java @@ -35,7 +35,7 @@ import org.apache.lucene.analysis.standard.StandardAnalyzer; import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.IndexWriterConfig; -import org.apache.lucene.index.RandomIndexWriter; +import org.apache.lucene.tests.index.RandomIndexWriter; import org.apache.lucene.store.Directory; import org.opensearch.Version; import org.opensearch.cluster.metadata.IndexMetadata; diff --git a/test/framework/src/main/java/org/opensearch/index/store/OpenSearchBaseDirectoryTestCase.java b/test/framework/src/main/java/org/opensearch/index/store/OpenSearchBaseDirectoryTestCase.java index 945c3d8a3bb66..428b1a83f97d5 100644 --- a/test/framework/src/main/java/org/opensearch/index/store/OpenSearchBaseDirectoryTestCase.java +++ b/test/framework/src/main/java/org/opensearch/index/store/OpenSearchBaseDirectoryTestCase.java @@ -37,9 +37,9 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; -import org.apache.lucene.store.BaseDirectoryTestCase; -import org.apache.lucene.util.LuceneTestCase; -import org.apache.lucene.util.TimeUnits; +import org.apache.lucene.tests.store.BaseDirectoryTestCase; +import org.apache.lucene.tests.util.LuceneTestCase; +import org.apache.lucene.tests.util.TimeUnits; import org.opensearch.bootstrap.BootstrapForTesting; import org.opensearch.test.junit.listeners.ReproduceInfoPrinter; diff --git a/test/framework/src/main/java/org/opensearch/indices/analysis/AnalysisFactoryTestCase.java b/test/framework/src/main/java/org/opensearch/indices/analysis/AnalysisFactoryTestCase.java index 80c573f3cc9ae..fd762289caddb 100644 --- a/test/framework/src/main/java/org/opensearch/indices/analysis/AnalysisFactoryTestCase.java +++ b/test/framework/src/main/java/org/opensearch/indices/analysis/AnalysisFactoryTestCase.java @@ -61,7 +61,7 @@ public abstract class AnalysisFactoryTestCase extends OpenSearchTestCase { private static final Map> KNOWN_TOKENIZERS = new MapBuilder>() - // exposed in ES + // exposed in OpenSearch .put("classic", MovedToAnalysisCommon.class) .put("edgengram", MovedToAnalysisCommon.class) .put("keyword", MovedToAnalysisCommon.class) @@ -81,7 +81,7 @@ public abstract class AnalysisFactoryTestCase extends OpenSearchTestCase { .immutableMap(); static final Map> KNOWN_TOKENFILTERS = new MapBuilder>() - // exposed in ES + // exposed in OpenSearch .put("apostrophe", MovedToAnalysisCommon.class) .put("arabicnormalization", MovedToAnalysisCommon.class) .put("arabicstem", MovedToAnalysisCommon.class) @@ -217,6 +217,10 @@ public abstract class AnalysisFactoryTestCase extends OpenSearchTestCase { .put("japanesecompletion", Void.class) // LUCENE-9575: recognize arbitrary patterns that include punctuation .put("patterntyping", Void.class) + // LUCENE-10248 + .put("spanishpluralstem", Void.class) + // LUCENE-10352 + .put("daitchmokotoffsoundex", Void.class) .immutableMap(); static final Map> KNOWN_CHARFILTERS = new MapBuilder>() diff --git a/test/framework/src/main/java/org/opensearch/repositories/blobstore/OpenSearchMockAPIBasedRepositoryIntegTestCase.java b/test/framework/src/main/java/org/opensearch/repositories/blobstore/OpenSearchMockAPIBasedRepositoryIntegTestCase.java index 3e075dd1bbc3b..f082c7a45a207 100644 --- a/test/framework/src/main/java/org/opensearch/repositories/blobstore/OpenSearchMockAPIBasedRepositoryIntegTestCase.java +++ b/test/framework/src/main/java/org/opensearch/repositories/blobstore/OpenSearchMockAPIBasedRepositoryIntegTestCase.java @@ -39,7 +39,7 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.message.ParameterizedMessage; -import org.apache.lucene.util.LuceneTestCase; +import org.apache.lucene.tests.util.LuceneTestCase; import org.opensearch.action.admin.indices.forcemerge.ForceMergeResponse; import org.opensearch.cluster.metadata.IndexMetadata; import org.opensearch.common.Strings; diff --git a/test/framework/src/main/java/org/opensearch/search/aggregations/AggregatorTestCase.java b/test/framework/src/main/java/org/opensearch/search/aggregations/AggregatorTestCase.java index 241ae1170817a..38a0253305833 100644 --- a/test/framework/src/main/java/org/opensearch/search/aggregations/AggregatorTestCase.java +++ b/test/framework/src/main/java/org/opensearch/search/aggregations/AggregatorTestCase.java @@ -39,15 +39,15 @@ import org.apache.lucene.document.SortedNumericDocValuesField; import org.apache.lucene.document.SortedSetDocValuesField; import org.apache.lucene.document.StoredField; -import org.apache.lucene.index.AssertingDirectoryReader; +import org.apache.lucene.tests.index.AssertingDirectoryReader; import org.apache.lucene.index.CompositeReaderContext; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.IndexReaderContext; import org.apache.lucene.index.LeafReaderContext; -import org.apache.lucene.index.RandomIndexWriter; +import org.apache.lucene.tests.index.RandomIndexWriter; import org.apache.lucene.sandbox.document.HalfFloatPoint; -import org.apache.lucene.search.AssertingIndexSearcher; +import org.apache.lucene.tests.search.AssertingIndexSearcher; import org.apache.lucene.search.Collector; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.MatchAllDocsQuery; @@ -652,7 +652,7 @@ protected static DirectoryReader wrapInMockESDirectoryReader(DirectoryReader dir /** * Added to randomly run with more assertions on the index searcher level, - * like {@link org.apache.lucene.util.LuceneTestCase#newSearcher(IndexReader)}, which can't be used because it also + * like {@link org.apache.lucene.tests.util.LuceneTestCase#newSearcher(IndexReader)}, which can't be used because it also * wraps in the IndexSearcher's IndexReader with other implementations that we can't handle. (e.g. ParallelCompositeReader) */ protected static IndexSearcher newIndexSearcher(IndexReader indexReader) { @@ -666,7 +666,7 @@ protected static IndexSearcher newIndexSearcher(IndexReader indexReader) { /** * Added to randomly run with more assertions on the index reader level, - * like {@link org.apache.lucene.util.LuceneTestCase#wrapReader(IndexReader)}, which can't be used because it also + * like {@link org.apache.lucene.tests.util.LuceneTestCase#wrapReader(IndexReader)}, which can't be used because it also * wraps in the IndexReader with other implementations that we can't handle. (e.g. ParallelCompositeReader) */ protected static IndexReader maybeWrapReaderEs(DirectoryReader reader) throws IOException { diff --git a/test/framework/src/main/java/org/opensearch/test/CorruptionUtils.java b/test/framework/src/main/java/org/opensearch/test/CorruptionUtils.java index 1bcde48900364..abfb3edc26417 100644 --- a/test/framework/src/main/java/org/opensearch/test/CorruptionUtils.java +++ b/test/framework/src/main/java/org/opensearch/test/CorruptionUtils.java @@ -50,7 +50,7 @@ import java.nio.file.StandardOpenOption; import java.util.Random; -import static org.apache.lucene.util.LuceneTestCase.assumeTrue; +import static org.apache.lucene.tests.util.LuceneTestCase.assumeTrue; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.notNullValue; import static org.junit.Assert.assertThat; diff --git a/test/framework/src/main/java/org/opensearch/test/FieldMaskingReader.java b/test/framework/src/main/java/org/opensearch/test/FieldMaskingReader.java index e2ff8c09cd329..dba42b546c1a2 100644 --- a/test/framework/src/main/java/org/opensearch/test/FieldMaskingReader.java +++ b/test/framework/src/main/java/org/opensearch/test/FieldMaskingReader.java @@ -32,7 +32,7 @@ package org.opensearch.test; import org.apache.lucene.index.DirectoryReader; -import org.apache.lucene.index.FieldFilterLeafReader; +import org.apache.lucene.tests.index.FieldFilterLeafReader; import org.apache.lucene.index.FilterDirectoryReader; import org.apache.lucene.index.FilterLeafReader; import org.apache.lucene.index.LeafReader; diff --git a/test/framework/src/main/java/org/opensearch/test/InternalTestCluster.java b/test/framework/src/main/java/org/opensearch/test/InternalTestCluster.java index 5ae441ed651b1..11b39f0ff8da1 100644 --- a/test/framework/src/main/java/org/opensearch/test/InternalTestCluster.java +++ b/test/framework/src/main/java/org/opensearch/test/InternalTestCluster.java @@ -155,8 +155,8 @@ import java.util.stream.IntStream; import java.util.stream.Stream; -import static org.apache.lucene.util.LuceneTestCase.TEST_NIGHTLY; -import static org.apache.lucene.util.LuceneTestCase.rarely; +import static org.apache.lucene.tests.util.LuceneTestCase.TEST_NIGHTLY; +import static org.apache.lucene.tests.util.LuceneTestCase.rarely; import static org.opensearch.cluster.coordination.ClusterBootstrapService.INITIAL_MASTER_NODES_SETTING; import static org.opensearch.common.unit.TimeValue.timeValueMillis; import static org.opensearch.common.unit.TimeValue.timeValueSeconds; diff --git a/test/framework/src/main/java/org/opensearch/test/MockKeywordPlugin.java b/test/framework/src/main/java/org/opensearch/test/MockKeywordPlugin.java index d837b98e22111..c27f3f169fbae 100644 --- a/test/framework/src/main/java/org/opensearch/test/MockKeywordPlugin.java +++ b/test/framework/src/main/java/org/opensearch/test/MockKeywordPlugin.java @@ -31,7 +31,7 @@ package org.opensearch.test; -import org.apache.lucene.analysis.MockTokenizer; +import org.apache.lucene.tests.analysis.MockTokenizer; import org.opensearch.index.analysis.TokenizerFactory; import org.opensearch.indices.analysis.AnalysisModule; import org.opensearch.plugins.AnalysisPlugin; diff --git a/test/framework/src/main/java/org/opensearch/test/OpenSearchIntegTestCase.java b/test/framework/src/main/java/org/opensearch/test/OpenSearchIntegTestCase.java index b9ee655dcc979..1c09fb2ff8c04 100644 --- a/test/framework/src/main/java/org/opensearch/test/OpenSearchIntegTestCase.java +++ b/test/framework/src/main/java/org/opensearch/test/OpenSearchIntegTestCase.java @@ -39,7 +39,7 @@ import org.apache.http.HttpHost; import org.apache.lucene.search.Sort; import org.apache.lucene.search.TotalHits; -import org.apache.lucene.util.LuceneTestCase; +import org.apache.lucene.tests.util.LuceneTestCase; import org.opensearch.OpenSearchException; import org.opensearch.ExceptionsHelper; import org.opensearch.action.ActionListener; diff --git a/test/framework/src/main/java/org/opensearch/test/OpenSearchTestCase.java b/test/framework/src/main/java/org/opensearch/test/OpenSearchTestCase.java index 6f4ea1ecb0601..96698036fca55 100644 --- a/test/framework/src/main/java/org/opensearch/test/OpenSearchTestCase.java +++ b/test/framework/src/main/java/org/opensearch/test/OpenSearchTestCase.java @@ -55,11 +55,11 @@ import org.apache.logging.log4j.status.StatusData; import org.apache.logging.log4j.status.StatusLogger; import org.apache.lucene.analysis.standard.StandardAnalyzer; -import org.apache.lucene.util.LuceneTestCase; -import org.apache.lucene.util.LuceneTestCase.SuppressCodecs; -import org.apache.lucene.util.TestRuleMarkFailure; -import org.apache.lucene.util.TestUtil; -import org.apache.lucene.util.TimeUnits; +import org.apache.lucene.tests.util.LuceneTestCase; +import org.apache.lucene.tests.util.LuceneTestCase.SuppressCodecs; +import org.apache.lucene.tests.util.TestRuleMarkFailure; +import org.apache.lucene.tests.util.TestUtil; +import org.apache.lucene.tests.util.TimeUnits; import org.opensearch.Version; import org.opensearch.bootstrap.BootstrapForTesting; import org.opensearch.bootstrap.JavaVersion; diff --git a/test/framework/src/main/java/org/opensearch/test/OpenSearchTokenStreamTestCase.java b/test/framework/src/main/java/org/opensearch/test/OpenSearchTokenStreamTestCase.java index 48c0451da20d0..e853c1e6314e1 100644 --- a/test/framework/src/main/java/org/opensearch/test/OpenSearchTokenStreamTestCase.java +++ b/test/framework/src/main/java/org/opensearch/test/OpenSearchTokenStreamTestCase.java @@ -35,9 +35,9 @@ import com.carrotsearch.randomizedtesting.annotations.Listeners; import com.carrotsearch.randomizedtesting.annotations.TimeoutSuite; -import org.apache.lucene.analysis.BaseTokenStreamTestCase; -import org.apache.lucene.util.LuceneTestCase; -import org.apache.lucene.util.TimeUnits; +import org.apache.lucene.tests.analysis.BaseTokenStreamTestCase; +import org.apache.lucene.tests.util.LuceneTestCase; +import org.apache.lucene.tests.util.TimeUnits; import org.opensearch.Version; import org.opensearch.bootstrap.BootstrapForTesting; import org.opensearch.cluster.metadata.IndexMetadata; diff --git a/test/framework/src/main/java/org/opensearch/test/client/RandomizingClient.java b/test/framework/src/main/java/org/opensearch/test/client/RandomizingClient.java index 42aaca6c242df..e0e916c6da0f1 100644 --- a/test/framework/src/main/java/org/opensearch/test/client/RandomizingClient.java +++ b/test/framework/src/main/java/org/opensearch/test/client/RandomizingClient.java @@ -33,7 +33,7 @@ package org.opensearch.test.client; import com.carrotsearch.randomizedtesting.generators.RandomPicks; -import org.apache.lucene.util.TestUtil; +import org.apache.lucene.tests.util.TestUtil; import org.opensearch.action.search.SearchRequestBuilder; import org.opensearch.action.search.SearchType; import org.opensearch.client.Client; diff --git a/test/framework/src/main/java/org/opensearch/test/engine/MockEngineSupport.java b/test/framework/src/main/java/org/opensearch/test/engine/MockEngineSupport.java index b49bd88310dec..f2427dfda2742 100644 --- a/test/framework/src/main/java/org/opensearch/test/engine/MockEngineSupport.java +++ b/test/framework/src/main/java/org/opensearch/test/engine/MockEngineSupport.java @@ -33,11 +33,11 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; -import org.apache.lucene.index.AssertingDirectoryReader; +import org.apache.lucene.tests.index.AssertingDirectoryReader; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.FilterDirectoryReader; import org.apache.lucene.index.IndexReader; -import org.apache.lucene.util.LuceneTestCase; +import org.apache.lucene.tests.util.LuceneTestCase; import org.opensearch.OpenSearchException; import org.opensearch.common.settings.Setting; import org.opensearch.common.settings.Setting.Property; diff --git a/test/framework/src/main/java/org/opensearch/test/hamcrest/OpenSearchAssertions.java b/test/framework/src/main/java/org/opensearch/test/hamcrest/OpenSearchAssertions.java index 421c022b38e9d..28afed1a50e59 100644 --- a/test/framework/src/main/java/org/opensearch/test/hamcrest/OpenSearchAssertions.java +++ b/test/framework/src/main/java/org/opensearch/test/hamcrest/OpenSearchAssertions.java @@ -90,8 +90,8 @@ import java.util.concurrent.TimeUnit; import java.util.function.Function; -import static org.apache.lucene.util.LuceneTestCase.expectThrows; -import static org.apache.lucene.util.LuceneTestCase.expectThrowsAnyOf; +import static org.apache.lucene.tests.util.LuceneTestCase.expectThrows; +import static org.apache.lucene.tests.util.LuceneTestCase.expectThrowsAnyOf; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.MatcherAssert.assertThat; diff --git a/test/framework/src/main/java/org/opensearch/test/rest/yaml/OpenSearchClientYamlSuiteTestCase.java b/test/framework/src/main/java/org/opensearch/test/rest/yaml/OpenSearchClientYamlSuiteTestCase.java index f30b1bfdd1e6b..5a404ccd4b9fc 100644 --- a/test/framework/src/main/java/org/opensearch/test/rest/yaml/OpenSearchClientYamlSuiteTestCase.java +++ b/test/framework/src/main/java/org/opensearch/test/rest/yaml/OpenSearchClientYamlSuiteTestCase.java @@ -35,7 +35,7 @@ import com.carrotsearch.randomizedtesting.RandomizedTest; import com.carrotsearch.randomizedtesting.annotations.TimeoutSuite; import org.apache.http.HttpHost; -import org.apache.lucene.util.TimeUnits; +import org.apache.lucene.tests.util.TimeUnits; import org.opensearch.Version; import org.opensearch.client.Node; import org.opensearch.client.Request; diff --git a/test/framework/src/main/java/org/opensearch/test/store/MockFSDirectoryFactory.java b/test/framework/src/main/java/org/opensearch/test/store/MockFSDirectoryFactory.java index 03af3977e229b..47952af1cd06c 100644 --- a/test/framework/src/main/java/org/opensearch/test/store/MockFSDirectoryFactory.java +++ b/test/framework/src/main/java/org/opensearch/test/store/MockFSDirectoryFactory.java @@ -35,12 +35,12 @@ import com.carrotsearch.randomizedtesting.generators.RandomPicks; import org.apache.logging.log4j.Logger; import org.apache.lucene.index.CheckIndex; -import org.apache.lucene.store.BaseDirectoryWrapper; +import org.apache.lucene.tests.store.BaseDirectoryWrapper; import org.apache.lucene.store.Directory; import org.apache.lucene.store.LockObtainFailedException; -import org.apache.lucene.store.MockDirectoryWrapper; -import org.apache.lucene.util.LuceneTestCase; -import org.apache.lucene.util.TestRuleMarkFailure; +import org.apache.lucene.tests.store.MockDirectoryWrapper; +import org.apache.lucene.tests.util.LuceneTestCase; +import org.apache.lucene.tests.util.TestRuleMarkFailure; import org.opensearch.cluster.metadata.IndexMetadata; import org.opensearch.common.io.stream.BytesStreamOutput; import org.opensearch.common.lucene.Lucene; diff --git a/test/framework/src/main/java/org/opensearch/test/transport/MockTransport.java b/test/framework/src/main/java/org/opensearch/test/transport/MockTransport.java index de4d5250fec84..e1e5bcc968047 100644 --- a/test/framework/src/main/java/org/opensearch/test/transport/MockTransport.java +++ b/test/framework/src/main/java/org/opensearch/test/transport/MockTransport.java @@ -63,7 +63,7 @@ import java.util.concurrent.ConcurrentMap; import java.util.function.Function; -import static org.apache.lucene.util.LuceneTestCase.rarely; +import static org.apache.lucene.tests.util.LuceneTestCase.rarely; /** * A basic transport implementation that allows to intercept requests that have been sent diff --git a/test/framework/src/test/java/org/opensearch/test/test/InternalTestClusterTests.java b/test/framework/src/test/java/org/opensearch/test/test/InternalTestClusterTests.java index 7e38061392312..7ffd96fb68911 100644 --- a/test/framework/src/test/java/org/opensearch/test/test/InternalTestClusterTests.java +++ b/test/framework/src/test/java/org/opensearch/test/test/InternalTestClusterTests.java @@ -31,7 +31,7 @@ package org.opensearch.test.test; -import org.apache.lucene.util.LuceneTestCase; +import org.apache.lucene.tests.util.LuceneTestCase; import org.opensearch.client.Client; import org.opensearch.cluster.node.DiscoveryNode; import org.opensearch.cluster.node.DiscoveryNodeRole; From 3675400b926bfd607cb4e0f38821d02f8849c540 Mon Sep 17 00:00:00 2001 From: Nick Knize Date: Wed, 16 Mar 2022 17:35:08 -0500 Subject: [PATCH 40/46] [Remove] type from CIR.mapping and CIRB.mapping (#2478) First pass to remove types from CreateIndexRequest and CreateIndexRequestBuilder mapping method. This method is overloaded several times so the most widely used methods in the RequestBuilder are refactored from mapping to setMapping to avoid confusion, conflicts, and to be consistent with other method names (e.g., setSettings, setCause, setAlias). Signed-off-by: Nicholas Walter Knize --- .../common/HighlighterWithAnalyzersTests.java | 9 +- .../script/expression/MoreExpressionIT.java | 10 +- .../TokenCountFieldMapperIntegrationIT.java | 5 +- .../AbstractParentChildTestCase.java | 3 +- .../join/aggregations/ChildrenIT.java | 12 +- .../join/query/ChildQuerySearchIT.java | 118 +++-------- .../opensearch/join/query/InnerHitsIT.java | 34 +-- .../percolator/PercolatorQuerySearchIT.java | 12 +- .../PercolatorQuerySearchTests.java | 4 +- .../ICUCollationKeywordFieldMapperIT.java | 31 +-- .../index/mapper/size/SizeMappingIT.java | 6 +- .../admin/indices/create/CreateIndexIT.java | 16 +- .../action/termvectors/GetTermVectorsIT.java | 36 ++-- .../cluster/SimpleClusterStateIT.java | 6 +- .../gateway/GatewayIndexStateIT.java | 5 +- .../java/org/opensearch/get/GetActionIT.java | 10 +- .../org/opensearch/index/IndexSortIT.java | 8 +- .../index/fielddata/FieldDataLoadingIT.java | 5 +- .../mapper/CopyToMapperIntegrationIT.java | 4 +- .../ExternalValuesMapperIntegrationIT.java | 15 +- .../mapper/MultiFieldsIntegrationIT.java | 12 +- .../PreBuiltAnalyzerIntegrationIT.java | 4 +- .../mapping/SimpleGetFieldMappingsIT.java | 16 +- .../indices/mapping/SimpleGetMappingsIT.java | 10 +- .../org/opensearch/mget/SimpleMgetIT.java | 11 +- .../opensearch/routing/SimpleRoutingIT.java | 36 +--- .../search/aggregations/CombiIT.java | 5 +- .../search/aggregations/EquivalenceIT.java | 20 +- .../search/aggregations/bucket/NestedIT.java | 14 +- .../aggregations/bucket/ReverseNestedIT.java | 10 +- .../aggregations/metrics/CardinalityIT.java | 5 +- .../aggregations/metrics/TopHitsIT.java | 5 +- .../aggregations/pipeline/MaxBucketIT.java | 2 +- .../aggregations/pipeline/MovAvgIT.java | 5 +- .../search/fetch/FetchSubPhasePluginIT.java | 5 +- .../search/fetch/subphase/InnerHitsIT.java | 17 +- .../highlight/HighlighterSearchIT.java | 195 ++++++------------ .../search/fieldcaps/FieldCapabilitiesIT.java | 4 +- .../search/fields/SearchFieldsIT.java | 21 +- .../functionscore/DecayFunctionScoreIT.java | 80 ++----- .../FunctionScoreFieldValueIT.java | 10 +- .../functionscore/FunctionScorePluginIT.java | 5 +- .../search/functionscore/QueryRescorerIT.java | 39 +--- .../search/geo/GeoBoundingBoxQueryIT.java | 15 +- .../opensearch/search/geo/GeoDistanceIT.java | 5 +- .../opensearch/search/geo/GeoFilterIT.java | 4 +- .../search/morelikethis/MoreLikeThisIT.java | 44 +--- .../search/nested/SimpleNestedIT.java | 33 +-- .../org/opensearch/search/query/ExistsIT.java | 12 +- .../search/query/MultiMatchQueryIT.java | 4 +- .../search/query/QueryStringIT.java | 22 +- .../search/query/SearchQueryIT.java | 21 +- .../search/query/SimpleQueryStringIT.java | 9 +- .../scriptfilter/ScriptQuerySearchIT.java | 8 +- .../search/scroll/DuelScrollIT.java | 5 +- .../opensearch/search/sort/FieldSortIT.java | 35 +--- .../opensearch/search/sort/GeoDistanceIT.java | 25 +-- .../opensearch/search/sort/SimpleSortIT.java | 10 +- .../suggest/CompletionSuggestSearchIT.java | 25 +-- .../ContextCompletionSuggestSearchIT.java | 5 +- .../search/suggest/SuggestSearchIT.java | 54 ++--- .../opensearch/similarity/SimilarityIT.java | 5 +- .../indices/create/CreateIndexRequest.java | 25 ++- .../create/CreateIndexRequestBuilder.java | 31 ++- .../indices/rollover/RolloverRequest.java | 2 +- .../create/CreateIndexRequestTests.java | 6 +- .../mapping/put/PutMappingRequestTests.java | 2 +- .../rollover/RolloverRequestTests.java | 3 +- .../indices/stats/IndicesStatsTests.java | 4 +- .../AbstractTermVectorsTestCase.java | 6 +- .../termvectors/GetTermVectorsTests.java | 2 - .../index/analysis/PreBuiltAnalyzerTests.java | 2 - .../mapper/FieldFilterMapperPluginTests.java | 6 +- .../index/search/NestedHelperTests.java | 2 - .../index/similarity/SimilarityTests.java | 16 -- .../termvectors/TermVectorsServiceTests.java | 6 - .../search/geo/GeoPointShapeQueryTests.java | 12 +- .../opensearch/search/geo/GeoQueryTests.java | 22 +- .../search/geo/GeoShapeQueryTests.java | 12 +- .../CategoryContextMappingTests.java | 20 -- .../completion/GeoContextMappingTests.java | 4 - .../index/RandomCreateIndexGenerator.java | 12 +- .../test/OpenSearchSingleNodeTestCase.java | 2 +- 83 files changed, 392 insertions(+), 1026 deletions(-) diff --git a/modules/analysis-common/src/test/java/org/opensearch/analysis/common/HighlighterWithAnalyzersTests.java b/modules/analysis-common/src/test/java/org/opensearch/analysis/common/HighlighterWithAnalyzersTests.java index faaf636d4a8ff..a8dd2d2578541 100644 --- a/modules/analysis-common/src/test/java/org/opensearch/analysis/common/HighlighterWithAnalyzersTests.java +++ b/modules/analysis-common/src/test/java/org/opensearch/analysis/common/HighlighterWithAnalyzersTests.java @@ -70,10 +70,8 @@ protected Collection> nodePlugins() { public void testNgramHighlightingWithBrokenPositions() throws IOException { assertAcked( - prepareCreate("test").addMapping( - "test", + prepareCreate("test").setMapping( jsonBuilder().startObject() - .startObject("test") .startObject("properties") .startObject("name") .field("type", "text") @@ -88,7 +86,6 @@ public void testNgramHighlightingWithBrokenPositions() throws IOException { .endObject() .endObject() .endObject() - .endObject() ) .setSettings( Settings.builder() @@ -260,7 +257,7 @@ public void testPhrasePrefix() throws IOException { .put("index.analysis.filter.synonym.type", "synonym") .putList("index.analysis.filter.synonym.synonyms", "quick => fast"); - assertAcked(prepareCreate("first_test_index").setSettings(builder.build()).addMapping("type1", type1TermVectorMapping())); + assertAcked(prepareCreate("first_test_index").setSettings(builder.build()).setMapping(type1TermVectorMapping())); ensureGreen(); @@ -421,7 +418,6 @@ public void testPhrasePrefix() throws IOException { public static XContentBuilder type1TermVectorMapping() throws IOException { return XContentFactory.jsonBuilder() .startObject() - .startObject("type1") .startObject("properties") .startObject("field1") .field("type", "text") @@ -432,7 +428,6 @@ public static XContentBuilder type1TermVectorMapping() throws IOException { .field("term_vector", "with_positions_offsets") .endObject() .endObject() - .endObject() .endObject(); } } diff --git a/modules/lang-expression/src/internalClusterTest/java/org/opensearch/script/expression/MoreExpressionIT.java b/modules/lang-expression/src/internalClusterTest/java/org/opensearch/script/expression/MoreExpressionIT.java index 259234d79ab42..450e70c3c8938 100644 --- a/modules/lang-expression/src/internalClusterTest/java/org/opensearch/script/expression/MoreExpressionIT.java +++ b/modules/lang-expression/src/internalClusterTest/java/org/opensearch/script/expression/MoreExpressionIT.java @@ -641,12 +641,11 @@ public void testPipelineAggregationScript() throws Exception { public void testGeo() throws Exception { XContentBuilder xContentBuilder = XContentFactory.jsonBuilder() .startObject() - .startObject("type1") .startObject("properties") .startObject("location") .field("type", "geo_point"); - xContentBuilder.endObject().endObject().endObject().endObject(); - assertAcked(prepareCreate("test").addMapping("type1", xContentBuilder)); + xContentBuilder.endObject().endObject().endObject(); + assertAcked(prepareCreate("test").setMapping(xContentBuilder)); ensureGreen(); client().prepareIndex("test") .setId("1") @@ -687,12 +686,11 @@ public void testGeo() throws Exception { public void testBoolean() throws Exception { XContentBuilder xContentBuilder = XContentFactory.jsonBuilder() .startObject() - .startObject("doc") .startObject("properties") .startObject("vip") .field("type", "boolean"); - xContentBuilder.endObject().endObject().endObject().endObject(); - assertAcked(prepareCreate("test").addMapping("doc", xContentBuilder)); + xContentBuilder.endObject().endObject().endObject(); + assertAcked(prepareCreate("test").setMapping(xContentBuilder)); ensureGreen(); indexRandom( true, diff --git a/modules/mapper-extras/src/javaRestTest/java/org/opensearch/index/mapper/TokenCountFieldMapperIntegrationIT.java b/modules/mapper-extras/src/javaRestTest/java/org/opensearch/index/mapper/TokenCountFieldMapperIntegrationIT.java index cbba425a04889..e25344a4bb4e3 100644 --- a/modules/mapper-extras/src/javaRestTest/java/org/opensearch/index/mapper/TokenCountFieldMapperIntegrationIT.java +++ b/modules/mapper-extras/src/javaRestTest/java/org/opensearch/index/mapper/TokenCountFieldMapperIntegrationIT.java @@ -128,10 +128,8 @@ private void init() throws IOException { settings.put("index.analysis.analyzer.mock_english.tokenizer", "standard"); settings.put("index.analysis.analyzer.mock_english.filter", "stop"); prepareCreate("test").setSettings(settings) - .addMapping( - "test", + .setMapping( jsonBuilder().startObject() - .startObject("test") .startObject("properties") .startObject("foo") .field("type", "text") @@ -162,7 +160,6 @@ private void init() throws IOException { .endObject() .endObject() .endObject() - .endObject() ) .get(); ensureGreen(); diff --git a/modules/parent-join/src/internalClusterTest/java/org/opensearch/join/aggregations/AbstractParentChildTestCase.java b/modules/parent-join/src/internalClusterTest/java/org/opensearch/join/aggregations/AbstractParentChildTestCase.java index c2536897f352a..5566c688aefbf 100644 --- a/modules/parent-join/src/internalClusterTest/java/org/opensearch/join/aggregations/AbstractParentChildTestCase.java +++ b/modules/parent-join/src/internalClusterTest/java/org/opensearch/join/aggregations/AbstractParentChildTestCase.java @@ -55,8 +55,7 @@ public abstract class AbstractParentChildTestCase extends ParentChildTestCase { @Before public void setupCluster() throws Exception { assertAcked( - prepareCreate("test").addMapping( - "doc", + prepareCreate("test").setMapping( addFieldMappings( buildParentJoinFieldMappingFromSimplifiedDef("join_field", true, "article", "comment"), "commenter", diff --git a/modules/parent-join/src/internalClusterTest/java/org/opensearch/join/aggregations/ChildrenIT.java b/modules/parent-join/src/internalClusterTest/java/org/opensearch/join/aggregations/ChildrenIT.java index 4e98d2aa1af08..08354cbaaf93b 100644 --- a/modules/parent-join/src/internalClusterTest/java/org/opensearch/join/aggregations/ChildrenIT.java +++ b/modules/parent-join/src/internalClusterTest/java/org/opensearch/join/aggregations/ChildrenIT.java @@ -174,8 +174,7 @@ public void testParentWithMultipleBuckets() throws Exception { public void testWithDeletes() throws Exception { String indexName = "xyz"; assertAcked( - prepareCreate(indexName).addMapping( - "doc", + prepareCreate(indexName).setMapping( addFieldMappings(buildParentJoinFieldMappingFromSimplifiedDef("join_field", true, "parent", "child"), "name", "keyword") ) ); @@ -234,8 +233,7 @@ public void testPostCollection() throws Exception { prepareCreate(indexName).setSettings( Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1).put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0) ) - .addMapping( - "doc", + .setMapping( addFieldMappings( buildParentJoinFieldMappingFromSimplifiedDef("join_field", true, masterType, childType), "brand", @@ -309,8 +307,7 @@ public void testHierarchicalChildrenAggs() { String parentType = "country"; String childType = "city"; assertAcked( - prepareCreate(indexName).addMapping( - "doc", + prepareCreate(indexName).setMapping( addFieldMappings( buildParentJoinFieldMappingFromSimplifiedDef("join_field", true, grandParentType, parentType, parentType, childType), "name", @@ -352,8 +349,7 @@ public void testPostCollectAllLeafReaders() throws Exception { // Before we only evaluated segments that yielded matches in 'towns' and 'parent_names' aggs, which caused // us to miss to evaluate child docs in segments we didn't have parent matches for. assertAcked( - prepareCreate("index").addMapping( - "doc", + prepareCreate("index").setMapping( addFieldMappings( buildParentJoinFieldMappingFromSimplifiedDef("join_field", true, "parentType", "childType"), "name", diff --git a/modules/parent-join/src/internalClusterTest/java/org/opensearch/join/query/ChildQuerySearchIT.java b/modules/parent-join/src/internalClusterTest/java/org/opensearch/join/query/ChildQuerySearchIT.java index ce83790c0d302..d684f0bfebcfb 100644 --- a/modules/parent-join/src/internalClusterTest/java/org/opensearch/join/query/ChildQuerySearchIT.java +++ b/modules/parent-join/src/internalClusterTest/java/org/opensearch/join/query/ChildQuerySearchIT.java @@ -102,8 +102,7 @@ public class ChildQuerySearchIT extends ParentChildTestCase { public void testMultiLevelChild() throws Exception { assertAcked( - prepareCreate("test").addMapping( - "doc", + prepareCreate("test").setMapping( buildParentJoinFieldMappingFromSimplifiedDef("join_field", true, "parent", "child", "child", "grandchild") ) ); @@ -166,9 +165,7 @@ public void testMultiLevelChild() throws Exception { // see #2744 public void test2744() throws IOException { - assertAcked( - prepareCreate("test").addMapping("doc", buildParentJoinFieldMappingFromSimplifiedDef("join_field", true, "foo", "test")) - ); + assertAcked(prepareCreate("test").setMapping(buildParentJoinFieldMappingFromSimplifiedDef("join_field", true, "foo", "test"))); ensureGreen(); // index simple data @@ -185,9 +182,7 @@ public void test2744() throws IOException { } public void testSimpleChildQuery() throws Exception { - assertAcked( - prepareCreate("test").addMapping("doc", buildParentJoinFieldMappingFromSimplifiedDef("join_field", true, "parent", "child")) - ); + assertAcked(prepareCreate("test").setMapping(buildParentJoinFieldMappingFromSimplifiedDef("join_field", true, "parent", "child"))); ensureGreen(); // index simple data @@ -251,9 +246,7 @@ public void testSimpleChildQuery() throws Exception { // Issue #3290 public void testCachingBugWithFqueryFilter() throws Exception { - assertAcked( - prepareCreate("test").addMapping("doc", buildParentJoinFieldMappingFromSimplifiedDef("join_field", true, "parent", "child")) - ); + assertAcked(prepareCreate("test").setMapping(buildParentJoinFieldMappingFromSimplifiedDef("join_field", true, "parent", "child"))); ensureGreen(); List builders = new ArrayList<>(); // index simple data @@ -290,9 +283,7 @@ public void testCachingBugWithFqueryFilter() throws Exception { } public void testHasParentFilter() throws Exception { - assertAcked( - prepareCreate("test").addMapping("doc", buildParentJoinFieldMappingFromSimplifiedDef("join_field", true, "parent", "child")) - ); + assertAcked(prepareCreate("test").setMapping(buildParentJoinFieldMappingFromSimplifiedDef("join_field", true, "parent", "child"))); ensureGreen(); Map> parentToChildren = new HashMap<>(); // Childless parent @@ -340,9 +331,7 @@ public void testHasParentFilter() throws Exception { } public void testSimpleChildQueryWithFlush() throws Exception { - assertAcked( - prepareCreate("test").addMapping("doc", buildParentJoinFieldMappingFromSimplifiedDef("join_field", true, "parent", "child")) - ); + assertAcked(prepareCreate("test").setMapping(buildParentJoinFieldMappingFromSimplifiedDef("join_field", true, "parent", "child"))); ensureGreen(); // index simple data with flushes, so we have many segments @@ -408,8 +397,7 @@ public void testSimpleChildQueryWithFlush() throws Exception { public void testScopedFacet() throws Exception { assertAcked( - prepareCreate("test").addMapping( - "doc", + prepareCreate("test").setMapping( addFieldMappings(buildParentJoinFieldMappingFromSimplifiedDef("join_field", true, "parent", "child"), "c_field", "keyword") ) ); @@ -459,9 +447,7 @@ public void testScopedFacet() throws Exception { } public void testDeletedParent() throws Exception { - assertAcked( - prepareCreate("test").addMapping("doc", buildParentJoinFieldMappingFromSimplifiedDef("join_field", true, "parent", "child")) - ); + assertAcked(prepareCreate("test").setMapping(buildParentJoinFieldMappingFromSimplifiedDef("join_field", true, "parent", "child"))); ensureGreen(); // index simple data createIndexRequest("test", "parent", "p1", null, "p_field", "p_value1").get(); @@ -496,9 +482,7 @@ public void testDeletedParent() throws Exception { } public void testDfsSearchType() throws Exception { - assertAcked( - prepareCreate("test").addMapping("doc", buildParentJoinFieldMappingFromSimplifiedDef("join_field", true, "parent", "child")) - ); + assertAcked(prepareCreate("test").setMapping(buildParentJoinFieldMappingFromSimplifiedDef("join_field", true, "parent", "child"))); ensureGreen(); // index simple data @@ -526,9 +510,7 @@ public void testDfsSearchType() throws Exception { } public void testHasChildAndHasParentFailWhenSomeSegmentsDontContainAnyParentOrChildDocs() throws Exception { - assertAcked( - prepareCreate("test").addMapping("doc", buildParentJoinFieldMappingFromSimplifiedDef("join_field", true, "parent", "child")) - ); + assertAcked(prepareCreate("test").setMapping(buildParentJoinFieldMappingFromSimplifiedDef("join_field", true, "parent", "child"))); ensureGreen(); createIndexRequest("test", "parent", "1", null, "p_field", 1).get(); @@ -551,9 +533,7 @@ public void testHasChildAndHasParentFailWhenSomeSegmentsDontContainAnyParentOrCh } public void testCountApiUsage() throws Exception { - assertAcked( - prepareCreate("test").addMapping("doc", buildParentJoinFieldMappingFromSimplifiedDef("join_field", true, "parent", "child")) - ); + assertAcked(prepareCreate("test").setMapping(buildParentJoinFieldMappingFromSimplifiedDef("join_field", true, "parent", "child"))); ensureGreen(); String parentId = "p1"; @@ -584,9 +564,7 @@ public void testCountApiUsage() throws Exception { } public void testExplainUsage() throws Exception { - assertAcked( - prepareCreate("test").addMapping("doc", buildParentJoinFieldMappingFromSimplifiedDef("join_field", true, "parent", "child")) - ); + assertAcked(prepareCreate("test").setMapping(buildParentJoinFieldMappingFromSimplifiedDef("join_field", true, "parent", "child"))); ensureGreen(); String parentId = "p1"; @@ -650,10 +628,8 @@ List createDocBuilders() { public void testScoreForParentChildQueriesWithFunctionScore() throws Exception { assertAcked( - prepareCreate("test").addMapping( - "doc", + prepareCreate("test").setMapping( jsonBuilder().startObject() - .startObject("doc") .startObject("properties") .startObject("join_field") .field("type", "join") @@ -663,7 +639,6 @@ public void testScoreForParentChildQueriesWithFunctionScore() throws Exception { .endObject() .endObject() .endObject() - .endObject() ) ); ensureGreen(); @@ -758,9 +733,7 @@ public void testScoreForParentChildQueriesWithFunctionScore() throws Exception { // Issue #2536 public void testParentChildQueriesCanHandleNoRelevantTypesInIndex() throws Exception { - assertAcked( - prepareCreate("test").addMapping("doc", buildParentJoinFieldMappingFromSimplifiedDef("join_field", true, "parent", "child")) - ); + assertAcked(prepareCreate("test").setMapping(buildParentJoinFieldMappingFromSimplifiedDef("join_field", true, "parent", "child"))); ensureGreen(); SearchResponse response = client().prepareSearch("test") @@ -792,9 +765,7 @@ public void testParentChildQueriesCanHandleNoRelevantTypesInIndex() throws Excep } public void testHasChildAndHasParentFilter_withFilter() throws Exception { - assertAcked( - prepareCreate("test").addMapping("doc", buildParentJoinFieldMappingFromSimplifiedDef("join_field", true, "parent", "child")) - ); + assertAcked(prepareCreate("test").setMapping(buildParentJoinFieldMappingFromSimplifiedDef("join_field", true, "parent", "child"))); ensureGreen(); createIndexRequest("test", "parent", "1", null, "p_field", 1).get(); @@ -820,9 +791,7 @@ public void testHasChildAndHasParentFilter_withFilter() throws Exception { } public void testHasChildInnerHitsHighlighting() throws Exception { - assertAcked( - prepareCreate("test").addMapping("doc", buildParentJoinFieldMappingFromSimplifiedDef("join_field", true, "parent", "child")) - ); + assertAcked(prepareCreate("test").setMapping(buildParentJoinFieldMappingFromSimplifiedDef("join_field", true, "parent", "child"))); ensureGreen(); createIndexRequest("test", "parent", "1", null, "p_field", 1).get(); @@ -848,9 +817,7 @@ public void testHasChildInnerHitsHighlighting() throws Exception { } public void testHasChildAndHasParentWrappedInAQueryFilter() throws Exception { - assertAcked( - prepareCreate("test").addMapping("doc", buildParentJoinFieldMappingFromSimplifiedDef("join_field", true, "parent", "child")) - ); + assertAcked(prepareCreate("test").setMapping(buildParentJoinFieldMappingFromSimplifiedDef("join_field", true, "parent", "child"))); ensureGreen(); // query filter in case for p/c shouldn't execute per segment, but rather @@ -884,8 +851,7 @@ public void testHasChildAndHasParentWrappedInAQueryFilter() throws Exception { public void testSimpleQueryRewrite() throws Exception { assertAcked( - prepareCreate("test").addMapping( - "doc", + prepareCreate("test").setMapping( addFieldMappings( buildParentJoinFieldMappingFromSimplifiedDef("join_field", true, "parent", "child"), "c_field", @@ -945,9 +911,7 @@ public void testSimpleQueryRewrite() throws Exception { // Issue #3144 public void testReIndexingParentAndChildDocuments() throws Exception { - assertAcked( - prepareCreate("test").addMapping("doc", buildParentJoinFieldMappingFromSimplifiedDef("join_field", true, "parent", "child")) - ); + assertAcked(prepareCreate("test").setMapping(buildParentJoinFieldMappingFromSimplifiedDef("join_field", true, "parent", "child"))); ensureGreen(); // index simple data @@ -1004,9 +968,7 @@ public void testReIndexingParentAndChildDocuments() throws Exception { // Issue #3203 public void testHasChildQueryWithMinimumScore() throws Exception { - assertAcked( - prepareCreate("test").addMapping("doc", buildParentJoinFieldMappingFromSimplifiedDef("join_field", true, "parent", "child")) - ); + assertAcked(prepareCreate("test").setMapping(buildParentJoinFieldMappingFromSimplifiedDef("join_field", true, "parent", "child"))); ensureGreen(); // index simple data @@ -1031,7 +993,7 @@ public void testHasChildQueryWithMinimumScore() throws Exception { public void testParentFieldQuery() throws Exception { assertAcked( prepareCreate("test").setSettings(Settings.builder().put("index.refresh_interval", -1)) - .addMapping("doc", buildParentJoinFieldMappingFromSimplifiedDef("join_field", true, "parent", "child")) + .setMapping(buildParentJoinFieldMappingFromSimplifiedDef("join_field", true, "parent", "child")) ); ensureGreen(); @@ -1063,7 +1025,7 @@ public void testParentFieldQuery() throws Exception { public void testParentIdQuery() throws Exception { assertAcked( prepareCreate("test").setSettings(Settings.builder().put(indexSettings()).put("index.refresh_interval", -1)) - .addMapping("doc", buildParentJoinFieldMappingFromSimplifiedDef("join_field", true, "parent", "child")) + .setMapping(buildParentJoinFieldMappingFromSimplifiedDef("join_field", true, "parent", "child")) ); ensureGreen(); @@ -1083,9 +1045,7 @@ public void testParentIdQuery() throws Exception { } public void testHasChildNotBeingCached() throws IOException { - assertAcked( - prepareCreate("test").addMapping("doc", buildParentJoinFieldMappingFromSimplifiedDef("join_field", true, "parent", "child")) - ); + assertAcked(prepareCreate("test").setMapping(buildParentJoinFieldMappingFromSimplifiedDef("join_field", true, "parent", "child"))); ensureGreen(); // index simple data @@ -1146,10 +1106,8 @@ private QueryBuilder randomHasParent(String type, String field, String value) { // Issue #3818 public void testHasChildQueryOnlyReturnsSingleChildType() throws Exception { assertAcked( - prepareCreate("grandissue").addMapping( - "doc", + prepareCreate("grandissue").setMapping( jsonBuilder().startObject() - .startObject("doc") .startObject("properties") .startObject("join_field") .field("type", "join") @@ -1160,7 +1118,6 @@ public void testHasChildQueryOnlyReturnsSingleChildType() throws Exception { .endObject() .endObject() .endObject() - .endObject() ) ); @@ -1203,8 +1160,7 @@ public void testHasChildQueryOnlyReturnsSingleChildType() throws Exception { public void testHasChildQueryWithNestedInnerObjects() throws Exception { assertAcked( - prepareCreate("test").addMapping( - "doc", + prepareCreate("test").setMapping( addFieldMappings(buildParentJoinFieldMappingFromSimplifiedDef("join_field", true, "parent", "child"), "objects", "nested") ) ); @@ -1282,9 +1238,7 @@ public void testHasChildQueryWithNestedInnerObjects() throws Exception { } public void testNamedFilters() throws Exception { - assertAcked( - prepareCreate("test").addMapping("doc", buildParentJoinFieldMappingFromSimplifiedDef("join_field", true, "parent", "child")) - ); + assertAcked(prepareCreate("test").setMapping(buildParentJoinFieldMappingFromSimplifiedDef("join_field", true, "parent", "child"))); ensureGreen(); String parentId = "p1"; @@ -1368,7 +1322,7 @@ public void testParentChildQueriesNoParentType() throws Exception { public void testParentChildCaching() throws Exception { assertAcked( prepareCreate("test").setSettings(Settings.builder().put("index.refresh_interval", -1)) - .addMapping("doc", buildParentJoinFieldMappingFromSimplifiedDef("join_field", true, "parent", "child")) + .setMapping(buildParentJoinFieldMappingFromSimplifiedDef("join_field", true, "parent", "child")) ); ensureGreen(); @@ -1413,9 +1367,7 @@ public void testParentChildCaching() throws Exception { } public void testParentChildQueriesViaScrollApi() throws Exception { - assertAcked( - prepareCreate("test").addMapping("doc", buildParentJoinFieldMappingFromSimplifiedDef("join_field", true, "parent", "child")) - ); + assertAcked(prepareCreate("test").setMapping(buildParentJoinFieldMappingFromSimplifiedDef("join_field", true, "parent", "child"))); ensureGreen(); for (int i = 0; i < 10; i++) { createIndexRequest("test", "parent", "p" + i, null).get(); @@ -1496,9 +1448,7 @@ private SearchResponse minMaxQuery(ScoreMode scoreMode, int minChildren, Integer } public void testMinMaxChildren() throws Exception { - assertAcked( - prepareCreate("test").addMapping("doc", buildParentJoinFieldMappingFromSimplifiedDef("join_field", true, "parent", "child")) - ); + assertAcked(prepareCreate("test").setMapping(buildParentJoinFieldMappingFromSimplifiedDef("join_field", true, "parent", "child"))); ensureGreen(); indexRandom(true, createMinMaxDocBuilders().toArray(new IndexRequestBuilder[0])); @@ -1811,10 +1761,7 @@ public void testMinMaxChildren() throws Exception { public void testHasParentInnerQueryType() { assertAcked( - prepareCreate("test").addMapping( - "doc", - buildParentJoinFieldMappingFromSimplifiedDef("join_field", true, "parent-type", "child-type") - ) + prepareCreate("test").setMapping(buildParentJoinFieldMappingFromSimplifiedDef("join_field", true, "parent-type", "child-type")) ); createIndexRequest("test", "child-type", "child-id", "parent-id").get(); createIndexRequest("test", "parent-type", "parent-id", null).get(); @@ -1834,8 +1781,7 @@ public void testHasParentInnerQueryType() { public void testHighlightersIgnoreParentChild() throws IOException { assertAcked( - prepareCreate("test").addMapping( - "doc", + prepareCreate("test").setMapping( jsonBuilder().startObject() .startObject("properties") .startObject("join_field") @@ -1888,7 +1834,7 @@ public void testHighlightersIgnoreParentChild() throws IOException { public void testAliasesFilterWithHasChildQuery() throws Exception { assertAcked( - prepareCreate("my-index").addMapping("doc", buildParentJoinFieldMappingFromSimplifiedDef("join_field", true, "parent", "child")) + prepareCreate("my-index").setMapping(buildParentJoinFieldMappingFromSimplifiedDef("join_field", true, "parent", "child")) ); createIndexRequest("my-index", "parent", "1", null).get(); createIndexRequest("my-index", "child", "2", "1").get(); diff --git a/modules/parent-join/src/internalClusterTest/java/org/opensearch/join/query/InnerHitsIT.java b/modules/parent-join/src/internalClusterTest/java/org/opensearch/join/query/InnerHitsIT.java index ed53b1643cc75..ffcc9cf38545f 100644 --- a/modules/parent-join/src/internalClusterTest/java/org/opensearch/join/query/InnerHitsIT.java +++ b/modules/parent-join/src/internalClusterTest/java/org/opensearch/join/query/InnerHitsIT.java @@ -103,10 +103,8 @@ protected Map, Object>> pluginScripts() { public void testSimpleParentChild() throws Exception { assertAcked( - prepareCreate("articles").addMapping( - "doc", + prepareCreate("articles").setMapping( jsonBuilder().startObject() - .startObject("doc") .startObject("properties") .startObject("join_field") .field("type", "join") @@ -123,7 +121,6 @@ public void testSimpleParentChild() throws Exception { .endObject() .endObject() .endObject() - .endObject() ) ); @@ -223,10 +220,8 @@ public void testSimpleParentChild() throws Exception { public void testRandomParentChild() throws Exception { assertAcked( - prepareCreate("idx").addMapping( - "doc", + prepareCreate("idx").setMapping( jsonBuilder().startObject() - .startObject("doc") .startObject("properties") .startObject("id") .field("type", "keyword") @@ -239,7 +234,6 @@ public void testRandomParentChild() throws Exception { .endObject() .endObject() .endObject() - .endObject() ) ); int numDocs = scaledRandomIntBetween(5, 50); @@ -323,8 +317,7 @@ public void testRandomParentChild() throws Exception { public void testInnerHitsOnHasParent() throws Exception { assertAcked( - prepareCreate("stack").addMapping( - "doc", + prepareCreate("stack").setMapping( addFieldMappings(buildParentJoinFieldMappingFromSimplifiedDef("join_field", true, "question", "answer"), "body", "text") ) ); @@ -379,8 +372,7 @@ public void testInnerHitsOnHasParent() throws Exception { public void testParentChildMultipleLayers() throws Exception { assertAcked( - prepareCreate("articles").addMapping( - "doc", + prepareCreate("articles").setMapping( addFieldMappings( buildParentJoinFieldMappingFromSimplifiedDef("join_field", true, "article", "comment", "comment", "remark"), "title", @@ -449,8 +441,7 @@ public void testParentChildMultipleLayers() throws Exception { public void testRoyals() throws Exception { assertAcked( - prepareCreate("royals").addMapping( - "doc", + prepareCreate("royals").setMapping( buildParentJoinFieldMappingFromSimplifiedDef( "join_field", true, @@ -536,9 +527,7 @@ public void testRoyals() throws Exception { } public void testMatchesQueriesParentChildInnerHits() throws Exception { - assertAcked( - prepareCreate("index").addMapping("doc", buildParentJoinFieldMappingFromSimplifiedDef("join_field", true, "parent", "child")) - ); + assertAcked(prepareCreate("index").setMapping(buildParentJoinFieldMappingFromSimplifiedDef("join_field", true, "parent", "child"))); List requests = new ArrayList<>(); requests.add(createIndexRequest("index", "parent", "1", null)); requests.add(createIndexRequest("index", "child", "3", "1", "field", "value1")); @@ -577,7 +566,7 @@ public void testMatchesQueriesParentChildInnerHits() throws Exception { public void testUseMaxDocInsteadOfSize() throws Exception { assertAcked( - prepareCreate("index1").addMapping("doc", buildParentJoinFieldMappingFromSimplifiedDef("join_field", true, "parent", "child")) + prepareCreate("index1").setMapping(buildParentJoinFieldMappingFromSimplifiedDef("join_field", true, "parent", "child")) ); client().admin() .indices() @@ -599,8 +588,7 @@ public void testUseMaxDocInsteadOfSize() throws Exception { public void testNestedInnerHitWrappedInParentChildInnerhit() { assertAcked( - prepareCreate("test").addMapping( - "doc", + prepareCreate("test").setMapping( addFieldMappings( buildParentJoinFieldMappingFromSimplifiedDef("join_field", true, "parent_type", "child_type"), "nested_type", @@ -632,8 +620,7 @@ public void testNestedInnerHitWrappedInParentChildInnerhit() { public void testInnerHitsWithIgnoreUnmapped() { assertAcked( - prepareCreate("index1").addMapping( - "doc", + prepareCreate("index1").setMapping( addFieldMappings( buildParentJoinFieldMappingFromSimplifiedDef("join_field", true, "parent_type", "child_type"), "nested_type", @@ -662,8 +649,7 @@ public void testInnerHitsWithIgnoreUnmapped() { public void testTooHighResultWindow() { assertAcked( - prepareCreate("index1").addMapping( - "doc", + prepareCreate("index1").setMapping( addFieldMappings( buildParentJoinFieldMappingFromSimplifiedDef("join_field", true, "parent_type", "child_type"), "nested_type", diff --git a/modules/percolator/src/internalClusterTest/java/org/opensearch/percolator/PercolatorQuerySearchIT.java b/modules/percolator/src/internalClusterTest/java/org/opensearch/percolator/PercolatorQuerySearchIT.java index 37c0eb051cd55..1cb5d81136de1 100644 --- a/modules/percolator/src/internalClusterTest/java/org/opensearch/percolator/PercolatorQuerySearchIT.java +++ b/modules/percolator/src/internalClusterTest/java/org/opensearch/percolator/PercolatorQuerySearchIT.java @@ -844,10 +844,8 @@ public void testManyPercolatorFields() throws Exception { client().admin() .indices() .prepareCreate("test3") - .addMapping( - "type", + .setMapping( jsonBuilder().startObject() - .startObject("type") .startObject("properties") .startObject("field") .field("type", "keyword") @@ -862,7 +860,6 @@ public void testManyPercolatorFields() throws Exception { .endObject() .endObject() .endObject() - .endObject() ) ); } @@ -876,10 +873,8 @@ public void testWithMultiplePercolatorFields() throws Exception { client().admin() .indices() .prepareCreate("test2") - .addMapping( - "type", + .setMapping( jsonBuilder().startObject() - .startObject("type") .startObject("properties") .startObject("field") .field("type", "keyword") @@ -894,7 +889,6 @@ public void testWithMultiplePercolatorFields() throws Exception { .endObject() .endObject() .endObject() - .endObject() ) ); @@ -977,7 +971,7 @@ public void testPercolateQueryWithNestedDocuments() throws Exception { .endObject() .endObject() .endObject(); - assertAcked(client().admin().indices().prepareCreate("test").addMapping("employee", mapping)); + assertAcked(client().admin().indices().prepareCreate("test").setMapping(mapping)); client().prepareIndex("test") .setId("q1") .setSource( diff --git a/modules/percolator/src/test/java/org/opensearch/percolator/PercolatorQuerySearchTests.java b/modules/percolator/src/test/java/org/opensearch/percolator/PercolatorQuerySearchTests.java index e212ad6630e9a..d3da99ffbc102 100644 --- a/modules/percolator/src/test/java/org/opensearch/percolator/PercolatorQuerySearchTests.java +++ b/modules/percolator/src/test/java/org/opensearch/percolator/PercolatorQuerySearchTests.java @@ -150,7 +150,7 @@ public void testPercolateQueryWithNestedDocuments_doNotLeakBitsetCacheEntries() .prepareCreate("test") // to avoid normal document from being cached by BitsetFilterCache .setSettings(Settings.builder().put(BitsetFilterCache.INDEX_LOAD_RANDOM_ACCESS_FILTERS_EAGERLY_SETTING.getKey(), false)) - .addMapping("employee", mapping) + .setMapping(mapping) ); client().prepareIndex("test") .setId("q1") @@ -238,7 +238,7 @@ public void testPercolateQueryWithNestedDocuments_doLeakFieldDataCacheEntries() mapping.endObject(); } mapping.endObject(); - createIndex("test", client().admin().indices().prepareCreate("test").addMapping("employee", mapping)); + createIndex("test", client().admin().indices().prepareCreate("test").setMapping(mapping)); Script script = new Script(ScriptType.INLINE, MockScriptPlugin.NAME, "use_fielddata_please", Collections.emptyMap()); client().prepareIndex("test") .setId("q1") diff --git a/plugins/analysis-icu/src/internalClusterTest/java/org/opensearch/index/mapper/ICUCollationKeywordFieldMapperIT.java b/plugins/analysis-icu/src/internalClusterTest/java/org/opensearch/index/mapper/ICUCollationKeywordFieldMapperIT.java index 46b9c45b9cf82..4735f39033081 100644 --- a/plugins/analysis-icu/src/internalClusterTest/java/org/opensearch/index/mapper/ICUCollationKeywordFieldMapperIT.java +++ b/plugins/analysis-icu/src/internalClusterTest/java/org/opensearch/index/mapper/ICUCollationKeywordFieldMapperIT.java @@ -71,8 +71,6 @@ protected Collection> nodePlugins() { */ public void testBasicUsage() throws Exception { String index = "foo"; - String type = "mytype"; - String[] equivalent = { "I WİLL USE TURKİSH CASING", "ı will use turkish casıng" }; XContentBuilder builder = jsonBuilder().startObject() @@ -88,7 +86,7 @@ public void testBasicUsage() throws Exception { .endObject() .endObject(); - assertAcked(client().admin().indices().prepareCreate(index).addMapping(type, builder)); + assertAcked(client().admin().indices().prepareCreate(index).setMapping(builder)); // both values should collate to same value indexRandom( @@ -114,7 +112,6 @@ public void testBasicUsage() throws Exception { public void testMultipleValues() throws Exception { String index = "foo"; - String type = "mytype"; String[] equivalent = { "a", "C", "a", "B" }; @@ -130,7 +127,7 @@ public void testMultipleValues() throws Exception { .endObject() .endObject(); - assertAcked(client().admin().indices().prepareCreate(index).addMapping(type, builder)); + assertAcked(client().admin().indices().prepareCreate(index).setMapping(builder)); // everything should be indexed fine, no exceptions indexRandom( @@ -177,7 +174,6 @@ public void testMultipleValues() throws Exception { */ public void testNormalization() throws Exception { String index = "foo"; - String type = "mytype"; String[] equivalent = { "I W\u0049\u0307LL USE TURKİSH CASING", "ı will use turkish casıng" }; @@ -195,7 +191,7 @@ public void testNormalization() throws Exception { .endObject() .endObject(); - assertAcked(client().admin().indices().prepareCreate(index).addMapping(type, builder)); + assertAcked(client().admin().indices().prepareCreate(index).setMapping(builder)); indexRandom( true, @@ -223,7 +219,6 @@ public void testNormalization() throws Exception { */ public void testSecondaryStrength() throws Exception { String index = "foo"; - String type = "mytype"; String[] equivalent = { "TESTING", "testing" }; @@ -241,7 +236,7 @@ public void testSecondaryStrength() throws Exception { .endObject() .endObject(); - assertAcked(client().admin().indices().prepareCreate(index).addMapping(type, builder)); + assertAcked(client().admin().indices().prepareCreate(index).setMapping(builder)); indexRandom( true, @@ -269,7 +264,6 @@ public void testSecondaryStrength() throws Exception { */ public void testIgnorePunctuation() throws Exception { String index = "foo"; - String type = "mytype"; String[] equivalent = { "foo-bar", "foo bar" }; @@ -287,7 +281,7 @@ public void testIgnorePunctuation() throws Exception { .endObject() .endObject(); - assertAcked(client().admin().indices().prepareCreate(index).addMapping(type, builder)); + assertAcked(client().admin().indices().prepareCreate(index).setMapping(builder)); indexRandom( true, @@ -315,7 +309,6 @@ public void testIgnorePunctuation() throws Exception { */ public void testIgnoreWhitespace() throws Exception { String index = "foo"; - String type = "mytype"; XContentBuilder builder = jsonBuilder().startObject() .startObject("properties") @@ -333,7 +326,7 @@ public void testIgnoreWhitespace() throws Exception { .endObject() .endObject(); - assertAcked(client().admin().indices().prepareCreate(index).addMapping(type, builder)); + assertAcked(client().admin().indices().prepareCreate(index).setMapping(builder)); indexRandom( true, @@ -363,7 +356,6 @@ public void testIgnoreWhitespace() throws Exception { */ public void testNumerics() throws Exception { String index = "foo"; - String type = "mytype"; XContentBuilder builder = jsonBuilder().startObject() .startObject("properties") @@ -376,7 +368,7 @@ public void testNumerics() throws Exception { .endObject() .endObject(); - assertAcked(client().admin().indices().prepareCreate(index).addMapping(type, builder)); + assertAcked(client().admin().indices().prepareCreate(index).setMapping(builder)); indexRandom( true, @@ -399,7 +391,6 @@ public void testNumerics() throws Exception { */ public void testIgnoreAccentsButNotCase() throws Exception { String index = "foo"; - String type = "mytype"; XContentBuilder builder = jsonBuilder().startObject() .startObject("properties") @@ -416,7 +407,7 @@ public void testIgnoreAccentsButNotCase() throws Exception { .endObject() .endObject(); - assertAcked(client().admin().indices().prepareCreate(index).addMapping(type, builder)); + assertAcked(client().admin().indices().prepareCreate(index).setMapping(builder)); indexRandom( true, @@ -441,7 +432,6 @@ public void testIgnoreAccentsButNotCase() throws Exception { */ public void testUpperCaseFirst() throws Exception { String index = "foo"; - String type = "mytype"; XContentBuilder builder = jsonBuilder().startObject() .startObject("properties") @@ -455,7 +445,7 @@ public void testUpperCaseFirst() throws Exception { .endObject() .endObject(); - assertAcked(client().admin().indices().prepareCreate(index).addMapping(type, builder)); + assertAcked(client().admin().indices().prepareCreate(index).setMapping(builder)); indexRandom( true, @@ -481,7 +471,6 @@ public void testUpperCaseFirst() throws Exception { */ public void testCustomRules() throws Exception { String index = "foo"; - String type = "mytype"; RuleBasedCollator baseCollator = (RuleBasedCollator) Collator.getInstance(new ULocale("de_DE")); String DIN5007_2_tailorings = "& ae , a\u0308 & AE , A\u0308" + "& oe , o\u0308 & OE , O\u0308" + "& ue , u\u0308 & UE , u\u0308"; @@ -504,7 +493,7 @@ public void testCustomRules() throws Exception { .endObject() .endObject(); - assertAcked(client().admin().indices().prepareCreate(index).addMapping(type, builder)); + assertAcked(client().admin().indices().prepareCreate(index).setMapping(builder)); indexRandom( true, diff --git a/plugins/mapper-size/src/internalClusterTest/java/org/opensearch/index/mapper/size/SizeMappingIT.java b/plugins/mapper-size/src/internalClusterTest/java/org/opensearch/index/mapper/size/SizeMappingIT.java index 2cf05da26c193..375222cd2af44 100644 --- a/plugins/mapper-size/src/internalClusterTest/java/org/opensearch/index/mapper/size/SizeMappingIT.java +++ b/plugins/mapper-size/src/internalClusterTest/java/org/opensearch/index/mapper/size/SizeMappingIT.java @@ -63,10 +63,9 @@ protected Collection> nodePlugins() { // issue 5053 public void testThatUpdatingMappingShouldNotRemoveSizeMappingConfiguration() throws Exception { String index = "foo"; - String type = MapperService.SINGLE_MAPPING_NAME; XContentBuilder builder = jsonBuilder().startObject().startObject("_size").field("enabled", true).endObject().endObject(); - assertAcked(client().admin().indices().prepareCreate(index).addMapping(type, builder)); + assertAcked(client().admin().indices().prepareCreate(index).setMapping(builder)); // check mapping again assertSizeMappingEnabled(index, true); @@ -88,10 +87,9 @@ public void testThatUpdatingMappingShouldNotRemoveSizeMappingConfiguration() thr public void testThatSizeCanBeSwitchedOnAndOff() throws Exception { String index = "foo"; - String type = MapperService.SINGLE_MAPPING_NAME; XContentBuilder builder = jsonBuilder().startObject().startObject("_size").field("enabled", true).endObject().endObject(); - assertAcked(client().admin().indices().prepareCreate(index).addMapping(type, builder)); + assertAcked(client().admin().indices().prepareCreate(index).setMapping(builder)); // check mapping again assertSizeMappingEnabled(index, true); diff --git a/server/src/internalClusterTest/java/org/opensearch/action/admin/indices/create/CreateIndexIT.java b/server/src/internalClusterTest/java/org/opensearch/action/admin/indices/create/CreateIndexIT.java index 476bd72ee3ca3..3ef2a63c7d0ac 100644 --- a/server/src/internalClusterTest/java/org/opensearch/action/admin/indices/create/CreateIndexIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/action/admin/indices/create/CreateIndexIT.java @@ -110,8 +110,7 @@ public void testCreationDateGenerated() { public void testNonNestedMappings() throws Exception { assertAcked( - prepareCreate("test").addMapping( - "_doc", + prepareCreate("test").setMapping( XContentFactory.jsonBuilder() .startObject() .startObject("properties") @@ -131,7 +130,7 @@ public void testNonNestedMappings() throws Exception { } public void testEmptyNestedMappings() throws Exception { - assertAcked(prepareCreate("test").addMapping("_doc", XContentFactory.jsonBuilder().startObject().endObject())); + assertAcked(prepareCreate("test").setMapping(XContentFactory.jsonBuilder().startObject().endObject())); GetMappingsResponse response = client().admin().indices().prepareGetMappings("test").get(); @@ -144,10 +143,8 @@ public void testEmptyNestedMappings() throws Exception { public void testMappingParamAndNestedMismatch() throws Exception { MapperParsingException e = expectThrows( MapperParsingException.class, - () -> prepareCreate("test").addMapping( - MapperService.SINGLE_MAPPING_NAME, - XContentFactory.jsonBuilder().startObject().startObject("type2").endObject().endObject() - ).get() + () -> prepareCreate("test").setMapping(XContentFactory.jsonBuilder().startObject().startObject("type2").endObject().endObject()) + .get() ); assertThat( e.getMessage(), @@ -159,10 +156,7 @@ public void testMappingParamAndNestedMismatch() throws Exception { public void testEmptyMappings() throws Exception { assertAcked( - prepareCreate("test").addMapping( - "_doc", - XContentFactory.jsonBuilder().startObject().startObject("_doc").endObject().endObject() - ) + prepareCreate("test").setMapping(XContentFactory.jsonBuilder().startObject().startObject("_doc").endObject().endObject()) ); GetMappingsResponse response = client().admin().indices().prepareGetMappings("test").get(); diff --git a/server/src/internalClusterTest/java/org/opensearch/action/termvectors/GetTermVectorsIT.java b/server/src/internalClusterTest/java/org/opensearch/action/termvectors/GetTermVectorsIT.java index d28dcbb924f95..8ab6450ead2af 100644 --- a/server/src/internalClusterTest/java/org/opensearch/action/termvectors/GetTermVectorsIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/action/termvectors/GetTermVectorsIT.java @@ -88,7 +88,7 @@ public void testNoSuchDoc() throws Exception { .endObject() .endObject() .endObject(); - assertAcked(prepareCreate("test").addAlias(new Alias("alias")).addMapping("type1", mapping)); + assertAcked(prepareCreate("test").addAlias(new Alias("alias")).setMapping(mapping)); client().prepareIndex("test").setId("667").setSource("field", "foo bar").execute().actionGet(); refresh(); @@ -105,16 +105,14 @@ public void testNoSuchDoc() throws Exception { public void testExistingFieldWithNoTermVectorsNoNPE() throws Exception { XContentBuilder mapping = jsonBuilder().startObject() - .startObject("type1") .startObject("properties") .startObject("existingfield") .field("type", "text") .field("term_vector", "with_positions_offsets_payloads") .endObject() .endObject() - .endObject() .endObject(); - assertAcked(prepareCreate("test").addAlias(new Alias("alias")).addMapping("type1", mapping)); + assertAcked(prepareCreate("test").addAlias(new Alias("alias")).setMapping(mapping)); // when indexing a field that simply has a question mark, the term vectors will be null client().prepareIndex("test").setId("0").setSource("existingfield", "?").execute().actionGet(); @@ -133,16 +131,14 @@ public void testExistingFieldWithNoTermVectorsNoNPE() throws Exception { public void testExistingFieldButNotInDocNPE() throws Exception { XContentBuilder mapping = jsonBuilder().startObject() - .startObject("type1") .startObject("properties") .startObject("existingfield") .field("type", "text") .field("term_vector", "with_positions_offsets_payloads") .endObject() .endObject() - .endObject() .endObject(); - assertAcked(prepareCreate("test").addAlias(new Alias("alias")).addMapping("type1", mapping)); + assertAcked(prepareCreate("test").addAlias(new Alias("alias")).setMapping(mapping)); // when indexing a field that simply has a question mark, the term vectors will be null client().prepareIndex("test").setId("0").setSource("anotherexistingfield", 1).execute().actionGet(); @@ -205,7 +201,6 @@ public void testNotIndexedField() throws Exception { public void testSimpleTermVectors() throws IOException { XContentBuilder mapping = jsonBuilder().startObject() - .startObject("type1") .startObject("properties") .startObject("field") .field("type", "text") @@ -213,10 +208,9 @@ public void testSimpleTermVectors() throws IOException { .field("analyzer", "tv_test") .endObject() .endObject() - .endObject() .endObject(); assertAcked( - prepareCreate("test").addMapping("type1", mapping) + prepareCreate("test").setMapping(mapping) .addAlias(new Alias("alias")) .setSettings( Settings.builder() @@ -316,7 +310,6 @@ public void testRandomSingleTermVectors() throws IOException { String optionString = termVectorOptionsToString(ft); XContentBuilder mapping = jsonBuilder().startObject() - .startObject("_doc") .startObject("properties") .startObject("field") .field("type", "text") @@ -324,10 +317,9 @@ public void testRandomSingleTermVectors() throws IOException { .field("analyzer", "tv_test") .endObject() .endObject() - .endObject() .endObject(); assertAcked( - prepareCreate("test").addMapping("_doc", mapping) + prepareCreate("test").setMapping(mapping) .setSettings( Settings.builder() .put("index.analysis.analyzer.tv_test.tokenizer", "standard") @@ -466,7 +458,7 @@ public void testSimpleTermVectorsWithGenerate() throws IOException { fieldNames[i] = "field" + String.valueOf(i); } - XContentBuilder mapping = jsonBuilder().startObject().startObject("type1").startObject("properties"); + XContentBuilder mapping = jsonBuilder().startObject().startObject("properties"); XContentBuilder source = jsonBuilder().startObject(); for (String field : fieldNames) { mapping.startObject(field) @@ -476,11 +468,11 @@ public void testSimpleTermVectorsWithGenerate() throws IOException { .endObject(); source.field(field, "the quick brown fox jumps over the lazy dog"); } - mapping.endObject().endObject().endObject(); + mapping.endObject().endObject(); source.endObject(); assertAcked( - prepareCreate("test").addMapping("type1", mapping) + prepareCreate("test").setMapping(mapping) .setSettings( Settings.builder() .put(indexSettings()) @@ -640,16 +632,16 @@ private void compareTermVectors(String fieldName, Fields fields0, Fields fields1 public void testSimpleWildCards() throws IOException { int numFields = 25; - XContentBuilder mapping = jsonBuilder().startObject().startObject("type1").startObject("properties"); + XContentBuilder mapping = jsonBuilder().startObject().startObject("properties"); XContentBuilder source = jsonBuilder().startObject(); for (int i = 0; i < numFields; i++) { mapping.startObject("field" + i).field("type", "text").field("term_vector", randomBoolean() ? "yes" : "no").endObject(); source.field("field" + i, "some text here"); } source.endObject(); - mapping.endObject().endObject().endObject(); + mapping.endObject().endObject(); - assertAcked(prepareCreate("test").addAlias(new Alias("alias")).addMapping("type1", mapping)); + assertAcked(prepareCreate("test").addAlias(new Alias("alias")).setMapping(mapping)); ensureGreen(); client().prepareIndex("test").setId("0").setSource(source).get(); @@ -744,7 +736,7 @@ public void testPerFieldAnalyzer() throws IOException { // setup mapping and document source Set withTermVectors = new HashSet<>(); - XContentBuilder mapping = jsonBuilder().startObject().startObject("type1").startObject("properties"); + XContentBuilder mapping = jsonBuilder().startObject().startObject("properties"); XContentBuilder source = jsonBuilder().startObject(); for (int i = 0; i < numFields; i++) { String fieldName = "field" + i; @@ -758,11 +750,11 @@ public void testPerFieldAnalyzer() throws IOException { source.field(fieldName, "some text here"); } source.endObject(); - mapping.endObject().endObject().endObject(); + mapping.endObject().endObject(); // setup indices with mapping Settings.Builder settings = Settings.builder().put(indexSettings()).put("index.analysis.analyzer", "standard"); - assertAcked(prepareCreate("test").addAlias(new Alias("alias")).setSettings(settings).addMapping("type1", mapping)); + assertAcked(prepareCreate("test").addAlias(new Alias("alias")).setSettings(settings).setMapping(mapping)); ensureGreen(); // index a single document with prepared source diff --git a/server/src/internalClusterTest/java/org/opensearch/cluster/SimpleClusterStateIT.java b/server/src/internalClusterTest/java/org/opensearch/cluster/SimpleClusterStateIT.java index 93a903e0b5e0c..fcf5dcf3891ce 100644 --- a/server/src/internalClusterTest/java/org/opensearch/cluster/SimpleClusterStateIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/cluster/SimpleClusterStateIT.java @@ -261,7 +261,7 @@ public void testLargeClusterStatePublishing() throws Exception { ByteSizeValue.parseBytesSizeValue("10k", "estimatedBytesSize").bytesAsInt(), ByteSizeValue.parseBytesSizeValue("256k", "estimatedBytesSize").bytesAsInt() ); - XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties"); + XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("properties"); int counter = 0; int numberOfFields = 0; while (true) { @@ -273,7 +273,7 @@ public void testLargeClusterStatePublishing() throws Exception { } } logger.info("number of fields [{}], estimated bytes [{}]", numberOfFields, estimatedBytesSize); - mapping.endObject().endObject().endObject(); + mapping.endObject().endObject(); int numberOfShards = scaledRandomIntBetween(1, cluster().numDataNodes()); // if the create index is ack'ed, then all nodes have successfully processed the cluster state @@ -287,7 +287,7 @@ public void testLargeClusterStatePublishing() throws Exception { .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0) .put(MapperService.INDEX_MAPPING_TOTAL_FIELDS_LIMIT_SETTING.getKey(), Long.MAX_VALUE) ) - .addMapping("type", mapping) + .setMapping(mapping) .setTimeout("60s") .get() ); diff --git a/server/src/internalClusterTest/java/org/opensearch/gateway/GatewayIndexStateIT.java b/server/src/internalClusterTest/java/org/opensearch/gateway/GatewayIndexStateIT.java index 24aff104ce837..a8828c7ad38b5 100644 --- a/server/src/internalClusterTest/java/org/opensearch/gateway/GatewayIndexStateIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/gateway/GatewayIndexStateIT.java @@ -105,10 +105,7 @@ public void testMappingMetadataParsed() throws Exception { client().admin() .indices() .prepareCreate("test") - .addMapping( - "type1", - XContentFactory.jsonBuilder().startObject().startObject("_routing").field("required", true).endObject().endObject() - ) + .setMapping(XContentFactory.jsonBuilder().startObject().startObject("_routing").field("required", true).endObject().endObject()) .execute() .actionGet(); diff --git a/server/src/internalClusterTest/java/org/opensearch/get/GetActionIT.java b/server/src/internalClusterTest/java/org/opensearch/get/GetActionIT.java index ec0b47ccd0ecf..a09778582b604 100644 --- a/server/src/internalClusterTest/java/org/opensearch/get/GetActionIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/get/GetActionIT.java @@ -541,10 +541,8 @@ public void testMultiGetWithVersion() throws Exception { public void testGetFieldsNonLeafField() throws Exception { assertAcked( prepareCreate("test").addAlias(new Alias("alias")) - .addMapping( - "my-type1", + .setMapping( jsonBuilder().startObject() - .startObject("my-type1") .startObject("properties") .startObject("field1") .startObject("properties") @@ -555,7 +553,6 @@ public void testGetFieldsNonLeafField() throws Exception { .endObject() .endObject() .endObject() - .endObject() ) .setSettings(Settings.builder().put("index.refresh_interval", -1)) ); @@ -582,10 +579,8 @@ public void testGetFieldsComplexField() throws Exception { prepareCreate("my-index") // multi types in 5.6 .setSettings(Settings.builder().put("index.refresh_interval", -1)) - .addMapping( - "my-type", + .setMapping( jsonBuilder().startObject() - .startObject("my-type") .startObject("properties") .startObject("field1") .field("type", "object") @@ -608,7 +603,6 @@ public void testGetFieldsComplexField() throws Exception { .endObject() .endObject() .endObject() - .endObject() ) ); diff --git a/server/src/internalClusterTest/java/org/opensearch/index/IndexSortIT.java b/server/src/internalClusterTest/java/org/opensearch/index/IndexSortIT.java index c0bc9d29af992..69eb34c39c10c 100644 --- a/server/src/internalClusterTest/java/org/opensearch/index/IndexSortIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/index/IndexSortIT.java @@ -92,7 +92,7 @@ public void testIndexSort() { .put("index.number_of_shards", "1") .put("index.number_of_replicas", "1") .putList("index.sort.field", "date", "numeric_dv", "keyword_dv") - ).addMapping("test", TEST_MAPPING).get(); + ).setMapping(TEST_MAPPING).get(); for (int i = 0; i < 20; i++) { client().prepareIndex("test") .setId(Integer.toString(i)) @@ -108,7 +108,7 @@ public void testInvalidIndexSort() { IllegalArgumentException exc = expectThrows( IllegalArgumentException.class, () -> prepareCreate("test").setSettings(Settings.builder().put(indexSettings()).putList("index.sort.field", "invalid_field")) - .addMapping("test", TEST_MAPPING) + .setMapping(TEST_MAPPING) .get() ); assertThat(exc.getMessage(), containsString("unknown index sort field:[invalid_field]")); @@ -116,7 +116,7 @@ public void testInvalidIndexSort() { exc = expectThrows( IllegalArgumentException.class, () -> prepareCreate("test").setSettings(Settings.builder().put(indexSettings()).putList("index.sort.field", "numeric")) - .addMapping("test", TEST_MAPPING) + .setMapping(TEST_MAPPING) .get() ); assertThat(exc.getMessage(), containsString("docvalues not found for index sort field:[numeric]")); @@ -124,7 +124,7 @@ public void testInvalidIndexSort() { exc = expectThrows( IllegalArgumentException.class, () -> prepareCreate("test").setSettings(Settings.builder().put(indexSettings()).putList("index.sort.field", "keyword")) - .addMapping("test", TEST_MAPPING) + .setMapping(TEST_MAPPING) .get() ); assertThat(exc.getMessage(), containsString("docvalues not found for index sort field:[keyword]")); diff --git a/server/src/internalClusterTest/java/org/opensearch/index/fielddata/FieldDataLoadingIT.java b/server/src/internalClusterTest/java/org/opensearch/index/fielddata/FieldDataLoadingIT.java index 0aa2abed14b79..51e2cf669cbfb 100644 --- a/server/src/internalClusterTest/java/org/opensearch/index/fielddata/FieldDataLoadingIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/index/fielddata/FieldDataLoadingIT.java @@ -43,10 +43,8 @@ public class FieldDataLoadingIT extends OpenSearchIntegTestCase { public void testEagerGlobalOrdinalsFieldDataLoading() throws Exception { assertAcked( - prepareCreate("test").addMapping( - "type", + prepareCreate("test").setMapping( jsonBuilder().startObject() - .startObject("type") .startObject("properties") .startObject("name") .field("type", "text") @@ -55,7 +53,6 @@ public void testEagerGlobalOrdinalsFieldDataLoading() throws Exception { .endObject() .endObject() .endObject() - .endObject() ) ); ensureGreen(); diff --git a/server/src/internalClusterTest/java/org/opensearch/index/mapper/CopyToMapperIntegrationIT.java b/server/src/internalClusterTest/java/org/opensearch/index/mapper/CopyToMapperIntegrationIT.java index f23e319a5e8d2..d4733e4f31cb2 100644 --- a/server/src/internalClusterTest/java/org/opensearch/index/mapper/CopyToMapperIntegrationIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/index/mapper/CopyToMapperIntegrationIT.java @@ -50,7 +50,7 @@ public class CopyToMapperIntegrationIT extends OpenSearchIntegTestCase { public void testDynamicTemplateCopyTo() throws Exception { - assertAcked(client().admin().indices().prepareCreate("test-idx").addMapping("_doc", createDynamicTemplateMapping())); + assertAcked(client().admin().indices().prepareCreate("test-idx").setMapping(createDynamicTemplateMapping())); int recordCount = between(1, 200); @@ -98,7 +98,6 @@ public void testDynamicObjectCopyTo() throws Exception { private XContentBuilder createDynamicTemplateMapping() throws IOException { return XContentFactory.jsonBuilder() .startObject() - .startObject("_doc") .startArray("dynamic_templates") .startObject() @@ -124,7 +123,6 @@ private XContentBuilder createDynamicTemplateMapping() throws IOException { .endObject() .endArray() - .endObject() .endObject(); } diff --git a/server/src/internalClusterTest/java/org/opensearch/index/mapper/ExternalValuesMapperIntegrationIT.java b/server/src/internalClusterTest/java/org/opensearch/index/mapper/ExternalValuesMapperIntegrationIT.java index 2251ec62f5c15..138081e56dd63 100644 --- a/server/src/internalClusterTest/java/org/opensearch/index/mapper/ExternalValuesMapperIntegrationIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/index/mapper/ExternalValuesMapperIntegrationIT.java @@ -55,18 +55,15 @@ protected Collection> nodePlugins() { } public void testHighlightingOnCustomString() throws Exception { - prepareCreate("test-idx").addMapping( - "type", + prepareCreate("test-idx").setMapping( XContentFactory.jsonBuilder() .startObject() - .startObject("type") .startObject("properties") .startObject("field") .field("type", FakeStringFieldMapper.CONTENT_TYPE) .endObject() .endObject() .endObject() - .endObject() ).execute().get(); index( @@ -118,11 +115,9 @@ public void testHighlightingOnCustomString() throws Exception { } public void testExternalValues() throws Exception { - prepareCreate("test-idx").addMapping( - "type", + prepareCreate("test-idx").setMapping( XContentFactory.jsonBuilder() .startObject() - .startObject("type") .startObject(ExternalMetadataMapper.CONTENT_TYPE) .endObject() .startObject("properties") @@ -131,7 +126,6 @@ public void testExternalValues() throws Exception { .endObject() .endObject() .endObject() - .endObject() ).execute().get(); index("test-idx", "type", "1", XContentFactory.jsonBuilder().startObject().field("field", "1234").endObject()); @@ -166,11 +160,9 @@ public void testExternalValues() throws Exception { } public void testExternalValuesWithMultifield() throws Exception { - prepareCreate("test-idx").addMapping( - "_doc", + prepareCreate("test-idx").setMapping( XContentFactory.jsonBuilder() .startObject() - .startObject("_doc") .startObject("properties") .startObject("f") .field("type", ExternalMapperPlugin.EXTERNAL_UPPER) @@ -189,7 +181,6 @@ public void testExternalValuesWithMultifield() throws Exception { .endObject() .endObject() .endObject() - .endObject() ).execute().get(); index("test-idx", "_doc", "1", "f", "This is my text"); diff --git a/server/src/internalClusterTest/java/org/opensearch/index/mapper/MultiFieldsIntegrationIT.java b/server/src/internalClusterTest/java/org/opensearch/index/mapper/MultiFieldsIntegrationIT.java index 37fa8cdd11a8b..7dbb6a45b857d 100644 --- a/server/src/internalClusterTest/java/org/opensearch/index/mapper/MultiFieldsIntegrationIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/index/mapper/MultiFieldsIntegrationIT.java @@ -58,7 +58,7 @@ public class MultiFieldsIntegrationIT extends OpenSearchIntegTestCase { @SuppressWarnings("unchecked") public void testMultiFields() throws Exception { - assertAcked(client().admin().indices().prepareCreate("my-index").addMapping("my-type", createTypeSource())); + assertAcked(client().admin().indices().prepareCreate("my-index").setMapping(createTypeSource())); GetMappingsResponse getMappingsResponse = client().admin().indices().prepareGetMappings("my-index").get(); MappingMetadata mappingMetadata = getMappingsResponse.mappings().get("my-index"); @@ -98,7 +98,7 @@ public void testMultiFields() throws Exception { @SuppressWarnings("unchecked") public void testGeoPointMultiField() throws Exception { - assertAcked(client().admin().indices().prepareCreate("my-index").addMapping("my-type", createMappingSource("geo_point"))); + assertAcked(client().admin().indices().prepareCreate("my-index").setMapping(createMappingSource("geo_point"))); GetMappingsResponse getMappingsResponse = client().admin().indices().prepareGetMappings("my-index").get(); MappingMetadata mappingMetadata = getMappingsResponse.mappings().get("my-index"); @@ -127,7 +127,7 @@ public void testGeoPointMultiField() throws Exception { @SuppressWarnings("unchecked") public void testCompletionMultiField() throws Exception { - assertAcked(client().admin().indices().prepareCreate("my-index").addMapping("my-type", createMappingSource("completion"))); + assertAcked(client().admin().indices().prepareCreate("my-index").setMapping(createMappingSource("completion"))); GetMappingsResponse getMappingsResponse = client().admin().indices().prepareGetMappings("my-index").get(); MappingMetadata mappingMetadata = getMappingsResponse.mappings().get("my-index"); @@ -149,7 +149,7 @@ public void testCompletionMultiField() throws Exception { @SuppressWarnings("unchecked") public void testIpMultiField() throws Exception { - assertAcked(client().admin().indices().prepareCreate("my-index").addMapping("my-type", createMappingSource("ip"))); + assertAcked(client().admin().indices().prepareCreate("my-index").setMapping(createMappingSource("ip"))); GetMappingsResponse getMappingsResponse = client().admin().indices().prepareGetMappings("my-index").get(); MappingMetadata mappingMetadata = getMappingsResponse.mappings().get("my-index"); @@ -172,7 +172,6 @@ public void testIpMultiField() throws Exception { private XContentBuilder createMappingSource(String fieldType) throws IOException { return XContentFactory.jsonBuilder() .startObject() - .startObject("my-type") .startObject("properties") .startObject("a") .field("type", fieldType) @@ -183,14 +182,12 @@ private XContentBuilder createMappingSource(String fieldType) throws IOException .endObject() .endObject() .endObject() - .endObject() .endObject(); } private XContentBuilder createTypeSource() throws IOException { return XContentFactory.jsonBuilder() .startObject() - .startObject("my-type") .startObject("properties") .startObject("title") .field("type", "text") @@ -201,7 +198,6 @@ private XContentBuilder createTypeSource() throws IOException { .endObject() .endObject() .endObject() - .endObject() .endObject(); } diff --git a/server/src/internalClusterTest/java/org/opensearch/indices/analysis/PreBuiltAnalyzerIntegrationIT.java b/server/src/internalClusterTest/java/org/opensearch/indices/analysis/PreBuiltAnalyzerIntegrationIT.java index d38fbfa1a51e1..a857f6cf6e96c 100644 --- a/server/src/internalClusterTest/java/org/opensearch/indices/analysis/PreBuiltAnalyzerIntegrationIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/indices/analysis/PreBuiltAnalyzerIntegrationIT.java @@ -78,18 +78,16 @@ public void testThatPreBuiltAnalyzersAreNotClosedOnIndexClose() throws Exception loadedAnalyzers.get(preBuiltAnalyzer).add(randomVersion); final XContentBuilder mapping = jsonBuilder().startObject() - .startObject("type") .startObject("properties") .startObject("foo") .field("type", "text") .field("analyzer", name) .endObject() .endObject() - .endObject() .endObject(); Settings versionSettings = settings(randomVersion).build(); - client().admin().indices().prepareCreate(indexName).addMapping("type", mapping).setSettings(versionSettings).get(); + client().admin().indices().prepareCreate(indexName).setMapping(mapping).setSettings(versionSettings).get(); } ensureGreen(); diff --git a/server/src/internalClusterTest/java/org/opensearch/indices/mapping/SimpleGetFieldMappingsIT.java b/server/src/internalClusterTest/java/org/opensearch/indices/mapping/SimpleGetFieldMappingsIT.java index 92a65f43361d6..3de2001acd1e7 100644 --- a/server/src/internalClusterTest/java/org/opensearch/indices/mapping/SimpleGetFieldMappingsIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/indices/mapping/SimpleGetFieldMappingsIT.java @@ -80,9 +80,8 @@ public void testGetMappingsWhereThereAreNone() { assertThat(response.fieldMappings("index", "field"), nullValue()); } - private XContentBuilder getMappingForType(String type) throws IOException { + private XContentBuilder getMappingForType() throws IOException { return jsonBuilder().startObject() - .startObject(type) .startObject("properties") .startObject("field1") .field("type", "text") @@ -99,14 +98,13 @@ private XContentBuilder getMappingForType(String type) throws IOException { .endObject() .endObject() .endObject() - .endObject() .endObject(); } public void testGetFieldMappings() throws Exception { - assertAcked(prepareCreate("indexa").addMapping("typeA", getMappingForType("typeA"))); - assertAcked(client().admin().indices().prepareCreate("indexb").addMapping("typeB", getMappingForType("typeB"))); + assertAcked(prepareCreate("indexa").setMapping(getMappingForType())); + assertAcked(client().admin().indices().prepareCreate("indexb").setMapping(getMappingForType())); // Get mappings by full name GetFieldMappingsResponse response = client().admin() @@ -136,7 +134,7 @@ public void testGetFieldMappings() throws Exception { @SuppressWarnings("unchecked") public void testSimpleGetFieldMappingsWithDefaults() throws Exception { - assertAcked(prepareCreate("test").addMapping("type", getMappingForType("type"))); + assertAcked(prepareCreate("test").setMapping(getMappingForType())); client().admin().indices().preparePutMapping("test").setSource("num", "type=long").get(); client().admin().indices().preparePutMapping("test").setSource("field2", "type=text,index=false").get(); @@ -163,7 +161,7 @@ public void testSimpleGetFieldMappingsWithDefaults() throws Exception { @SuppressWarnings("unchecked") public void testGetFieldMappingsWithFieldAlias() throws Exception { - assertAcked(prepareCreate("test").addMapping("type", getMappingForType("type"))); + assertAcked(prepareCreate("test").setMapping(getMappingForType())); GetFieldMappingsResponse response = client().admin().indices().prepareGetFieldMappings().setFields("alias", "field1").get(); @@ -179,7 +177,7 @@ public void testGetFieldMappingsWithFieldAlias() throws Exception { // fix #6552 public void testSimpleGetFieldMappingsWithPretty() throws Exception { - assertAcked(prepareCreate("index").addMapping("type", getMappingForType("type"))); + assertAcked(prepareCreate("index").setMapping(getMappingForType())); Map params = new HashMap<>(); params.put("pretty", "true"); GetFieldMappingsResponse response = client().admin() @@ -209,7 +207,7 @@ public void testSimpleGetFieldMappingsWithPretty() throws Exception { } public void testGetFieldMappingsWithBlocks() throws Exception { - assertAcked(prepareCreate("test").addMapping("_doc", getMappingForType("_doc"))); + assertAcked(prepareCreate("test").setMapping(getMappingForType())); for (String block : Arrays.asList(SETTING_BLOCKS_READ, SETTING_BLOCKS_WRITE, SETTING_READ_ONLY)) { try { diff --git a/server/src/internalClusterTest/java/org/opensearch/indices/mapping/SimpleGetMappingsIT.java b/server/src/internalClusterTest/java/org/opensearch/indices/mapping/SimpleGetMappingsIT.java index aac12522afa2f..f54e60a8baa88 100644 --- a/server/src/internalClusterTest/java/org/opensearch/indices/mapping/SimpleGetMappingsIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/indices/mapping/SimpleGetMappingsIT.java @@ -70,21 +70,19 @@ public void testGetMappingsWhereThereAreNone() { assertEquals(MappingMetadata.EMPTY_MAPPINGS, response.mappings().get("index")); } - private XContentBuilder getMappingForType(String type) throws IOException { + private XContentBuilder getMappingForType() throws IOException { return jsonBuilder().startObject() - .startObject(type) .startObject("properties") .startObject("field1") .field("type", "text") .endObject() .endObject() - .endObject() .endObject(); } public void testSimpleGetMappings() throws Exception { - client().admin().indices().prepareCreate("indexa").addMapping("typeA", getMappingForType("typeA")).execute().actionGet(); - client().admin().indices().prepareCreate("indexb").addMapping("typeA", getMappingForType("typeA")).execute().actionGet(); + client().admin().indices().prepareCreate("indexa").setMapping(getMappingForType()).execute().actionGet(); + client().admin().indices().prepareCreate("indexb").setMapping(getMappingForType()).execute().actionGet(); ClusterHealthResponse clusterHealth = client().admin() .cluster() @@ -114,7 +112,7 @@ public void testSimpleGetMappings() throws Exception { } public void testGetMappingsWithBlocks() throws IOException { - client().admin().indices().prepareCreate("test").addMapping("_doc", getMappingForType("_doc")).execute().actionGet(); + client().admin().indices().prepareCreate("test").setMapping(getMappingForType()).execute().actionGet(); ensureGreen(); for (String block : Arrays.asList(SETTING_BLOCKS_READ, SETTING_BLOCKS_WRITE, SETTING_READ_ONLY)) { diff --git a/server/src/internalClusterTest/java/org/opensearch/mget/SimpleMgetIT.java b/server/src/internalClusterTest/java/org/opensearch/mget/SimpleMgetIT.java index 5b78c5686dc6a..c9d18e64ca038 100644 --- a/server/src/internalClusterTest/java/org/opensearch/mget/SimpleMgetIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/mget/SimpleMgetIT.java @@ -129,16 +129,7 @@ public void testThatMgetShouldWorkWithMultiIndexAlias() throws IOException { public void testThatMgetShouldWorkWithAliasRouting() throws IOException { assertAcked( prepareCreate("test").addAlias(new Alias("alias1").routing("abc")) - .addMapping( - "test", - jsonBuilder().startObject() - .startObject("test") - .startObject("_routing") - .field("required", true) - .endObject() - .endObject() - .endObject() - ) + .setMapping(jsonBuilder().startObject().startObject("_routing").field("required", true).endObject().endObject()) ); client().prepareIndex("alias1") diff --git a/server/src/internalClusterTest/java/org/opensearch/routing/SimpleRoutingIT.java b/server/src/internalClusterTest/java/org/opensearch/routing/SimpleRoutingIT.java index f3179a0c4acb3..8909b9deece9b 100644 --- a/server/src/internalClusterTest/java/org/opensearch/routing/SimpleRoutingIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/routing/SimpleRoutingIT.java @@ -351,17 +351,7 @@ public void testRequiredRoutingCrudApis() throws Exception { .indices() .prepareCreate("test") .addAlias(new Alias("alias")) - .addMapping( - "type1", - XContentFactory.jsonBuilder() - .startObject() - .startObject("type1") - .startObject("_routing") - .field("required", true) - .endObject() - .endObject() - .endObject() - ) + .setMapping(XContentFactory.jsonBuilder().startObject().startObject("_routing").field("required", true).endObject().endObject()) .execute() .actionGet(); ensureGreen(); @@ -450,17 +440,7 @@ public void testRequiredRoutingBulk() throws Exception { .indices() .prepareCreate("test") .addAlias(new Alias("alias")) - .addMapping( - "type1", - XContentFactory.jsonBuilder() - .startObject() - .startObject("type1") - .startObject("_routing") - .field("required", true) - .endObject() - .endObject() - .endObject() - ) + .setMapping(XContentFactory.jsonBuilder().startObject().startObject("_routing").field("required", true).endObject().endObject()) .execute() .actionGet(); ensureGreen(); @@ -544,17 +524,7 @@ public void testRequiredRoutingMappingVariousAPIs() throws Exception { .indices() .prepareCreate("test") .addAlias(new Alias("alias")) - .addMapping( - "type1", - XContentFactory.jsonBuilder() - .startObject() - .startObject("type1") - .startObject("_routing") - .field("required", true) - .endObject() - .endObject() - .endObject() - ) + .setMapping(XContentFactory.jsonBuilder().startObject().startObject("_routing").field("required", true).endObject().endObject()) .execute() .actionGet(); ensureGreen(); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/CombiIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/CombiIT.java index f49938fb27e72..c5794c76e21de 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/CombiIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/CombiIT.java @@ -115,10 +115,8 @@ public void testMultipleAggsOnSameField_WithDifferentRequiredValueSourceType() t */ public void testSubAggregationForTopAggregationOnUnmappedField() throws Exception { - prepareCreate("idx").addMapping( - "type", + prepareCreate("idx").setMapping( jsonBuilder().startObject() - .startObject("type") .startObject("properties") .startObject("name") .field("type", "keyword") @@ -128,7 +126,6 @@ public void testSubAggregationForTopAggregationOnUnmappedField() throws Exceptio .endObject() .endObject() .endObject() - .endObject() ).get(); ensureSearchable("idx"); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/EquivalenceIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/EquivalenceIT.java index 29c325d01492e..d03b10ce092c9 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/EquivalenceIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/EquivalenceIT.java @@ -133,17 +133,14 @@ public void testRandomRanges() throws Exception { } } - prepareCreate("idx").addMapping( - "type", + prepareCreate("idx").setMapping( jsonBuilder().startObject() - .startObject("type") .startObject("properties") .startObject("values") .field("type", "double") .endObject() .endObject() .endObject() - .endObject() ).get(); for (int i = 0; i < docs.length; ++i) { @@ -235,10 +232,8 @@ public void testDuelTerms() throws Exception { final IntHashSet valuesSet = new IntHashSet(); cluster().wipeIndices("idx"); - prepareCreate("idx").addMapping( - "type", + prepareCreate("idx").setMapping( jsonBuilder().startObject() - .startObject("type") .startObject("properties") .startObject("num") .field("type", "double") @@ -260,7 +255,6 @@ public void testDuelTerms() throws Exception { .endObject() .endObject() .endObject() - .endObject() ).get(); List indexingRequests = new ArrayList<>(); @@ -358,17 +352,14 @@ public void testDuelTerms() throws Exception { // Duel between histograms and scripted terms public void testDuelTermsHistogram() throws Exception { - prepareCreate("idx").addMapping( - "type", + prepareCreate("idx").setMapping( jsonBuilder().startObject() - .startObject("type") .startObject("properties") .startObject("num") .field("type", "double") .endObject() .endObject() .endObject() - .endObject() ).get(); final int numDocs = scaledRandomIntBetween(500, 5000); @@ -422,17 +413,14 @@ public void testDuelTermsHistogram() throws Exception { public void testLargeNumbersOfPercentileBuckets() throws Exception { // test high numbers of percentile buckets to make sure paging and release work correctly - prepareCreate("idx").addMapping( - "type", + prepareCreate("idx").setMapping( jsonBuilder().startObject() - .startObject("type") .startObject("properties") .startObject("double_value") .field("type", "double") .endObject() .endObject() .endObject() - .endObject() ).get(); final int numDocs = scaledRandomIntBetween(2500, 5000); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/NestedIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/NestedIT.java index 256281f8c6833..969a48b514f1a 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/NestedIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/NestedIT.java @@ -158,10 +158,8 @@ public void setupSuiteScopeCluster() throws Exception { } assertAcked( - prepareCreate("idx_nested_nested_aggs").addMapping( - "type", + prepareCreate("idx_nested_nested_aggs").setMapping( jsonBuilder().startObject() - .startObject("type") .startObject("properties") .startObject("nested1") .field("type", "nested") @@ -173,7 +171,6 @@ public void setupSuiteScopeCluster() throws Exception { .endObject() .endObject() .endObject() - .endObject() ) ); ensureGreen("idx_nested_nested_aggs"); @@ -400,7 +397,6 @@ public void testNestedOnObjectField() throws Exception { // Test based on: https://github.com/elastic/elasticsearch/issues/9280 public void testParentFilterResolvedCorrectly() throws Exception { XContentBuilder mapping = jsonBuilder().startObject() - .startObject("provider") .startObject("properties") .startObject("comments") .field("type", "nested") @@ -450,11 +446,10 @@ public void testParentFilterResolvedCorrectly() throws Exception { .endObject() .endObject() .endObject() - .endObject() .endObject(); assertAcked( prepareCreate("idx2").setSettings(Settings.builder().put(SETTING_NUMBER_OF_SHARDS, 1).put(SETTING_NUMBER_OF_REPLICAS, 0)) - .addMapping("provider", mapping) + .setMapping(mapping) ); ensureGreen("idx2"); @@ -649,10 +644,8 @@ public void testNestedSameDocIdProcessedMultipleTime() throws Exception { public void testFilterAggInsideNestedAgg() throws Exception { assertAcked( - prepareCreate("classes").addMapping( - "class", + prepareCreate("classes").setMapping( jsonBuilder().startObject() - .startObject("class") .startObject("properties") .startObject("name") .field("type", "text") @@ -681,7 +674,6 @@ public void testFilterAggInsideNestedAgg() throws Exception { .endObject() .endObject() .endObject() - .endObject() ) ); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/ReverseNestedIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/ReverseNestedIT.java index e8a57ea3941ff..67ae145aece11 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/ReverseNestedIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/ReverseNestedIT.java @@ -75,8 +75,7 @@ public class ReverseNestedIT extends OpenSearchIntegTestCase { @Override public void setupSuiteScopeCluster() throws Exception { assertAcked( - prepareCreate("idx1").addMapping( - "type", + prepareCreate("idx1").setMapping( jsonBuilder().startObject() .startObject("properties") .startObject("field1") @@ -99,8 +98,7 @@ public void setupSuiteScopeCluster() throws Exception { ) ); assertAcked( - prepareCreate("idx2").addMapping( - "type", + prepareCreate("idx2").setMapping( jsonBuilder().startObject() .startObject("properties") .startObject("nested1") @@ -531,7 +529,6 @@ public void testNonExistingNestedField() throws Exception { public void testSameParentDocHavingMultipleBuckets() throws Exception { XContentBuilder mapping = jsonBuilder().startObject() - .startObject("product") .field("dynamic", "strict") .startObject("properties") .startObject("id") @@ -562,11 +559,10 @@ public void testSameParentDocHavingMultipleBuckets() throws Exception { .endObject() .endObject() .endObject() - .endObject() .endObject(); assertAcked( prepareCreate("idx3").setSettings(Settings.builder().put(SETTING_NUMBER_OF_SHARDS, 1).put(SETTING_NUMBER_OF_REPLICAS, 0)) - .addMapping("product", mapping) + .setMapping(mapping) ); client().prepareIndex("idx3") diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/CardinalityIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/CardinalityIT.java index e01f966cadd9b..7996e2bee44c1 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/CardinalityIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/CardinalityIT.java @@ -128,10 +128,8 @@ public Settings indexSettings() { @Override public void setupSuiteScopeCluster() throws Exception { - prepareCreate("idx").addMapping( - "type", + prepareCreate("idx").setMapping( jsonBuilder().startObject() - .startObject("type") .startObject("properties") .startObject("str_value") .field("type", "keyword") @@ -153,7 +151,6 @@ public void setupSuiteScopeCluster() throws Exception { .endObject() .endObject() .endObject() - .endObject() ).get(); numDocs = randomIntBetween(2, 100); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/TopHitsIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/TopHitsIT.java index b6fdcf4b6267d..53a3cd4da5446 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/TopHitsIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/TopHitsIT.java @@ -139,10 +139,8 @@ public void setupSuiteScopeCluster() throws Exception { assertAcked(prepareCreate("field-collapsing").addMapping("type", "group", "type=keyword")); createIndex("empty"); assertAcked( - prepareCreate("articles").addMapping( - "article", + prepareCreate("articles").setMapping( jsonBuilder().startObject() - .startObject("article") .startObject("properties") .startObject(TERMS_AGGS_FIELD) .field("type", "keyword") @@ -174,7 +172,6 @@ public void setupSuiteScopeCluster() throws Exception { .endObject() .endObject() .endObject() - .endObject() ) ); ensureGreen("idx", "empty", "articles"); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/MaxBucketIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/MaxBucketIT.java index 3d9ebb469cba6..0d63e6d719610 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/MaxBucketIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/MaxBucketIT.java @@ -556,7 +556,7 @@ public void testFieldIsntWrittenOutTwice() throws Exception { .endObject() .endObject() .endObject(); - assertAcked(client().admin().indices().prepareCreate("foo_2").addMapping("doc", builder).get()); + assertAcked(client().admin().indices().prepareCreate("foo_2").setMapping(builder).get()); XContentBuilder docBuilder = jsonBuilder().startObject() .startObject("license") diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/MovAvgIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/MovAvgIT.java index dc37b49e7a910..91390edc7e872 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/MovAvgIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/MovAvgIT.java @@ -130,18 +130,15 @@ public String toString() { @Override public void setupSuiteScopeCluster() throws Exception { - prepareCreate("idx").addMapping( - "type", + prepareCreate("idx").setMapping( XContentFactory.jsonBuilder() .startObject() - .startObject("type") .startObject("properties") .startObject(VALUE_FIELD) .field("type", "double") .endObject() .endObject() .endObject() - .endObject() ).execute().get(); createIndex("idx_unmapped"); List builders = new ArrayList<>(); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/fetch/FetchSubPhasePluginIT.java b/server/src/internalClusterTest/java/org/opensearch/search/fetch/FetchSubPhasePluginIT.java index 68bac89213c57..def247676ac1a 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/fetch/FetchSubPhasePluginIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/fetch/FetchSubPhasePluginIT.java @@ -79,10 +79,8 @@ public void testPlugin() throws Exception { client().admin() .indices() .prepareCreate("test") - .addMapping( - "type1", + .setMapping( jsonBuilder().startObject() - .startObject("type1") .startObject("properties") .startObject("test") .field("type", "text") @@ -90,7 +88,6 @@ public void testPlugin() throws Exception { .endObject() .endObject() .endObject() - .endObject() ) .get(); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/fetch/subphase/InnerHitsIT.java b/server/src/internalClusterTest/java/org/opensearch/search/fetch/subphase/InnerHitsIT.java index b16678d60fce7..e28ef3c00a485 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/fetch/subphase/InnerHitsIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/fetch/subphase/InnerHitsIT.java @@ -100,10 +100,8 @@ protected Map, Object>> pluginScripts() { public void testSimpleNested() throws Exception { assertAcked( - prepareCreate("articles").addMapping( - "article", + prepareCreate("articles").setMapping( jsonBuilder().startObject() - .startObject("article") .startObject("properties") .startObject("comments") .field("type", "nested") @@ -119,7 +117,6 @@ public void testSimpleNested() throws Exception { .endObject() .endObject() .endObject() - .endObject() ) ); @@ -313,10 +310,8 @@ public void testRandomNested() throws Exception { public void testNestedMultipleLayers() throws Exception { assertAcked( - prepareCreate("articles").addMapping( - "article", + prepareCreate("articles").setMapping( jsonBuilder().startObject() - .startObject("article") .startObject("properties") .startObject("comments") .field("type", "nested") @@ -339,7 +334,6 @@ public void testNestedMultipleLayers() throws Exception { .endObject() .endObject() .endObject() - .endObject() ) ); @@ -583,8 +577,7 @@ public void testInnerHitsWithObjectFieldThatHasANestedField() throws Exception { // number_of_shards = 1, because then we catch the expected exception in the same way. // (See expectThrows(...) below) .setSettings(Settings.builder().put("index.number_of_shards", 1)) - .addMapping( - "article", + .setMapping( jsonBuilder().startObject() .startObject("properties") .startObject("comments") @@ -741,7 +734,6 @@ public void testInnerHitsWithObjectFieldThatHasANestedField() throws Exception { public void testMatchesQueriesNestedInnerHits() throws Exception { XContentBuilder builder = jsonBuilder().startObject() - .startObject("type1") .startObject("properties") .startObject("nested1") .field("type", "nested") @@ -755,9 +747,8 @@ public void testMatchesQueriesNestedInnerHits() throws Exception { .field("type", "long") .endObject() .endObject() - .endObject() .endObject(); - assertAcked(prepareCreate("test").addMapping("type1", builder)); + assertAcked(prepareCreate("test").setMapping(builder)); ensureGreen(); List requests = new ArrayList<>(); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/fetch/subphase/highlight/HighlighterSearchIT.java b/server/src/internalClusterTest/java/org/opensearch/search/fetch/subphase/highlight/HighlighterSearchIT.java index 27513b575d6f0..74c2922ebf328 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/fetch/subphase/highlight/HighlighterSearchIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/fetch/subphase/highlight/HighlighterSearchIT.java @@ -139,18 +139,16 @@ protected Collection> nodePlugins() { public void testHighlightingWithKeywordIgnoreBoundaryScanner() throws IOException { XContentBuilder mappings = jsonBuilder(); mappings.startObject(); - mappings.startObject("type") - .startObject("properties") + mappings.startObject("properties") .startObject("tags") .field("type", "keyword") .endObject() .startObject("sort") .field("type", "long") .endObject() - .endObject() .endObject(); mappings.endObject(); - assertAcked(prepareCreate("test").addMapping("type", mappings)); + assertAcked(prepareCreate("test").setMapping(mappings)); client().prepareIndex("test") .setId("1") .setSource(jsonBuilder().startObject().array("tags", "foo bar", "foo bar", "foo bar", "foo baz").field("sort", 1).endObject()) @@ -176,16 +174,9 @@ public void testHighlightingWithKeywordIgnoreBoundaryScanner() throws IOExceptio public void testHighlightingWithStoredKeyword() throws IOException { XContentBuilder mappings = jsonBuilder(); mappings.startObject(); - mappings.startObject("type") - .startObject("properties") - .startObject("text") - .field("type", "keyword") - .field("store", true) - .endObject() - .endObject() - .endObject(); + mappings.startObject("properties").startObject("text").field("type", "keyword").field("store", true).endObject().endObject(); mappings.endObject(); - assertAcked(prepareCreate("test").addMapping("type", mappings)); + assertAcked(prepareCreate("test").setMapping(mappings)); client().prepareIndex("test").setId("1").setSource(jsonBuilder().startObject().field("text", "foo").endObject()).get(); refresh(); SearchResponse search = client().prepareSearch() @@ -199,18 +190,16 @@ public void testHighlightingWithWildcardName() throws IOException { // test the kibana case with * as fieldname that will try highlight all fields including meta fields XContentBuilder mappings = jsonBuilder(); mappings.startObject(); - mappings.startObject("type") - .startObject("properties") + mappings.startObject("properties") .startObject("text") .field("type", "text") .field("analyzer", "keyword") .field("index_options", "offsets") .field("term_vector", "with_positions_offsets") .endObject() - .endObject() .endObject(); mappings.endObject(); - assertAcked(prepareCreate("test").addMapping("type", mappings)); + assertAcked(prepareCreate("test").setMapping(mappings)); client().prepareIndex("test").setId("1").setSource(jsonBuilder().startObject().field("text", "text").endObject()).get(); refresh(); for (String type : ALL_TYPES) { @@ -224,7 +213,6 @@ public void testHighlightingWithWildcardName() throws IOException { public void testFieldAlias() throws IOException { XContentBuilder mappings = jsonBuilder().startObject() - .startObject("type") .startObject("properties") .startObject("text") .field("type", "text") @@ -236,9 +224,8 @@ public void testFieldAlias() throws IOException { .field("path", "text") .endObject() .endObject() - .endObject() .endObject(); - assertAcked(prepareCreate("test").addMapping("type", mappings)); + assertAcked(prepareCreate("test").setMapping(mappings)); client().prepareIndex("test").setId("1").setSource("text", "foo").get(); refresh(); @@ -253,7 +240,6 @@ public void testFieldAlias() throws IOException { public void testFieldAliasWithSourceLookup() throws IOException { XContentBuilder mappings = jsonBuilder().startObject() - .startObject("type") .startObject("properties") .startObject("text") .field("type", "text") @@ -266,9 +252,8 @@ public void testFieldAliasWithSourceLookup() throws IOException { .field("path", "text") .endObject() .endObject() - .endObject() .endObject(); - assertAcked(prepareCreate("test").addMapping("type", mappings)); + assertAcked(prepareCreate("test").setMapping(mappings)); client().prepareIndex("test").setId("1").setSource("text", "foo bar").get(); refresh(); @@ -283,7 +268,6 @@ public void testFieldAliasWithSourceLookup() throws IOException { public void testFieldAliasWithWildcardField() throws IOException { XContentBuilder mappings = jsonBuilder().startObject() - .startObject("type") .startObject("properties") .startObject("keyword") .field("type", "keyword") @@ -293,9 +277,8 @@ public void testFieldAliasWithWildcardField() throws IOException { .field("path", "keyword") .endObject() .endObject() - .endObject() .endObject(); - assertAcked(prepareCreate("test").addMapping("type", mappings)); + assertAcked(prepareCreate("test").setMapping(mappings)); client().prepareIndex("test").setId("1").setSource("keyword", "foo").get(); refresh(); @@ -308,8 +291,7 @@ public void testFieldAliasWithWildcardField() throws IOException { public void testHighlightingWhenFieldsAreNotStoredThereIsNoSource() throws IOException { XContentBuilder mappings = jsonBuilder(); mappings.startObject(); - mappings.startObject("type") - .startObject("_source") + mappings.startObject("_source") .field("enabled", false) .endObject() .startObject("properties") @@ -325,10 +307,9 @@ public void testHighlightingWhenFieldsAreNotStoredThereIsNoSource() throws IOExc .field("type", "text") .field("store", true) .endObject() - .endObject() .endObject(); mappings.endObject(); - assertAcked(prepareCreate("test").addMapping("type", mappings)); + assertAcked(prepareCreate("test").setMapping(mappings)); client().prepareIndex("test") .setId("1") .setSource(jsonBuilder().startObject().field("unstored_text", "text").field("text", "text").endObject()) @@ -410,10 +391,8 @@ public void testEnsureNoNegativeOffsets() throws Exception { public void testSourceLookupHighlightingUsingPlainHighlighter() throws Exception { assertAcked( - prepareCreate("test").addMapping( - "type1", + prepareCreate("test").setMapping( jsonBuilder().startObject() - .startObject("type1") .startObject("properties") // we don't store title and don't use term vector, now lets see if it works... .startObject("title") @@ -432,7 +411,6 @@ public void testSourceLookupHighlightingUsingPlainHighlighter() throws Exception .endObject() .endObject() .endObject() - .endObject() ) ); @@ -480,10 +458,8 @@ public void testSourceLookupHighlightingUsingPlainHighlighter() throws Exception public void testSourceLookupHighlightingUsingFastVectorHighlighter() throws Exception { assertAcked( - prepareCreate("test").addMapping( - "type1", + prepareCreate("test").setMapping( jsonBuilder().startObject() - .startObject("type1") .startObject("properties") // we don't store title, now lets see if it works... .startObject("title") @@ -502,7 +478,6 @@ public void testSourceLookupHighlightingUsingFastVectorHighlighter() throws Exce .endObject() .endObject() .endObject() - .endObject() ) ); @@ -550,10 +525,8 @@ public void testSourceLookupHighlightingUsingFastVectorHighlighter() throws Exce public void testSourceLookupHighlightingUsingPostingsHighlighter() throws Exception { assertAcked( - prepareCreate("test").addMapping( - "type1", + prepareCreate("test").setMapping( jsonBuilder().startObject() - .startObject("type1") .startObject("properties") // we don't store title, now lets see if it works... .startObject("title") @@ -572,7 +545,6 @@ public void testSourceLookupHighlightingUsingPostingsHighlighter() throws Except .endObject() .endObject() .endObject() - .endObject() ) ); @@ -771,10 +743,8 @@ public void testHighlightingOnWildcardFields() throws Exception { public void testForceSourceWithSourceDisabled() throws Exception { assertAcked( - prepareCreate("test").addMapping( - "type1", + prepareCreate("test").setMapping( jsonBuilder().startObject() - .startObject("type1") .startObject("_source") .field("enabled", false) .endObject() @@ -790,7 +760,6 @@ public void testForceSourceWithSourceDisabled() throws Exception { .endObject() .endObject() .endObject() - .endObject() ) ); @@ -852,7 +821,7 @@ public void testPlainHighlighter() throws Exception { } public void testFastVectorHighlighter() throws Exception { - assertAcked(prepareCreate("test").addMapping("type1", type1TermVectorMapping())); + assertAcked(prepareCreate("test").setMapping(type1TermVectorMapping())); ensureGreen(); indexRandom( @@ -886,7 +855,7 @@ public void testFastVectorHighlighter() throws Exception { } public void testHighlighterWithSentenceBoundaryScanner() throws Exception { - assertAcked(prepareCreate("test").addMapping("type1", type1TermVectorMapping())); + assertAcked(prepareCreate("test").setMapping(type1TermVectorMapping())); ensureGreen(); indexRandom( @@ -927,7 +896,7 @@ public void testHighlighterWithSentenceBoundaryScanner() throws Exception { } public void testHighlighterWithSentenceBoundaryScannerAndLocale() throws Exception { - assertAcked(prepareCreate("test").addMapping("type1", type1TermVectorMapping())); + assertAcked(prepareCreate("test").setMapping(type1TermVectorMapping())); ensureGreen(); indexRandom( @@ -970,7 +939,7 @@ public void testHighlighterWithSentenceBoundaryScannerAndLocale() throws Excepti } public void testHighlighterWithWordBoundaryScanner() throws Exception { - assertAcked(prepareCreate("test").addMapping("type1", type1TermVectorMapping())); + assertAcked(prepareCreate("test").setMapping(type1TermVectorMapping())); ensureGreen(); indexRandom(true, client().prepareIndex("test").setSource("field1", "some quick and hairy brown:fox jumped over the lazy dog")); @@ -1000,7 +969,7 @@ public void testHighlighterWithWordBoundaryScanner() throws Exception { } public void testHighlighterWithWordBoundaryScannerAndLocale() throws Exception { - assertAcked(prepareCreate("test").addMapping("type1", type1TermVectorMapping())); + assertAcked(prepareCreate("test").setMapping(type1TermVectorMapping())); ensureGreen(); indexRandom(true, client().prepareIndex("test").setSource("field1", "some quick and hairy brown:fox jumped over the lazy dog")); @@ -1034,7 +1003,7 @@ public void testHighlighterWithWordBoundaryScannerAndLocale() throws Exception { * phraseLimit is not set. Its default is now reasonably low. */ public void testFVHManyMatches() throws Exception { - assertAcked(prepareCreate("test").addMapping("type1", type1TermVectorMapping())); + assertAcked(prepareCreate("test").setMapping(type1TermVectorMapping())); ensureGreen(); // Index one megabyte of "t " over and over and over again @@ -1085,11 +1054,9 @@ private void checkMatchedFieldsCase(boolean requireFieldMatch) throws Exception settings.put("index.analysis.analyzer.mock_english.filter", "mock_snowball"); assertAcked( prepareCreate("test").setSettings(settings) - .addMapping( - "type1", + .setMapping( XContentFactory.jsonBuilder() .startObject() - .startObject("type1") .startObject("properties") .startObject("foo") .field("type", "text") @@ -1119,7 +1086,6 @@ private void checkMatchedFieldsCase(boolean requireFieldMatch) throws Exception .endObject() .endObject() .endObject() - .endObject() ) ); ensureGreen(); @@ -1270,7 +1236,7 @@ private void checkMatchedFieldsCase(boolean requireFieldMatch) throws Exception } public void testFastVectorHighlighterManyDocs() throws Exception { - assertAcked(prepareCreate("test").addMapping("type1", type1TermVectorMapping())); + assertAcked(prepareCreate("test").setMapping(type1TermVectorMapping())); ensureGreen(); int COUNT = between(20, 100); @@ -1297,7 +1263,6 @@ public void testFastVectorHighlighterManyDocs() throws Exception { public XContentBuilder type1TermVectorMapping() throws IOException { return XContentFactory.jsonBuilder() .startObject() - .startObject("type1") .startObject("properties") .startObject("field1") .field("type", "text") @@ -1308,7 +1273,6 @@ public XContentBuilder type1TermVectorMapping() throws IOException { .field("term_vector", "with_positions_offsets") .endObject() .endObject() - .endObject() .endObject(); } @@ -1406,10 +1370,8 @@ public void testEscapeHtmlVector() throws Exception { public void testMultiMapperVectorWithStore() throws Exception { assertAcked( - prepareCreate("test").addMapping( - "type1", + prepareCreate("test").setMapping( jsonBuilder().startObject() - .startObject("type1") .startObject("properties") .startObject("title") .field("type", "text") @@ -1427,7 +1389,6 @@ public void testMultiMapperVectorWithStore() throws Exception { .endObject() .endObject() .endObject() - .endObject() ) ); ensureGreen(); @@ -1453,10 +1414,8 @@ public void testMultiMapperVectorWithStore() throws Exception { public void testMultiMapperVectorFromSource() throws Exception { assertAcked( - prepareCreate("test").addMapping( - "type1", + prepareCreate("test").setMapping( jsonBuilder().startObject() - .startObject("type1") .startObject("properties") .startObject("title") .field("type", "text") @@ -1474,7 +1433,6 @@ public void testMultiMapperVectorFromSource() throws Exception { .endObject() .endObject() .endObject() - .endObject() ) ); ensureGreen(); @@ -1501,10 +1459,8 @@ public void testMultiMapperVectorFromSource() throws Exception { public void testMultiMapperNoVectorWithStore() throws Exception { assertAcked( - prepareCreate("test").addMapping( - "type1", + prepareCreate("test").setMapping( jsonBuilder().startObject() - .startObject("type1") .startObject("properties") .startObject("title") .field("type", "text") @@ -1522,7 +1478,6 @@ public void testMultiMapperNoVectorWithStore() throws Exception { .endObject() .endObject() .endObject() - .endObject() ) ); @@ -1549,10 +1504,8 @@ public void testMultiMapperNoVectorWithStore() throws Exception { public void testMultiMapperNoVectorFromSource() throws Exception { assertAcked( - prepareCreate("test").addMapping( - "type1", + prepareCreate("test").setMapping( jsonBuilder().startObject() - .startObject("type1") .startObject("properties") .startObject("title") .field("type", "text") @@ -1570,7 +1523,6 @@ public void testMultiMapperNoVectorFromSource() throws Exception { .endObject() .endObject() .endObject() - .endObject() ) ); ensureGreen(); @@ -1739,7 +1691,7 @@ public void testBoostingQuery() { } public void testBoostingQueryTermVector() throws IOException { - assertAcked(prepareCreate("test").addMapping("type1", type1TermVectorMapping())); + assertAcked(prepareCreate("test").setMapping(type1TermVectorMapping())); ensureGreen(); client().prepareIndex("test").setSource("field1", "this is a test", "field2", "The quick brown fox jumps over the lazy dog").get(); refresh(); @@ -1770,7 +1722,7 @@ public void testCommonTermsQuery() { } public void testCommonTermsTermVector() throws IOException { - assertAcked(prepareCreate("test").addMapping("type1", type1TermVectorMapping())); + assertAcked(prepareCreate("test").setMapping(type1TermVectorMapping())); ensureGreen(); client().prepareIndex("test").setSource("field1", "this is a test", "field2", "The quick brown fox jumps over the lazy dog").get(); @@ -2278,7 +2230,7 @@ public void testHighlightNoMatchSizeNumberOfFragments() throws IOException { } public void testPostingsHighlighter() throws Exception { - assertAcked(prepareCreate("test").addMapping("type1", type1PostingsffsetsMapping())); + assertAcked(prepareCreate("test").setMapping(type1PostingsffsetsMapping())); ensureGreen(); client().prepareIndex("test") @@ -2349,7 +2301,7 @@ public void testPostingsHighlighter() throws Exception { } public void testPostingsHighlighterMultipleFields() throws Exception { - assertAcked(prepareCreate("test").addMapping("type1", type1PostingsffsetsMapping()).get()); + assertAcked(prepareCreate("test").setMapping(type1PostingsffsetsMapping()).get()); ensureGreen(); index( @@ -2371,7 +2323,7 @@ public void testPostingsHighlighterMultipleFields() throws Exception { } public void testPostingsHighlighterNumberOfFragments() throws Exception { - assertAcked(prepareCreate("test").addMapping("type1", type1PostingsffsetsMapping())); + assertAcked(prepareCreate("test").setMapping(type1PostingsffsetsMapping())); ensureGreen(); client().prepareIndex("test") @@ -2457,7 +2409,6 @@ public void testPostingsHighlighterNumberOfFragments() throws Exception { public void testMultiMatchQueryHighlight() throws IOException { XContentBuilder mapping = XContentFactory.jsonBuilder() .startObject() - .startObject("type1") .startObject("properties") .startObject("field1") .field("type", "text") @@ -2470,9 +2421,8 @@ public void testMultiMatchQueryHighlight() throws IOException { .field("term_vector", "with_positions_offsets") .endObject() .endObject() - .endObject() .endObject(); - assertAcked(prepareCreate("test").addMapping("type1", mapping)); + assertAcked(prepareCreate("test").setMapping(mapping)); ensureGreen(); client().prepareIndex("test") .setSource("field1", "The quick brown fox jumps over", "field2", "The quick brown fox jumps over") @@ -2507,7 +2457,7 @@ public void testMultiMatchQueryHighlight() throws IOException { } public void testPostingsHighlighterOrderByScore() throws Exception { - assertAcked(prepareCreate("test").addMapping("type1", type1PostingsffsetsMapping())); + assertAcked(prepareCreate("test").setMapping(type1PostingsffsetsMapping())); ensureGreen(); client().prepareIndex("test") @@ -2585,10 +2535,8 @@ public void testPostingsHighlighterEscapeHtml() throws Exception { public void testPostingsHighlighterMultiMapperWithStore() throws Exception { assertAcked( - prepareCreate("test").addMapping( - "type1", + prepareCreate("test").setMapping( jsonBuilder().startObject() - .startObject("type1") .startObject("properties") .startObject("title") .field("type", "text") @@ -2606,7 +2554,6 @@ public void testPostingsHighlighterMultiMapperWithStore() throws Exception { .endObject() .endObject() .endObject() - .endObject() ) ); ensureGreen(); @@ -2645,10 +2592,8 @@ public void testPostingsHighlighterMultiMapperWithStore() throws Exception { public void testPostingsHighlighterMultiMapperFromSource() throws Exception { assertAcked( - prepareCreate("test").addMapping( - "type1", + prepareCreate("test").setMapping( jsonBuilder().startObject() - .startObject("type1") .startObject("properties") .startObject("title") .field("type", "text") @@ -2666,7 +2611,6 @@ public void testPostingsHighlighterMultiMapperFromSource() throws Exception { .endObject() .endObject() .endObject() - .endObject() ) ); ensureGreen(); @@ -2693,10 +2637,8 @@ public void testPostingsHighlighterMultiMapperFromSource() throws Exception { public void testPostingsHighlighterShouldFailIfNoOffsets() throws Exception { assertAcked( - prepareCreate("test").addMapping( - "type1", + prepareCreate("test").setMapping( jsonBuilder().startObject() - .startObject("type1") .startObject("properties") .startObject("title") .field("type", "text") @@ -2705,7 +2647,6 @@ public void testPostingsHighlighterShouldFailIfNoOffsets() throws Exception { .endObject() .endObject() .endObject() - .endObject() ) ); ensureGreen(); @@ -2726,7 +2667,7 @@ public void testPostingsHighlighterShouldFailIfNoOffsets() throws Exception { } public void testPostingsHighlighterBoostingQuery() throws IOException { - assertAcked(prepareCreate("test").addMapping("type1", type1PostingsffsetsMapping())); + assertAcked(prepareCreate("test").setMapping(type1PostingsffsetsMapping())); ensureGreen(); client().prepareIndex("test") .setSource("field1", "this is a test", "field2", "The quick brown fox jumps over the lazy dog! Second sentence.") @@ -2743,7 +2684,7 @@ public void testPostingsHighlighterBoostingQuery() throws IOException { } public void testPostingsHighlighterCommonTermsQuery() throws IOException { - assertAcked(prepareCreate("test").addMapping("type1", type1PostingsffsetsMapping())); + assertAcked(prepareCreate("test").setMapping(type1PostingsffsetsMapping())); ensureGreen(); client().prepareIndex("test") @@ -2770,7 +2711,6 @@ public void testPostingsHighlighterCommonTermsQuery() throws IOException { private static XContentBuilder type1PostingsffsetsMapping() throws IOException { return XContentFactory.jsonBuilder() .startObject() - .startObject("type1") .startObject("properties") .startObject("field1") .field("type", "text") @@ -2781,12 +2721,11 @@ private static XContentBuilder type1PostingsffsetsMapping() throws IOException { .field("index_options", "offsets") .endObject() .endObject() - .endObject() .endObject(); } public void testPostingsHighlighterPrefixQuery() throws Exception { - assertAcked(prepareCreate("test").addMapping("type1", type1PostingsffsetsMapping())); + assertAcked(prepareCreate("test").setMapping(type1PostingsffsetsMapping())); ensureGreen(); client().prepareIndex("test") @@ -2808,7 +2747,7 @@ public void testPostingsHighlighterPrefixQuery() throws Exception { } public void testPostingsHighlighterFuzzyQuery() throws Exception { - assertAcked(prepareCreate("test").addMapping("type1", type1PostingsffsetsMapping())); + assertAcked(prepareCreate("test").setMapping(type1PostingsffsetsMapping())); ensureGreen(); client().prepareIndex("test") @@ -2831,7 +2770,7 @@ public void testPostingsHighlighterFuzzyQuery() throws Exception { } public void testPostingsHighlighterRegexpQuery() throws Exception { - assertAcked(prepareCreate("test").addMapping("type1", type1PostingsffsetsMapping())); + assertAcked(prepareCreate("test").setMapping(type1PostingsffsetsMapping())); ensureGreen(); client().prepareIndex("test") @@ -2854,7 +2793,7 @@ public void testPostingsHighlighterRegexpQuery() throws Exception { } public void testPostingsHighlighterWildcardQuery() throws Exception { - assertAcked(prepareCreate("test").addMapping("type1", type1PostingsffsetsMapping())); + assertAcked(prepareCreate("test").setMapping(type1PostingsffsetsMapping())); ensureGreen(); client().prepareIndex("test") @@ -2890,7 +2829,7 @@ public void testPostingsHighlighterWildcardQuery() throws Exception { } public void testPostingsHighlighterTermRangeQuery() throws Exception { - assertAcked(prepareCreate("test").addMapping("type1", type1PostingsffsetsMapping())); + assertAcked(prepareCreate("test").setMapping(type1PostingsffsetsMapping())); ensureGreen(); client().prepareIndex("test").setSource("field1", "this is a test", "field2", "aaab").get(); @@ -2905,7 +2844,7 @@ public void testPostingsHighlighterTermRangeQuery() throws Exception { } public void testPostingsHighlighterQueryString() throws Exception { - assertAcked(prepareCreate("test").addMapping("type1", type1PostingsffsetsMapping())); + assertAcked(prepareCreate("test").setMapping(type1PostingsffsetsMapping())); ensureGreen(); client().prepareIndex("test") @@ -2928,7 +2867,7 @@ public void testPostingsHighlighterQueryString() throws Exception { } public void testPostingsHighlighterRegexpQueryWithinConstantScoreQuery() throws Exception { - assertAcked(prepareCreate("test").addMapping("type1", type1PostingsffsetsMapping())); + assertAcked(prepareCreate("test").setMapping(type1PostingsffsetsMapping())); ensureGreen(); client().prepareIndex("test").setSource("field1", "The photography word will get highlighted").get(); @@ -2942,7 +2881,7 @@ public void testPostingsHighlighterRegexpQueryWithinConstantScoreQuery() throws } public void testPostingsHighlighterMultiTermQueryMultipleLevels() throws Exception { - assertAcked(prepareCreate("test").addMapping("type1", type1PostingsffsetsMapping())); + assertAcked(prepareCreate("test").setMapping(type1PostingsffsetsMapping())); ensureGreen(); client().prepareIndex("test").setSource("field1", "The photography word will get highlighted").get(); @@ -2959,7 +2898,7 @@ public void testPostingsHighlighterMultiTermQueryMultipleLevels() throws Excepti } public void testPostingsHighlighterPrefixQueryWithinBooleanQuery() throws Exception { - assertAcked(prepareCreate("test").addMapping("type1", type1PostingsffsetsMapping())); + assertAcked(prepareCreate("test").setMapping(type1PostingsffsetsMapping())); ensureGreen(); client().prepareIndex("test").setSource("field1", "The photography word will get highlighted").get(); @@ -2974,7 +2913,7 @@ public void testPostingsHighlighterPrefixQueryWithinBooleanQuery() throws Except } public void testPostingsHighlighterQueryStringWithinFilteredQuery() throws Exception { - assertAcked(prepareCreate("test").addMapping("type1", type1PostingsffsetsMapping())); + assertAcked(prepareCreate("test").setMapping(type1PostingsffsetsMapping())); ensureGreen(); client().prepareIndex("test").setSource("field1", "The photography word will get highlighted").get(); @@ -2989,7 +2928,7 @@ public void testPostingsHighlighterQueryStringWithinFilteredQuery() throws Excep } public void testPostingsHighlighterManyDocs() throws Exception { - assertAcked(prepareCreate("test").addMapping("type1", type1PostingsffsetsMapping())); + assertAcked(prepareCreate("test").setMapping(type1PostingsffsetsMapping())); ensureGreen(); int COUNT = between(20, 100); @@ -3025,7 +2964,6 @@ public void testPostingsHighlighterManyDocs() throws Exception { public void testDoesNotHighlightTypeName() throws Exception { XContentBuilder mapping = XContentFactory.jsonBuilder() .startObject() - .startObject("typename") .startObject("properties") .startObject("foo") .field("type", "text") @@ -3033,9 +2971,8 @@ public void testDoesNotHighlightTypeName() throws Exception { .field("term_vector", "with_positions_offsets") .endObject() .endObject() - .endObject() .endObject(); - assertAcked(prepareCreate("test").addMapping("typename", mapping)); + assertAcked(prepareCreate("test").setMapping(mapping)); ensureGreen(); indexRandom(true, client().prepareIndex("test").setSource("foo", "test typename")); @@ -3052,7 +2989,6 @@ public void testDoesNotHighlightTypeName() throws Exception { public void testDoesNotHighlightAliasFilters() throws Exception { XContentBuilder mapping = XContentFactory.jsonBuilder() .startObject() - .startObject("typename") .startObject("properties") .startObject("foo") .field("type", "text") @@ -3060,9 +2996,8 @@ public void testDoesNotHighlightAliasFilters() throws Exception { .field("term_vector", "with_positions_offsets") .endObject() .endObject() - .endObject() .endObject(); - assertAcked(prepareCreate("test").addMapping("typename", mapping)); + assertAcked(prepareCreate("test").setMapping(mapping)); assertAcked(client().admin().indices().prepareAliases().addAlias("test", "filtered_alias", matchQuery("foo", "japanese"))); ensureGreen(); @@ -3078,7 +3013,7 @@ public void testDoesNotHighlightAliasFilters() throws Exception { } public void testFastVectorHighlighterPhraseBoost() throws Exception { - assertAcked(prepareCreate("test").addMapping("type1", type1TermVectorMapping())); + assertAcked(prepareCreate("test").setMapping(type1TermVectorMapping())); phraseBoostTestCase("fvh"); } @@ -3174,8 +3109,7 @@ public void testGeoFieldHighlightingWithDifferentHighlighters() throws IOExcepti // see https://github.com/elastic/elasticsearch/issues/17537 XContentBuilder mappings = jsonBuilder(); mappings.startObject(); - mappings.startObject("type") - .startObject("properties") + mappings.startObject("properties") .startObject("geo_point") .field("type", "geo_point") .endObject() @@ -3184,10 +3118,9 @@ public void testGeoFieldHighlightingWithDifferentHighlighters() throws IOExcepti .field("term_vector", "with_positions_offsets_payloads") .field("index_options", "offsets") .endObject() - .endObject() .endObject(); mappings.endObject(); - assertAcked(prepareCreate("test").addMapping("type", mappings)); + assertAcked(prepareCreate("test").setMapping(mappings)); client().prepareIndex("test") .setId("1") @@ -3216,18 +3149,16 @@ public void testGeoFieldHighlightingWhenQueryGetsRewritten() throws IOException // see https://github.com/elastic/elasticsearch/issues/17537#issuecomment-244939633 XContentBuilder mappings = jsonBuilder(); mappings.startObject(); - mappings.startObject("jobs") - .startObject("properties") + mappings.startObject("properties") .startObject("loc") .field("type", "geo_point") .endObject() .startObject("jd") .field("type", "text") .endObject() - .endObject() .endObject(); mappings.endObject(); - assertAcked(prepareCreate("test").addMapping("jobs", mappings)); + assertAcked(prepareCreate("test").setMapping(mappings)); ensureYellow(); client().prepareIndex("test") @@ -3259,15 +3190,9 @@ public void testKeywordFieldHighlighting() throws IOException { // check that keyword highlighting works XContentBuilder mappings = jsonBuilder(); mappings.startObject(); - mappings.startObject("type") - .startObject("properties") - .startObject("keyword_field") - .field("type", "keyword") - .endObject() - .endObject() - .endObject(); + mappings.startObject("properties").startObject("keyword_field").field("type", "keyword").endObject().endObject(); mappings.endObject(); - assertAcked(prepareCreate("test").addMapping("type", mappings)); + assertAcked(prepareCreate("test").setMapping(mappings)); client().prepareIndex("test") .setId("1") @@ -3299,7 +3224,7 @@ public void testCopyToFields() throws Exception { // If field is not stored, it is looked up in source (but source has only 'foo' b.startObject("foo_copy").field("type", "text").field("store", true).endObject(); b.endObject().endObject(); - prepareCreate("test").addMapping("type", b).get(); + prepareCreate("test").setMapping(b).get(); client().prepareIndex("test") .setId("1") diff --git a/server/src/internalClusterTest/java/org/opensearch/search/fieldcaps/FieldCapabilitiesIT.java b/server/src/internalClusterTest/java/org/opensearch/search/fieldcaps/FieldCapabilitiesIT.java index 8767904e03c72..aed2975ed3234 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/fieldcaps/FieldCapabilitiesIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/fieldcaps/FieldCapabilitiesIT.java @@ -88,7 +88,7 @@ public void setUp() throws Exception { .endObject() .endObject() .endObject(); - assertAcked(prepareCreate("old_index").addMapping("_doc", oldIndexMapping)); + assertAcked(prepareCreate("old_index").setMapping(oldIndexMapping)); XContentBuilder newIndexMapping = XContentFactory.jsonBuilder() .startObject() @@ -106,7 +106,7 @@ public void setUp() throws Exception { .endObject() .endObject() .endObject(); - assertAcked(prepareCreate("new_index").addMapping("_doc", newIndexMapping)); + assertAcked(prepareCreate("new_index").setMapping(newIndexMapping)); assertAcked(client().admin().indices().prepareAliases().addAlias("new_index", "current")); } diff --git a/server/src/internalClusterTest/java/org/opensearch/search/fields/SearchFieldsIT.java b/server/src/internalClusterTest/java/org/opensearch/search/fields/SearchFieldsIT.java index 72c60e98ec328..46d47d838f68b 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/fields/SearchFieldsIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/fields/SearchFieldsIT.java @@ -774,10 +774,8 @@ public void testGetFieldsComplexField() throws Exception { .indices() .prepareCreate("my-index") .setSettings(Settings.builder().put("index.refresh_interval", -1)) - .addMapping( - MapperService.SINGLE_MAPPING_NAME, + .setMapping( jsonBuilder().startObject() - .startObject(MapperService.SINGLE_MAPPING_NAME) .startObject("properties") .startObject("field1") .field("type", "object") @@ -800,7 +798,6 @@ public void testGetFieldsComplexField() throws Exception { .endObject() .endObject() .endObject() - .endObject() ) .get(); @@ -1181,7 +1178,6 @@ public void testScriptFields() throws Exception { public void testDocValueFieldsWithFieldAlias() throws Exception { XContentBuilder mapping = XContentFactory.jsonBuilder() .startObject() - .startObject(MapperService.SINGLE_MAPPING_NAME) .startObject("_source") .field("enabled", false) .endObject() @@ -1203,9 +1199,8 @@ public void testDocValueFieldsWithFieldAlias() throws Exception { .field("path", "date_field") .endObject() .endObject() - .endObject() .endObject(); - assertAcked(prepareCreate("test").addMapping(MapperService.SINGLE_MAPPING_NAME, mapping)); + assertAcked(prepareCreate("test").setMapping(mapping)); ensureGreen("test"); DateTime date = new DateTime(1990, 12, 29, 0, 0, DateTimeZone.UTC); @@ -1244,7 +1239,6 @@ public void testDocValueFieldsWithFieldAlias() throws Exception { public void testWildcardDocValueFieldsWithFieldAlias() throws Exception { XContentBuilder mapping = XContentFactory.jsonBuilder() .startObject() - .startObject(MapperService.SINGLE_MAPPING_NAME) .startObject("_source") .field("enabled", false) .endObject() @@ -1266,9 +1260,8 @@ public void testWildcardDocValueFieldsWithFieldAlias() throws Exception { .field("path", "date_field") .endObject() .endObject() - .endObject() .endObject(); - assertAcked(prepareCreate("test").addMapping(MapperService.SINGLE_MAPPING_NAME, mapping)); + assertAcked(prepareCreate("test").setMapping(mapping)); ensureGreen("test"); DateTime date = new DateTime(1990, 12, 29, 0, 0, DateTimeZone.UTC); @@ -1306,7 +1299,6 @@ public void testWildcardDocValueFieldsWithFieldAlias() throws Exception { public void testStoredFieldsWithFieldAlias() throws Exception { XContentBuilder mapping = XContentFactory.jsonBuilder() .startObject() - .startObject(MapperService.SINGLE_MAPPING_NAME) .startObject("properties") .startObject("field1") .field("type", "text") @@ -1325,9 +1317,8 @@ public void testStoredFieldsWithFieldAlias() throws Exception { .field("path", "field2") .endObject() .endObject() - .endObject() .endObject(); - assertAcked(prepareCreate("test").addMapping(MapperService.SINGLE_MAPPING_NAME, mapping)); + assertAcked(prepareCreate("test").setMapping(mapping)); index("test", MapperService.SINGLE_MAPPING_NAME, "1", "field1", "value1", "field2", "value2"); refresh("test"); @@ -1350,7 +1341,6 @@ public void testStoredFieldsWithFieldAlias() throws Exception { public void testWildcardStoredFieldsWithFieldAlias() throws Exception { XContentBuilder mapping = XContentFactory.jsonBuilder() .startObject() - .startObject(MapperService.SINGLE_MAPPING_NAME) .startObject("properties") .startObject("field1") .field("type", "text") @@ -1369,9 +1359,8 @@ public void testWildcardStoredFieldsWithFieldAlias() throws Exception { .field("path", "field2") .endObject() .endObject() - .endObject() .endObject(); - assertAcked(prepareCreate("test").addMapping(MapperService.SINGLE_MAPPING_NAME, mapping)); + assertAcked(prepareCreate("test").setMapping(mapping)); index("test", MapperService.SINGLE_MAPPING_NAME, "1", "field1", "value1", "field2", "value2"); refresh("test"); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/functionscore/DecayFunctionScoreIT.java b/server/src/internalClusterTest/java/org/opensearch/search/functionscore/DecayFunctionScoreIT.java index 712026eaf5c43..709a916d98838 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/functionscore/DecayFunctionScoreIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/functionscore/DecayFunctionScoreIT.java @@ -96,10 +96,8 @@ protected boolean forbidPrivateIndexSettings() { public void testDistanceScoreGeoLinGaussExp() throws Exception { assertAcked( - prepareCreate("test").addMapping( - "type1", + prepareCreate("test").setMapping( jsonBuilder().startObject() - .startObject("type1") .startObject("properties") .startObject("test") .field("type", "text") @@ -109,7 +107,6 @@ public void testDistanceScoreGeoLinGaussExp() throws Exception { .endObject() .endObject() .endObject() - .endObject() ) ); @@ -223,10 +220,8 @@ public void testDistanceScoreGeoLinGaussExp() throws Exception { public void testDistanceScoreGeoLinGaussExpWithOffset() throws Exception { assertAcked( - prepareCreate("test").addMapping( - "type1", + prepareCreate("test").setMapping( jsonBuilder().startObject() - .startObject("type1") .startObject("properties") .startObject("test") .field("type", "text") @@ -236,7 +231,6 @@ public void testDistanceScoreGeoLinGaussExpWithOffset() throws Exception { .endObject() .endObject() .endObject() - .endObject() ) ); @@ -337,10 +331,8 @@ public void testBoostModeSettingWorks() throws Exception { Settings settings = Settings.builder().put(IndexMetadata.INDEX_NUMBER_OF_SHARDS_SETTING.getKey(), 1).build(); assertAcked( prepareCreate("test").setSettings(settings) - .addMapping( - "type1", + .setMapping( jsonBuilder().startObject() - .startObject("type1") .startObject("properties") .startObject("test") .field("type", "text") @@ -350,7 +342,6 @@ public void testBoostModeSettingWorks() throws Exception { .endObject() .endObject() .endObject() - .endObject() ) ); @@ -436,10 +427,8 @@ public void testBoostModeSettingWorks() throws Exception { public void testParseGeoPoint() throws Exception { assertAcked( - prepareCreate("test").addMapping( - "type1", + prepareCreate("test").setMapping( jsonBuilder().startObject() - .startObject("type1") .startObject("properties") .startObject("test") .field("type", "text") @@ -449,7 +438,6 @@ public void testParseGeoPoint() throws Exception { .endObject() .endObject() .endObject() - .endObject() ) ); @@ -504,10 +492,8 @@ public void testParseGeoPoint() throws Exception { public void testCombineModes() throws Exception { assertAcked( - prepareCreate("test").addMapping( - "type1", + prepareCreate("test").setMapping( jsonBuilder().startObject() - .startObject("type1") .startObject("properties") .startObject("test") .field("type", "text") @@ -517,7 +503,6 @@ public void testCombineModes() throws Exception { .endObject() .endObject() .endObject() - .endObject() ) ); @@ -621,10 +606,8 @@ public void testCombineModes() throws Exception { public void testCombineModesExplain() throws Exception { assertAcked( - prepareCreate("test").addMapping( - "type1", + prepareCreate("test").setMapping( jsonBuilder().startObject() - .startObject("type1") .startObject("properties") .startObject("test") .field("type", "text") @@ -634,7 +617,6 @@ public void testCombineModesExplain() throws Exception { .endObject() .endObject() .endObject() - .endObject() ) ); @@ -691,10 +673,8 @@ public void testCombineModesExplain() throws Exception { public void testExceptionThrownIfScaleLE0() throws Exception { assertAcked( - prepareCreate("test").addMapping( - "type1", + prepareCreate("test").setMapping( jsonBuilder().startObject() - .startObject("type1") .startObject("properties") .startObject("test") .field("type", "text") @@ -704,7 +684,6 @@ public void testExceptionThrownIfScaleLE0() throws Exception { .endObject() .endObject() .endObject() - .endObject() ) ); client().index( @@ -731,10 +710,8 @@ public void testExceptionThrownIfScaleLE0() throws Exception { public void testParseDateMath() throws Exception { assertAcked( - prepareCreate("test").addMapping( - "type1", + prepareCreate("test").setMapping( jsonBuilder().startObject() - .startObject("type1") .startObject("properties") .startObject("test") .field("type", "text") @@ -745,7 +722,6 @@ public void testParseDateMath() throws Exception { .endObject() .endObject() .endObject() - .endObject() ) ); client().index( @@ -785,10 +761,8 @@ public void testParseDateMath() throws Exception { public void testValueMissingLin() throws Exception { assertAcked( - prepareCreate("test").addMapping( - "type1", + prepareCreate("test").setMapping( jsonBuilder().startObject() - .startObject("type1") .startObject("properties") .startObject("test") .field("type", "text") @@ -801,7 +775,6 @@ public void testValueMissingLin() throws Exception { .endObject() .endObject() .endObject() - .endObject() ) ); @@ -854,10 +827,8 @@ public void testDateWithoutOrigin() throws Exception { ZonedDateTime dt = ZonedDateTime.now(ZoneOffset.UTC); assertAcked( - prepareCreate("test").addMapping( - "type1", + prepareCreate("test").setMapping( jsonBuilder().startObject() - .startObject("type1") .startObject("properties") .startObject("test") .field("type", "text") @@ -867,7 +838,6 @@ public void testDateWithoutOrigin() throws Exception { .endObject() .endObject() .endObject() - .endObject() ) ); @@ -933,7 +903,6 @@ public void testManyDocsLin() throws Exception { Version version = VersionUtils.randomIndexCompatibleVersion(random()); Settings settings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, version).build(); XContentBuilder xContentBuilder = jsonBuilder().startObject() - .startObject("type") .startObject("properties") .startObject("test") .field("type", "text") @@ -949,8 +918,8 @@ public void testManyDocsLin() throws Exception { .startObject("geo") .field("type", "geo_point") .field("ignore_malformed", true); - xContentBuilder.endObject().endObject().endObject().endObject(); - assertAcked(prepareCreate("test").setSettings(settings).addMapping("type", xContentBuilder)); + xContentBuilder.endObject().endObject().endObject(); + assertAcked(prepareCreate("test").setSettings(settings).setMapping(xContentBuilder)); int numDocs = 200; List indexBuilders = new ArrayList<>(); @@ -1013,10 +982,8 @@ public void testManyDocsLin() throws Exception { public void testParsingExceptionIfFieldDoesNotExist() throws Exception { assertAcked( - prepareCreate("test").addMapping( - "type", + prepareCreate("test").setMapping( jsonBuilder().startObject() - .startObject("type") .startObject("properties") .startObject("test") .field("type", "text") @@ -1026,7 +993,6 @@ public void testParsingExceptionIfFieldDoesNotExist() throws Exception { .endObject() .endObject() .endObject() - .endObject() ) ); int numDocs = 2; @@ -1066,10 +1032,8 @@ public void testParsingExceptionIfFieldDoesNotExist() throws Exception { public void testParsingExceptionIfFieldTypeDoesNotMatch() throws Exception { assertAcked( - prepareCreate("test").addMapping( - "type", + prepareCreate("test").setMapping( jsonBuilder().startObject() - .startObject("type") .startObject("properties") .startObject("test") .field("type", "text") @@ -1079,7 +1043,6 @@ public void testParsingExceptionIfFieldTypeDoesNotMatch() throws Exception { .endObject() .endObject() .endObject() - .endObject() ) ); client().index( @@ -1105,10 +1068,8 @@ public void testParsingExceptionIfFieldTypeDoesNotMatch() throws Exception { public void testNoQueryGiven() throws Exception { assertAcked( - prepareCreate("test").addMapping( - "type", + prepareCreate("test").setMapping( jsonBuilder().startObject() - .startObject("type") .startObject("properties") .startObject("test") .field("type", "text") @@ -1118,7 +1079,6 @@ public void testNoQueryGiven() throws Exception { .endObject() .endObject() .endObject() - .endObject() ) ); client().index(indexRequest("test").source(jsonBuilder().startObject().field("test", "value").field("num", 1.0).endObject())) @@ -1138,10 +1098,8 @@ public void testNoQueryGiven() throws Exception { public void testMultiFieldOptions() throws Exception { assertAcked( - prepareCreate("test").addMapping( - "type1", + prepareCreate("test").setMapping( jsonBuilder().startObject() - .startObject("type1") .startObject("properties") .startObject("test") .field("type", "text") @@ -1154,7 +1112,6 @@ public void testMultiFieldOptions() throws Exception { .endObject() .endObject() .endObject() - .endObject() ) ); @@ -1271,10 +1228,8 @@ public void testMultiFieldOptions() throws Exception { public void testDistanceScoreGeoLinGaussExplain() throws Exception { assertAcked( - prepareCreate("test").addMapping( - "type1", + prepareCreate("test").setMapping( jsonBuilder().startObject() - .startObject("type1") .startObject("properties") .startObject("test") .field("type", "text") @@ -1284,7 +1239,6 @@ public void testDistanceScoreGeoLinGaussExplain() throws Exception { .endObject() .endObject() .endObject() - .endObject() ) ); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/functionscore/FunctionScoreFieldValueIT.java b/server/src/internalClusterTest/java/org/opensearch/search/functionscore/FunctionScoreFieldValueIT.java index 8e0a14b7062a7..4e1df591cb245 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/functionscore/FunctionScoreFieldValueIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/functionscore/FunctionScoreFieldValueIT.java @@ -57,10 +57,8 @@ public class FunctionScoreFieldValueIT extends OpenSearchIntegTestCase { public void testFieldValueFactor() throws IOException { assertAcked( - prepareCreate("test").addMapping( - "type1", + prepareCreate("test").setMapping( jsonBuilder().startObject() - .startObject("type1") .startObject("properties") .startObject("test") .field("type", randomFrom(new String[] { "short", "float", "long", "integer", "double" })) @@ -70,7 +68,6 @@ public void testFieldValueFactor() throws IOException { .endObject() .endObject() .endObject() - .endObject() ).get() ); @@ -169,10 +166,8 @@ public void testFieldValueFactor() throws IOException { public void testFieldValueFactorExplain() throws IOException { assertAcked( - prepareCreate("test").addMapping( - "type1", + prepareCreate("test").setMapping( jsonBuilder().startObject() - .startObject("type1") .startObject("properties") .startObject("test") .field("type", randomFrom(new String[] { "short", "float", "long", "integer", "double" })) @@ -182,7 +177,6 @@ public void testFieldValueFactorExplain() throws IOException { .endObject() .endObject() .endObject() - .endObject() ).get() ); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/functionscore/FunctionScorePluginIT.java b/server/src/internalClusterTest/java/org/opensearch/search/functionscore/FunctionScorePluginIT.java index 885f1aa7ff7a0..a7a14f3b0d889 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/functionscore/FunctionScorePluginIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/functionscore/FunctionScorePluginIT.java @@ -76,10 +76,8 @@ public void testPlugin() throws Exception { client().admin() .indices() .prepareCreate("test") - .addMapping( - "type1", + .setMapping( jsonBuilder().startObject() - .startObject("type1") .startObject("properties") .startObject("test") .field("type", "text") @@ -89,7 +87,6 @@ public void testPlugin() throws Exception { .endObject() .endObject() .endObject() - .endObject() ) .get(); client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForYellowStatus().get(); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/functionscore/QueryRescorerIT.java b/server/src/internalClusterTest/java/org/opensearch/search/functionscore/QueryRescorerIT.java index e216e92c63bad..c4a41ad5b76e2 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/functionscore/QueryRescorerIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/functionscore/QueryRescorerIT.java @@ -127,10 +127,8 @@ public void testEnforceWindowSize() { public void testRescorePhrase() throws Exception { assertAcked( - prepareCreate("test").addMapping( - "type1", + prepareCreate("test").setMapping( jsonBuilder().startObject() - .startObject("type1") .startObject("properties") .startObject("field1") .field("analyzer", "whitespace") @@ -138,7 +136,6 @@ public void testRescorePhrase() throws Exception { .endObject() .endObject() .endObject() - .endObject() ).setSettings(Settings.builder().put(indexSettings()).put("index.number_of_shards", 1)) ); @@ -190,22 +187,16 @@ public void testMoreDocs() throws Exception { XContentBuilder mapping = XContentFactory.jsonBuilder() .startObject() - .startObject("type1") .startObject("properties") .startObject("field1") .field("type", "text") .field("analyzer", "whitespace") .endObject() .endObject() - .endObject() .endObject(); assertAcked( - client().admin() - .indices() - .prepareCreate("test") - .addMapping("type1", mapping) - .setSettings(builder.put("index.number_of_shards", 1)) + client().admin().indices().prepareCreate("test").setMapping(mapping).setSettings(builder.put("index.number_of_shards", 1)) ); client().prepareIndex("test").setId("1").setSource("field1", "massachusetts avenue boston massachusetts").get(); @@ -285,22 +276,16 @@ public void testSmallRescoreWindow() throws Exception { XContentBuilder mapping = XContentFactory.jsonBuilder() .startObject() - .startObject("type1") .startObject("properties") .startObject("field1") .field("type", "text") .field("analyzer", "whitespace") .endObject() .endObject() - .endObject() .endObject(); assertAcked( - client().admin() - .indices() - .prepareCreate("test") - .addMapping("type1", mapping) - .setSettings(builder.put("index.number_of_shards", 1)) + client().admin().indices().prepareCreate("test").setMapping(mapping).setSettings(builder.put("index.number_of_shards", 1)) ); client().prepareIndex("test").setId("3").setSource("field1", "massachusetts").get(); @@ -371,22 +356,16 @@ public void testRescorerMadeScoresWorse() throws Exception { XContentBuilder mapping = XContentFactory.jsonBuilder() .startObject() - .startObject("type1") .startObject("properties") .startObject("field1") .field("type", "text") .field("analyzer", "whitespace") .endObject() .endObject() - .endObject() .endObject(); assertAcked( - client().admin() - .indices() - .prepareCreate("test") - .addMapping("type1", mapping) - .setSettings(builder.put("index.number_of_shards", 1)) + client().admin().indices().prepareCreate("test").setMapping(mapping).setSettings(builder.put("index.number_of_shards", 1)) ); client().prepareIndex("test").setId("3").setSource("field1", "massachusetts").get(); @@ -524,10 +503,8 @@ public void testEquivalence() throws Exception { public void testExplain() throws Exception { assertAcked( - prepareCreate("test").addMapping( - "type1", + prepareCreate("test").setMapping( jsonBuilder().startObject() - .startObject("type1") .startObject("properties") .startObject("field1") .field("analyzer", "whitespace") @@ -535,7 +512,6 @@ public void testExplain() throws Exception { .endObject() .endObject() .endObject() - .endObject() ) ); ensureGreen(); @@ -785,10 +761,8 @@ private int indexRandomNumbers(String analyzer, int shards, boolean dummyDocs) t } assertAcked( - prepareCreate("test").addMapping( - "type1", + prepareCreate("test").setMapping( jsonBuilder().startObject() - .startObject("type1") .startObject("properties") .startObject("field1") .field("analyzer", analyzer) @@ -796,7 +770,6 @@ private int indexRandomNumbers(String analyzer, int shards, boolean dummyDocs) t .endObject() .endObject() .endObject() - .endObject() ).setSettings(builder) ); int numDocs = randomIntBetween(100, 150); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/geo/GeoBoundingBoxQueryIT.java b/server/src/internalClusterTest/java/org/opensearch/search/geo/GeoBoundingBoxQueryIT.java index f865e56cd99e3..90e7163342547 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/geo/GeoBoundingBoxQueryIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/geo/GeoBoundingBoxQueryIT.java @@ -64,12 +64,11 @@ public void testSimpleBoundingBoxTest() throws Exception { Settings settings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, version).build(); XContentBuilder xContentBuilder = XContentFactory.jsonBuilder() .startObject() - .startObject("type1") .startObject("properties") .startObject("location") .field("type", "geo_point"); - xContentBuilder.endObject().endObject().endObject().endObject(); - assertAcked(prepareCreate("test").setSettings(settings).addMapping("type1", xContentBuilder)); + xContentBuilder.endObject().endObject().endObject(); + assertAcked(prepareCreate("test").setSettings(settings).setMapping(xContentBuilder)); ensureGreen(); client().prepareIndex("test") @@ -195,12 +194,11 @@ public void testLimit2BoundingBox() throws Exception { Settings settings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, version).build(); XContentBuilder xContentBuilder = XContentFactory.jsonBuilder() .startObject() - .startObject("type1") .startObject("properties") .startObject("location") .field("type", "geo_point"); - xContentBuilder.endObject().endObject().endObject().endObject(); - assertAcked(prepareCreate("test").setSettings(settings).addMapping("type1", xContentBuilder)); + xContentBuilder.endObject().endObject().endObject(); + assertAcked(prepareCreate("test").setSettings(settings).setMapping(xContentBuilder)); ensureGreen(); client().prepareIndex("test") @@ -275,12 +273,11 @@ public void testCompleteLonRange() throws Exception { Settings settings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, version).build(); XContentBuilder xContentBuilder = XContentFactory.jsonBuilder() .startObject() - .startObject("type1") .startObject("properties") .startObject("location") .field("type", "geo_point"); - xContentBuilder.endObject().endObject().endObject().endObject(); - assertAcked(prepareCreate("test").setSettings(settings).addMapping("type1", xContentBuilder)); + xContentBuilder.endObject().endObject().endObject(); + assertAcked(prepareCreate("test").setSettings(settings).setMapping(xContentBuilder)); ensureGreen(); client().prepareIndex("test") diff --git a/server/src/internalClusterTest/java/org/opensearch/search/geo/GeoDistanceIT.java b/server/src/internalClusterTest/java/org/opensearch/search/geo/GeoDistanceIT.java index d00c0a8c0faf7..ba7af0ecfcb06 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/geo/GeoDistanceIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/geo/GeoDistanceIT.java @@ -124,12 +124,11 @@ public void setupTestIndex() throws IOException { Settings settings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, version).build(); XContentBuilder xContentBuilder = XContentFactory.jsonBuilder() .startObject() - .startObject("type1") .startObject("properties") .startObject("location") .field("type", "geo_point"); - xContentBuilder.endObject().endObject().endObject().endObject(); - assertAcked(prepareCreate("test").setSettings(settings).addMapping("type1", xContentBuilder)); + xContentBuilder.endObject().endObject().endObject(); + assertAcked(prepareCreate("test").setSettings(settings).setMapping(xContentBuilder)); ensureGreen(); } diff --git a/server/src/internalClusterTest/java/org/opensearch/search/geo/GeoFilterIT.java b/server/src/internalClusterTest/java/org/opensearch/search/geo/GeoFilterIT.java index 8322c9704eecb..975cb5f7e3b8c 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/geo/GeoFilterIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/geo/GeoFilterIT.java @@ -393,7 +393,6 @@ public void testBulk() throws Exception { Settings settings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, version).build(); XContentBuilder xContentBuilder = XContentFactory.jsonBuilder() .startObject() - .startObject("country") .startObject("properties") .startObject("pin") .field("type", "geo_point"); @@ -404,10 +403,9 @@ public void testBulk() throws Exception { .field("ignore_malformed", true) .endObject() .endObject() - .endObject() .endObject(); - client().admin().indices().prepareCreate("countries").setSettings(settings).addMapping("country", xContentBuilder).get(); + client().admin().indices().prepareCreate("countries").setSettings(settings).setMapping(xContentBuilder).get(); BulkResponse bulk = client().prepareBulk().add(bulkAction, 0, bulkAction.length, null, xContentBuilder.contentType()).get(); for (BulkItemResponse item : bulk.getItems()) { diff --git a/server/src/internalClusterTest/java/org/opensearch/search/morelikethis/MoreLikeThisIT.java b/server/src/internalClusterTest/java/org/opensearch/search/morelikethis/MoreLikeThisIT.java index 7ffd648d06611..69b2e655dd0ad 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/morelikethis/MoreLikeThisIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/morelikethis/MoreLikeThisIT.java @@ -86,17 +86,14 @@ protected Collection> nodePlugins() { public void testSimpleMoreLikeThis() throws Exception { logger.info("Creating index test"); assertAcked( - prepareCreate("test").addMapping( - "type1", + prepareCreate("test").setMapping( jsonBuilder().startObject() - .startObject("type1") .startObject("properties") .startObject("text") .field("type", "text") .endObject() .endObject() .endObject() - .endObject() ) ); @@ -119,17 +116,14 @@ public void testSimpleMoreLikeThis() throws Exception { public void testSimpleMoreLikeThisWithTypes() throws Exception { logger.info("Creating index test"); assertAcked( - prepareCreate("test").addMapping( - "type1", + prepareCreate("test").setMapping( jsonBuilder().startObject() - .startObject("type1") .startObject("properties") .startObject("text") .field("type", "text") .endObject() .endObject() .endObject() - .endObject() ) ); @@ -151,10 +145,8 @@ public void testSimpleMoreLikeThisWithTypes() throws Exception { // Issue #30148 public void testMoreLikeThisForZeroTokensInOneOfTheAnalyzedFields() throws Exception { - CreateIndexRequestBuilder createIndexRequestBuilder = prepareCreate("test").addMapping( - "type", + CreateIndexRequestBuilder createIndexRequestBuilder = prepareCreate("test").setMapping( jsonBuilder().startObject() - .startObject("type") .startObject("properties") .startObject("myField") .field("type", "text") @@ -164,7 +156,6 @@ public void testMoreLikeThisForZeroTokensInOneOfTheAnalyzedFields() throws Excep .endObject() .endObject() .endObject() - .endObject() ); assertAcked(createIndexRequestBuilder); @@ -214,17 +205,14 @@ public void testSimpleMoreLikeOnLongField() throws Exception { public void testMoreLikeThisWithAliases() throws Exception { logger.info("Creating index test"); assertAcked( - prepareCreate("test").addMapping( - "type1", + prepareCreate("test").setMapping( jsonBuilder().startObject() - .startObject("type1") .startObject("properties") .startObject("text") .field("type", "text") .endObject() .endObject() .endObject() - .endObject() ) ); logger.info("Creating aliases alias release"); @@ -370,10 +358,8 @@ public void testMoreLikeThisIssueRoutingNotSerialized() throws Exception { // Issue #3252 public void testNumericField() throws Exception { final String[] numericTypes = new String[] { "byte", "short", "integer", "long" }; - prepareCreate("test").addMapping( - "type", + prepareCreate("test").setMapping( jsonBuilder().startObject() - .startObject("type") .startObject("properties") .startObject("int_value") .field("type", randomFrom(numericTypes)) @@ -383,7 +369,6 @@ public void testNumericField() throws Exception { .endObject() .endObject() .endObject() - .endObject() ).get(); ensureGreen(); client().prepareIndex("test") @@ -486,7 +471,6 @@ public void testNumericField() throws Exception { public void testMoreLikeThisWithFieldAlias() throws Exception { XContentBuilder mapping = XContentFactory.jsonBuilder() .startObject() - .startObject("_doc") .startObject("properties") .startObject("text") .field("type", "text") @@ -496,10 +480,9 @@ public void testMoreLikeThisWithFieldAlias() throws Exception { .field("path", "text") .endObject() .endObject() - .endObject() .endObject(); - assertAcked(prepareCreate("test").addMapping("_doc", mapping)); + assertAcked(prepareCreate("test").setMapping(mapping)); ensureGreen(); index("test", "_doc", "1", "text", "lucene"); @@ -517,17 +500,14 @@ public void testMoreLikeThisWithFieldAlias() throws Exception { public void testSimpleMoreLikeInclude() throws Exception { logger.info("Creating index test"); assertAcked( - prepareCreate("test").addMapping( - "type1", + prepareCreate("test").setMapping( jsonBuilder().startObject() - .startObject("type1") .startObject("properties") .startObject("text") .field("type", "text") .endObject() .endObject() .endObject() - .endObject() ) ); @@ -584,17 +564,14 @@ public void testSimpleMoreLikeInclude() throws Exception { public void testSimpleMoreLikeThisIds() throws Exception { logger.info("Creating index test"); assertAcked( - prepareCreate("test").addMapping( - "type1", + prepareCreate("test").setMapping( jsonBuilder().startObject() - .startObject("type1") .startObject("properties") .startObject("text") .field("type", "text") .endObject() .endObject() .endObject() - .endObject() ) ); @@ -867,10 +844,8 @@ public void testWithRouting() throws IOException { public void testWithMissingRouting() throws IOException { logger.info("Creating index test with routing required for type1"); assertAcked( - prepareCreate("test").addMapping( - "type1", + prepareCreate("test").setMapping( jsonBuilder().startObject() - .startObject("type1") .startObject("properties") .startObject("text") .field("type", "text") @@ -880,7 +855,6 @@ public void testWithMissingRouting() throws IOException { .field("required", true) .endObject() .endObject() - .endObject() ) ); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/nested/SimpleNestedIT.java b/server/src/internalClusterTest/java/org/opensearch/search/nested/SimpleNestedIT.java index c6c58e6fcb6a5..e3c1abff5d206 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/nested/SimpleNestedIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/nested/SimpleNestedIT.java @@ -213,10 +213,8 @@ public void testSimpleNested() throws Exception { public void testMultiNested() throws Exception { assertAcked( - prepareCreate("test").addMapping( - "type1", + prepareCreate("test").setMapping( jsonBuilder().startObject() - .startObject("type1") .startObject("properties") .startObject("nested1") .field("type", "nested") @@ -228,7 +226,6 @@ public void testMultiNested() throws Exception { .endObject() .endObject() .endObject() - .endObject() ) ); @@ -371,10 +368,8 @@ public void testMultiNested() throws Exception { public void testDeleteNestedDocsWithAlias() throws Exception { assertAcked( prepareCreate("test").setSettings(Settings.builder().put(indexSettings()).put("index.refresh_interval", -1).build()) - .addMapping( - "type1", + .setMapping( jsonBuilder().startObject() - .startObject("type1") .startObject("properties") .startObject("field1") .field("type", "text") @@ -384,7 +379,6 @@ public void testDeleteNestedDocsWithAlias() throws Exception { .endObject() .endObject() .endObject() - .endObject() ) ); @@ -437,17 +431,14 @@ public void testDeleteNestedDocsWithAlias() throws Exception { public void testExplain() throws Exception { assertAcked( - prepareCreate("test").addMapping( - "type1", + prepareCreate("test").setMapping( jsonBuilder().startObject() - .startObject("type1") .startObject("properties") .startObject("nested1") .field("type", "nested") .endObject() .endObject() .endObject() - .endObject() ) ); @@ -485,10 +476,8 @@ public void testExplain() throws Exception { public void testSimpleNestedSorting() throws Exception { assertAcked( prepareCreate("test").setSettings(Settings.builder().put(indexSettings()).put("index.refresh_interval", -1)) - .addMapping( - "type1", + .setMapping( jsonBuilder().startObject() - .startObject("type1") .startObject("properties") .startObject("nested1") .field("type", "nested") @@ -501,7 +490,6 @@ public void testSimpleNestedSorting() throws Exception { .endObject() .endObject() .endObject() - .endObject() ) ); ensureGreen(); @@ -586,10 +574,8 @@ public void testSimpleNestedSorting() throws Exception { public void testSimpleNestedSortingWithNestedFilterMissing() throws Exception { assertAcked( prepareCreate("test").setSettings(Settings.builder().put(indexSettings()).put("index.refresh_interval", -1)) - .addMapping( - "type1", + .setMapping( jsonBuilder().startObject() - .startObject("type1") .startObject("properties") .startObject("nested1") .field("type", "nested") @@ -604,7 +590,6 @@ public void testSimpleNestedSortingWithNestedFilterMissing() throws Exception { .endObject() .endObject() .endObject() - .endObject() ) ); ensureGreen(); @@ -1048,11 +1033,9 @@ public void testLeakingSortValues() throws Exception { public void testSortNestedWithNestedFilter() throws Exception { assertAcked( - prepareCreate("test").addMapping( - "type1", + prepareCreate("test").setMapping( XContentFactory.jsonBuilder() .startObject() - .startObject("type1") .startObject("properties") .startObject("grand_parent_values") .field("type", "long") @@ -1075,7 +1058,6 @@ public void testSortNestedWithNestedFilter() throws Exception { .endObject() .endObject() .endObject() - .endObject() ) ); ensureGreen(); @@ -1448,8 +1430,7 @@ public void testSortNestedWithNestedFilter() throws Exception { // Issue #9305 public void testNestedSortingWithNestedFilterAsFilter() throws Exception { assertAcked( - prepareCreate("test").addMapping( - "type", + prepareCreate("test").setMapping( jsonBuilder().startObject() .startObject("properties") .startObject("officelocation") diff --git a/server/src/internalClusterTest/java/org/opensearch/search/query/ExistsIT.java b/server/src/internalClusterTest/java/org/opensearch/search/query/ExistsIT.java index 89c614485b620..75b8e46802061 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/query/ExistsIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/query/ExistsIT.java @@ -70,7 +70,6 @@ public void testEmptyIndex() throws Exception { public void testExists() throws Exception { XContentBuilder mapping = XContentBuilder.builder(JsonXContent.jsonXContent) .startObject() - .startObject("type") .startObject("properties") .startObject("foo") .field("type", "text") @@ -95,10 +94,9 @@ public void testExists() throws Exception { .endObject() .endObject() .endObject() - .endObject() .endObject(); - assertAcked(client().admin().indices().prepareCreate("idx").addMapping("type", mapping)); + assertAcked(client().admin().indices().prepareCreate("idx").setMapping(mapping)); Map barObject = new HashMap<>(); barObject.put("foo", "bar"); barObject.put("bar", singletonMap("bar", "foo")); @@ -176,7 +174,6 @@ public void testExists() throws Exception { public void testFieldAlias() throws Exception { XContentBuilder mapping = XContentFactory.jsonBuilder() .startObject() - .startObject("type") .startObject("properties") .startObject("bar") .field("type", "long") @@ -194,9 +191,8 @@ public void testFieldAlias() throws Exception { .field("path", "foo.bar") .endObject() .endObject() - .endObject() .endObject(); - assertAcked(prepareCreate("idx").addMapping("type", mapping)); + assertAcked(prepareCreate("idx").setMapping(mapping)); ensureGreen("idx"); List indexRequests = new ArrayList<>(); @@ -226,7 +222,6 @@ public void testFieldAlias() throws Exception { public void testFieldAliasWithNoDocValues() throws Exception { XContentBuilder mapping = XContentFactory.jsonBuilder() .startObject() - .startObject("type") .startObject("properties") .startObject("foo") .field("type", "long") @@ -237,9 +232,8 @@ public void testFieldAliasWithNoDocValues() throws Exception { .field("path", "foo") .endObject() .endObject() - .endObject() .endObject(); - assertAcked(prepareCreate("idx").addMapping("type", mapping)); + assertAcked(prepareCreate("idx").setMapping(mapping)); ensureGreen("idx"); List indexRequests = new ArrayList<>(); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/query/MultiMatchQueryIT.java b/server/src/internalClusterTest/java/org/opensearch/search/query/MultiMatchQueryIT.java index b40a034fc2c92..66b42fe266887 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/query/MultiMatchQueryIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/query/MultiMatchQueryIT.java @@ -109,7 +109,7 @@ public void init() throws Exception { .put("index.analysis.analyzer.category.tokenizer", "standard") .put("index.analysis.analyzer.category.filter", "lowercase") ); - assertAcked(builder.addMapping("test", createMapping())); + assertAcked(builder.setMapping(createMapping())); ensureGreen(); int numDocs = scaledRandomIntBetween(50, 100); List builders = new ArrayList<>(); @@ -259,7 +259,6 @@ public void init() throws Exception { private XContentBuilder createMapping() throws IOException { return XContentFactory.jsonBuilder() .startObject() - .startObject("test") .startObject("properties") .startObject("id") .field("type", "keyword") @@ -287,7 +286,6 @@ private XContentBuilder createMapping() throws IOException { .field("type", "date") .endObject() .endObject() - .endObject() .endObject(); } diff --git a/server/src/internalClusterTest/java/org/opensearch/search/query/QueryStringIT.java b/server/src/internalClusterTest/java/org/opensearch/search/query/QueryStringIT.java index 494aa4c0e6b88..5c7e53fda3f23 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/query/QueryStringIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/query/QueryStringIT.java @@ -278,16 +278,14 @@ public void testAllFieldsWithSpecifiedLeniency() throws IOException { public void testLimitOnExpandedFieldsButIgnoreUnmappedFields() throws Exception { XContentBuilder builder = jsonBuilder(); builder.startObject(); - builder.startObject("_doc"); builder.startObject("properties"); for (int i = 0; i < CLUSTER_MAX_CLAUSE_COUNT; i++) { builder.startObject("field" + i).field("type", "text").endObject(); } builder.endObject(); // properties - builder.endObject(); // type1 builder.endObject(); - assertAcked(prepareCreate("ignoreunmappedfields").addMapping("_doc", builder)); + assertAcked(prepareCreate("ignoreunmappedfields").setMapping(builder)); client().prepareIndex("ignoreunmappedfields").setId("1").setSource("field1", "foo bar baz").get(); refresh(); @@ -303,25 +301,19 @@ public void testLimitOnExpandedFields() throws Exception { XContentBuilder builder = jsonBuilder(); builder.startObject(); { - builder.startObject("_doc"); - { - builder.startObject("properties"); - { - for (int i = 0; i < CLUSTER_MAX_CLAUSE_COUNT; i++) { - builder.startObject("field_A" + i).field("type", "text").endObject(); - builder.startObject("field_B" + i).field("type", "text").endObject(); - } - builder.endObject(); - } - builder.endObject(); + builder.startObject("properties"); + for (int i = 0; i < CLUSTER_MAX_CLAUSE_COUNT; i++) { + builder.startObject("field_A" + i).field("type", "text").endObject(); + builder.startObject("field_B" + i).field("type", "text").endObject(); } builder.endObject(); } + builder.endObject(); assertAcked( prepareCreate("testindex").setSettings( Settings.builder().put(MapperService.INDEX_MAPPING_TOTAL_FIELDS_LIMIT_SETTING.getKey(), CLUSTER_MAX_CLAUSE_COUNT + 100) - ).addMapping("_doc", builder) + ).setMapping(builder) ); client().prepareIndex("testindex").setId("1").setSource("field_A0", "foo bar baz").get(); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/query/SearchQueryIT.java b/server/src/internalClusterTest/java/org/opensearch/search/query/SearchQueryIT.java index fb744ccfc4655..fa2d79ecb2017 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/query/SearchQueryIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/query/SearchQueryIT.java @@ -1119,10 +1119,8 @@ public void testTermsQuery() throws Exception { public void testTermsLookupFilter() throws Exception { assertAcked(prepareCreate("lookup").addMapping("type", "terms", "type=text", "other", "type=text")); assertAcked( - prepareCreate("lookup2").addMapping( - "type", + prepareCreate("lookup2").setMapping( jsonBuilder().startObject() - .startObject("type") .startObject("properties") .startObject("arr") .startObject("properties") @@ -1133,7 +1131,6 @@ public void testTermsLookupFilter() throws Exception { .endObject() .endObject() .endObject() - .endObject() ) ); assertAcked(prepareCreate("lookup3").addMapping("type", "_source", "enabled=false", "terms", "type=text")); @@ -1600,10 +1597,8 @@ public void testSpanNot() throws IOException, ExecutionException, InterruptedExc public void testSimpleDFSQuery() throws IOException { assertAcked( - prepareCreate("test").addMapping( - "_doc", + prepareCreate("test").setMapping( jsonBuilder().startObject() - .startObject("_doc") .startObject("_routing") .field("required", true) .endObject() @@ -1621,7 +1616,6 @@ public void testSimpleDFSQuery() throws IOException { .endObject() .endObject() .endObject() - .endObject() ) ); @@ -1876,8 +1870,7 @@ public void testRangeQueryWithLocaleMapping() throws Exception { assert ("SPI,COMPAT".equals(System.getProperty("java.locale.providers"))) : "`-Djava.locale.providers=SPI,COMPAT` needs to be set"; assertAcked( - prepareCreate("test").addMapping( - "type1", + prepareCreate("test").setMapping( jsonBuilder().startObject() .startObject("properties") .startObject("date_field") @@ -1978,7 +1971,6 @@ public void testRangeQueryRangeFields_24744() throws Exception { public void testNestedQueryWithFieldAlias() throws Exception { XContentBuilder mapping = XContentFactory.jsonBuilder() .startObject() - .startObject("_doc") .startObject("properties") .startObject("section") .field("type", "nested") @@ -1993,9 +1985,8 @@ public void testNestedQueryWithFieldAlias() throws Exception { .endObject() .endObject() .endObject() - .endObject() .endObject(); - assertAcked(prepareCreate("index").addMapping("_doc", mapping)); + assertAcked(prepareCreate("index").setMapping(mapping)); XContentBuilder source = XContentFactory.jsonBuilder() .startObject() @@ -2019,7 +2010,6 @@ public void testNestedQueryWithFieldAlias() throws Exception { public void testFieldAliasesForMetaFields() throws Exception { XContentBuilder mapping = XContentFactory.jsonBuilder() .startObject() - .startObject("type") .startObject("properties") .startObject("id-alias") .field("type", "alias") @@ -2030,9 +2020,8 @@ public void testFieldAliasesForMetaFields() throws Exception { .field("path", "_routing") .endObject() .endObject() - .endObject() .endObject(); - assertAcked(prepareCreate("test").addMapping("type", mapping)); + assertAcked(prepareCreate("test").setMapping(mapping)); IndexRequestBuilder indexRequest = client().prepareIndex("test").setId("1").setRouting("custom").setSource("field", "value"); indexRandom(true, false, indexRequest); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/query/SimpleQueryStringIT.java b/server/src/internalClusterTest/java/org/opensearch/search/query/SimpleQueryStringIT.java index c53eda63f155f..e0391d9cbc971 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/query/SimpleQueryStringIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/query/SimpleQueryStringIT.java @@ -239,10 +239,8 @@ public void testSimpleQueryStringMinimumShouldMatch() throws Exception { public void testNestedFieldSimpleQueryString() throws IOException { assertAcked( - prepareCreate("test").addMapping( - "type1", + prepareCreate("test").setMapping( jsonBuilder().startObject() - .startObject("type1") .startObject("properties") .startObject("body") .field("type", "text") @@ -253,7 +251,6 @@ public void testNestedFieldSimpleQueryString() throws IOException { .endObject() // fields .endObject() // body .endObject() // properties - .endObject() // type1 .endObject() ) ); @@ -607,19 +604,17 @@ public void testAllFieldsWithSpecifiedLeniency() throws IOException { public void testLimitOnExpandedFields() throws Exception { XContentBuilder builder = jsonBuilder(); builder.startObject(); - builder.startObject("type1"); builder.startObject("properties"); for (int i = 0; i < CLUSTER_MAX_CLAUSE_COUNT + 1; i++) { builder.startObject("field" + i).field("type", "text").endObject(); } builder.endObject(); // properties - builder.endObject(); // type1 builder.endObject(); assertAcked( prepareCreate("toomanyfields").setSettings( Settings.builder().put(MapperService.INDEX_MAPPING_TOTAL_FIELDS_LIMIT_SETTING.getKey(), CLUSTER_MAX_CLAUSE_COUNT + 100) - ).addMapping("type1", builder) + ).setMapping(builder) ); client().prepareIndex("toomanyfields").setId("1").setSource("field1", "foo bar baz").get(); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/scriptfilter/ScriptQuerySearchIT.java b/server/src/internalClusterTest/java/org/opensearch/search/scriptfilter/ScriptQuerySearchIT.java index 3b120dcab22f2..3bc9eb5b25261 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/scriptfilter/ScriptQuerySearchIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/scriptfilter/ScriptQuerySearchIT.java @@ -127,11 +127,7 @@ public void testCustomScriptBinaryField() throws Exception { final byte[] randomBytesDoc2 = getRandomBytes(16); assertAcked( - client().admin() - .indices() - .prepareCreate("my-index") - .addMapping("my-type", createMappingSource("binary")) - .setSettings(indexSettings()) + client().admin().indices().prepareCreate("my-index").setMapping(createMappingSource("binary")).setSettings(indexSettings()) ); client().prepareIndex("my-index") .setId("1") @@ -170,14 +166,12 @@ private byte[] getRandomBytes(int len) { private XContentBuilder createMappingSource(String fieldType) throws IOException { return XContentFactory.jsonBuilder() .startObject() - .startObject("my-type") .startObject("properties") .startObject("binaryData") .field("type", fieldType) .field("doc_values", "true") .endObject() .endObject() - .endObject() .endObject(); } diff --git a/server/src/internalClusterTest/java/org/opensearch/search/scroll/DuelScrollIT.java b/server/src/internalClusterTest/java/org/opensearch/search/scroll/DuelScrollIT.java index be55193da30cc..19cf1ee3a0ee7 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/scroll/DuelScrollIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/scroll/DuelScrollIT.java @@ -115,10 +115,8 @@ public void testDuelQueryThenFetch() throws Exception { private TestContext create(SearchType... searchTypes) throws Exception { assertAcked( - prepareCreate("index").addMapping( - "type", + prepareCreate("index").setMapping( jsonBuilder().startObject() - .startObject("type") .startObject("properties") .startObject("field1") .field("type", "long") @@ -139,7 +137,6 @@ private TestContext create(SearchType... searchTypes) throws Exception { .endObject() .endObject() .endObject() - .endObject() ) ); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/sort/FieldSortIT.java b/server/src/internalClusterTest/java/org/opensearch/search/sort/FieldSortIT.java index 38aac2850dc56..3d1d407b386e1 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/sort/FieldSortIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/sort/FieldSortIT.java @@ -276,11 +276,9 @@ public void testTrackScores() throws Exception { public void testRandomSorting() throws IOException, InterruptedException, ExecutionException { Random random = random(); assertAcked( - prepareCreate("test").addMapping( - "type", + prepareCreate("test").setMapping( XContentFactory.jsonBuilder() .startObject() - .startObject("type") .startObject("properties") .startObject("sparse_bytes") .field("type", "keyword") @@ -290,7 +288,6 @@ public void testRandomSorting() throws IOException, InterruptedException, Execut .endObject() .endObject() .endObject() - .endObject() ) ); ensureGreen(); @@ -558,11 +555,9 @@ public void testIssue2991() { public void testSimpleSorts() throws Exception { Random random = random(); assertAcked( - prepareCreate("test").addMapping( - "type1", + prepareCreate("test").setMapping( XContentFactory.jsonBuilder() .startObject() - .startObject("type1") .startObject("properties") .startObject("str_value") .field("type", "keyword") @@ -590,7 +585,6 @@ public void testSimpleSorts() throws Exception { .endObject() .endObject() .endObject() - .endObject() ) ); ensureGreen(); @@ -801,11 +795,9 @@ public void testSimpleSorts() throws Exception { public void testSortMissingNumbers() throws Exception { assertAcked( - prepareCreate("test").addMapping( - "type1", + prepareCreate("test").setMapping( XContentFactory.jsonBuilder() .startObject() - .startObject("type1") .startObject("properties") .startObject("i_value") .field("type", "integer") @@ -815,7 +807,6 @@ public void testSortMissingNumbers() throws Exception { .endObject() .endObject() .endObject() - .endObject() ) ); ensureGreen(); @@ -873,18 +864,15 @@ public void testSortMissingNumbers() throws Exception { public void testSortMissingStrings() throws IOException { assertAcked( - prepareCreate("test").addMapping( - "type1", + prepareCreate("test").setMapping( XContentFactory.jsonBuilder() .startObject() - .startObject("type1") .startObject("properties") .startObject("value") .field("type", "keyword") .endObject() .endObject() .endObject() - .endObject() ) ); ensureGreen(); @@ -1009,11 +997,9 @@ public void testIgnoreUnmapped() throws Exception { public void testSortMVField() throws Exception { assertAcked( - prepareCreate("test").addMapping( - "type1", + prepareCreate("test").setMapping( XContentFactory.jsonBuilder() .startObject() - .startObject("type1") .startObject("properties") .startObject("long_values") .field("type", "long") @@ -1038,7 +1024,6 @@ public void testSortMVField() throws Exception { .endObject() .endObject() .endObject() - .endObject() ) ); ensureGreen(); @@ -1345,18 +1330,15 @@ public void testSortMVField() throws Exception { public void testSortOnRareField() throws IOException { assertAcked( - prepareCreate("test").addMapping( - "type1", + prepareCreate("test").setMapping( XContentFactory.jsonBuilder() .startObject() - .startObject("type1") .startObject("properties") .startObject("string_values") .field("type", "keyword") .endObject() .endObject() .endObject() - .endObject() ) ); ensureGreen(); @@ -1494,11 +1476,9 @@ public void testSortMetaField() throws Exception { */ public void testNestedSort() throws IOException, InterruptedException, ExecutionException { assertAcked( - prepareCreate("test").addMapping( - "type", + prepareCreate("test").setMapping( XContentFactory.jsonBuilder() .startObject() - .startObject("type") .startObject("properties") .startObject("nested") .field("type", "nested") @@ -1530,7 +1510,6 @@ public void testNestedSort() throws IOException, InterruptedException, Execution .endObject() .endObject() .endObject() - .endObject() ) ); ensureGreen(); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/sort/GeoDistanceIT.java b/server/src/internalClusterTest/java/org/opensearch/search/sort/GeoDistanceIT.java index 1739add2ff5e8..5cc9ab77bcb2d 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/sort/GeoDistanceIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/sort/GeoDistanceIT.java @@ -72,12 +72,11 @@ public void testDistanceSortingMVFields() throws Exception { Settings settings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, version).build(); XContentBuilder xContentBuilder = XContentFactory.jsonBuilder() .startObject() - .startObject("type1") .startObject("properties") .startObject("locations") .field("type", "geo_point"); - xContentBuilder.field("ignore_malformed", true).endObject().endObject().endObject().endObject(); - assertAcked(prepareCreate("test").setSettings(settings).addMapping("type1", xContentBuilder)); + xContentBuilder.field("ignore_malformed", true).endObject().endObject().endObject(); + assertAcked(prepareCreate("test").setSettings(settings).setMapping(xContentBuilder)); ensureGreen(); client().prepareIndex("test") @@ -268,12 +267,11 @@ public void testDistanceSortingWithMissingGeoPoint() throws Exception { Settings settings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, version).build(); XContentBuilder xContentBuilder = XContentFactory.jsonBuilder() .startObject() - .startObject("type1") .startObject("properties") .startObject("locations") .field("type", "geo_point"); - xContentBuilder.endObject().endObject().endObject().endObject(); - assertAcked(prepareCreate("test").setSettings(settings).addMapping("type1", xContentBuilder)); + xContentBuilder.endObject().endObject().endObject(); + assertAcked(prepareCreate("test").setSettings(settings).setMapping(xContentBuilder)); ensureGreen(); client().prepareIndex("test") @@ -333,7 +331,6 @@ public void testDistanceSortingNestedFields() throws Exception { Settings settings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, version).build(); XContentBuilder xContentBuilder = XContentFactory.jsonBuilder() .startObject() - .startObject("company") .startObject("properties") .startObject("name") .field("type", "text") @@ -346,9 +343,9 @@ public void testDistanceSortingNestedFields() throws Exception { .endObject() .startObject("location") .field("type", "geo_point"); - xContentBuilder.endObject().endObject().endObject().endObject().endObject().endObject(); + xContentBuilder.endObject().endObject().endObject().endObject().endObject(); - assertAcked(prepareCreate("companies").setSettings(settings).addMapping("company", xContentBuilder)); + assertAcked(prepareCreate("companies").setSettings(settings).setMapping(xContentBuilder)); ensureGreen(); indexRandom( @@ -590,15 +587,14 @@ public void testGeoDistanceFilter() throws IOException { XContentBuilder mapping = JsonXContent.contentBuilder() .startObject() - .startObject("location") .startObject("properties") .startObject("pin") .field("type", "geo_point"); - mapping.endObject().endObject().endObject().endObject(); + mapping.endObject().endObject().endObject(); XContentBuilder source = JsonXContent.contentBuilder().startObject().field("pin", Geohash.stringEncode(lon, lat)).endObject(); - assertAcked(prepareCreate("locations").setSettings(settings).addMapping("location", mapping)); + assertAcked(prepareCreate("locations").setSettings(settings).setMapping(mapping)); client().prepareIndex("locations").setId("1").setCreate(true).setSource(source).get(); refresh(); client().prepareGet("locations", "1").get(); @@ -614,12 +610,11 @@ public void testGeoDistanceFilter() throws IOException { public void testDistanceSortingWithUnmappedField() throws Exception { XContentBuilder xContentBuilder = XContentFactory.jsonBuilder() .startObject() - .startObject("type1") .startObject("properties") .startObject("locations") .field("type", "geo_point"); - xContentBuilder.endObject().endObject().endObject().endObject(); - assertAcked(prepareCreate("test1").addMapping("type1", xContentBuilder)); + xContentBuilder.endObject().endObject().endObject(); + assertAcked(prepareCreate("test1").setMapping(xContentBuilder)); assertAcked(prepareCreate("test2")); ensureGreen(); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/sort/SimpleSortIT.java b/server/src/internalClusterTest/java/org/opensearch/search/sort/SimpleSortIT.java index 70bb24532aa7d..8ff0790e7cb48 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/sort/SimpleSortIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/sort/SimpleSortIT.java @@ -134,10 +134,8 @@ static > T getMinValueScript( public void testSimpleSorts() throws Exception { Random random = random(); assertAcked( - prepareCreate("test").addMapping( - "type1", + prepareCreate("test").setMapping( jsonBuilder().startObject() - .startObject("type1") .startObject("properties") .startObject("str_value") .field("type", "keyword") @@ -165,7 +163,6 @@ public void testSimpleSorts() throws Exception { .endObject() .endObject() .endObject() - .endObject() ) ); ensureGreen(); @@ -446,17 +443,14 @@ public void testDocumentsWithNullValue() throws Exception { public void test2920() throws IOException { assertAcked( - prepareCreate("test").addMapping( - "test", + prepareCreate("test").setMapping( jsonBuilder().startObject() - .startObject("test") .startObject("properties") .startObject("value") .field("type", "keyword") .endObject() .endObject() .endObject() - .endObject() ) ); ensureGreen(); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/suggest/CompletionSuggestSearchIT.java b/server/src/internalClusterTest/java/org/opensearch/search/suggest/CompletionSuggestSearchIT.java index e85eff8450ca4..690564fe1cac8 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/suggest/CompletionSuggestSearchIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/suggest/CompletionSuggestSearchIT.java @@ -682,15 +682,13 @@ public void testThatDisablingPositionIncrementsWorkForStopwords() throws Excepti public void testThatUpgradeToMultiFieldsWorks() throws Exception { final XContentBuilder mapping = jsonBuilder().startObject() - .startObject(MapperService.SINGLE_MAPPING_NAME) .startObject("properties") .startObject(FIELD) .field("type", "text") .endObject() .endObject() - .endObject() .endObject(); - assertAcked(prepareCreate(INDEX).addMapping(MapperService.SINGLE_MAPPING_NAME, mapping)); + assertAcked(prepareCreate(INDEX).setMapping(mapping)); client().prepareIndex(INDEX) .setId("1") .setRefreshPolicy(IMMEDIATE) @@ -1328,7 +1326,7 @@ private void createIndexAndMappingAndSettings(Settings settings, CompletionMappi .indices() .prepareCreate(INDEX) .setSettings(Settings.builder().put(indexSettings()).put(settings)) - .addMapping(MapperService.SINGLE_MAPPING_NAME, mapping) + .setMapping(mapping) .get() ); } @@ -1376,17 +1374,14 @@ public void testVeryLongInput() throws IOException { client().admin() .indices() .prepareCreate(INDEX) - .addMapping( - MapperService.SINGLE_MAPPING_NAME, + .setMapping( jsonBuilder().startObject() - .startObject(MapperService.SINGLE_MAPPING_NAME) .startObject("properties") .startObject(FIELD) .field("type", "completion") .endObject() .endObject() .endObject() - .endObject() ) .get() ); @@ -1408,17 +1403,14 @@ public void testReservedChars() throws IOException { client().admin() .indices() .prepareCreate(INDEX) - .addMapping( - MapperService.SINGLE_MAPPING_NAME, + .setMapping( jsonBuilder().startObject() - .startObject(MapperService.SINGLE_MAPPING_NAME) .startObject("properties") .startObject(FIELD) .field("type", "completion") .endObject() .endObject() .endObject() - .endObject() ) .get() ); @@ -1449,17 +1441,14 @@ public void testIssue5930() throws IOException { client().admin() .indices() .prepareCreate(INDEX) - .addMapping( - MapperService.SINGLE_MAPPING_NAME, + .setMapping( jsonBuilder().startObject() - .startObject(MapperService.SINGLE_MAPPING_NAME) .startObject("properties") .startObject(FIELD) .field("type", "completion") .endObject() .endObject() .endObject() - .endObject() ) .get() ); @@ -1508,7 +1497,6 @@ public void testMultiDocSuggestions() throws Exception { public void testSuggestWithFieldAlias() throws Exception { XContentBuilder mapping = XContentFactory.jsonBuilder() .startObject() - .startObject(MapperService.SINGLE_MAPPING_NAME) .startObject("properties") .startObject(FIELD) .field("type", "completion") @@ -1518,9 +1506,8 @@ public void testSuggestWithFieldAlias() throws Exception { .field("path", FIELD) .endObject() .endObject() - .endObject() .endObject(); - assertAcked(prepareCreate(INDEX).addMapping(MapperService.SINGLE_MAPPING_NAME, mapping)); + assertAcked(prepareCreate(INDEX).setMapping(mapping)); List builders = new ArrayList<>(); builders.add(client().prepareIndex(INDEX).setSource(FIELD, "apple")); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/suggest/ContextCompletionSuggestSearchIT.java b/server/src/internalClusterTest/java/org/opensearch/search/suggest/ContextCompletionSuggestSearchIT.java index acc4350d149cd..27d3d455330f5 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/suggest/ContextCompletionSuggestSearchIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/suggest/ContextCompletionSuggestSearchIT.java @@ -42,7 +42,6 @@ import org.opensearch.common.unit.Fuzziness; import org.opensearch.common.xcontent.ToXContent; import org.opensearch.common.xcontent.XContentBuilder; -import org.opensearch.index.mapper.MapperService; import org.opensearch.rest.RestStatus; import org.opensearch.search.suggest.CompletionSuggestSearchIT.CompletionMappingBuilder; import org.opensearch.search.suggest.completion.CompletionSuggestionBuilder; @@ -608,7 +607,7 @@ public void testGeoField() throws Exception { mapping.endObject(); mapping.endObject(); - assertAcked(prepareCreate(INDEX).addMapping(MapperService.SINGLE_MAPPING_NAME, mapping)); + assertAcked(prepareCreate(INDEX).setMapping(mapping)); XContentBuilder source1 = jsonBuilder().startObject() .startObject("location") @@ -754,7 +753,7 @@ private void createIndexAndMappingAndSettings(Settings settings, CompletionMappi .indices() .prepareCreate(INDEX) .setSettings(Settings.builder().put(indexSettings()).put(settings)) - .addMapping(MapperService.SINGLE_MAPPING_NAME, mapping) + .setMapping(mapping) .get() ); } diff --git a/server/src/internalClusterTest/java/org/opensearch/search/suggest/SuggestSearchIT.java b/server/src/internalClusterTest/java/org/opensearch/search/suggest/SuggestSearchIT.java index bb6e1643dd767..cd3921d91a3ed 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/suggest/SuggestSearchIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/suggest/SuggestSearchIT.java @@ -127,16 +127,14 @@ public void testSuggestAcrossMultipleIndices() throws IOException { XContentBuilder mapping = XContentFactory.jsonBuilder() .startObject() - .startObject("type1") .startObject("properties") .startObject("text") .field("type", "text") .field("analyzer", "keyword") .endObject() .endObject() - .endObject() .endObject(); - assertAcked(prepareCreate("test_2").addMapping("type1", mapping)); + assertAcked(prepareCreate("test_2").setMapping(mapping)); ensureGreen(); index("test_2", "type1", "1", "text", "ab cd"); @@ -217,7 +215,6 @@ public void testSuggestModes() throws IOException { XContentBuilder mapping = XContentFactory.jsonBuilder() .startObject() - .startObject("type1") .startObject("properties") .startObject("name") .field("type", "text") @@ -230,9 +227,8 @@ public void testSuggestModes() throws IOException { .endObject() .endObject() .endObject() - .endObject() .endObject(); - assertAcked(builder.addMapping("type1", mapping)); + assertAcked(builder.setMapping(mapping)); ensureGreen(); index("test", "type1", "1", "name", "I like iced tea"); @@ -300,7 +296,6 @@ public void testUnmappedField() throws IOException, InterruptedException, Execut ); XContentBuilder mapping = XContentFactory.jsonBuilder() .startObject() - .startObject("type1") .startObject("properties") .startObject("name") .field("type", "text") @@ -313,9 +308,8 @@ public void testUnmappedField() throws IOException, InterruptedException, Execut .endObject() .endObject() .endObject() - .endObject() .endObject(); - assertAcked(builder.addMapping("type1", mapping)); + assertAcked(builder.setMapping(mapping)); ensureGreen(); indexRandom( @@ -558,7 +552,6 @@ public void testPrefixLength() throws IOException { ); XContentBuilder mapping = XContentFactory.jsonBuilder() .startObject() - .startObject("type1") .startObject("properties") .startObject("body") .field("type", "text") @@ -569,9 +562,8 @@ public void testPrefixLength() throws IOException { .field("analyzer", "bigram") .endObject() .endObject() - .endObject() .endObject(); - assertAcked(builder.addMapping("type1", mapping)); + assertAcked(builder.setMapping(mapping)); ensureGreen(); index("test", "type1", "1", "body", "hello world"); @@ -614,7 +606,6 @@ public void testBasicPhraseSuggest() throws IOException, URISyntaxException { ); XContentBuilder mapping = XContentFactory.jsonBuilder() .startObject() - .startObject("type1") .startObject("properties") .startObject("body") .field("type", "text") @@ -625,9 +616,8 @@ public void testBasicPhraseSuggest() throws IOException, URISyntaxException { .field("analyzer", "bigram") .endObject() .endObject() - .endObject() .endObject(); - assertAcked(builder.addMapping("type1", mapping)); + assertAcked(builder.setMapping(mapping)); ensureGreen(); String[] strings = new String[] { @@ -758,7 +748,6 @@ public void testSizeParam() throws IOException { XContentBuilder mapping = XContentFactory.jsonBuilder() .startObject() - .startObject("type1") .startObject("properties") .startObject("body") .field("type", "text") @@ -769,9 +758,8 @@ public void testSizeParam() throws IOException { .field("analyzer", "bigram") .endObject() .endObject() - .endObject() .endObject(); - assertAcked(builder.addMapping("type1", mapping)); + assertAcked(builder.setMapping(mapping)); ensureGreen(); String line = "xorr the god jewel"; @@ -833,16 +821,14 @@ public void testShardFailures() throws IOException, InterruptedException { XContentBuilder mapping = XContentFactory.jsonBuilder() .startObject() - .startObject("type2") .startObject("properties") .startObject("name") .field("type", "text") .field("analyzer", "suggest") .endObject() .endObject() - .endObject() .endObject(); - assertAcked(builder.addMapping("type2", mapping)); + assertAcked(builder.setMapping(mapping)); ensureGreen(); index("test", "type2", "1", "foo", "bar"); @@ -880,14 +866,12 @@ public void testShardFailures() throws IOException, InterruptedException { public void testEmptyShards() throws IOException, InterruptedException { XContentBuilder mappingBuilder = XContentFactory.jsonBuilder() .startObject() - .startObject("type1") .startObject("properties") .startObject("name") .field("type", "text") .field("analyzer", "suggest") .endObject() .endObject() - .endObject() .endObject(); assertAcked( prepareCreate("test").setSettings( @@ -901,7 +885,7 @@ public void testEmptyShards() throws IOException, InterruptedException { .put("index.analysis.filter.shingler.min_shingle_size", 2) .put("index.analysis.filter.shingler.max_shingle_size", 5) .put("index.analysis.filter.shingler.output_unigrams", true) - ).addMapping("type1", mappingBuilder) + ).setMapping(mappingBuilder) ); ensureGreen(); @@ -978,16 +962,14 @@ public void testSearchForRarePhrase() throws IOException { XContentBuilder mapping = XContentFactory.jsonBuilder() .startObject() - .startObject("type1") .startObject("properties") .startObject("body") .field("type", "text") .field("analyzer", "body") .endObject() .endObject() - .endObject() .endObject(); - assertAcked(builder.addMapping("type1", mapping)); + assertAcked(builder.setMapping(mapping)); ensureGreen(); NumShards test = getNumShards("test"); @@ -1039,16 +1021,14 @@ public void testSuggestWithManyCandidates() throws InterruptedException, Executi XContentBuilder mapping = XContentFactory.jsonBuilder() .startObject() - .startObject("type1") .startObject("properties") .startObject("title") .field("type", "text") .field("analyzer", "text") .endObject() .endObject() - .endObject() .endObject(); - assertAcked(builder.addMapping("type1", mapping)); + assertAcked(builder.setMapping(mapping)); ensureGreen(); List titles = new ArrayList<>(); @@ -1166,7 +1146,6 @@ public void testSuggestWithManyCandidates() throws InterruptedException, Executi public void testSuggestWithFieldAlias() throws Exception { XContentBuilder mapping = XContentFactory.jsonBuilder() .startObject() - .startObject("type") .startObject("properties") .startObject("text") .field("type", "keyword") @@ -1176,9 +1155,8 @@ public void testSuggestWithFieldAlias() throws Exception { .field("path", "text") .endObject() .endObject() - .endObject() .endObject(); - assertAcked(prepareCreate("test").addMapping("type", mapping)); + assertAcked(prepareCreate("test").setMapping(mapping)); List builders = new ArrayList<>(); builders.add(client().prepareIndex("test").setSource("text", "apple")); @@ -1195,17 +1173,13 @@ public void testSuggestWithFieldAlias() throws Exception { public void testPhraseSuggestMinDocFreq() throws Exception { XContentBuilder mapping = XContentFactory.jsonBuilder() .startObject() - .startObject("type") .startObject("properties") .startObject("text") .field("type", "keyword") .endObject() .endObject() - .endObject() .endObject(); - assertAcked( - prepareCreate("test").setSettings(Settings.builder().put("index.number_of_shards", 1).build()).addMapping("type", mapping) - ); + assertAcked(prepareCreate("test").setSettings(Settings.builder().put("index.number_of_shards", 1).build()).setMapping(mapping)); List builders = new ArrayList<>(); builders.add(client().prepareIndex("test").setSource("text", "apple")); @@ -1298,16 +1272,14 @@ public void testPhraseSuggesterCollate() throws InterruptedException, ExecutionE XContentBuilder mapping = XContentFactory.jsonBuilder() .startObject() - .startObject("type1") .startObject("properties") .startObject("title") .field("type", "text") .field("analyzer", "text") .endObject() .endObject() - .endObject() .endObject(); - assertAcked(builder.addMapping("type1", mapping)); + assertAcked(builder.setMapping(mapping)); ensureGreen(); List titles = new ArrayList<>(); diff --git a/server/src/internalClusterTest/java/org/opensearch/similarity/SimilarityIT.java b/server/src/internalClusterTest/java/org/opensearch/similarity/SimilarityIT.java index 57c14876b25ff..929aac388b678 100644 --- a/server/src/internalClusterTest/java/org/opensearch/similarity/SimilarityIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/similarity/SimilarityIT.java @@ -53,10 +53,8 @@ public void testCustomBM25Similarity() throws Exception { client().admin() .indices() .prepareCreate("test") - .addMapping( - "type1", + .setMapping( jsonBuilder().startObject() - .startObject("type1") .startObject("properties") .startObject("field1") .field("similarity", "custom") @@ -68,7 +66,6 @@ public void testCustomBM25Similarity() throws Exception { .endObject() .endObject() .endObject() - .endObject() ) .setSettings( Settings.builder() diff --git a/server/src/main/java/org/opensearch/action/admin/indices/create/CreateIndexRequest.java b/server/src/main/java/org/opensearch/action/admin/indices/create/CreateIndexRequest.java index 8b38308d39c93..b234561563e56 100644 --- a/server/src/main/java/org/opensearch/action/admin/indices/create/CreateIndexRequest.java +++ b/server/src/main/java/org/opensearch/action/admin/indices/create/CreateIndexRequest.java @@ -248,28 +248,33 @@ public CreateIndexRequest mapping(String mapping) { /** * Adds mapping that will be added when the index gets created. * - * @param type The mapping type * @param source The mapping source * @param xContentType the content type of the mapping source * @deprecated types are being removed */ @Deprecated - private CreateIndexRequest mapping(String type, BytesReference source, XContentType xContentType) { + private CreateIndexRequest mapping(BytesReference source, XContentType xContentType) { Objects.requireNonNull(xContentType); Map mappingAsMap = XContentHelper.convertToMap(source, false, xContentType).v2(); - return mapping(type, mappingAsMap); + return mapping(MapperService.SINGLE_MAPPING_NAME, mappingAsMap); } /** * Adds mapping that will be added when the index gets created. * - * @param type The mapping type * @param source The mapping source - * @deprecated types are being removed */ - @Deprecated - public CreateIndexRequest mapping(String type, XContentBuilder source) { - return mapping(type, BytesReference.bytes(source), source.contentType()); + public CreateIndexRequest mapping(XContentBuilder source) { + return mapping(BytesReference.bytes(source), source.contentType()); + } + + /** + * Set the mapping for this index + * + * @param source The mapping source + */ + public CreateIndexRequest mapping(Map source) { + return mapping(MapperService.SINGLE_MAPPING_NAME, source); } /** @@ -280,7 +285,7 @@ public CreateIndexRequest mapping(String type, XContentBuilder source) { * @deprecated types are being removed */ @Deprecated - public CreateIndexRequest mapping(String type, Map source) { + private CreateIndexRequest mapping(String type, Map source) { // wrap it in a type map if its not if (source.size() != 1 || !source.containsKey(type)) { source = Collections.singletonMap(MapperService.SINGLE_MAPPING_NAME, source); @@ -304,7 +309,7 @@ public CreateIndexRequest mapping(String type, Map source) { */ @Deprecated public CreateIndexRequest mapping(String type, Object... source) { - mapping(type, PutMappingRequest.buildFromSimplifiedDef(source)); + mapping(PutMappingRequest.buildFromSimplifiedDef(source)); return this; } diff --git a/server/src/main/java/org/opensearch/action/admin/indices/create/CreateIndexRequestBuilder.java b/server/src/main/java/org/opensearch/action/admin/indices/create/CreateIndexRequestBuilder.java index 77e48d079cb5c..41f364517fd06 100644 --- a/server/src/main/java/org/opensearch/action/admin/indices/create/CreateIndexRequestBuilder.java +++ b/server/src/main/java/org/opensearch/action/admin/indices/create/CreateIndexRequestBuilder.java @@ -113,43 +113,28 @@ public CreateIndexRequestBuilder setSettings(Map source) { * * @param source The mapping source */ - @Deprecated public CreateIndexRequestBuilder setMapping(String source) { request.mapping(source); return this; } - /** - * The cause for this index creation. - */ - public CreateIndexRequestBuilder setCause(String cause) { - request.cause(cause); - return this; - } - /** * Adds mapping that will be added when the index gets created. * - * @param type The mapping type * @param source The mapping source - * @deprecated types are being removed */ - @Deprecated - public CreateIndexRequestBuilder addMapping(String type, XContentBuilder source) { - request.mapping(type, source); + public CreateIndexRequestBuilder setMapping(XContentBuilder source) { + request.mapping(source); return this; } /** * Adds mapping that will be added when the index gets created. * - * @param type The mapping type * @param source The mapping source - * @deprecated types are being removed */ - @Deprecated - public CreateIndexRequestBuilder addMapping(String type, Map source) { - request.mapping(type, source); + public CreateIndexRequestBuilder setMapping(Map source) { + request.mapping(source); return this; } @@ -164,6 +149,14 @@ public CreateIndexRequestBuilder addMapping(String type, Object... source) { return this; } + /** + * The cause for this index creation. + */ + public CreateIndexRequestBuilder setCause(String cause) { + request.cause(cause); + return this; + } + /** * Sets the aliases that will be associated with the index when it gets created */ diff --git a/server/src/main/java/org/opensearch/action/admin/indices/rollover/RolloverRequest.java b/server/src/main/java/org/opensearch/action/admin/indices/rollover/RolloverRequest.java index f06cb599a60df..402b3741205a2 100644 --- a/server/src/main/java/org/opensearch/action/admin/indices/rollover/RolloverRequest.java +++ b/server/src/main/java/org/opensearch/action/admin/indices/rollover/RolloverRequest.java @@ -101,7 +101,7 @@ public class RolloverRequest extends AcknowledgedRequest implem if (MapperService.isMappingSourceTyped(MapperService.SINGLE_MAPPING_NAME, mappings)) { throw new IllegalArgumentException("The mapping definition cannot be nested under a type"); } - request.createIndexRequest.mapping(MapperService.SINGLE_MAPPING_NAME, mappings); + request.createIndexRequest.mapping(mappings); }, CreateIndexRequest.MAPPINGS, ObjectParser.ValueType.OBJECT); PARSER.declareField( (parser, request, context) -> request.createIndexRequest.aliases(parser.map()), diff --git a/server/src/test/java/org/opensearch/action/admin/indices/create/CreateIndexRequestTests.java b/server/src/test/java/org/opensearch/action/admin/indices/create/CreateIndexRequestTests.java index 320db79428300..31039826868ac 100644 --- a/server/src/test/java/org/opensearch/action/admin/indices/create/CreateIndexRequestTests.java +++ b/server/src/test/java/org/opensearch/action/admin/indices/create/CreateIndexRequestTests.java @@ -116,10 +116,10 @@ public void testMappingKeyedByType() throws IOException { .endObject() .endObject() .endObject(); - request1.mapping("type1", builder); + request1.mapping(builder); builder = XContentFactory.contentBuilder(randomFrom(XContentType.values())); builder.startObject() - .startObject("type1") + .startObject(MapperService.SINGLE_MAPPING_NAME) .startObject("properties") .startObject("field1") .field("type", "text") @@ -134,7 +134,7 @@ public void testMappingKeyedByType() throws IOException { .endObject() .endObject() .endObject(); - request2.mapping("type1", builder); + request2.mapping(builder); assertEquals(request1.mappings(), request2.mappings()); } } diff --git a/server/src/test/java/org/opensearch/action/admin/indices/mapping/put/PutMappingRequestTests.java b/server/src/test/java/org/opensearch/action/admin/indices/mapping/put/PutMappingRequestTests.java index fd6fc3b6839d7..ab401b7d45792 100644 --- a/server/src/test/java/org/opensearch/action/admin/indices/mapping/put/PutMappingRequestTests.java +++ b/server/src/test/java/org/opensearch/action/admin/indices/mapping/put/PutMappingRequestTests.java @@ -160,7 +160,7 @@ private static PutMappingRequest createTestItem() throws IOException { String index = randomAlphaOfLength(5); PutMappingRequest request = new PutMappingRequest(index); - request.source(RandomCreateIndexGenerator.randomMapping("_doc")); + request.source(RandomCreateIndexGenerator.randomMapping()); return request; } diff --git a/server/src/test/java/org/opensearch/action/admin/indices/rollover/RolloverRequestTests.java b/server/src/test/java/org/opensearch/action/admin/indices/rollover/RolloverRequestTests.java index 0fcc60e2a4087..079e5d388bbf4 100644 --- a/server/src/test/java/org/opensearch/action/admin/indices/rollover/RolloverRequestTests.java +++ b/server/src/test/java/org/opensearch/action/admin/indices/rollover/RolloverRequestTests.java @@ -218,8 +218,7 @@ public void testValidation() { private static RolloverRequest createTestItem() throws IOException { RolloverRequest rolloverRequest = new RolloverRequest(); if (randomBoolean()) { - rolloverRequest.getCreateIndexRequest() - .mapping(MapperService.SINGLE_MAPPING_NAME, RandomCreateIndexGenerator.randomMapping(MapperService.SINGLE_MAPPING_NAME)); + rolloverRequest.getCreateIndexRequest().mapping(RandomCreateIndexGenerator.randomMapping()); } if (randomBoolean()) { RandomCreateIndexGenerator.randomAliases(rolloverRequest.getCreateIndexRequest()); diff --git a/server/src/test/java/org/opensearch/action/admin/indices/stats/IndicesStatsTests.java b/server/src/test/java/org/opensearch/action/admin/indices/stats/IndicesStatsTests.java index 6a84c5894fceb..171639083be58 100644 --- a/server/src/test/java/org/opensearch/action/admin/indices/stats/IndicesStatsTests.java +++ b/server/src/test/java/org/opensearch/action/admin/indices/stats/IndicesStatsTests.java @@ -67,7 +67,6 @@ public void testSegmentStats() throws Exception { IndexModule.Type storeType = IndexModule.defaultStoreType(true); XContentBuilder mapping = XContentFactory.jsonBuilder() .startObject() - .startObject("doc") .startObject("properties") .startObject("foo") .field("type", "keyword") @@ -82,13 +81,12 @@ public void testSegmentStats() throws Exception { .field("type", "long") .endObject() .endObject() - .endObject() .endObject(); assertAcked( client().admin() .indices() .prepareCreate("test") - .addMapping("doc", mapping) + .setMapping(mapping) .setSettings(Settings.builder().put("index.store.type", storeType.getSettingsKey())) ); ensureGreen("test"); diff --git a/server/src/test/java/org/opensearch/action/termvectors/AbstractTermVectorsTestCase.java b/server/src/test/java/org/opensearch/action/termvectors/AbstractTermVectorsTestCase.java index 78214334928a4..d9613176c0156 100644 --- a/server/src/test/java/org/opensearch/action/termvectors/AbstractTermVectorsTestCase.java +++ b/server/src/test/java/org/opensearch/action/termvectors/AbstractTermVectorsTestCase.java @@ -219,16 +219,16 @@ public String toString() { protected void createIndexBasedOnFieldSettings(String index, String alias, TestFieldSetting[] fieldSettings) throws IOException { XContentBuilder mappingBuilder = jsonBuilder(); - mappingBuilder.startObject().startObject("type1").startObject("properties"); + mappingBuilder.startObject().startObject("properties"); for (TestFieldSetting field : fieldSettings) { field.addToMappings(mappingBuilder); } - mappingBuilder.endObject().endObject().endObject(); + mappingBuilder.endObject().endObject(); Settings.Builder settings = Settings.builder() .put(indexSettings()) .put("index.analysis.analyzer.tv_test.tokenizer", "standard") .putList("index.analysis.analyzer.tv_test.filter", "lowercase"); - assertAcked(prepareCreate(index).addMapping("type1", mappingBuilder).setSettings(settings).addAlias(new Alias(alias))); + assertAcked(prepareCreate(index).setMapping(mappingBuilder).setSettings(settings).addAlias(new Alias(alias))); } /** diff --git a/server/src/test/java/org/opensearch/action/termvectors/GetTermVectorsTests.java b/server/src/test/java/org/opensearch/action/termvectors/GetTermVectorsTests.java index 5486d110c9329..1217efb883468 100644 --- a/server/src/test/java/org/opensearch/action/termvectors/GetTermVectorsTests.java +++ b/server/src/test/java/org/opensearch/action/termvectors/GetTermVectorsTests.java @@ -169,7 +169,6 @@ public void testRandomPayloadWithDelimitedPayloadTokenFilter() throws IOExceptio String queryString = createString(tokens, payloads, encoding, delimiter.charAt(0)); // create the mapping XContentBuilder mapping = jsonBuilder().startObject() - .startObject("type1") .startObject("properties") .startObject("field") .field("type", "text") @@ -177,7 +176,6 @@ public void testRandomPayloadWithDelimitedPayloadTokenFilter() throws IOExceptio .field("analyzer", "payload_test") .endObject() .endObject() - .endObject() .endObject(); Settings setting = Settings.builder() .put("index.analysis.analyzer.payload_test.tokenizer", "mock-whitespace") diff --git a/server/src/test/java/org/opensearch/index/analysis/PreBuiltAnalyzerTests.java b/server/src/test/java/org/opensearch/index/analysis/PreBuiltAnalyzerTests.java index 9dfdbb4469956..d8e0a4ea3bc2e 100644 --- a/server/src/test/java/org/opensearch/index/analysis/PreBuiltAnalyzerTests.java +++ b/server/src/test/java/org/opensearch/index/analysis/PreBuiltAnalyzerTests.java @@ -114,14 +114,12 @@ public void testThatAnalyzersAreUsedInMapping() throws IOException { XContentBuilder mapping = XContentFactory.jsonBuilder() .startObject() - .startObject("type") .startObject("properties") .startObject("field") .field("type", "text") .field("analyzer", analyzerName) .endObject() .endObject() - .endObject() .endObject(); MapperService mapperService = createIndex("test", indexSettings, "type", mapping).mapperService(); diff --git a/server/src/test/java/org/opensearch/index/mapper/FieldFilterMapperPluginTests.java b/server/src/test/java/org/opensearch/index/mapper/FieldFilterMapperPluginTests.java index c0900cc40abff..1e5a212c59825 100644 --- a/server/src/test/java/org/opensearch/index/mapper/FieldFilterMapperPluginTests.java +++ b/server/src/test/java/org/opensearch/index/mapper/FieldFilterMapperPluginTests.java @@ -101,7 +101,7 @@ public void testGetFieldMappings() { // as the one coming from a filtered index with same mappings GetMappingsResponse getMappingsResponse = client().admin().indices().prepareGetMappings("filtered").get(); MappingMetadata filtered = getMappingsResponse.getMappings().get("filtered"); - assertAcked(client().admin().indices().prepareCreate("test").addMapping("_doc", filtered.getSourceAsMap())); + assertAcked(client().admin().indices().prepareCreate("test").setMapping(filtered.getSourceAsMap())); GetFieldMappingsResponse response = client().admin().indices().prepareGetFieldMappings("test").setFields("*").get(); assertEquals(1, response.mappings().size()); assertFieldMappings(response.mappings().get("test"), FILTERED_FLAT_FIELDS); @@ -128,7 +128,7 @@ public void testFieldCapabilities() { // as the one coming from a filtered index with same mappings GetMappingsResponse getMappingsResponse = client().admin().indices().prepareGetMappings("filtered").get(); MappingMetadata filteredMapping = getMappingsResponse.getMappings().get("filtered"); - assertAcked(client().admin().indices().prepareCreate("test").addMapping("_doc", filteredMapping.getSourceAsMap())); + assertAcked(client().admin().indices().prepareCreate("test").setMapping(filteredMapping.getSourceAsMap())); FieldCapabilitiesResponse test = client().fieldCaps(new FieldCapabilitiesRequest().fields("*").indices("test")).actionGet(); // properties.value is an object field in the new mapping filteredFields.add("properties.value"); @@ -176,7 +176,7 @@ private void assertExpectedMappings(ImmutableOpenMap ma private void assertMappingsAreValid(Map sourceAsMap) { // check that the returned filtered mappings are still valid mappings by submitting them and retrieving them back - assertAcked(client().admin().indices().prepareCreate("test").addMapping("_doc", sourceAsMap)); + assertAcked(client().admin().indices().prepareCreate("test").setMapping(sourceAsMap)); GetMappingsResponse testMappingsResponse = client().admin().indices().prepareGetMappings("test").get(); assertEquals(1, testMappingsResponse.getMappings().size()); // the mappings are returned unfiltered for this index, yet they are the same as the previous ones that were returned filtered diff --git a/server/src/test/java/org/opensearch/index/search/NestedHelperTests.java b/server/src/test/java/org/opensearch/index/search/NestedHelperTests.java index 9164dba8a3f62..c02df8168afee 100644 --- a/server/src/test/java/org/opensearch/index/search/NestedHelperTests.java +++ b/server/src/test/java/org/opensearch/index/search/NestedHelperTests.java @@ -66,7 +66,6 @@ public void setUp() throws Exception { super.setUp(); XContentBuilder mapping = XContentFactory.jsonBuilder() .startObject() - .startObject("type") .startObject("properties") .startObject("foo") .field("type", "keyword") @@ -110,7 +109,6 @@ public void setUp() throws Exception { .endObject() .endObject() .endObject() - .endObject() .endObject(); indexService = createIndex("index", Settings.EMPTY, "type", mapping); mapperService = indexService.mapperService(); diff --git a/server/src/test/java/org/opensearch/index/similarity/SimilarityTests.java b/server/src/test/java/org/opensearch/index/similarity/SimilarityTests.java index 418b933558e63..625064820df44 100644 --- a/server/src/test/java/org/opensearch/index/similarity/SimilarityTests.java +++ b/server/src/test/java/org/opensearch/index/similarity/SimilarityTests.java @@ -100,14 +100,12 @@ public void testResolveSimilaritiesFromMapping_classicIsForbidden() throws IOExc public void testResolveSimilaritiesFromMapping_bm25() throws IOException { XContentBuilder mapping = XContentFactory.jsonBuilder() .startObject() - .startObject("type") .startObject("properties") .startObject("field1") .field("type", "text") .field("similarity", "my_similarity") .endObject() .endObject() - .endObject() .endObject(); Settings indexSettings = Settings.builder() @@ -131,14 +129,12 @@ public void testResolveSimilaritiesFromMapping_bm25() throws IOException { public void testResolveSimilaritiesFromMapping_boolean() throws IOException { XContentBuilder mapping = XContentFactory.jsonBuilder() .startObject() - .startObject("type") .startObject("properties") .startObject("field1") .field("type", "text") .field("similarity", "boolean") .endObject() .endObject() - .endObject() .endObject(); MapperService mapperService = createIndex("foo", Settings.EMPTY, "type", mapping).mapperService(); @@ -148,14 +144,12 @@ public void testResolveSimilaritiesFromMapping_boolean() throws IOException { public void testResolveSimilaritiesFromMapping_DFR() throws IOException { XContentBuilder mapping = XContentFactory.jsonBuilder() .startObject() - .startObject("type") .startObject("properties") .startObject("field1") .field("type", "text") .field("similarity", "my_similarity") .endObject() .endObject() - .endObject() .endObject(); Settings indexSettings = Settings.builder() @@ -178,14 +172,12 @@ public void testResolveSimilaritiesFromMapping_DFR() throws IOException { public void testResolveSimilaritiesFromMapping_IB() throws IOException { XContentBuilder mapping = XContentFactory.jsonBuilder() .startObject() - .startObject("type") .startObject("properties") .startObject("field1") .field("type", "text") .field("similarity", "my_similarity") .endObject() .endObject() - .endObject() .endObject(); Settings indexSettings = Settings.builder() @@ -208,14 +200,12 @@ public void testResolveSimilaritiesFromMapping_IB() throws IOException { public void testResolveSimilaritiesFromMapping_DFI() throws IOException { XContentBuilder mapping = XContentFactory.jsonBuilder() .startObject() - .startObject("type") .startObject("properties") .startObject("field1") .field("type", "text") .field("similarity", "my_similarity") .endObject() .endObject() - .endObject() .endObject(); Settings indexSettings = Settings.builder() @@ -233,14 +223,12 @@ public void testResolveSimilaritiesFromMapping_DFI() throws IOException { public void testResolveSimilaritiesFromMapping_LMDirichlet() throws IOException { XContentBuilder mapping = XContentFactory.jsonBuilder() .startObject() - .startObject("type") .startObject("properties") .startObject("field1") .field("type", "text") .field("similarity", "my_similarity") .endObject() .endObject() - .endObject() .endObject(); Settings indexSettings = Settings.builder() @@ -261,14 +249,12 @@ public void testResolveSimilaritiesFromMapping_LMDirichlet() throws IOException public void testResolveSimilaritiesFromMapping_LMJelinekMercer() throws IOException { XContentBuilder mapping = XContentFactory.jsonBuilder() .startObject() - .startObject("type") .startObject("properties") .startObject("field1") .field("type", "text") .field("similarity", "my_similarity") .endObject() .endObject() - .endObject() .endObject(); Settings indexSettings = Settings.builder() @@ -292,7 +278,6 @@ public void testResolveSimilaritiesFromMapping_Unknown() throws IOException { String mapping = Strings.toString( XContentFactory.jsonBuilder() .startObject() - .startObject("type") .startObject("properties") .startObject("field1") .field("type", "text") @@ -300,7 +285,6 @@ public void testResolveSimilaritiesFromMapping_Unknown() throws IOException { .endObject() .endObject() .endObject() - .endObject() ); IndexService indexService = createIndex("foo"); diff --git a/server/src/test/java/org/opensearch/index/termvectors/TermVectorsServiceTests.java b/server/src/test/java/org/opensearch/index/termvectors/TermVectorsServiceTests.java index f35911c8a3553..60694c7d6d61a 100644 --- a/server/src/test/java/org/opensearch/index/termvectors/TermVectorsServiceTests.java +++ b/server/src/test/java/org/opensearch/index/termvectors/TermVectorsServiceTests.java @@ -60,14 +60,12 @@ public class TermVectorsServiceTests extends OpenSearchSingleNodeTestCase { public void testTook() throws Exception { XContentBuilder mapping = jsonBuilder().startObject() - .startObject("type1") .startObject("properties") .startObject("field") .field("type", "text") .field("term_vector", "with_positions_offsets_payloads") .endObject() .endObject() - .endObject() .endObject(); createIndex("test", Settings.EMPTY, "type1", mapping); ensureGreen(); @@ -90,14 +88,12 @@ public void testTook() throws Exception { public void testDocFreqs() throws IOException { XContentBuilder mapping = jsonBuilder().startObject() - .startObject("_doc") .startObject("properties") .startObject("text") .field("type", "text") .field("term_vector", "with_positions_offsets_payloads") .endObject() .endObject() - .endObject() .endObject(); Settings settings = Settings.builder().put("number_of_shards", 1).build(); createIndex("test", settings, "_doc", mapping); @@ -130,7 +126,6 @@ public void testDocFreqs() throws IOException { public void testWithIndexedPhrases() throws IOException { XContentBuilder mapping = jsonBuilder().startObject() - .startObject("_doc") .startObject("properties") .startObject("text") .field("type", "text") @@ -138,7 +133,6 @@ public void testWithIndexedPhrases() throws IOException { .field("term_vector", "with_positions_offsets_payloads") .endObject() .endObject() - .endObject() .endObject(); Settings settings = Settings.builder().put("number_of_shards", 1).build(); createIndex("test", settings, "_doc", mapping); diff --git a/server/src/test/java/org/opensearch/search/geo/GeoPointShapeQueryTests.java b/server/src/test/java/org/opensearch/search/geo/GeoPointShapeQueryTests.java index a2171e6eabab3..3d4c82da0107e 100644 --- a/server/src/test/java/org/opensearch/search/geo/GeoPointShapeQueryTests.java +++ b/server/src/test/java/org/opensearch/search/geo/GeoPointShapeQueryTests.java @@ -89,7 +89,7 @@ protected XContentBuilder createDefaultMapping() throws Exception { public void testProcessRelationSupport() throws Exception { XContentBuilder xcb = createDefaultMapping(); - client().admin().indices().prepareCreate("test").addMapping("_doc", xcb).get(); + client().admin().indices().prepareCreate("test").setMapping(xcb).get(); ensureGreen(); Rectangle rectangle = new Rectangle(-35, -25, -25, -35); @@ -112,7 +112,7 @@ public void testProcessRelationSupport() throws Exception { public void testQueryLine() throws Exception { XContentBuilder xcb = createDefaultMapping(); - client().admin().indices().prepareCreate("test").addMapping("_doc", xcb).get(); + client().admin().indices().prepareCreate("test").setMapping(xcb).get(); ensureGreen(); Line line = new Line(new double[] { -25, -25 }, new double[] { -35, -35 }); @@ -126,7 +126,7 @@ public void testQueryLine() throws Exception { public void testQueryLinearRing() throws Exception { XContentBuilder xcb = createDefaultMapping(); - client().admin().indices().prepareCreate("test").addMapping("_doc", xcb).get(); + client().admin().indices().prepareCreate("test").setMapping(xcb).get(); ensureGreen(); LinearRing linearRing = new LinearRing(new double[] { -25, -35, -25 }, new double[] { -25, -35, -25 }); @@ -148,7 +148,7 @@ public void testQueryLinearRing() throws Exception { public void testQueryMultiLine() throws Exception { XContentBuilder xcb = createDefaultMapping(); - client().admin().indices().prepareCreate("test").addMapping("_doc", xcb).get(); + client().admin().indices().prepareCreate("test").setMapping(xcb).get(); ensureGreen(); CoordinatesBuilder coords1 = new CoordinatesBuilder().coordinate(-35, -35).coordinate(-25, -25); @@ -167,7 +167,7 @@ public void testQueryMultiLine() throws Exception { public void testQueryMultiPoint() throws Exception { XContentBuilder xcb = createDefaultMapping(); - client().admin().indices().prepareCreate("test").addMapping("_doc", xcb).get(); + client().admin().indices().prepareCreate("test").setMapping(xcb).get(); ensureGreen(); MultiPointBuilder mpb = new MultiPointBuilder().coordinate(-35, -25).coordinate(-15, -5); @@ -182,7 +182,7 @@ public void testQueryMultiPoint() throws Exception { public void testQueryPoint() throws Exception { XContentBuilder xcb = createDefaultMapping(); - client().admin().indices().prepareCreate("test").addMapping("_doc", xcb).get(); + client().admin().indices().prepareCreate("test").setMapping(xcb).get(); ensureGreen(); PointBuilder pb = new PointBuilder().coordinate(-35, -25); diff --git a/server/src/test/java/org/opensearch/search/geo/GeoQueryTests.java b/server/src/test/java/org/opensearch/search/geo/GeoQueryTests.java index b46ac4a72952b..dfaadaa8875ca 100644 --- a/server/src/test/java/org/opensearch/search/geo/GeoQueryTests.java +++ b/server/src/test/java/org/opensearch/search/geo/GeoQueryTests.java @@ -82,7 +82,7 @@ protected Collection> getPlugins() { public void testNullShape() throws Exception { XContentBuilder xcb = createDefaultMapping(); - client().admin().indices().prepareCreate(defaultIndexName).addMapping("_doc", xcb).get(); + client().admin().indices().prepareCreate(defaultIndexName).setMapping(xcb).get(); ensureGreen(); client().prepareIndex(defaultIndexName) @@ -96,7 +96,7 @@ public void testNullShape() throws Exception { public void testIndexPointsFilterRectangle() throws Exception { XContentBuilder xcb = createDefaultMapping(); - client().admin().indices().prepareCreate(defaultIndexName).addMapping("_doc", xcb).get(); + client().admin().indices().prepareCreate(defaultIndexName).setMapping(xcb).get(); ensureGreen(); client().prepareIndex(defaultIndexName) @@ -134,7 +134,7 @@ public void testIndexPointsFilterRectangle() throws Exception { public void testIndexPointsCircle() throws Exception { XContentBuilder xcb = createDefaultMapping(); - client().admin().indices().prepareCreate(defaultIndexName).addMapping("_doc", xcb).get(); + client().admin().indices().prepareCreate(defaultIndexName).setMapping(xcb).get(); ensureGreen(); client().prepareIndex(defaultIndexName) @@ -167,7 +167,7 @@ public void testIndexPointsCircle() throws Exception { public void testIndexPointsPolygon() throws Exception { XContentBuilder xcb = createDefaultMapping(); - client().admin().indices().prepareCreate(defaultIndexName).addMapping("_doc", xcb).get(); + client().admin().indices().prepareCreate(defaultIndexName).setMapping(xcb).get(); ensureGreen(); client().prepareIndex(defaultIndexName) @@ -203,7 +203,7 @@ public void testIndexPointsPolygon() throws Exception { public void testIndexPointsMultiPolygon() throws Exception { XContentBuilder xcb = createDefaultMapping(); - client().admin().indices().prepareCreate(defaultIndexName).addMapping("_doc", xcb).get(); + client().admin().indices().prepareCreate(defaultIndexName).setMapping(xcb).get(); ensureGreen(); client().prepareIndex(defaultIndexName) @@ -257,7 +257,7 @@ public void testIndexPointsMultiPolygon() throws Exception { public void testIndexPointsRectangle() throws Exception { XContentBuilder xcb = createDefaultMapping(); - client().admin().indices().prepareCreate(defaultIndexName).addMapping("_doc", xcb).get(); + client().admin().indices().prepareCreate(defaultIndexName).setMapping(xcb).get(); ensureGreen(); client().prepareIndex(defaultIndexName) @@ -285,7 +285,7 @@ public void testIndexPointsRectangle() throws Exception { public void testIndexPointsIndexedRectangle() throws Exception { XContentBuilder xcb = createDefaultMapping(); - client().admin().indices().prepareCreate(defaultIndexName).addMapping(defaultIndexName, xcb).get(); + client().admin().indices().prepareCreate(defaultIndexName).setMapping(xcb).get(); ensureGreen(); client().prepareIndex(defaultIndexName) @@ -310,7 +310,7 @@ public void testIndexPointsIndexedRectangle() throws Exception { .endObject() .endObject() .endObject(); - client().admin().indices().prepareCreate(indexedShapeIndex).addMapping(defaultIndexName, xcb).get(); + client().admin().indices().prepareCreate(indexedShapeIndex).setMapping(xcb).get(); ensureGreen(); client().prepareIndex(indexedShapeIndex) @@ -352,7 +352,7 @@ public void testIndexPointsIndexedRectangle() throws Exception { public void testRectangleSpanningDateline() throws Exception { XContentBuilder xcb = createDefaultMapping(); - client().admin().indices().prepareCreate("test").addMapping("_doc", xcb).get(); + client().admin().indices().prepareCreate("test").setMapping(xcb).get(); ensureGreen(); client().prepareIndex(defaultIndexName) @@ -385,7 +385,7 @@ public void testRectangleSpanningDateline() throws Exception { public void testPolygonSpanningDateline() throws Exception { XContentBuilder xcb = createDefaultMapping(); - client().admin().indices().prepareCreate("test").addMapping("_doc", xcb).get(); + client().admin().indices().prepareCreate("test").setMapping(xcb).get(); ensureGreen(); client().prepareIndex(defaultIndexName) @@ -429,7 +429,7 @@ public void testPolygonSpanningDateline() throws Exception { public void testMultiPolygonSpanningDateline() throws Exception { XContentBuilder xcb = createDefaultMapping(); - client().admin().indices().prepareCreate("test").addMapping("_doc", xcb).get(); + client().admin().indices().prepareCreate("test").setMapping(xcb).get(); ensureGreen(); client().prepareIndex(defaultIndexName) diff --git a/server/src/test/java/org/opensearch/search/geo/GeoShapeQueryTests.java b/server/src/test/java/org/opensearch/search/geo/GeoShapeQueryTests.java index 61458512b84e4..4247a2ccabff2 100644 --- a/server/src/test/java/org/opensearch/search/geo/GeoShapeQueryTests.java +++ b/server/src/test/java/org/opensearch/search/geo/GeoShapeQueryTests.java @@ -262,7 +262,7 @@ public void testRandomGeoCollectionQuery() throws Exception { XContentBuilder mapping = createRandomMapping(); Settings settings = Settings.builder().put("index.number_of_shards", 1).build(); - client().admin().indices().prepareCreate("test").addMapping("_doc", mapping).setSettings(settings).get(); + client().admin().indices().prepareCreate("test").setMapping(mapping).setSettings(settings).get(); ensureGreen(); XContentBuilder docSource = gcb.toXContent(jsonBuilder().startObject().field("geo"), null).endObject(); @@ -282,7 +282,7 @@ public void testRandomGeoCollectionQuery() throws Exception { // Test for issue #34418 public void testEnvelopeSpanningDateline() throws Exception { XContentBuilder mapping = createDefaultMapping(); - client().admin().indices().prepareCreate("test").addMapping("_doc", mapping).get(); + client().admin().indices().prepareCreate("test").setMapping(mapping).get(); ensureGreen(); String doc1 = "{\"geo\": {\r\n" @@ -488,7 +488,7 @@ public void testEdgeCases() throws Exception { public void testIndexedShapeReferenceSourceDisabled() throws Exception { XContentBuilder mapping = createDefaultMapping(); - client().admin().indices().prepareCreate("test").addMapping("type1", mapping).get(); + client().admin().indices().prepareCreate("test").setMapping(mapping).get(); createIndex("shapes", Settings.EMPTY, "shape_type", "_source", "enabled=false"); ensureGreen(); @@ -612,7 +612,7 @@ public void testExistsQuery() throws Exception { GeometryCollectionBuilder gcb = RandomShapeGenerator.createGeometryCollection(random()); XContentBuilder builder = createRandomMapping(); - client().admin().indices().prepareCreate("test").addMapping("type", builder).execute().actionGet(); + client().admin().indices().prepareCreate("test").setMapping(builder).execute().actionGet(); XContentBuilder docSource = gcb.toXContent(jsonBuilder().startObject().field("geo"), null).endObject(); client().prepareIndex("test").setId("1").setSource(docSource).setRefreshPolicy(IMMEDIATE).get(); @@ -748,7 +748,6 @@ public void testIndexedShapeReference() throws Exception { public void testFieldAlias() throws IOException { XContentBuilder mapping = XContentFactory.jsonBuilder() .startObject() - .startObject("type") .startObject("properties") .startObject("location") .field("type", "geo_shape") @@ -759,7 +758,6 @@ public void testFieldAlias() throws IOException { .field("path", "location") .endObject() .endObject() - .endObject() .endObject(); createIndex("test", Settings.EMPTY, "type", mapping); @@ -786,7 +784,7 @@ public void testQueryRandomGeoCollection() throws Exception { gcb.shape(new PolygonBuilder(cb)); XContentBuilder builder = createRandomMapping(); - client().admin().indices().prepareCreate("test").addMapping("type", builder).get(); + client().admin().indices().prepareCreate("test").setMapping(builder).get(); XContentBuilder docSource = gcb.toXContent(jsonBuilder().startObject().field("geo"), null).endObject(); client().prepareIndex("test").setId("1").setSource(docSource).setRefreshPolicy(IMMEDIATE).get(); diff --git a/server/src/test/java/org/opensearch/search/suggest/completion/CategoryContextMappingTests.java b/server/src/test/java/org/opensearch/search/suggest/completion/CategoryContextMappingTests.java index b2c54492b66d7..dc6cfdd281980 100644 --- a/server/src/test/java/org/opensearch/search/suggest/completion/CategoryContextMappingTests.java +++ b/server/src/test/java/org/opensearch/search/suggest/completion/CategoryContextMappingTests.java @@ -77,7 +77,6 @@ public class CategoryContextMappingTests extends OpenSearchSingleNodeTestCase { public void testIndexingWithNoContexts() throws Exception { String mapping = Strings.toString( jsonBuilder().startObject() - .startObject("type1") .startObject("properties") .startObject("completion") .field("type", "completion") @@ -90,7 +89,6 @@ public void testIndexingWithNoContexts() throws Exception { .endObject() .endObject() .endObject() - .endObject() ); DocumentMapper defaultMapper = createIndex("test").mapperService() @@ -129,7 +127,6 @@ public void testIndexingWithNoContexts() throws Exception { public void testIndexingWithSimpleContexts() throws Exception { String mapping = Strings.toString( jsonBuilder().startObject() - .startObject("type1") .startObject("properties") .startObject("completion") .field("type", "completion") @@ -142,7 +139,6 @@ public void testIndexingWithSimpleContexts() throws Exception { .endObject() .endObject() .endObject() - .endObject() ); DocumentMapper defaultMapper = createIndex("test").mapperService() @@ -176,7 +172,6 @@ public void testIndexingWithSimpleContexts() throws Exception { public void testIndexingWithSimpleNumberContexts() throws Exception { String mapping = Strings.toString( jsonBuilder().startObject() - .startObject("type1") .startObject("properties") .startObject("completion") .field("type", "completion") @@ -189,7 +184,6 @@ public void testIndexingWithSimpleNumberContexts() throws Exception { .endObject() .endObject() .endObject() - .endObject() ); DocumentMapper defaultMapper = createIndex("test").mapperService() @@ -223,7 +217,6 @@ public void testIndexingWithSimpleNumberContexts() throws Exception { public void testIndexingWithSimpleBooleanContexts() throws Exception { String mapping = Strings.toString( jsonBuilder().startObject() - .startObject("type1") .startObject("properties") .startObject("completion") .field("type", "completion") @@ -236,7 +229,6 @@ public void testIndexingWithSimpleBooleanContexts() throws Exception { .endObject() .endObject() .endObject() - .endObject() ); DocumentMapper defaultMapper = createIndex("test").mapperService() @@ -270,7 +262,6 @@ public void testIndexingWithSimpleBooleanContexts() throws Exception { public void testIndexingWithSimpleNULLContexts() throws Exception { String mapping = Strings.toString( jsonBuilder().startObject() - .startObject("type1") .startObject("properties") .startObject("completion") .field("type", "completion") @@ -283,7 +274,6 @@ public void testIndexingWithSimpleNULLContexts() throws Exception { .endObject() .endObject() .endObject() - .endObject() ); DocumentMapper defaultMapper = createIndex("test").mapperService() @@ -314,7 +304,6 @@ public void testIndexingWithSimpleNULLContexts() throws Exception { public void testIndexingWithContextList() throws Exception { String mapping = Strings.toString( jsonBuilder().startObject() - .startObject("type1") .startObject("properties") .startObject("completion") .field("type", "completion") @@ -327,7 +316,6 @@ public void testIndexingWithContextList() throws Exception { .endObject() .endObject() .endObject() - .endObject() ); DocumentMapper defaultMapper = createIndex("test").mapperService() @@ -359,7 +347,6 @@ public void testIndexingWithContextList() throws Exception { public void testIndexingWithMixedTypeContextList() throws Exception { String mapping = Strings.toString( jsonBuilder().startObject() - .startObject("type1") .startObject("properties") .startObject("completion") .field("type", "completion") @@ -372,7 +359,6 @@ public void testIndexingWithMixedTypeContextList() throws Exception { .endObject() .endObject() .endObject() - .endObject() ); DocumentMapper defaultMapper = createIndex("test").mapperService() @@ -404,7 +390,6 @@ public void testIndexingWithMixedTypeContextList() throws Exception { public void testIndexingWithMixedTypeContextListHavingNULL() throws Exception { String mapping = Strings.toString( jsonBuilder().startObject() - .startObject("type1") .startObject("properties") .startObject("completion") .field("type", "completion") @@ -417,7 +402,6 @@ public void testIndexingWithMixedTypeContextListHavingNULL() throws Exception { .endObject() .endObject() .endObject() - .endObject() ); DocumentMapper defaultMapper = createIndex("test").mapperService() @@ -443,7 +427,6 @@ public void testIndexingWithMixedTypeContextListHavingNULL() throws Exception { public void testIndexingWithMultipleContexts() throws Exception { String mapping = Strings.toString( jsonBuilder().startObject() - .startObject("type1") .startObject("properties") .startObject("completion") .field("type", "completion") @@ -460,7 +443,6 @@ public void testIndexingWithMultipleContexts() throws Exception { .endObject() .endObject() .endObject() - .endObject() ); DocumentMapper defaultMapper = createIndex("test").mapperService() @@ -798,7 +780,6 @@ public void testQueryContextParsingMixedHavingNULL() throws Exception { public void testUnknownQueryContextParsing() throws Exception { XContentBuilder mapping = jsonBuilder().startObject() - .startObject("type1") .startObject("properties") .startObject("completion") .field("type", "completion") @@ -814,7 +795,6 @@ public void testUnknownQueryContextParsing() throws Exception { .endArray() .endObject() .endObject() - .endObject() .endObject(); MapperService mapperService = createIndex("test", Settings.EMPTY, "type1", mapping).mapperService(); diff --git a/server/src/test/java/org/opensearch/search/suggest/completion/GeoContextMappingTests.java b/server/src/test/java/org/opensearch/search/suggest/completion/GeoContextMappingTests.java index 31cc2e73ff2a3..1a1f3b33f0646 100644 --- a/server/src/test/java/org/opensearch/search/suggest/completion/GeoContextMappingTests.java +++ b/server/src/test/java/org/opensearch/search/suggest/completion/GeoContextMappingTests.java @@ -245,7 +245,6 @@ public void testIndexingWithMultipleContexts() throws Exception { public void testMalformedGeoField() throws Exception { XContentBuilder mapping = jsonBuilder(); mapping.startObject(); - mapping.startObject("type1"); mapping.startObject("properties"); mapping.startObject("pin"); String type = randomFrom("text", "keyword", "long"); @@ -266,7 +265,6 @@ public void testMalformedGeoField() throws Exception { mapping.endObject(); - mapping.endObject(); mapping.endObject(); mapping.endObject(); @@ -281,7 +279,6 @@ public void testMalformedGeoField() throws Exception { public void testMissingGeoField() throws Exception { XContentBuilder mapping = jsonBuilder(); mapping.startObject(); - mapping.startObject("type1"); mapping.startObject("properties"); mapping.startObject("suggestion"); mapping.field("type", "completion"); @@ -298,7 +295,6 @@ public void testMissingGeoField() throws Exception { mapping.endObject(); - mapping.endObject(); mapping.endObject(); mapping.endObject(); diff --git a/test/framework/src/main/java/org/opensearch/index/RandomCreateIndexGenerator.java b/test/framework/src/main/java/org/opensearch/index/RandomCreateIndexGenerator.java index dbcbb0f65a491..1498a0b7b2d17 100644 --- a/test/framework/src/main/java/org/opensearch/index/RandomCreateIndexGenerator.java +++ b/test/framework/src/main/java/org/opensearch/index/RandomCreateIndexGenerator.java @@ -63,8 +63,7 @@ public static CreateIndexRequest randomCreateIndexRequest() throws IOException { CreateIndexRequest request = new CreateIndexRequest(index); randomAliases(request); if (randomBoolean()) { - String type = randomAlphaOfLength(5); - request.mapping(type, randomMapping(type)); + request.mapping(randomMapping()); } if (randomBoolean()) { request.settings(randomIndexSettings()); @@ -94,16 +93,15 @@ public static Settings randomIndexSettings() { } /** - * Creates a random mapping, with the mapping definition nested - * under the given type name. + * Creates a random mapping */ - public static XContentBuilder randomMapping(String type) throws IOException { + public static XContentBuilder randomMapping() throws IOException { XContentBuilder builder = XContentFactory.contentBuilder(randomFrom(XContentType.values())); - builder.startObject().startObject(type); + builder.startObject(); randomMappingFields(builder, true); - builder.endObject().endObject(); + builder.endObject(); return builder; } diff --git a/test/framework/src/main/java/org/opensearch/test/OpenSearchSingleNodeTestCase.java b/test/framework/src/main/java/org/opensearch/test/OpenSearchSingleNodeTestCase.java index 83e59e1edd8c8..fb4831f881092 100644 --- a/test/framework/src/main/java/org/opensearch/test/OpenSearchSingleNodeTestCase.java +++ b/test/framework/src/main/java/org/opensearch/test/OpenSearchSingleNodeTestCase.java @@ -310,7 +310,7 @@ protected IndexService createIndex(String index, Settings settings) { protected IndexService createIndex(String index, Settings settings, String type, XContentBuilder mappings) { CreateIndexRequestBuilder createIndexRequestBuilder = client().admin().indices().prepareCreate(index).setSettings(settings); if (type != null && mappings != null) { - createIndexRequestBuilder.addMapping(type, mappings); + createIndexRequestBuilder.setMapping(mappings); } return createIndex(index, createIndexRequestBuilder); } From f34a75381d40575c9859760a6be655acd5da2058 Mon Sep 17 00:00:00 2001 From: Suraj Singh <79435743+dreamer-89@users.noreply.github.com> Date: Thu, 17 Mar 2022 09:02:08 -0700 Subject: [PATCH 41/46] [Remove] Type metadata from ingest documents (#2491) Signed-off-by: Suraj Singh --- .../ingest/common/AppendProcessorTests.java | 2 +- .../ingest/common/SetProcessorTests.java | 2 +- .../ingest/SimulatePipelineRequest.java | 6 ---- .../org/opensearch/ingest/IngestDocument.java | 2 -- .../SimulatePipelineRequestParsingTests.java | 33 ++----------------- 5 files changed, 4 insertions(+), 41 deletions(-) diff --git a/modules/ingest-common/src/test/java/org/opensearch/ingest/common/AppendProcessorTests.java b/modules/ingest-common/src/test/java/org/opensearch/ingest/common/AppendProcessorTests.java index 9a507338df332..7caa63792f347 100644 --- a/modules/ingest-common/src/test/java/org/opensearch/ingest/common/AppendProcessorTests.java +++ b/modules/ingest-common/src/test/java/org/opensearch/ingest/common/AppendProcessorTests.java @@ -147,7 +147,7 @@ public void testConvertScalarToList() throws Exception { public void testAppendMetadataExceptVersion() throws Exception { // here any metadata field value becomes a list, which won't make sense in most of the cases, // but support for append is streamlined like for set so we test it - Metadata randomMetadata = randomFrom(Metadata.INDEX, Metadata.TYPE, Metadata.ID, Metadata.ROUTING); + Metadata randomMetadata = randomFrom(Metadata.INDEX, Metadata.ID, Metadata.ROUTING); List values = new ArrayList<>(); Processor appendProcessor; if (randomBoolean()) { diff --git a/modules/ingest-common/src/test/java/org/opensearch/ingest/common/SetProcessorTests.java b/modules/ingest-common/src/test/java/org/opensearch/ingest/common/SetProcessorTests.java index 0e7ba5556fbf8..923757b605108 100644 --- a/modules/ingest-common/src/test/java/org/opensearch/ingest/common/SetProcessorTests.java +++ b/modules/ingest-common/src/test/java/org/opensearch/ingest/common/SetProcessorTests.java @@ -116,7 +116,7 @@ public void testSetExistingNullFieldWithOverrideDisabled() throws Exception { } public void testSetMetadataExceptVersion() throws Exception { - Metadata randomMetadata = randomFrom(Metadata.INDEX, Metadata.TYPE, Metadata.ID, Metadata.ROUTING); + Metadata randomMetadata = randomFrom(Metadata.INDEX, Metadata.ID, Metadata.ROUTING); Processor processor = createSetProcessor(randomMetadata.getFieldName(), "_value", true, false); IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random()); processor.execute(ingestDocument); diff --git a/server/src/main/java/org/opensearch/action/ingest/SimulatePipelineRequest.java b/server/src/main/java/org/opensearch/action/ingest/SimulatePipelineRequest.java index 6223f25488d88..bc0317e076319 100644 --- a/server/src/main/java/org/opensearch/action/ingest/SimulatePipelineRequest.java +++ b/server/src/main/java/org/opensearch/action/ingest/SimulatePipelineRequest.java @@ -194,12 +194,6 @@ private static List parseDocs(Map config) { Map dataMap = (Map) object; Map document = ConfigurationUtils.readMap(null, null, dataMap, Fields.SOURCE); String index = ConfigurationUtils.readStringOrIntProperty(null, null, dataMap, Metadata.INDEX.getFieldName(), "_index"); - if (dataMap.containsKey(Metadata.TYPE.getFieldName())) { - deprecationLogger.deprecate( - "simulate_pipeline_with_types", - "[types removal] specifying _type in pipeline simulation requests is deprecated" - ); - } String id = ConfigurationUtils.readStringOrIntProperty(null, null, dataMap, Metadata.ID.getFieldName(), "_id"); String routing = ConfigurationUtils.readOptionalStringOrIntProperty(null, null, dataMap, Metadata.ROUTING.getFieldName()); Long version = null; diff --git a/server/src/main/java/org/opensearch/ingest/IngestDocument.java b/server/src/main/java/org/opensearch/ingest/IngestDocument.java index b496799c34dd0..b18946486f8c6 100644 --- a/server/src/main/java/org/opensearch/ingest/IngestDocument.java +++ b/server/src/main/java/org/opensearch/ingest/IngestDocument.java @@ -38,7 +38,6 @@ import org.opensearch.index.mapper.IndexFieldMapper; import org.opensearch.index.mapper.RoutingFieldMapper; import org.opensearch.index.mapper.SourceFieldMapper; -import org.opensearch.index.mapper.TypeFieldMapper; import org.opensearch.index.mapper.VersionFieldMapper; import org.opensearch.script.TemplateScript; @@ -846,7 +845,6 @@ public String toString() { public enum Metadata { INDEX(IndexFieldMapper.NAME), - TYPE(TypeFieldMapper.NAME), ID(IdFieldMapper.NAME), ROUTING(RoutingFieldMapper.NAME), VERSION(VersionFieldMapper.NAME), diff --git a/server/src/test/java/org/opensearch/action/ingest/SimulatePipelineRequestParsingTests.java b/server/src/test/java/org/opensearch/action/ingest/SimulatePipelineRequestParsingTests.java index c85c0a01de63e..36b1f8089fdea 100644 --- a/server/src/test/java/org/opensearch/action/ingest/SimulatePipelineRequestParsingTests.java +++ b/server/src/test/java/org/opensearch/action/ingest/SimulatePipelineRequestParsingTests.java @@ -57,7 +57,6 @@ import static org.opensearch.ingest.IngestDocument.Metadata.ID; import static org.opensearch.ingest.IngestDocument.Metadata.INDEX; import static org.opensearch.ingest.IngestDocument.Metadata.ROUTING; -import static org.opensearch.ingest.IngestDocument.Metadata.TYPE; import static org.opensearch.ingest.IngestDocument.Metadata.VERSION; import static org.opensearch.ingest.IngestDocument.Metadata.VERSION_TYPE; import static org.opensearch.ingest.IngestDocument.Metadata.IF_SEQ_NO; @@ -132,15 +131,7 @@ public void testParseUsingPipelineStore(boolean useExplicitType) throws Exceptio assertThat(actualRequest.getPipeline().getProcessors().size(), equalTo(1)); } - public void testParseWithProvidedPipelineNoType() throws Exception { - innerTestParseWithProvidedPipeline(false); - } - - public void testParseWithProvidedPipelineWithType() throws Exception { - innerTestParseWithProvidedPipeline(true); - } - - private void innerTestParseWithProvidedPipeline(boolean useExplicitType) throws Exception { + public void innerTestParseWithProvidedPipeline() throws Exception { int numDocs = randomIntBetween(1, 10); Map requestContent = new HashMap<>(); @@ -150,16 +141,7 @@ private void innerTestParseWithProvidedPipeline(boolean useExplicitType) throws for (int i = 0; i < numDocs; i++) { Map doc = new HashMap<>(); Map expectedDoc = new HashMap<>(); - List fields = Arrays.asList( - INDEX, - TYPE, - ID, - ROUTING, - VERSION, - VERSION_TYPE, - IF_SEQ_NO, - IF_PRIMARY_TERM - ); + List fields = Arrays.asList(INDEX, ID, ROUTING, VERSION, VERSION_TYPE, IF_SEQ_NO, IF_PRIMARY_TERM); for (IngestDocument.Metadata field : fields) { if (field == VERSION) { Long value = randomLong(); @@ -173,14 +155,6 @@ private void innerTestParseWithProvidedPipeline(boolean useExplicitType) throws Long value = randomNonNegativeLong(); doc.put(field.getFieldName(), value); expectedDoc.put(field.getFieldName(), value); - } else if (field == TYPE) { - if (useExplicitType) { - String value = randomAlphaOfLengthBetween(1, 10); - doc.put(field.getFieldName(), value); - expectedDoc.put(field.getFieldName(), value); - } else { - expectedDoc.put(field.getFieldName(), "_doc"); - } } else { if (randomBoolean()) { String value = randomAlphaOfLengthBetween(1, 10); @@ -249,9 +223,6 @@ private void innerTestParseWithProvidedPipeline(boolean useExplicitType) throws assertThat(actualRequest.getPipeline().getId(), equalTo(SIMULATED_PIPELINE_ID)); assertThat(actualRequest.getPipeline().getDescription(), nullValue()); assertThat(actualRequest.getPipeline().getProcessors().size(), equalTo(numProcessors)); - if (useExplicitType) { - assertWarnings("[types removal] specifying _type in pipeline simulation requests is deprecated"); - } } public void testNullPipelineId() { From 2e3d3fef135442a59ec970c6bec7917467fa8d07 Mon Sep 17 00:00:00 2001 From: Nick Knize Date: Thu, 17 Mar 2022 12:05:24 -0500 Subject: [PATCH 42/46] [Remove] Type from PutIndexTemplateRequest and PITRB (#2497) Continues removal of types from PutIndexTemplateRequest and PutIndexTemplateRequestBuilder.mapping. Delegated mapping method in PutIndexTemplateRequestBuilder is refactored to setMapping for consistency with similar methods (e.g., setSettings, setAliases). Signed-off-by: Nicholas Walter Knize --- .../index/query/RankFeatureQueryBuilderTests.java | 2 +- .../percolator/PercolatorQuerySearchIT.java | 2 +- .../percolator/PercolateQueryBuilderTests.java | 4 ++-- .../PercolateWithNestedQueryBuilderTests.java | 2 +- .../document/AliasedIndexDocumentActionsIT.java | 2 +- .../java/org/opensearch/index/HiddenIndexIT.java | 10 +++++----- .../indices/template/SimpleIndexTemplateIT.java | 6 +++--- .../search/basic/TransportTwoNodesSearchIT.java | 2 +- .../search/searchafter/SearchAfterIT.java | 2 +- .../admin/indices/create/CreateIndexRequest.java | 6 ++---- .../indices/create/CreateIndexRequestBuilder.java | 4 ++-- .../indices/mapping/put/PutMappingRequest.java | 14 +++++++------- .../mapping/put/PutMappingRequestBuilder.java | 2 +- .../indices/rollover/RolloverRequestBuilder.java | 4 ++-- .../template/put/PutIndexTemplateRequest.java | 5 +++-- .../put/PutIndexTemplateRequestBuilder.java | 4 ++-- .../mapping/put/PutMappingRequestTests.java | 7 ++----- .../RangeFieldQueryStringQueryBuilderTests.java | 2 +- .../index/query/MatchQueryBuilderTests.java | 2 +- .../index/query/NestedQueryBuilderTests.java | 2 +- .../index/query/QueryStringQueryBuilderTests.java | 4 ++-- .../index/query/TermsSetQueryBuilderTests.java | 2 +- .../opensearch/test/AbstractBuilderTestCase.java | 2 +- .../test/OpenSearchSingleNodeTestCase.java | 2 +- 24 files changed, 45 insertions(+), 49 deletions(-) diff --git a/modules/mapper-extras/src/test/java/org/opensearch/index/query/RankFeatureQueryBuilderTests.java b/modules/mapper-extras/src/test/java/org/opensearch/index/query/RankFeatureQueryBuilderTests.java index b0d7bb9d2e14e..e183ba6f6735c 100644 --- a/modules/mapper-extras/src/test/java/org/opensearch/index/query/RankFeatureQueryBuilderTests.java +++ b/modules/mapper-extras/src/test/java/org/opensearch/index/query/RankFeatureQueryBuilderTests.java @@ -62,7 +62,7 @@ protected void initializeAdditionalMappings(MapperService mapperService) throws "_doc", new CompressedXContent( Strings.toString( - PutMappingRequest.buildFromSimplifiedDef( + PutMappingRequest.simpleMapping( "my_feature_field", "type=rank_feature", "my_negative_feature_field", diff --git a/modules/percolator/src/internalClusterTest/java/org/opensearch/percolator/PercolatorQuerySearchIT.java b/modules/percolator/src/internalClusterTest/java/org/opensearch/percolator/PercolatorQuerySearchIT.java index 1cb5d81136de1..11fc61d6c6d99 100644 --- a/modules/percolator/src/internalClusterTest/java/org/opensearch/percolator/PercolatorQuerySearchIT.java +++ b/modules/percolator/src/internalClusterTest/java/org/opensearch/percolator/PercolatorQuerySearchIT.java @@ -565,7 +565,7 @@ public void testPercolatorQueryWithHighlighting() throws Exception { client().admin() .indices() .prepareCreate("test") - .addMapping("type", "id", "type=keyword", "field1", fieldMapping, "query", "type=percolator") + .addMapping("type", "id", "type=keyword", "field1", fieldMapping.toString(), "query", "type=percolator") ); client().prepareIndex("test") .setId("1") diff --git a/modules/percolator/src/test/java/org/opensearch/percolator/PercolateQueryBuilderTests.java b/modules/percolator/src/test/java/org/opensearch/percolator/PercolateQueryBuilderTests.java index 12be15552652c..44d8d64086091 100644 --- a/modules/percolator/src/test/java/org/opensearch/percolator/PercolateQueryBuilderTests.java +++ b/modules/percolator/src/test/java/org/opensearch/percolator/PercolateQueryBuilderTests.java @@ -110,14 +110,14 @@ protected void initializeAdditionalMappings(MapperService mapperService) throws docType, new CompressedXContent( Strings.toString( - PutMappingRequest.buildFromSimplifiedDef(queryField, "type=percolator", aliasField, "type=alias,path=" + queryField) + PutMappingRequest.simpleMapping(queryField, "type=percolator", aliasField, "type=alias,path=" + queryField) ) ), MapperService.MergeReason.MAPPING_UPDATE ); mapperService.merge( docType, - new CompressedXContent(Strings.toString(PutMappingRequest.buildFromSimplifiedDef(TEXT_FIELD_NAME, "type=text"))), + new CompressedXContent(Strings.toString(PutMappingRequest.simpleMapping(TEXT_FIELD_NAME, "type=text"))), MapperService.MergeReason.MAPPING_UPDATE ); } diff --git a/modules/percolator/src/test/java/org/opensearch/percolator/PercolateWithNestedQueryBuilderTests.java b/modules/percolator/src/test/java/org/opensearch/percolator/PercolateWithNestedQueryBuilderTests.java index 5038e72e9be5e..0ab9eff731bff 100644 --- a/modules/percolator/src/test/java/org/opensearch/percolator/PercolateWithNestedQueryBuilderTests.java +++ b/modules/percolator/src/test/java/org/opensearch/percolator/PercolateWithNestedQueryBuilderTests.java @@ -50,7 +50,7 @@ protected void initializeAdditionalMappings(MapperService mapperService) throws super.initializeAdditionalMappings(mapperService); mapperService.merge( "_doc", - new CompressedXContent(Strings.toString(PutMappingRequest.buildFromSimplifiedDef("some_nested_object", "type=nested"))), + new CompressedXContent(Strings.toString(PutMappingRequest.simpleMapping("some_nested_object", "type=nested"))), MapperService.MergeReason.MAPPING_UPDATE ); } diff --git a/server/src/internalClusterTest/java/org/opensearch/document/AliasedIndexDocumentActionsIT.java b/server/src/internalClusterTest/java/org/opensearch/document/AliasedIndexDocumentActionsIT.java index de9db5f145116..1d9f7c1e39de2 100644 --- a/server/src/internalClusterTest/java/org/opensearch/document/AliasedIndexDocumentActionsIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/document/AliasedIndexDocumentActionsIT.java @@ -49,7 +49,7 @@ protected void createIndex() { logger.info("--> creating index test"); client().admin() .indices() - .create(createIndexRequest("test1").mapping("type1", "name", "type=keyword,store=true").alias(new Alias("test"))) + .create(createIndexRequest("test1").simpleMapping("name", "type=keyword,store=true").alias(new Alias("test"))) .actionGet(); } diff --git a/server/src/internalClusterTest/java/org/opensearch/index/HiddenIndexIT.java b/server/src/internalClusterTest/java/org/opensearch/index/HiddenIndexIT.java index 54fbc8cecb967..7d1c92cba3205 100644 --- a/server/src/internalClusterTest/java/org/opensearch/index/HiddenIndexIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/index/HiddenIndexIT.java @@ -122,7 +122,7 @@ public void testGlobalTemplatesDoNotApply() { .indices() .preparePutTemplate("a_global_template") .setPatterns(Collections.singletonList("*")) - .addMapping("_doc", "foo", "type=text") + .setMapping("foo", "type=text") .get() ); assertAcked( @@ -130,7 +130,7 @@ public void testGlobalTemplatesDoNotApply() { .indices() .preparePutTemplate("not_global_template") .setPatterns(Collections.singletonList("a*")) - .addMapping("_doc", "bar", "type=text") + .setMapping("bar", "type=text") .get() ); assertAcked( @@ -138,7 +138,7 @@ public void testGlobalTemplatesDoNotApply() { .indices() .preparePutTemplate("specific_template") .setPatterns(Collections.singletonList("a_hidden_index")) - .addMapping("_doc", "baz", "type=text") + .setMapping("baz", "type=text") .get() ); assertAcked( @@ -146,7 +146,7 @@ public void testGlobalTemplatesDoNotApply() { .indices() .preparePutTemplate("unused_template") .setPatterns(Collections.singletonList("not_used")) - .addMapping("_doc", "foobar", "type=text") + .setMapping("foobar", "type=text") .get() ); @@ -192,7 +192,7 @@ public void testNonGlobalTemplateCanMakeIndexHidden() { .indices() .preparePutTemplate("a_global_template") .setPatterns(Collections.singletonList("my_hidden_pattern*")) - .addMapping("_doc", "foo", "type=text") + .setMapping("foo", "type=text") .setSettings(Settings.builder().put("index.hidden", true).build()) .get() ); diff --git a/server/src/internalClusterTest/java/org/opensearch/indices/template/SimpleIndexTemplateIT.java b/server/src/internalClusterTest/java/org/opensearch/indices/template/SimpleIndexTemplateIT.java index 378657a6554b4..6065db46d8ee2 100644 --- a/server/src/internalClusterTest/java/org/opensearch/indices/template/SimpleIndexTemplateIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/indices/template/SimpleIndexTemplateIT.java @@ -554,7 +554,7 @@ public void testIndexTemplateWithAliases() throws Exception { .indices() .preparePutTemplate("template_with_aliases") .setPatterns(Collections.singletonList("te*")) - .addMapping("_doc", "type", "type=keyword", "field", "type=text") + .setMapping("type", "type=keyword", "field", "type=text") .addAlias(new Alias("simple_alias")) .addAlias(new Alias("templated_alias-{index}")) .addAlias(new Alias("filtered_alias").filter("{\"term\":{\"type\":\"type2\"}}")) @@ -820,7 +820,7 @@ public void testStrictAliasParsingInIndicesCreatedViaTemplates() throws Exceptio .preparePutTemplate("template1") .setPatterns(Collections.singletonList("a*")) .setOrder(0) - .addMapping("test", "field", "type=text") + .setMapping("field", "type=text") .addAlias(new Alias("alias1").filter(termQuery("field", "value"))) .get(); // Indexing into b index should fail, since there is field with name 'field' in the mapping @@ -930,7 +930,7 @@ public void testOrderAndVersion() { .setPatterns(Collections.singletonList("te*")) .setVersion(version) .setOrder(order) - .addMapping("test", "field", "type=text") + .setMapping("field", "type=text") .get() ); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/basic/TransportTwoNodesSearchIT.java b/server/src/internalClusterTest/java/org/opensearch/search/basic/TransportTwoNodesSearchIT.java index 420121006a943..5cd6e76e1e487 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/basic/TransportTwoNodesSearchIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/basic/TransportTwoNodesSearchIT.java @@ -96,7 +96,7 @@ private Set prepareData(int numShards) throws Exception { client().admin() .indices() - .create(createIndexRequest("test").settings(settingsBuilder).mapping("type", "foo", "type=geo_point")) + .create(createIndexRequest("test").settings(settingsBuilder).simpleMapping("foo", "type=geo_point")) .actionGet(); ensureGreen(); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/searchafter/SearchAfterIT.java b/server/src/internalClusterTest/java/org/opensearch/search/searchafter/SearchAfterIT.java index b88e56b4f675d..aae6c1dec48b3 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/searchafter/SearchAfterIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/searchafter/SearchAfterIT.java @@ -323,7 +323,7 @@ private void createIndexMappingsFromObjectType(String indexName, String typeName fail("Can't match type [" + type + "]"); } } - indexRequestBuilder.addMapping(typeName, mappings.toArray()).get(); + indexRequestBuilder.addMapping(typeName, mappings.toArray(new String[0])).get(); ensureGreen(); } diff --git a/server/src/main/java/org/opensearch/action/admin/indices/create/CreateIndexRequest.java b/server/src/main/java/org/opensearch/action/admin/indices/create/CreateIndexRequest.java index b234561563e56..7f1f516d13a04 100644 --- a/server/src/main/java/org/opensearch/action/admin/indices/create/CreateIndexRequest.java +++ b/server/src/main/java/org/opensearch/action/admin/indices/create/CreateIndexRequest.java @@ -305,11 +305,9 @@ private CreateIndexRequest mapping(String type, Map source) { /** * A specialized simplified mapping source method, takes the form of simple properties definition: * ("field1", "type=string,store=true"). - * @deprecated types are being removed */ - @Deprecated - public CreateIndexRequest mapping(String type, Object... source) { - mapping(PutMappingRequest.buildFromSimplifiedDef(source)); + public CreateIndexRequest simpleMapping(String... source) { + mapping(PutMappingRequest.simpleMapping(source)); return this; } diff --git a/server/src/main/java/org/opensearch/action/admin/indices/create/CreateIndexRequestBuilder.java b/server/src/main/java/org/opensearch/action/admin/indices/create/CreateIndexRequestBuilder.java index 41f364517fd06..ac80b3eb6a155 100644 --- a/server/src/main/java/org/opensearch/action/admin/indices/create/CreateIndexRequestBuilder.java +++ b/server/src/main/java/org/opensearch/action/admin/indices/create/CreateIndexRequestBuilder.java @@ -144,8 +144,8 @@ public CreateIndexRequestBuilder setMapping(Map source) { * @deprecated types are being removed */ @Deprecated - public CreateIndexRequestBuilder addMapping(String type, Object... source) { - request.mapping(type, source); + public CreateIndexRequestBuilder addMapping(String type, String... source) { + request.simpleMapping(source); return this; } diff --git a/server/src/main/java/org/opensearch/action/admin/indices/mapping/put/PutMappingRequest.java b/server/src/main/java/org/opensearch/action/admin/indices/mapping/put/PutMappingRequest.java index 52be45054ba55..be3e676a4a1a2 100644 --- a/server/src/main/java/org/opensearch/action/admin/indices/mapping/put/PutMappingRequest.java +++ b/server/src/main/java/org/opensearch/action/admin/indices/mapping/put/PutMappingRequest.java @@ -217,8 +217,8 @@ public String source() { * Also supports metadata mapping fields such as `_all` and `_parent` as property definition, these metadata * mapping fields will automatically be put on the top level mapping object. */ - public PutMappingRequest source(Object... source) { - return source(buildFromSimplifiedDef(source)); + public PutMappingRequest source(String... source) { + return source(simpleMapping(source)); } public String origin() { @@ -239,7 +239,7 @@ public PutMappingRequest origin(String origin) { * if the number of the source arguments is not divisible by two * @return the mappings definition */ - public static XContentBuilder buildFromSimplifiedDef(Object... source) { + public static XContentBuilder simpleMapping(String... source) { if (source.length % 2 != 0) { throw new IllegalArgumentException("mapping source must be pairs of fieldnames and properties definition."); } @@ -248,10 +248,10 @@ public static XContentBuilder buildFromSimplifiedDef(Object... source) { builder.startObject(); for (int i = 0; i < source.length; i++) { - String fieldName = source[i++].toString(); + String fieldName = source[i++]; if (RESERVED_FIELDS.contains(fieldName)) { builder.startObject(fieldName); - String[] s1 = Strings.splitStringByCommaToArray(source[i].toString()); + String[] s1 = Strings.splitStringByCommaToArray(source[i]); for (String s : s1) { String[] s2 = Strings.split(s, "="); if (s2.length != 2) { @@ -265,13 +265,13 @@ public static XContentBuilder buildFromSimplifiedDef(Object... source) { builder.startObject("properties"); for (int i = 0; i < source.length; i++) { - String fieldName = source[i++].toString(); + String fieldName = source[i++]; if (RESERVED_FIELDS.contains(fieldName)) { continue; } builder.startObject(fieldName); - String[] s1 = Strings.splitStringByCommaToArray(source[i].toString()); + String[] s1 = Strings.splitStringByCommaToArray(source[i]); for (String s : s1) { String[] s2 = Strings.split(s, "="); if (s2.length != 2) { diff --git a/server/src/main/java/org/opensearch/action/admin/indices/mapping/put/PutMappingRequestBuilder.java b/server/src/main/java/org/opensearch/action/admin/indices/mapping/put/PutMappingRequestBuilder.java index a1b3b40d4e961..3ef96254b3f9b 100644 --- a/server/src/main/java/org/opensearch/action/admin/indices/mapping/put/PutMappingRequestBuilder.java +++ b/server/src/main/java/org/opensearch/action/admin/indices/mapping/put/PutMappingRequestBuilder.java @@ -102,7 +102,7 @@ public PutMappingRequestBuilder setSource(String mappingSource, XContentType xCo * A specialized simplified mapping source method, takes the form of simple properties definition: * ("field1", "type=string,store=true"). */ - public PutMappingRequestBuilder setSource(Object... source) { + public PutMappingRequestBuilder setSource(String... source) { request.source(source); return this; } diff --git a/server/src/main/java/org/opensearch/action/admin/indices/rollover/RolloverRequestBuilder.java b/server/src/main/java/org/opensearch/action/admin/indices/rollover/RolloverRequestBuilder.java index a7af2f963d15b..c74f71a70e09d 100644 --- a/server/src/main/java/org/opensearch/action/admin/indices/rollover/RolloverRequestBuilder.java +++ b/server/src/main/java/org/opensearch/action/admin/indices/rollover/RolloverRequestBuilder.java @@ -84,8 +84,8 @@ public RolloverRequestBuilder alias(Alias alias) { return this; } - public RolloverRequestBuilder mapping(String type, Object... source) { - this.request.getCreateIndexRequest().mapping(type, source); + public RolloverRequestBuilder simpleMapping(String... source) { + this.request.getCreateIndexRequest().simpleMapping(source); return this; } diff --git a/server/src/main/java/org/opensearch/action/admin/indices/template/put/PutIndexTemplateRequest.java b/server/src/main/java/org/opensearch/action/admin/indices/template/put/PutIndexTemplateRequest.java index 2ea2e492ffe4d..d443d14f3f463 100644 --- a/server/src/main/java/org/opensearch/action/admin/indices/template/put/PutIndexTemplateRequest.java +++ b/server/src/main/java/org/opensearch/action/admin/indices/template/put/PutIndexTemplateRequest.java @@ -58,6 +58,7 @@ import org.opensearch.common.xcontent.XContentType; import org.opensearch.common.xcontent.json.JsonXContent; import org.opensearch.common.xcontent.support.XContentMapValues; +import org.opensearch.index.mapper.MapperService; import java.io.IOException; import java.util.Collections; @@ -303,8 +304,8 @@ public PutIndexTemplateRequest mapping(String type, Map source) * A specialized simplified mapping source method, takes the form of simple properties definition: * ("field1", "type=string,store=true"). */ - public PutIndexTemplateRequest mapping(String type, Object... source) { - mapping(type, PutMappingRequest.buildFromSimplifiedDef(source)); + public PutIndexTemplateRequest mapping(String... source) { + mapping(MapperService.SINGLE_MAPPING_NAME, PutMappingRequest.simpleMapping(source)); return this; } diff --git a/server/src/main/java/org/opensearch/action/admin/indices/template/put/PutIndexTemplateRequestBuilder.java b/server/src/main/java/org/opensearch/action/admin/indices/template/put/PutIndexTemplateRequestBuilder.java index caff201abeda7..df3b5f6417576 100644 --- a/server/src/main/java/org/opensearch/action/admin/indices/template/put/PutIndexTemplateRequestBuilder.java +++ b/server/src/main/java/org/opensearch/action/admin/indices/template/put/PutIndexTemplateRequestBuilder.java @@ -137,8 +137,8 @@ public PutIndexTemplateRequestBuilder addMapping(String type, String source, XCo * A specialized simplified mapping source method, takes the form of simple properties definition: * ("field1", "type=string,store=true"). */ - public PutIndexTemplateRequestBuilder addMapping(String type, Object... source) { - request.mapping(type, source); + public PutIndexTemplateRequestBuilder setMapping(String... source) { + request.mapping(source); return this; } diff --git a/server/src/test/java/org/opensearch/action/admin/indices/mapping/put/PutMappingRequestTests.java b/server/src/test/java/org/opensearch/action/admin/indices/mapping/put/PutMappingRequestTests.java index ab401b7d45792..d8178769ba7b1 100644 --- a/server/src/test/java/org/opensearch/action/admin/indices/mapping/put/PutMappingRequestTests.java +++ b/server/src/test/java/org/opensearch/action/admin/indices/mapping/put/PutMappingRequestTests.java @@ -92,15 +92,12 @@ public void testValidation() { } /** - * Test that {@link PutMappingRequest#buildFromSimplifiedDef(Object...)} + * Test that {@link PutMappingRequest#simpleMapping(String...)} * rejects inputs where the {@code Object...} varargs of field name and properties are not * paired correctly */ public void testBuildFromSimplifiedDef() { - IllegalArgumentException e = expectThrows( - IllegalArgumentException.class, - () -> PutMappingRequest.buildFromSimplifiedDef("only_field") - ); + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> PutMappingRequest.simpleMapping("only_field")); assertEquals("mapping source must be pairs of fieldnames and properties definition.", e.getMessage()); } diff --git a/server/src/test/java/org/opensearch/index/mapper/RangeFieldQueryStringQueryBuilderTests.java b/server/src/test/java/org/opensearch/index/mapper/RangeFieldQueryStringQueryBuilderTests.java index 0a01d86e76dea..a8245627c6930 100644 --- a/server/src/test/java/org/opensearch/index/mapper/RangeFieldQueryStringQueryBuilderTests.java +++ b/server/src/test/java/org/opensearch/index/mapper/RangeFieldQueryStringQueryBuilderTests.java @@ -73,7 +73,7 @@ protected void initializeAdditionalMappings(MapperService mapperService) throws "_doc", new CompressedXContent( Strings.toString( - PutMappingRequest.buildFromSimplifiedDef( + PutMappingRequest.simpleMapping( INTEGER_RANGE_FIELD_NAME, "type=integer_range", LONG_RANGE_FIELD_NAME, diff --git a/server/src/test/java/org/opensearch/index/query/MatchQueryBuilderTests.java b/server/src/test/java/org/opensearch/index/query/MatchQueryBuilderTests.java index dac32849e3cef..d6cd157d6f84e 100644 --- a/server/src/test/java/org/opensearch/index/query/MatchQueryBuilderTests.java +++ b/server/src/test/java/org/opensearch/index/query/MatchQueryBuilderTests.java @@ -390,7 +390,7 @@ protected void initializeAdditionalMappings(MapperService mapperService) throws "_doc", new CompressedXContent( Strings.toString( - PutMappingRequest.buildFromSimplifiedDef("string_boost", "type=text", "string_no_pos", "type=text,index_options=docs") + PutMappingRequest.simpleMapping("string_boost", "type=text", "string_no_pos", "type=text,index_options=docs") ) ), MapperService.MergeReason.MAPPING_UPDATE diff --git a/server/src/test/java/org/opensearch/index/query/NestedQueryBuilderTests.java b/server/src/test/java/org/opensearch/index/query/NestedQueryBuilderTests.java index b95d9f8d36ad8..abc3e0bb8c4c3 100644 --- a/server/src/test/java/org/opensearch/index/query/NestedQueryBuilderTests.java +++ b/server/src/test/java/org/opensearch/index/query/NestedQueryBuilderTests.java @@ -77,7 +77,7 @@ protected void initializeAdditionalMappings(MapperService mapperService) throws "_doc", new CompressedXContent( Strings.toString( - PutMappingRequest.buildFromSimplifiedDef( + PutMappingRequest.simpleMapping( TEXT_FIELD_NAME, "type=text", INT_FIELD_NAME, diff --git a/server/src/test/java/org/opensearch/index/query/QueryStringQueryBuilderTests.java b/server/src/test/java/org/opensearch/index/query/QueryStringQueryBuilderTests.java index b091fc2103344..393d4cb3f2121 100644 --- a/server/src/test/java/org/opensearch/index/query/QueryStringQueryBuilderTests.java +++ b/server/src/test/java/org/opensearch/index/query/QueryStringQueryBuilderTests.java @@ -1096,7 +1096,7 @@ public void testDisabledFieldNamesField() throws Exception { .merge( "_doc", new CompressedXContent( - Strings.toString(PutMappingRequest.buildFromSimplifiedDef("foo", "type=text", "_field_names", "enabled=false")) + Strings.toString(PutMappingRequest.simpleMapping("foo", "type=text", "_field_names", "enabled=false")) ), MapperService.MergeReason.MAPPING_UPDATE ); @@ -1112,7 +1112,7 @@ public void testDisabledFieldNamesField() throws Exception { .merge( "_doc", new CompressedXContent( - Strings.toString(PutMappingRequest.buildFromSimplifiedDef("foo", "type=text", "_field_names", "enabled=true")) + Strings.toString(PutMappingRequest.simpleMapping("foo", "type=text", "_field_names", "enabled=true")) ), MapperService.MergeReason.MAPPING_UPDATE ); diff --git a/server/src/test/java/org/opensearch/index/query/TermsSetQueryBuilderTests.java b/server/src/test/java/org/opensearch/index/query/TermsSetQueryBuilderTests.java index 3c39773108830..43ca3139e30cb 100644 --- a/server/src/test/java/org/opensearch/index/query/TermsSetQueryBuilderTests.java +++ b/server/src/test/java/org/opensearch/index/query/TermsSetQueryBuilderTests.java @@ -93,7 +93,7 @@ protected void initializeAdditionalMappings(MapperService mapperService) throws String docType = "_doc"; mapperService.merge( docType, - new CompressedXContent(Strings.toString(PutMappingRequest.buildFromSimplifiedDef("m_s_m", "type=long"))), + new CompressedXContent(Strings.toString(PutMappingRequest.simpleMapping("m_s_m", "type=long"))), MapperService.MergeReason.MAPPING_UPDATE ); } diff --git a/test/framework/src/main/java/org/opensearch/test/AbstractBuilderTestCase.java b/test/framework/src/main/java/org/opensearch/test/AbstractBuilderTestCase.java index e5d14333de828..ae32db5fe6032 100644 --- a/test/framework/src/main/java/org/opensearch/test/AbstractBuilderTestCase.java +++ b/test/framework/src/main/java/org/opensearch/test/AbstractBuilderTestCase.java @@ -438,7 +438,7 @@ public void onRemoval(ShardId shardId, Accountable accountable) { "_doc", new CompressedXContent( Strings.toString( - PutMappingRequest.buildFromSimplifiedDef( + PutMappingRequest.simpleMapping( TEXT_FIELD_NAME, "type=text", KEYWORD_FIELD_NAME, diff --git a/test/framework/src/main/java/org/opensearch/test/OpenSearchSingleNodeTestCase.java b/test/framework/src/main/java/org/opensearch/test/OpenSearchSingleNodeTestCase.java index fb4831f881092..fe65d14bbcd0f 100644 --- a/test/framework/src/main/java/org/opensearch/test/OpenSearchSingleNodeTestCase.java +++ b/test/framework/src/main/java/org/opensearch/test/OpenSearchSingleNodeTestCase.java @@ -320,7 +320,7 @@ protected IndexService createIndex(String index, Settings settings, String type, * @deprecated types are being removed */ @Deprecated - protected IndexService createIndex(String index, Settings settings, String type, Object... mappings) { + protected IndexService createIndex(String index, Settings settings, String type, String... mappings) { CreateIndexRequestBuilder createIndexRequestBuilder = client().admin().indices().prepareCreate(index).setSettings(settings); if (type != null) { createIndexRequestBuilder.addMapping(type, mappings); From f14e0a36c48ecb8d2f58b37cc18e86d3b6836319 Mon Sep 17 00:00:00 2001 From: Suraj Singh <79435743+dreamer-89@users.noreply.github.com> Date: Thu, 17 Mar 2022 13:59:14 -0700 Subject: [PATCH 43/46] Add 1.3.0 release notes in main (#2489) * Add 1.3.0 release notes in main Signed-off-by: Suraj Singh * Address review comment. Removed dependabot commits and fix indentation, linings, incorrect blocks Signed-off-by: Suraj Singh * Self review of md file Signed-off-by: Suraj Singh --- .../opensearch.release-notes-1.3.0.md | 1299 +++++++++++++++++ 1 file changed, 1299 insertions(+) create mode 100644 release-notes/opensearch.release-notes-1.3.0.md diff --git a/release-notes/opensearch.release-notes-1.3.0.md b/release-notes/opensearch.release-notes-1.3.0.md new file mode 100644 index 0000000000000..62c5be8413943 --- /dev/null +++ b/release-notes/opensearch.release-notes-1.3.0.md @@ -0,0 +1,1299 @@ +## Version 1.3.0 Release Notes + +* __MapperService has to be passed in as null for EnginePlugins CodecService constructor (#2177) (#2413)__ + + [opensearch-trigger-bot[bot]](mailto:98922864+opensearch-trigger-bot[bot]@users.noreply.github.com) - Wed, 9 Mar 2022 10:17:33 -0500 + + efs/remotes/os_or/1.3 + * MapperService has to be passed in as null for EnginePlugins CodecService + constructor + + * Addressing code review comments + + * Delayed CodecService instantiation up to the shard initialization + + * Added logger (associated with shard) to CodecServiceConfig + + * Refactored the EngineConfigFactory / IndexShard instantiation of the + CodecService + (cherry picked from commit 9c679cbbfcf685e3865d2cf06b8f4e10c3082d49) + + Signed-off-by: Andriy Redko <andriy.redko@aiven.io> + + +* __Replace exclusionary words whitelist and blacklist in the places that… (#2365)__ + + [aponb](mailto:apre@gmx.at) - Mon, 7 Mar 2022 15:14:36 -0800 + + * Replace the exclusionary word whitelist with allowlist, and blacklist with + denylist, in code commet and internal variable/method/class/package name. + + Signed-off-by: Andreas <apre@gmx.at> + + +* __Install plugin command help (#2193) (#2264)__ + + [Joshua Palis](mailto:jpalis@amazon.com) - Mon, 7 Mar 2022 15:24:56 -0500 + * edited opensearch-plugin install help output to include plugin URL + * fixed unit test for plugin install help output by correctly identifying the + beginning og the non-option argument list + * added comments to install plugins help non option argument ouput unit test + * fixed format violation + * added additional details on valid plugin ids and how to use plugin URLs + * added additional information to plugin install help output + (cherry picked from commit b251d2b565b918708a1612ec16d1916122c7805d) Signed-off-by: Joshua Palis <jpalis@amazon.com> + + Signed-off-by: Joshua Palis <jpalis@amazon.com> + + +* __Add valuesField in PercentilesAggregationBuilder streamInput constructor (#2308) (#2389)__ + + [opensearch-trigger-bot[bot]](mailto:98922864+opensearch-trigger-bot[bot]@users.noreply.github.com) - Mon, 7 Mar 2022 13:07:41 -0500 + + Signed-off-by: Subhobrata Dey <sbcd90@gmail.com> + (cherry picked from commit e1fd4b75b4f888d8d486baceeb9fd6fe7df44416) + Co-authored-by: Subhobrata Dey <sbcd90@gmail.com> + + +* __Updated the url for docker distribution (#2325) (#2360)__ + + [opensearch-trigger-bot[bot]](mailto:98922864+opensearch-trigger-bot[bot]@users.noreply.github.com) - Mon, 7 Mar 2022 11:52:46 -0500 + + Signed-off-by: Owais Kazi <owaiskazi19@gmail.com> + (cherry picked from commit 9224537704bb12980a129afb1e7b6ba6ab93680e) + Co-authored-by: Owais Kazi <owaiskazi19@gmail.com> + + +* __Reintroduce negative epoch_millis #1991 (#2232) (#2380)__ + + [Breno Faria](mailto:breno.faria@intrafind.com) - Mon, 7 Mar 2022 11:46:54 -0500 + + * Reintroduce negative epoch_millis #1991 + Fixes a regression introduced with Elasticsearch 7 regarding the date + field + type that removed support for negative timestamps with sub-second + granularity. Thanks to Ryan Kophs (https://github.com/rkophs) for allowing me to use + his previous work. + Signed-off-by: Breno Faria <breno.faria@intrafind.de> + + * applying spotless fix + Signed-off-by: Breno Faria <breno.faria@intrafind.de> + + * more conservative implementation of isSupportedBy + Signed-off-by: Breno Faria <breno.faria@intrafind.de> + + * adding braces to control flow statement + Signed-off-by: Breno Faria <breno.faria@intrafind.de> + + * spotless fix... + Signed-off-by: Breno Faria <breno.faria@intrafind.de> + Co-authored-by: Breno Faria <breno.faria@intrafind.de> + Co-authored-by: Breno Faria <breno.faria@intrafind.de> + + +* __Add 'key' field to 'function_score' query function definition in explanation response (#1711) (#2346)__ + + [Andriy Redko](mailto:andriy.redko@aiven.io) - Mon, 7 Mar 2022 11:42:03 -0500 + + Signed-off-by: Andriy Redko <andriy.redko@aiven.io> + + +* __Fix java-version-checker source/target compatibility settings (#2354)__ + + [Andriy Redko](mailto:andriy.redko@aiven.io) - Fri, 4 Mar 2022 15:21:54 -0500 + + Signed-off-by: Andriy Redko <andriy.redko@aiven.io> + +* __Fixing the --release flag usage for javac (#2343)__ + + [Andriy Redko](mailto:andriy.redko@aiven.io) - Fri, 4 Mar 2022 13:49:01 -0500 + + * Fixing the --release flag usage for javac + Signed-off-by: Andriy Redko <andriy.redko@aiven.io> + + * Fixing the --html5 flag usage for javadoc + Signed-off-by: Andriy Redko <andriy.redko@aiven.io> + + +* __Fixing soft deletes deprecation warning (#2339)__ + + [Vacha Shah](mailto:vachshah@amazon.com) - Fri, 4 Mar 2022 10:06:11 -0500 + + Signed-off-by: Vacha Shah <vachshah@amazon.com> + + +* __Remove Github DCO action since DCO runs via Github App now (#2317) (#2323)__ + + [opensearch-trigger-bot[bot]](mailto:98922864+opensearch-trigger-bot[bot]@users.noreply.github.com) - Thu, 3 Mar 2022 12:12:55 -0800 + + Signed-off-by: Vacha Shah <vachshah@amazon.com> + (cherry picked from commit cdb42ad3013f67970def21e15c546c9c4fd08d6f) + Co-authored-by: Vacha Shah <vachshah@amazon.com> + + +* __[Backport 1.x] Avoid logging duplicate deprecation warnings multiple times (#2315)__ + + [opensearch-trigger-bot[bot]](mailto:98922864+opensearch-trigger-bot[bot]@users.noreply.github.com) - Thu, 3 Mar 2022 14:23:05 -0500 + + * Avoid logging duplicate deprecation warnings multiple times (#1660) + + * Avoid logging duplicate deprecation warnings multiple times + Signed-off-by: Vacha <vachshah@amazon.com> + + * Fixes test failures + Signed-off-by: Vacha <vachshah@amazon.com> + + * Adding deprecation logger tests + Signed-off-by: Vacha <vachshah@amazon.com> + + * Using ConcurrentHashMap keySet + Signed-off-by: Vacha Shah <vachshah@amazon.com> + (cherry picked from commit e66ea2c4f3ec583f087a82d1ebfb6383b2f159c1) + + * Fixing failing RestResizeHandlerTests in 1.x + Signed-off-by: Vacha Shah <vachshah@amazon.com> + Co-authored-by: Vacha <vachshah@amazon.com> + + +* __Restore Java 8 compatibility for build tools. (#2300)__ + + [Daniel Doubrovkine (dB.)](mailto:dblock@dblock.org) - Thu, 3 Mar 2022 14:14:23 -0500 + + * Restore Java 8 compatibility for build tools. + Signed-off-by: dblock <dblock@dblock.org> + + * Make source code compatible with Java 8. + Signed-off-by: dblock <dblock@dblock.org> + +* __Add support of SOCKS proxies for S3 repository (#2160) (#2316)__ + + [opensearch-trigger-bot[bot]](mailto:98922864+opensearch-trigger-bot[bot]@users.noreply.github.com) - Thu, 3 Mar 2022 13:49:40 -0500 + + Signed-off-by: Andrey Pleskach <ples@aiven.io> + (cherry picked from commit f13b951c7006700a9b8a8bb2cdecd67439bc1e86) + Co-authored-by: Andrey Pleskach <ples@aiven.io> + + +* __Fix flaky test case - string profiler via global ordinals (#2226) (#2313)__ + + [opensearch-trigger-bot[bot]](mailto:98922864+opensearch-trigger-bot[bot]@users.noreply.github.com) - Wed, 2 Mar 2022 17:48:34 -0600 + + forcemerge to one segment before executing aggregation query. + Signed-off-by: Peng Huo <penghuo@gmail.com> + (cherry picked from commit 9e225dc9b85c4fc2d3d910846bd0da25bc6a40df) + Co-authored-by: Peng Huo <penghuo@gmail.com> + +* __Auto-increment next development iteration. (#1816) (#2164)__ + + [opensearch-trigger-bot[bot]](mailto:98922864+opensearch-trigger-bot[bot]@users.noreply.github.com) - Wed, 2 Mar 2022 15:46:28 -0800 + + * Auto-increment next development iteration. + Signed-off-by: dblock <dblock@amazon.com> + + * Make bwc increments on X.Y and main branches. + Signed-off-by: dblock <dblock@amazon.com> + Signed-off-by: dblock + <dblock@dblock.org> + Co-authored-by: Daniel Doubrovkine (dB.) <dblock@dblock.org> + + +* __Downgrade to JDK 11. (#2301)__ + + [Daniel Doubrovkine (dB.)](mailto:dblock@dblock.org) - Wed, 2 Mar 2022 13:37:23 -0500 + + * Downgrade to JDK 11. + Signed-off-by: dblock <dblock@dblock.org> + + * Added support for patch JDK version, like 11.0.14+1 + Signed-off-by: Andriy Redko <andriy.redko@aiven.io> + + * Use JDK 11.0.14.1+1. + Signed-off-by: dblock <dblock@dblock.org> + + * ./gradlew :build-tools:spotlessApply + Signed-off-by: dblock <dblock@dblock.org> + Co-authored-by: Andriy Redko <andriy.redko@aiven.io> + + +* __Adding shards per node constraint for predictability to testClusterGr… (#2110) (#2265)__ + + [opensearch-trigger-bot[bot]](mailto:98922864+opensearch-trigger-bot[bot]@users.noreply.github.com) - Mon, 28 Feb 2022 15:59:57 -0600 + + * Adding shards per node constraint for predictability to + testClusterGreenAfterPartialRelocation + Signed-off-by: Ankit Jain <jain.ankitk@gmail.com> + + * Fixing precommit violation + Signed-off-by: Ankit Jain <jain.ankitk@gmail.com> + + * Adding assertion to ensure invariant + Signed-off-by: Ankit Jain <jain.ankitk@gmail.com> + (cherry picked from commit 8ae0db5285963b8e3552ce106ef1368813dbc8b1) + Co-authored-by: Ankit Jain <jain.ankitk@gmail.com> + + +* __Revert "[Backport 1.x] Override Default Distribution Download Url with Custom Distribution Url When User Passes a Url (#2191)" (#2243)__ + + [Rabi Panda](mailto:adnapibar@gmail.com) - Thu, 24 Feb 2022 15:58:30 -0800 + + Signed-off-by: Rabi Panda <adnapibar@gmail.com> + + +* __added config file to git issue template directory to disable blank issue creation (#2158) (#2249)__ + + [Joshua Palis](mailto:99766446+joshpalis@users.noreply.github.com) - Thu, 24 Feb 2022 15:34:24 -0800 + + Signed-off-by: Joshua Palis <jpalis@amazon.com> + Co-authored-by: Joshua Palis <jpalis@amazon.com> + (cherry picked from commit fb187eacc26487cd644f09091e462001d8839315) + + +* __Case Insensitive Support in Regexp Interval (#2237) (#2246)__ + + [Matt Weber](mailto:matt@mattweber.org) - Thu, 24 Feb 2022 15:58:09 -0600 + + 1x backport of #2237. Add a `case_insensitive` flag to regexp interval source. + + Signed-off-by: Matt Weber <matt@mattweber.org> + + +* __Add Factory to enable Lucene ConcatenateGraphFilter (#1278) (#2152) (#2219)__ + + [Mau Bach Quang](mailto:quangmaubach@gmail.com) - Thu, 24 Feb 2022 10:49:31 -0800 + + Lucene has a ConcatenateGraphFilter that can concatenate tokens from a + TokenStream + to create a single token (or several tokens that have the same + position if + input TokenStream is a graph). + The change is to enable that ConcatenateGraphFilter by adding a Factory. + + (cherry-pick from 0e95bb9dff976a9c7f9cdac63a92040043d029e2) + Signed-off-by: Mau Bach Quang <quangmaubach@gmail.com> + + +* __[Backport 1.x] Override Default Distribution Download Url with Custom Distribution Url When User Passes a Url (#2191)__ + + [Rishikesh Pasham](mailto:62345295+Rishikesh1159@users.noreply.github.com) - Mon, 21 Feb 2022 10:56:00 -0800 + + * Backport Enabling Sort Optimization to make use of Lucene + Signed-off-by: Rishikesh1159 <rishireddy1159@gmail.com> + + * Backport Enabling Sort Optimization to make use of Lucene and small change in + a method call signature + Signed-off-by: Rishikesh1159 <rishireddy1159@gmail.com> + + * [Backport 1.x] Override Default Distribution Download Url with Custom + Distribution Url When User Passes a Url + Signed-off-by: Rishikesh1159 <rishireddy1159@gmail.com> + + * Adding Spotless check to previous PR + Signed-off-by: Rishikesh1159 <rishireddy1159@gmail.com> + +* __Support unordered non-overlapping intervals (#2103) (#2172)__ + + [Matt Weber](mailto:matt@mattweber.org) - Fri, 18 Feb 2022 11:52:52 -0500 + + This commit exposes Intervals.unorderedNoOverlaps (LUCENE-8828). + + (#2103 backport) + Signed-off-by: Matt Weber <matt@mattweber.org> + + +* __Add regexp interval source (#1917) (#2069)__ + + [github-actions[bot]](mailto:41898282+github-actions[bot]@users.noreply.github.com) - Wed, 16 Feb 2022 14:46:35 -0500 + + * Add regexp interval source + Add a regexp interval source provider so people can use regular + expressions + inside of intervals queries. + Signed-off-by: Matt Weber <matt@mattweber.org> + + * Fixes + + - register regexp interval in SearchModule + - use fully-qualified name for lucene RegExp + - get rid of unnecessary variable + Signed-off-by: Matt Weber <matt@mattweber.org> + (cherry picked from commit b9420d8f70dfc168b9d44f736850af4ef7306a99) + Co-authored-by: Matt Weber <matt@mattweber.org> + + +* __Add proxy username and password settings for Azure repository (#2098) (#2108)__ + + [opensearch-trigger-bot[bot]](mailto:98922864+opensearch-trigger-bot[bot]@users.noreply.github.com) - Wed, 16 Feb 2022 12:01:51 -0500 + + Added username/password proxy settings for Azure repository. + Security + settings: + - azure.client.*.proxy.username - Proxy user name + - azure.client.*.proxy.password - Proxy user password + Signed-off-by: Andrey Pleskach <ples@aiven.io> + (cherry picked from commit 62361ceafce4abb735567066d1c4865ca6d7136f) + Co-authored-by: Andrey Pleskach <ples@aiven.io> + + +* __Support first and last parameter for missing bucket ordering in composite aggregation (#1942) (#2049)__ + + [Peng Huo](mailto:penghuo@gmail.com) - Tue, 15 Feb 2022 14:07:35 -0800 + + Support for "first" and "last" parameters for missing bucket ordering in + composite aggregation. + By default, if order is asc, missing_bucket at first, + if order is desc, missing_bucket at last. If + missing_order is "first" or + "last", regardless order, missing_bucket is at first or last respectively. + Signed-off-by: Peng Huo <penghuo@gmail.com> + + +* __Mapping update for “date_range” field type is not idempotent (#2094) (#2106)__ + + [opensearch-trigger-bot[bot]](mailto:98922864+opensearch-trigger-bot[bot]@users.noreply.github.com) - Tue, 15 Feb 2022 11:44:17 -0600 + + Signed-off-by: Andriy Redko <andriy.redko@aiven.io> + (cherry picked from commit 6b6f03368f49f5f8001d6d0ed85cd9af7bab76f6) + Co-authored-by: Andriy Redko <andriy.redko@aiven.io> + + +* __Fix integration tests failure (#2067) (#2090)__ + + [opensearch-trigger-bot[bot]](mailto:98922864+opensearch-trigger-bot[bot]@users.noreply.github.com) - Fri, 11 Feb 2022 11:07:46 -0500 + + Fixed integration tests failure on Linux with Kernel 5.16.x + Signed-off-by: Andrey Pleskach <ples@aiven.io> + (cherry picked from commit 27ed6fc82c7db7a3a741499f0dbd7722fa053f9d) + Co-authored-by: Andrey Pleskach <ples@aiven.io> + + +* __Backport/backport 2048,2074 to 1.x (#2085)__ + + [Ankit Jain](mailto:jain.ankitk@gmail.com) - Fri, 11 Feb 2022 09:36:21 -0500 + + * Stabilizing org.opensearch.cluster.routing.MovePrimaryFirstTests.test… + (#2048) + + * Stabilizing + org.opensearch.cluster.routing.MovePrimaryFirstTests.testClusterGreenAfterPartialRelocation + + Signed-off-by: Ankit Jain <jain.ankitk@gmail.com> + + * Removing unused import + Signed-off-by: Ankit Jain <jain.ankitk@gmail.com> + + * Making code more readable + Signed-off-by: Ankit Jain <jain.ankitk@gmail.com> + (cherry picked from commit 343b82fe24525bbab01ef5a0d9bb8917068c71bf) + + * Added timeout to ensureGreen() for testClusterGreenAfterPartialRelocation + (#2074) + Signed-off-by: Ankit Jain <jain.ankitk@gmail.com> + (cherry picked from commit f0984eb409d44e8b68deb1c262bf81accc300acb) + + +* __Removing lingering transportclient (#1955) (#2088)__ + + [opensearch-trigger-bot[bot]](mailto:98922864+opensearch-trigger-bot[bot]@users.noreply.github.com) - Thu, 10 Feb 2022 17:21:12 -0800 + + Signed-off-by: Sarat Vemulapalli <vemulapallisarat@gmail.com> + (cherry picked from commit 781156471a1827b1b66445f716c7567f714dda86) + Co-authored-by: Sarat Vemulapalli <vemulapallisarat@gmail.com> + + +* __Prioritize primary shard movement during shard allocation (#1445) (#2079)__ + + [opensearch-trigger-bot[bot]](mailto:98922864+opensearch-trigger-bot[bot]@users.noreply.github.com) - Thu, 10 Feb 2022 13:48:38 -0500 + + When some node or set of nodes is excluded (based on some cluster setting) + BalancedShardsAllocator iterates over them in breadth first order picking 1 + shard from + each node and repeating the process until all shards are balanced. + Since shards from + each node are picked randomly it's possible the p and r of + shard1 is relocated first + leaving behind both p and r of shard2. If the + excluded nodes were to go down the + cluster becomes red. + This commit introduces a new setting + "cluster.routing.allocation.move.primary_first" + that prioritizes the p of both + shard1 and shard2 first so the cluster does not become + red if the excluded + nodes were to go down before relocating other shards. Note that + with this + setting enabled performance of this change is a direct function of number + of + indices, shards, replicas, and nodes. The larger the indices, replicas, and + distribution scale, the slower the allocation becomes. This should be used with + care. + Signed-off-by: Ankit Jain <jain.ankitk@gmail.com> + (cherry picked from commit 6eb8f6f307567892bbabbe37aff7cd42be486df0) + Co-authored-by: Ankit Jain <jain.ankitk@gmail.com> + + +* __Adding workflow to auto delete backport merged branches from backport workflow (#2050) (#2065)__ + + [github-actions[bot]](mailto:41898282+github-actions[bot]@users.noreply.github.com) - Tue, 8 Feb 2022 12:27:45 -0800 + + Signed-off-by: Vacha Shah <vachshah@amazon.com> + (cherry picked from commit 9c9e218ae697b65e410304825cac81ccdf355e66) + Co-authored-by: Vacha <vachshah@amazon.com> + + +* __Another attempt to fix o.o.transport.netty4.OpenSearchLoggingHandlerIT fails w/ stack overflow (#2051) (#2055)__ + + [github-actions[bot]](mailto:41898282+github-actions[bot]@users.noreply.github.com) - Mon, 7 Feb 2022 16:11:06 -0500 + + Signed-off-by: Andriy Redko <andriy.redko@aiven.io> + (cherry picked from commit 1e5d98329eaa76d1aea19306242e6fa74b840b75) + Co-authored-by: Andriy Redko <andriy.redko@aiven.io> + + +* __added backport for 1.2.5 to 1.x branch (#2057)__ + + [Abhinav Gupta](mailto:guptabhi123@gmail.com) - Mon, 7 Feb 2022 13:33:27 -0500 + + Signed-off-by: Abhinav Gupta <abhng@amazon.com> + + +* __[Backport] Introduce FS Health HEALTHY threshold to fail stuck node (#1269)__ + + [Bukhtawar Khan](mailto:bukhtawa@amazon.com) - Mon, 7 Feb 2022 12:48:03 -0500 + + * Introduce FS Health HEALTHY threshold to fail stuck node (#1167) + This will cause the leader stuck on IO during publication to step down and + eventually trigger a leader election. + * Issue Description + * The publication of cluster state is time bound to 30s by a + cluster.publish.timeout settings. If this time is reached before the new + cluster state is committed, then the cluster state change is rejected and the + leader considers itself to have failed. It stands down and starts trying to + elect a new master. + There is a bug in leader that when it tries to publish the new cluster state + it first tries acquire a lock to flush the new state under a mutex to disk. The + same lock is used to cancel the publication on timeout. Below is the state of + the timeout scheduler meant to cancel the publication. So essentially if the + flushing of cluster state is stuck on IO, so will the cancellation of the + publication since both of them share the same mutex. So leader will not step + down and effectively block the cluster from making progress. + Signed-off-by: Bukhtawar Khan <bukhtawa@amazon.com> + + * Fix up settings + Signed-off-by: Bukhtawar Khan <bukhtawa@amazon.com> + + * Fix up tests + Signed-off-by: Bukhtawar Khan <bukhtawa@amazon.com> + + * Fix up tests + Signed-off-by: Bukhtawar Khan <bukhtawa@amazon.com> + + * Fix up tests + Signed-off-by: Bukhtawar Khan <bukhtawa@amazon.com> + + +* __[Backport] Handle shard over allocation during partial zone/rack or independent … (#1268)__ + + [Bukhtawar Khan](mailto:bukhtawa@amazon.com) - Mon, 7 Feb 2022 09:58:10 -0500 + + * Handle shard over allocation during partial zone/rack or independent node + failures (#1149) + The changes ensure that in the event of a partial zone failure, the surviving + nodes in the minority zone don't get overloaded with shards, this is governed + by a skewness limit. + Signed-off-by: Bukhtawar Khan <bukhtawa@amazon.com> + + * Fix up imports + Signed-off-by: Bukhtawar Khan <bukhtawa@amazon.com> + + * Fix up imports + Signed-off-by: Bukhtawar Khan <bukhtawa@amazon.com> + + * Fix up imports + Signed-off-by: Bukhtawar Khan <bukhtawa@amazon.com> + + * Fix up check style + Signed-off-by: Bukhtawar Khan <bukhtawa@amazon.com> + + +* __Add Version.V_1_2_5 constant__ + + [Nicholas Walter Knize](mailto:nknize@apache.org) - Fri, 4 Feb 2022 18:29:46 -0600 + + Signed-off-by: Nicholas Walter Knize <nknize@apache.org> + + +* __add 1.2.5 to bwcVersions__ + + [Nicholas Walter Knize](mailto:nknize@apache.org) - Fri, 4 Feb 2022 18:29:35 -0600 + + Signed-off-by: Nicholas Walter Knize <nknize@apache.org> + + +* __Revert "Upgrading Shadow plugin to 7.1.2 (#2033) (#2037)" (#2047)__ + + [Sarat Vemulapalli](mailto:vemulapallisarat@gmail.com) - Wed, 2 Feb 2022 19:40:51 -0800 + + This reverts commit 8725061c15fac70a81d144ed2d79b09f5e1a2f7f. + Signed-off-by: Sarat Vemulapalli <vemulapallisarat@gmail.com> + + +* __Fix AssertionError message (#2044) (#2045)__ + + [github-actions[bot]](mailto:41898282+github-actions[bot]@users.noreply.github.com) - Wed, 2 Feb 2022 21:05:15 -0500 + + Signed-off-by: Lukáš Vlček <lukas.vlcek@aiven.io> + (cherry picked from commit 270c59f523acbb3af73ab56dbcfe754e619fdca9) + Co-authored-by: Lukáš Vlček <lukas.vlcek@aiven.io> + + +* __Upgrading Shadow plugin to 7.1.2 (#2033) (#2037)__ + + [github-actions[bot]](mailto:41898282+github-actions[bot]@users.noreply.github.com) - Wed, 2 Feb 2022 14:52:31 -0800 + + Shadow plugin is used for publishing jars + and this upgrades Log4J dependency + for build. + Signed-off-by: Sarat Vemulapalli <vemulapallisarat@gmail.com> + (cherry picked from commit 1f9517c4caee48eda6eee77f603d815af1fd7770) + Co-authored-by: Sarat Vemulapalli <vemulapallisarat@gmail.com> + + +* __[FEATURE] Add OPENSEARCH_JAVA_HOME env to override JAVA_HOME (#2040)__ + + [Andriy Redko](mailto:andriy.redko@aiven.io) - Wed, 2 Feb 2022 12:16:50 -0800 + + Signed-off-by: Andriy Redko <andriy.redko@aiven.io> + + +* __build: introduce support for reproducible builds (#1995) (#2038)__ + + [github-actions[bot]](mailto:41898282+github-actions[bot]@users.noreply.github.com) - Wed, 2 Feb 2022 14:21:34 -0500 + + Reproducible builds is an initiative to create an independently-verifiable path + from source to binary code [1]. This can be done by: + - Make all archive tasks in gradle reproducible by ignoring timestamp on files + [2] + - Preserve the order in side the archives [2] + - Ensure GlobalBuildInfoPlugin.java use [SOURCE_DATE_EPOCH] when available + + [SOURCE_DATE_EPOCH]: https://reproducible-builds.org/docs/source-date-epoch/ + [1]: https://reproducible-builds.org/ + [2]: + https://docs.gradle.org/current/userguide/working_with_files.html#sec:reproducible_archives + + Signed-off-by: Leonidas Spyropoulos <artafinde@gmail.com> + (cherry picked from commit 6da253b8fff9a9d9cbbf65807efa7aeaddc9c9d3) + Co-authored-by: Leonidas Spyropoulos <artafinde@gmail.com> + + +* __[1x] Deprecate index.merge.policy.max_merge_at_once_explicit (#1981) (#1984)__ + + [Nick Knize](mailto:nknize@apache.org) - Wed, 2 Feb 2022 11:48:00 -0600 + + max_merge_at_once_explicit is removed in lucene 9 so the index setting is + + deprecated for removal in the next major release. + Signed-off-by: Nicholas Walter Knize <nknize@apache.org> + + +* __[Deprecate] Setting explicit version on analysis component (#1978) (#1985)__ + + [Nick Knize](mailto:nknize@apache.org) - Wed, 2 Feb 2022 12:28:44 -0500 + + Lucene 9 removes the ability to define an explicit version on an analysis + + component. The version parameter is deprecated at parse time and a warning is + + issued to the user through the deprecation logger. + Signed-off-by: Nicholas Walter Knize <nknize@apache.org> + + +* __[BUG] Docker distribution builds are failing. Switching to http://vault.centos.org (#2024) (#2030)__ + + [Andriy Redko](mailto:andriy.redko@aiven.io) - Tue, 1 Feb 2022 14:16:50 -0600 + + Signed-off-by: Andriy Redko <andriy.redko@aiven.io> + + +* __[Backport] Enabling Sort Optimization to make use of Lucene (#1989)__ + + [Rishikesh Pasham](mailto:62345295+Rishikesh1159@users.noreply.github.com) - Mon, 31 Jan 2022 10:48:13 -0600 + + * Backport Enabling Sort Optimization to make use of Lucene + Signed-off-by: Rishikesh1159 <rishireddy1159@gmail.com> + + * Backport Enabling Sort Optimization to make use of Lucene and small change in + a method call signature + Signed-off-by: Rishikesh1159 <rishireddy1159@gmail.com> + + +* __Upgrading Jackson-Databind version (#1982) (#1987)__ + + [github-actions[bot]](mailto:41898282+github-actions[bot]@users.noreply.github.com) - Sun, 30 Jan 2022 16:32:14 -0800 + + * Upgrading Jackson-Databind version + Signed-off-by: Rishikesh1159 <rishireddy1159@gmail.com> + + * Adding jackson-databind version using getProperty method + Signed-off-by: Rishikesh1159 <rishireddy1159@gmail.com> + (cherry picked from commit 1568407c362b2534366048379f1bd93f2d164d89) + Co-authored-by: Rishikesh Pasham + <62345295+Rishikesh1159@users.noreply.github.com> + + +* __Update bundled JDK distribution to 17.0.2+8 (#2007) (#2009)__ + + [Andriy Redko](mailto:andriy.redko@aiven.io) - Sat, 29 Jan 2022 09:45:19 -0800 + + Signed-off-by: Andriy Redko <andriy.redko@aiven.io> + + +* __Linked the formatting setting file (#1860) (#1961)__ + + [github-actions[bot]](mailto:41898282+github-actions[bot]@users.noreply.github.com) - Thu, 27 Jan 2022 12:24:34 -0800 + + Signed-off-by: Owais Kazi <owaiskazi19@gmail.com> + (cherry picked from commit cfc9ec292dea2169495deffe6d1e25b654e5e35e) + Co-authored-by: Owais Kazi <owaiskazi19@gmail.com> + + +* __Add hook to execute logic before Integ test task starts (#1969) (#1971)__ + + [github-actions[bot]](mailto:41898282+github-actions[bot]@users.noreply.github.com) - Wed, 26 Jan 2022 17:42:45 -0600 + + Add hook to execute custom logic before the integ test starts. + This is + required for a workaround to enable the jacoco code coverage for Integ Tests. + Signed-off-by: Ankit Kala <ankikala@amazon.com> + + +* __Fixing typo in TESTING.md (#1849) (#1959)__ + + [github-actions[bot]](mailto:41898282+github-actions[bot]@users.noreply.github.com) - Wed, 26 Jan 2022 17:41:33 -0600 + + Fixes some grammar and link typos found in TESTING.md. + Signed-off-by: Vacha Shah <vachshah@amazon.com> + + +* __Add max_expansions option to wildcard interval (#1916) (#1979)__ + + [Matt Weber](mailto:matt@mattweber.org) - Wed, 26 Jan 2022 16:08:34 -0600 + + Add support for setting the max expansions on a wildcard interval. + The default + value is still 128 and the max value is bounded by + `BooleanQuery.getMaxClauseCount()`. + Signed-off-by: Matt Weber <matt@mattweber.org> + + +* __Update protobuf-java to 3.19.3 (#1945) (#1949)__ + + [Tianli Feng](mailto:ftl94@live.com) - Fri, 21 Jan 2022 08:52:05 -0800 + + * Update protobuf-java to 3.19.3 + Signed-off-by: Tianli Feng <ftl94@live.com> + + * Exclude some API usage violations in the package com.google.protobuf for + thirdPartyAudit task to pass + Signed-off-by: Tianli Feng <ftl94@live.com> + + +* __Timeout fix backport to 1.x (#1953)__ + + [Suraj Singh](mailto:79435743+dreamer-89@users.noreply.github.com) - Thu, 20 Jan 2022 20:53:43 -0600 + + * [Bug] Wait for outstanding requests to complete (#1925) + Signed-off-by: Suraj Singh <surajrider@gmail.com> + + * [BUG] Wait for outstanding requests to complete in LastSuccessfulSett… + (#1939) + + * [BUG] Wait for outstanding requests to complete in + LastSuccessfulSettingsUpdate test + Signed-off-by: Suraj Singh <surajrider@gmail.com> + + * [BUG] Wait for outstanding requests to complete in + LastSuccessfulSettingsUpdate test + Signed-off-by: Suraj Singh <surajrider@gmail.com> + + +* __Update Netty to 4.1.73.Final (#1936) (#1937)__ + + [Andriy Redko](mailto:andriy.redko@aiven.io) - Wed, 19 Jan 2022 00:00:42 -0500 + + efs/remotes/origin/1.3 + Signed-off-by: Andriy Redko <andriy.redko@aiven.io> + + +* __Modernize and consolidate JDKs usage across all stages of the build. Use JDK-17 as bundled JDK distribution to run tests (#1922)__ + + [Andriy Redko](mailto:andriy.redko@aiven.io) - Tue, 18 Jan 2022 10:51:53 -0500 + + Signed-off-by: Andriy Redko <andriy.redko@aiven.io> + + +* __Expand SearchPlugin javadocs. (#1909) (#1923)__ + + [Matt Weber](mailto:matt@mattweber.org) - Tue, 18 Jan 2022 09:48:52 -0600 + + Add and clarify some search plugin point documentation. + Signed-off-by: Matt Weber <matt@mattweber.org> + + +* __Make SortBuilders pluggable (#1856) (#1915)__ + + [Matt Weber](mailto:matt@mattweber.org) - Mon, 17 Jan 2022 12:28:22 -0500 + + Add the ability for plugin authors to add custom sort builders. + Signed-off-by: Matt Weber <matt@mattweber.org> + + +* __Refactor LegacyESVersion tests from Version tests (#1662) (#1663)__ + + [Nick Knize](mailto:nknize@apache.org) - Mon, 17 Jan 2022 12:27:56 -0500 + + In preparation for removing all LegacyESVersion support by 3.0; this commit + + largely refactors the LegacyESVersion test logic from the OpenSearch Version + + test logic into an independent test class. This PR also updates + Version.fromString + to ensure a proper legacy version is returned when major is + > 3 (to support + legacy yaml test and build scripts). + Note that bwc w/ legacy versions are still supported so some cross + compatibility + testing is retained in the Version test class. + + Signed-off-by: Nicholas Walter Knize <nknize@apache.org> + + +* __Fixing org.opensearch.common.network.InetAddressesTests.testForStringIPv6WithScopeIdInput (#1913) (#1914)__ + + [Andriy Redko](mailto:andriy.redko@aiven.io) - Sat, 15 Jan 2022 10:14:17 -0600 + + Signed-off-by: Andriy Redko <andriy.redko@aiven.io> + + +* __[BUG] Serialization bugs can cause node drops (#1885) (#1911)__ + + [Andriy Redko](mailto:andriy.redko@aiven.io) - Fri, 14 Jan 2022 14:23:25 -0600 + + This commit restructures InboundHandler to ensure all data + is consumed over + the wire. + Signed-off-by: Andriy Redko <andriy.redko@aiven.io> + + +* __Fix o.o.transport.netty4.OpenSearchLoggingHandlerIT stack overflow test failure (#1900) (#1906)__ + + [Andriy Redko](mailto:andriy.redko@aiven.io) - Fri, 14 Jan 2022 12:38:18 -0600 + + Attempt to fix o.o.transport.netty4.OpenSearchLoggingHandlerIT fails w/ stack + overflow by + hardening test expectation patterns in regex patterns + Signed-off-by: Andriy Redko <andriy.redko@aiven.io> + + +* __Replace JCenter with Maven Central. (#1057) (#1892)__ + + [Marc Handalian](mailto:handalm@amazon.com) - Wed, 12 Jan 2022 17:03:42 -0800 + + On February 3 2021, JFrog + [announced](https://jfrog.com/blog/into-the-sunset-bintray-jcenter-gocenter-and-chartcenter/) + the shutdown of JCenter. Later on April 27 2021, an update was provided that + the repository will only be read only and new package and versions are no + longer accepted on JCenter. This means we should no longer use JCenter for our + central artifacts repository. + This change replaces JCenter with Maven Central as per the Gradle + recommendation - https://blog.gradle.org/jcenter-shutdown + Signed-off-by: Rabi Panda <adnapibar@gmail.com> + Signed-off-by: Marc Handalian + <handalm@amazon.com> + Co-authored-by: Rabi Panda <adnapibar@gmail.com> + + +* __Update FIPS API libraries of Bouncy Castle (#1853) (#1886)__ + + [Tianli Feng](mailto:ftl94@live.com) - Wed, 12 Jan 2022 09:37:05 -0500 + + * Update bc-fips to 1.0.2.1 + Signed-off-by: Tianli Feng <ftl94@live.com> + + * Update bcpg-fips to 1.0.5.1 + Signed-off-by: Tianli Feng <ftl94@live.com> + + * Update bctls-fips to 1.0.12.2 + Signed-off-by: Tianli Feng <ftl94@live.com> + + * Use the unified bouncycastle version for bcpkix-jdk15on in HDFS testing + fixture + Signed-off-by: Tianli Feng <ftl94@live.com> + + +* __[1.x] Remove remaining Flavor Serialization (#1751) (#1757)__ + + [Nick Knize](mailto:nknize@apache.org) - Thu, 6 Jan 2022 11:11:31 -0800 + + * [Remove] Remaining Flavor Serialization (#1751) + This commit removes unnecessary serialization of unused flavor variable in + build + metadata from V_1_3_0+ + Signed-off-by: Nicholas Walter Knize <nknize@apache.org> + + * change flavor version check to V_1_3_0 + This commit changes the flavor serialization check in Build from V_2_0_0 to + V_1_3_0. + Signed-off-by: Nicholas Walter Knize <nknize@apache.org> + + +* __Update junit to 4.13.1 (#1837) (#1842)__ + + [Ashish Agrawal](mailto:ashisagr@amazon.com) - Wed, 5 Jan 2022 08:15:56 -0500 + + * Update junit to 4.13.1 + Signed-off-by: Ashish Agrawal <ashisagr@amazon.com> + + * update junit to 4.13.2 + Signed-off-by: Ashish Agrawal <ashisagr@amazon.com> + + * update SHA1 file + Signed-off-by: Ashish Agrawal <ashisagr@amazon.com> + + +* __Upgrading bouncycastle to 1.70 (#1832) (#1834)__ + + [Sarat Vemulapalli](mailto:vemulapallisarat@gmail.com) - Tue, 4 Jan 2022 11:57:01 -0800 + + Signed-off-by: Sarat Vemulapalli <vemulapallisarat@gmail.com> + + +* __Updatting Netty to 4.1.72.Final (#1831) (#1835)__ + + [Sarat Vemulapalli](mailto:vemulapallisarat@gmail.com) - Tue, 4 Jan 2022 11:56:39 -0800 + + Signed-off-by: Sarat Vemulapalli <vemulapallisarat@gmail.com> + + +* __Intermittent java.lang.Exception: Suite timeout exceeded (>= 1200000 msec) (#1827)__ + + [Andriy Redko](mailto:andriy.redko@aiven.io) - Wed, 29 Dec 2021 13:23:49 -0500 + + Signed-off-by: Andriy Redko <andriy.redko@aiven.io> + + +* __Execution failed for task ':test:fixtures:azure/s3/hdfs/gcs-fixture:composeDown' (#1824) (#1825)__ + + [Andriy Redko](mailto:andriy.redko@aiven.io) - Wed, 29 Dec 2021 11:28:51 -0600 + + Signed-off-by: Andriy Redko <andriy.redko@aiven.io> + + +* __Update to log4j 2.17.1 (#1820) (#1822)__ + + [Andriy Redko](mailto:andriy.redko@aiven.io) - Tue, 28 Dec 2021 17:57:26 -0500 + + Signed-off-by: Andriy Redko <andriy.redko@aiven.io> + + +* __RestIntegTestTask fails because of missed log4j-core dependency (#1815) (#1818)__ + + [Andriy Redko](mailto:andriy.redko@aiven.io) - Tue, 28 Dec 2021 17:08:37 -0500 + + Signed-off-by: Andriy Redko <andriy.redko@aiven.io> + + +* __Gradle clean failing after a failed gradle check, folders created by Docker under 'root' user (#1726) (#1775)__ + + [Andriy Redko](mailto:andriy.redko@aiven.io) - Thu, 23 Dec 2021 17:42:09 -0500 + + Signed-off-by: Andriy Redko <andriy.redko@aiven.io> + + +* __[plugin] repository-azure: add configuration settings for connect/write/response/read timeouts (#1789) (#1802)__ + + [Andriy Redko](mailto:andriy.redko@aiven.io) - Thu, 23 Dec 2021 12:26:27 -0600 + + * [plugin] repository-azure: add configuration settings for + connect/write/response/read timeouts + Signed-off-by: Andriy Redko <andriy.redko@aiven.io> + + * Addressing code review comments: renaming connectionXxx to connectXxx + Signed-off-by: Andriy Redko <andriy.redko@aiven.io> + + * Addressing code review comments: adding timeout comment + Signed-off-by: Andriy Redko <andriy.redko@aiven.io> + + +* __Add bwc version 1.2.4 (#1797)__ + + [Rabi Panda](mailto:adnapibar@gmail.com) - Thu, 23 Dec 2021 10:19:15 -0500 + + Signed-off-by: Rabi Panda <adnapibar@gmail.com> + + +* __Use try-with-resources with MockLogAppender (#1595) (#1784)__ + + [Andrew Ross](mailto:andrross@amazon.com) - Tue, 21 Dec 2021 20:03:11 -0800 + + I previously added a helper that started a MockLogAppender to ensure it + was never added to a Logger before it was started. I subsequently found + the opposite case in RolloverIT.java where the appender was stopped + before it was closed, therefore creating a race where a concurrently + running test in the same JVM could cause a logging failure. This seems + like a really easy mistake to make when writing a test or introduce when + refactoring a test. I've made a change to use try-with-resources to + ensure that proper setup and teardown is done. This should make it much + harder to introduce this particular test bug in the future. Unfortunately, + it did involve touching a lot of files. The changes here are purely structural + to leverage try-with-resources; no testing logic has been changed. + + Signed-off-by: Andrew Ross <andrross@amazon.com> + + +* __Ignore file order in test assertion (#1755) (#1782)__ + + [Andrew Ross](mailto:andrross@amazon.com) - Mon, 20 Dec 2021 19:35:36 -0600 + + This unit test asserts that a SHA file for a groovy dependency gets + created. + However, a SHA file for javaparser-core also gets created in + the same + directory. For some reason, builds were failing on my machine + because + `Files::list` was returning the javaparser-core file first. I + don't believe + there are any ordering guarantees with that API, so I + relaxed the assertion to + not depend on ordering. + + Signed-off-by: Andrew Ross <andrross@amazon.com> + + +* __Fixing allocation filters to persist existing state on settings update (#1718) (#1780)__ + + [Ankit Jain](mailto:jain.ankitk@gmail.com) - Mon, 20 Dec 2021 18:31:53 -0500 + + * Fixing allocation filters to persist existing state on settings update + Signed-off-by: Ankit Jain <jain.ankitk@gmail.com> + + * Adding test for filter settings update + Signed-off-by: Ankit Jain <jain.ankitk@gmail.com> + + * Adding more tests and review comments + Signed-off-by: Ankit Jain <jain.ankitk@gmail.com> + + * Adding assertion and unit test for operation type mismatch + Signed-off-by: Ankit Jain <jain.ankitk@gmail.com> + + * Updating test names + + Signed-off-by: Ankit Jain <jain.ankitk@gmail.com> + + +* __Update to log4j 2.17.0 (#1771) (#1773)__ + + [Andriy Redko](mailto:andriy.redko@aiven.io) - Sat, 18 Dec 2021 11:33:16 -0800 + + Signed-off-by: Andriy Redko <andriy.redko@aiven.io> + + +* __Updating .gitattributes for additional file types (#1727) (#1766)__ + + [Sarat Vemulapalli](mailto:vemulapallisarat@gmail.com) - Fri, 17 Dec 2021 15:51:12 -0800 + + * Updating .gitattributes for additional types + + Signed-off-by: Sarat Vemulapalli <vemulapallisarat@gmail.com> + + +* __Better JDK-18 EA (and beyond) support of SecurityManager (#1750) (#1753)__ + + [Andriy Redko](mailto:andriy.redko@aiven.io) - Fri, 17 Dec 2021 16:08:30 -0500 + + Signed-off-by: Andriy Redko <andriy.redko@aiven.io> + + +* __Add version 1.2.3. (#1759)__ + + [Daniel Doubrovkine (dB.)](mailto:dblock@dblock.org) - Fri, 17 Dec 2021 09:14:45 -0800 + + Signed-off-by: dblock <dblock@dblock.org> + + +* __[plugin] repository-azure is not working properly hangs on basic operations (#1740)__ + + [Andriy Redko](mailto:andriy.redko@aiven.io) - Thu, 16 Dec 2021 15:01:50 -0500 + + * [plugin] repository-azure is not working properly hangs on basic operations + Signed-off-by: Andriy Redko <andriy.redko@aiven.io> + + * Added tests cases and TODO items, addressing code review comments + Signed-off-by: Andriy Redko <andriy.redko@aiven.io> + + +* __Adding 1.2.2 (#1731) (#1736)__ + + [Sarat Vemulapalli](mailto:vemulapallisarat@gmail.com) - Wed, 15 Dec 2021 14:16:16 -0500 + + Signed-off-by: Sarat Vemulapalli <vemulapallisarat@gmail.com> + + +* __Upgrade to log4j 2.16.0 (#1721) (#1723)__ + + [Andriy Redko](mailto:andriy.redko@aiven.io) - Tue, 14 Dec 2021 12:19:35 -0500 + + Signed-off-by: Andriy Redko <andriy.redko@aiven.io> + + +* __Support JDK 18 EA builds (#1714)__ + + [Andriy Redko](mailto:andriy.redko@aiven.io) - Tue, 14 Dec 2021 06:47:00 -0500 + + Signed-off-by: Andriy Redko <andriy.redko@aiven.io> + + +* __Fixing .gitattributes for binary content, removing *.class files (#1717) (#1720)__ + + [Andriy Redko](mailto:andriy.redko@aiven.io) - Mon, 13 Dec 2021 16:11:25 -0800 + + Signed-off-by: Andriy Redko <andriy.redko@aiven.io> + + +* __Upgrade to logj4 2.15.0 (#1705)__ + + [Andrew Ross](mailto:andrross@amazon.com) - Fri, 10 Dec 2021 16:35:18 -0500 + + Signed-off-by: Andrew Ross <andrross@amazon.com> + + +* __Add version 1.2.1. (#1701) (#1702)__ + + [Daniel Doubrovkine (dB.)](mailto:dblock@dblock.org) - Fri, 10 Dec 2021 15:36:19 -0500 + + Signed-off-by: dblock <dblock@dblock.org> + + +* __Move Gradle wrapper and precommit checks into OpenSearch repo. (#1664) (#1678)__ + + [Daniel Doubrovkine (dB.)](mailto:dblock@dblock.org) - Wed, 8 Dec 2021 11:45:55 -0500 + + * Move Gradle checks into OpenSearch repo. + Signed-off-by: dblock <dblock@amazon.com> + + * Use working-directory for gradle wrapper validation. + Signed-off-by: dblock <dblock@amazon.com> + + * Use https://github.com/gradle/wrapper-validation-action. + Signed-off-by: dblock <dblock@amazon.com> + + +* __Moving DCO to workflows (#1458) (#1666)__ + + [Daniel Doubrovkine (dB.)](mailto:dblock@dblock.org) - Wed, 8 Dec 2021 07:59:34 -0500 + + Signed-off-by: CEHENKLE <henkle@amazon.com> + Co-authored-by: CEHENKLE <henkle@amazon.com> + + +* __Start MockLogAppender before adding to static context (#1587) (#1659)__ + + [Andrew Ross](mailto:andrross@amazon.com) - Mon, 6 Dec 2021 16:45:49 -0500 + + I observed a test failure with the message + 'Attempted to append to non-started appender mock' from an assertion in + `OpenSearchTestCase::after`. I believe this indicates that a + MockLogAppender + (which is named "mock") was added as an appender to the + static logging context + and some other test in the same JVM happened to + cause a logging statement to + hit that appender and cause an error, which + then caused an unrelated test to + fail (because they share static state + with the logger). Almost all usages of + MockLogAppender start it + immediately after creation. I found a few that did + not and fixed those. + I also made a static helper in MockLogAppender to start + it upon + creation. + Signed-off-by: Andrew Ross <andrross@amazon.com> + + +* __Revert "Support Gradle 7 (#1609) (#1622)" (#1657)__ + + [Andriy Redko](mailto:andriy.redko@aiven.io) - Mon, 6 Dec 2021 10:47:35 -0500 + + This reverts commit 93bd32b14270be0da8a6b5eef8eeabfce7eb2b58. + Signed-off-by: Andriy Redko <andriy.redko@aiven.io> + + +* __Added .gitattributes to manage end-of-line checks for Windows/*nix systems (#1638) (#1655)__ + + [Andriy Redko](mailto:andriy.redko@aiven.io) - Mon, 6 Dec 2021 08:08:01 -0500 + + Signed-off-by: Andriy Redko <andriy.redko@aiven.io> + + +* __Support Gradle 7 (#1609) (#1622)__ + + [Andriy Redko](mailto:andriy.redko@aiven.io) - Fri, 3 Dec 2021 15:53:57 -0500 + + Signed-off-by: Andriy Redko <andriy.redko@aiven.io> + + +* __Renaming Slave terminology to Replica in 1.x branch (backporting) (#1645)__ + + [Rishikesh Pasham](mailto:62345295+Rishikesh1159@users.noreply.github.com) - Fri, 3 Dec 2021 15:52:51 -0500 + + Signed-off-by: Rishikesh Pasham <rishireddy1159@gmail.com> + + +* __Upgrading commons-codec in hdfs-fixture and cleaning up dependencies in repository-hdfs (#1603) (#1621)__ + + [Vacha](mailto:vachshah@amazon.com) - Tue, 30 Nov 2021 17:17:21 -0500 + + Signed-off-by: Vacha <vachshah@amazon.com> + + +* __Rename field_masking_span to span_field_masking (#1606) (#1623)__ + + [Xue Zhou](mailto:85715413+xuezhou25@users.noreply.github.com) - Mon, 29 Nov 2021 23:18:13 -0500 + + * Rename field_masking_span to span_field_masking + Signed-off-by: Xue Zhou <xuezhou@amazon.com> + + * Update SearchModuleTests.java + Signed-off-by: Xue Zhou <xuezhou@amazon.com> + + * Rename field_masking_span to span_field_masking + Signed-off-by: Xue Zhou <xuezhou@amazon.com> + + +* __Upgrade dependency (#1571) (#1594)__ + + [Vacha](mailto:vachshah@amazon.com) - Mon, 29 Nov 2021 14:48:33 -0500 + + * Upgrading guava, commons-io and apache-ant dependencies. + Signed-off-by: Vacha <vachshah@amazon.com> + + * Adding failureaccess since guava needs it. + Signed-off-by: Vacha <vachshah@amazon.com> + + +* __Lower build requirement from Java 14+ to Java 11+ (#940) (#1608)__ + + [Marc Handalian](mailto:handalm@amazon.com) - Tue, 23 Nov 2021 21:41:59 -0500 + + * Lower build requirement from Java 14+ to Java 11+ + Avoid use of -Werror -Xlint:all, which may change significantly across + java + releases (new warnings could be added). Instead, just list the + warnings + individually. + Workaround JDK 11 compiler bug (JDK-8209058) that only impacts test fixture + code in the build itself. + Signed-off-by: Robert Muir <rmuir@apache.org> + + * Disable warning around -source 7 -release 7 for java version checker + The java version checker triggers some default warnings because it + targets + java7: + + ``` + Task :distribution:tools:java-version-checker:compileJava FAILED + warning: [options] source value 7 is obsolete and will be removed in a future release + warning: [options] target value 7 is obsolete and will be removed in a future release + warning: [options] To suppress warnings about obsolete options, use -Xlint:-options. + + error: warnings found and -Werror specified + ``` + + * Suppress this warning explicitly for this module. + Signed-off-by: Robert Muir <rmuir@apache.org> + + * more java14 -> java11 cleanup + Signed-off-by: Robert Muir <rmuir@apache.org> + Co-authored-by: Robert Muir <rmuir@apache.org> + Signed-off-by: Marc Handalian + <handalm@amazon.com> + Co-authored-by: Daniel Doubrovkine (dB.) <dblock@dblock.org> + Co-authored-by: + Robert Muir <rmuir@apache.org> + + +* __Giving informative error messages for double slashes in API call URLs- [ BACKPORT-1.x ] (#1601)__ + + [Megha Sai Kavikondala](mailto:kavmegha@amazon.com) - Tue, 23 Nov 2021 13:34:31 -0500 + + * Integration test that checks for settings upgrade (#1482) + + * Made changes. + Signed-off-by: Megha Sai Kavikondala <kavmegha@amazon.com> + + * Signed-off-by: Megha Sai Kavikondala <kavmegha@amazon.com> + Changes made by deleting the TestSettingsIT file and adding new lines in + FullClusterRestartSettingsUpgradeIT.java + Signed-off-by: Megha Sai Kavikondala <kavmegha@amazon.com> + + * Signed-off-by: Megha Sai Kavikondala <kavmegha@amazon.com> + Informative error messages related to empty index name.[Backport] + + +* __Enable RestHighLevel-Client to set parameter require_alias for bulk index and reindex requests (#1604)__ + + [Jan Baudisch](mailto:jan.baudisch.de@gmail.com) - Tue, 23 Nov 2021 10:20:07 -0500 + + Signed-off-by: Jan Baudisch <jan.baudisch.libri@gmail.com> + Co-authored-by: Jan Baudisch <jan.baudisch.libri@gmail.com> + + +* __Upgrading gson to 2.8.9 (#1541). (#1545)__ + + [Vacha](mailto:vachshah@amazon.com) - Fri, 19 Nov 2021 16:52:38 -0500 + + Signed-off-by: Vacha <vachshah@amazon.com> + + +* __[repository-azure] Update to the latest Azure Storage SDK v12, remove privileged runnable wrapper in favor of access helper (#1521) (#1538)__ + + [Andriy Redko](mailto:andriy.redko@aiven.io) - Thu, 11 Nov 2021 14:05:47 -0800 + + Signed-off-by: Andriy Redko <andriy.redko@aiven.io> + + +* __Integration test that checks for settings upgrade (#1482) (#1524)__ + + [Megha Sai Kavikondala](mailto:kavmegha@amazon.com) - Thu, 11 Nov 2021 12:55:11 -0800 + + * Made changes. + Signed-off-by: Megha Sai Kavikondala <kavmegha@amazon.com> + + * Signed-off-by: Megha Sai Kavikondala <kavmegha@amazon.com> + Changes made by deleting the TestSettingsIT file and adding new lines in + FullClusterRestartSettingsUpgradeIT.java + Signed-off-by: Megha Sai Kavikondala <kavmegha@amazon.com> + + +* __Added logic to allow {dot} files on startup (#1437) (#1516)__ + + [Ryan Bogan](mailto:10944539+ryanbogan@users.noreply.github.com) - Thu, 11 Nov 2021 11:02:03 -0800 + + * Added logic to allow {dot} files on startup + Signed-off-by: Ryan Bogan <rbogan@amazon.com> + + * Ensures that only plugin directories are returned by findPluginDirs() + Signed-off-by: Ryan Bogan <rbogan@amazon.com> + + * Prevents . files from being returned as plugins + Signed-off-by: Ryan Bogan <rbogan@amazon.com> + + +* __Add staged version 1.1.1 (#1509)__ + + [Nick Knize](mailto:nknize@apache.org) - Thu, 4 Nov 2021 14:46:57 -0500 + + Signed-off-by: Nicholas Walter Knize <nknize@apache.org> + + From b9f04405d6dfabcc2b8e87a86d4f83b003b3ffe7 Mon Sep 17 00:00:00 2001 From: Nick Knize Date: Thu, 17 Mar 2022 18:13:27 -0500 Subject: [PATCH 44/46] [Remove] types from CreateIndexRequest and companion Builder's mapping method (#2498) Removes the type variable as input to CreateIndexRequest.mapping(Object...) along with the CreateIndexRequestBuilder helper class. This also refactors the method name to setMapping for consistency with other methods (e.g., setSettings). Signed-off-by: Nicholas Walter Knize --- .../common/QueryStringWithAnalyzersIT.java | 2 +- .../common/HighlighterWithAnalyzersTests.java | 9 +-- .../script/expression/MoreExpressionIT.java | 12 ++-- .../percolator/PercolatorQuerySearchIT.java | 39 +++++------- .../PercolatorQuerySearchTests.java | 2 +- .../index/mapper/size/SizeMappingIT.java | 3 +- .../admin/indices/create/ShrinkIndexIT.java | 3 +- .../admin/indices/create/SplitIndexIT.java | 9 ++- .../action/search/TransportSearchIT.java | 2 +- .../action/termvectors/GetTermVectorsIT.java | 30 ++++----- .../opensearch/aliases/IndexAliasesIT.java | 24 +++---- .../coordination/RareClusterStateIT.java | 4 +- .../document/DocumentActionsIT.java | 2 +- .../org/opensearch/document/ShardInfoIT.java | 2 +- .../opensearch/explain/ExplainActionIT.java | 6 +- .../java/org/opensearch/get/GetActionIT.java | 4 +- .../index/suggest/stats/SuggestStatsIT.java | 4 +- .../indices/IndicesRequestCacheIT.java | 20 +++--- .../indices/analyze/AnalyzeActionIT.java | 6 +- .../breaker/CircuitBreakerServiceIT.java | 3 +- .../recovery/IndexPrimaryRelocationIT.java | 2 +- .../indices/stats/IndexStatsIT.java | 4 +- .../template/SimpleIndexTemplateIT.java | 4 +- .../recovery/TruncatedRecoveryIT.java | 2 +- .../AggregationsIntegrationIT.java | 2 +- .../search/aggregations/MetadataIT.java | 2 +- .../search/aggregations/MissingValueIT.java | 4 +- .../bucket/AdjacencyMatrixIT.java | 2 +- .../aggregations/bucket/DateHistogramIT.java | 16 ++--- .../bucket/DateHistogramOffsetIT.java | 2 +- .../aggregations/bucket/DateRangeIT.java | 8 +-- .../bucket/DiversifiedSamplerIT.java | 4 +- .../aggregations/bucket/DoubleTermsIT.java | 6 +- .../search/aggregations/bucket/FilterIT.java | 2 +- .../search/aggregations/bucket/FiltersIT.java | 2 +- .../aggregations/bucket/GeoDistanceIT.java | 6 +- .../aggregations/bucket/GeoHashGridIT.java | 4 +- .../aggregations/bucket/HistogramIT.java | 10 +-- .../search/aggregations/bucket/IpRangeIT.java | 2 +- .../search/aggregations/bucket/IpTermsIT.java | 6 +- .../aggregations/bucket/LongTermsIT.java | 4 +- .../aggregations/bucket/MinDocCountIT.java | 2 +- .../aggregations/bucket/NaNSortingIT.java | 2 +- .../search/aggregations/bucket/NestedIT.java | 10 +-- .../search/aggregations/bucket/RangeIT.java | 8 +-- .../search/aggregations/bucket/SamplerIT.java | 4 +- .../aggregations/bucket/ShardReduceIT.java | 3 +- .../SignificantTermsSignificanceScoreIT.java | 11 ++-- .../bucket/TermsDocCountErrorIT.java | 10 +-- .../bucket/TermsShardMinDocCountIT.java | 5 +- .../bucket/terms/StringTermsIT.java | 34 ++-------- .../aggregations/metrics/CardinalityIT.java | 2 +- .../aggregations/metrics/ExtendedStatsIT.java | 2 +- .../metrics/HDRPercentileRanksIT.java | 2 +- .../metrics/HDRPercentilesIT.java | 2 +- .../metrics/MedianAbsoluteDeviationIT.java | 4 +- .../metrics/ScriptedMetricIT.java | 4 +- .../search/aggregations/metrics/StatsIT.java | 2 +- .../search/aggregations/metrics/SumIT.java | 7 +-- .../metrics/TDigestPercentileRanksIT.java | 2 +- .../metrics/TDigestPercentilesIT.java | 2 +- .../aggregations/metrics/TopHitsIT.java | 6 +- .../aggregations/metrics/ValueCountIT.java | 2 +- .../aggregations/pipeline/AvgBucketIT.java | 4 +- .../pipeline/DateDerivativeIT.java | 2 +- .../aggregations/pipeline/DerivativeIT.java | 4 +- .../pipeline/ExtendedStatsBucketIT.java | 4 +- .../aggregations/pipeline/MaxBucketIT.java | 4 +- .../aggregations/pipeline/MinBucketIT.java | 4 +- .../pipeline/PercentilesBucketIT.java | 4 +- .../aggregations/pipeline/StatsBucketIT.java | 4 +- .../aggregations/pipeline/SumBucketIT.java | 4 +- .../search/basic/SearchWhileRelocatingIT.java | 2 +- .../basic/TransportSearchFailuresIT.java | 2 +- .../search/fetch/subphase/InnerHitsIT.java | 12 ++-- .../highlight/HighlighterSearchIT.java | 63 +++++++------------ .../search/fieldcaps/FieldCapabilitiesIT.java | 4 +- .../search/fields/SearchFieldsIT.java | 7 +-- .../functionscore/RandomScoreFunctionIT.java | 3 +- .../opensearch/search/geo/GeoPolygonIT.java | 3 +- .../search/geo/GeoShapeIntegrationIT.java | 6 +- .../geo/LegacyGeoShapeIntegrationIT.java | 18 +----- .../search/morelikethis/MoreLikeThisIT.java | 21 ++----- .../search/nested/SimpleNestedIT.java | 4 +- .../aggregation/AggregationProfilerIT.java | 2 +- .../search/query/MultiMatchQueryIT.java | 2 +- .../search/query/ScriptScoreQueryIT.java | 8 +-- .../search/query/SearchQueryIT.java | 48 +++++++------- .../scriptfilter/ScriptQuerySearchIT.java | 2 +- .../search/scroll/SearchScrollIT.java | 4 +- .../search/searchafter/SearchAfterIT.java | 17 +++-- .../search/simple/SimpleSearchIT.java | 6 +- .../opensearch/search/sort/FieldSortIT.java | 44 ++++++------- .../search/sort/GeoDistanceSortBuilderIT.java | 11 ++-- .../search/source/MetadataFetchingIT.java | 2 +- .../search/suggest/SuggestSearchIT.java | 14 ++--- .../java/org/opensearch/update/UpdateIT.java | 2 +- .../validate/SimpleValidateQueryIT.java | 8 +-- .../create/CreateIndexRequestBuilder.java | 4 +- .../metadata/MetadataMappingServiceTests.java | 12 ++-- .../FieldStatsProviderRefreshTests.java | 2 +- .../index/mapper/UpdateMappingTests.java | 2 +- .../query/CommonTermsQueryParserTests.java | 3 +- .../bucket/ShardSizeTestCase.java | 2 +- .../metrics/AbstractGeoTestCase.java | 13 ++-- .../search/geo/GeoShapeQueryTests.java | 22 ++----- .../SharedSignificantTermsTestMethods.java | 2 +- .../metrics/AbstractNumericTestCase.java | 2 +- .../test/OpenSearchSingleNodeTestCase.java | 4 +- 109 files changed, 343 insertions(+), 480 deletions(-) diff --git a/modules/analysis-common/src/internalClusterTest/java/org/opensearch/analysis/common/QueryStringWithAnalyzersIT.java b/modules/analysis-common/src/internalClusterTest/java/org/opensearch/analysis/common/QueryStringWithAnalyzersIT.java index 02c3bdfd70ec2..8c2f83bf83d85 100644 --- a/modules/analysis-common/src/internalClusterTest/java/org/opensearch/analysis/common/QueryStringWithAnalyzersIT.java +++ b/modules/analysis-common/src/internalClusterTest/java/org/opensearch/analysis/common/QueryStringWithAnalyzersIT.java @@ -73,7 +73,7 @@ public void testCustomWordDelimiterQueryString() { .put("analysis.filter.custom_word_delimiter.split_on_numerics", "false") .put("analysis.filter.custom_word_delimiter.stem_english_possessive", "false") ) - .addMapping("type1", "field1", "type=text,analyzer=my_analyzer", "field2", "type=text,analyzer=my_analyzer") + .setMapping("field1", "type=text,analyzer=my_analyzer", "field2", "type=text,analyzer=my_analyzer") ); client().prepareIndex("test").setId("1").setSource("field1", "foo bar baz", "field2", "not needed").get(); diff --git a/modules/analysis-common/src/test/java/org/opensearch/analysis/common/HighlighterWithAnalyzersTests.java b/modules/analysis-common/src/test/java/org/opensearch/analysis/common/HighlighterWithAnalyzersTests.java index a8dd2d2578541..57c959a4f0b65 100644 --- a/modules/analysis-common/src/test/java/org/opensearch/analysis/common/HighlighterWithAnalyzersTests.java +++ b/modules/analysis-common/src/test/java/org/opensearch/analysis/common/HighlighterWithAnalyzersTests.java @@ -149,8 +149,7 @@ public void testMultiPhraseCutoff() throws IOException { * query. We cut off and extract terms if there are more than 16 terms in the query */ assertAcked( - prepareCreate("test").addMapping( - "test", + prepareCreate("test").setMapping( "body", "type=text,analyzer=custom_analyzer," + "search_analyzer=custom_analyzer,term_vector=with_positions_offsets" ) @@ -225,8 +224,7 @@ public void testSynonyms() throws IOException { assertAcked( prepareCreate("test").setSettings(builder.build()) - .addMapping( - "type1", + .setMapping( "field1", "type=text,term_vector=with_positions_offsets,search_analyzer=synonym," + "analyzer=standard,index_options=offsets" ) @@ -335,8 +333,7 @@ public void testPhrasePrefix() throws IOException { assertAcked( prepareCreate("second_test_index").setSettings(builder.build()) - .addMapping( - "doc", + .setMapping( "field4", "type=text,term_vector=with_positions_offsets,analyzer=synonym", "field3", diff --git a/modules/lang-expression/src/internalClusterTest/java/org/opensearch/script/expression/MoreExpressionIT.java b/modules/lang-expression/src/internalClusterTest/java/org/opensearch/script/expression/MoreExpressionIT.java index 450e70c3c8938..952b00dda608c 100644 --- a/modules/lang-expression/src/internalClusterTest/java/org/opensearch/script/expression/MoreExpressionIT.java +++ b/modules/lang-expression/src/internalClusterTest/java/org/opensearch/script/expression/MoreExpressionIT.java @@ -158,7 +158,7 @@ public void testScore() throws Exception { } public void testDateMethods() throws Exception { - OpenSearchAssertions.assertAcked(prepareCreate("test").addMapping("doc", "date0", "type=date", "date1", "type=date")); + OpenSearchAssertions.assertAcked(prepareCreate("test").setMapping("date0", "type=date", "date1", "type=date")); ensureGreen("test"); indexRandom( true, @@ -188,7 +188,7 @@ public void testDateMethods() throws Exception { } public void testDateObjectMethods() throws Exception { - OpenSearchAssertions.assertAcked(prepareCreate("test").addMapping("doc", "date0", "type=date", "date1", "type=date")); + OpenSearchAssertions.assertAcked(prepareCreate("test").setMapping("date0", "type=date", "date1", "type=date")); ensureGreen("test"); indexRandom( true, @@ -219,7 +219,7 @@ public void testDateObjectMethods() throws Exception { public void testMultiValueMethods() throws Exception { OpenSearchAssertions.assertAcked( - prepareCreate("test").addMapping("doc", "double0", "type=double", "double1", "type=double", "double2", "type=double") + prepareCreate("test").setMapping("double0", "type=double", "double1", "type=double", "double2", "type=double") ); ensureGreen("test"); @@ -322,7 +322,7 @@ public void testMultiValueMethods() throws Exception { } public void testInvalidDateMethodCall() throws Exception { - OpenSearchAssertions.assertAcked(prepareCreate("test").addMapping("doc", "double", "type=double")); + OpenSearchAssertions.assertAcked(prepareCreate("test").setMapping("double", "type=double")); ensureGreen("test"); indexRandom(true, client().prepareIndex("test").setId("1").setSource("double", "178000000.0")); try { @@ -343,7 +343,7 @@ public void testInvalidDateMethodCall() throws Exception { } public void testSparseField() throws Exception { - OpenSearchAssertions.assertAcked(prepareCreate("test").addMapping("doc", "x", "type=long", "y", "type=long")); + OpenSearchAssertions.assertAcked(prepareCreate("test").setMapping("x", "type=long", "y", "type=long")); ensureGreen("test"); indexRandom( true, @@ -528,7 +528,7 @@ public void testSpecialValueVariable() throws Exception { public void testStringSpecialValueVariable() throws Exception { // i.e. expression script for term aggregations, which is not allowed - assertAcked(client().admin().indices().prepareCreate("test").addMapping("doc", "text", "type=keyword").get()); + assertAcked(client().admin().indices().prepareCreate("test").setMapping("text", "type=keyword").get()); ensureGreen("test"); indexRandom( true, diff --git a/modules/percolator/src/internalClusterTest/java/org/opensearch/percolator/PercolatorQuerySearchIT.java b/modules/percolator/src/internalClusterTest/java/org/opensearch/percolator/PercolatorQuerySearchIT.java index 11fc61d6c6d99..f78b74e272ebf 100644 --- a/modules/percolator/src/internalClusterTest/java/org/opensearch/percolator/PercolatorQuerySearchIT.java +++ b/modules/percolator/src/internalClusterTest/java/org/opensearch/percolator/PercolatorQuerySearchIT.java @@ -101,7 +101,7 @@ public void testPercolatorQuery() throws Exception { client().admin() .indices() .prepareCreate("test") - .addMapping("type", "id", "type=keyword", "field1", "type=keyword", "field2", "type=keyword", "query", "type=percolator") + .setMapping("id", "type=keyword", "field1", "type=keyword", "field2", "type=keyword", "query", "type=percolator") ); client().prepareIndex("test") @@ -183,8 +183,7 @@ public void testPercolatorRangeQueries() throws Exception { client().admin() .indices() .prepareCreate("test") - .addMapping( - "type", + .setMapping( "field1", "type=long", "field2", @@ -315,17 +314,7 @@ public void testPercolatorGeoQueries() throws Exception { client().admin() .indices() .prepareCreate("test") - .addMapping( - "type", - "id", - "type=keyword", - "field1", - "type=geo_point", - "field2", - "type=geo_shape", - "query", - "type=percolator" - ) + .setMapping("id", "type=keyword", "field1", "type=geo_point", "field2", "type=geo_shape", "query", "type=percolator") ); client().prepareIndex("test") @@ -380,7 +369,7 @@ public void testPercolatorQueryExistingDocument() throws Exception { client().admin() .indices() .prepareCreate("test") - .addMapping("type", "id", "type=keyword", "field1", "type=keyword", "field2", "type=keyword", "query", "type=percolator") + .setMapping("id", "type=keyword", "field1", "type=keyword", "field2", "type=keyword", "query", "type=percolator") ); client().prepareIndex("test") @@ -438,7 +427,7 @@ public void testPercolatorQueryExistingDocumentSourceDisabled() throws Exception client().admin() .indices() .prepareCreate("test") - .addMapping("type", "_source", "enabled=false", "field1", "type=keyword", "query", "type=percolator") + .setMapping("_source", "enabled=false", "field1", "type=keyword", "query", "type=percolator") ); client().prepareIndex("test").setId("1").setSource(jsonBuilder().startObject().field("query", matchAllQuery()).endObject()).get(); @@ -459,7 +448,7 @@ public void testPercolatorSpecificQueries() throws Exception { client().admin() .indices() .prepareCreate("test") - .addMapping("type", "id", "type=keyword", "field1", "type=text", "field2", "type=text", "query", "type=percolator") + .setMapping("id", "type=keyword", "field1", "type=text", "field2", "type=text", "query", "type=percolator") ); client().prepareIndex("test") @@ -565,7 +554,7 @@ public void testPercolatorQueryWithHighlighting() throws Exception { client().admin() .indices() .prepareCreate("test") - .addMapping("type", "id", "type=keyword", "field1", fieldMapping.toString(), "query", "type=percolator") + .setMapping("id", "type=keyword", "field1", fieldMapping.toString(), "query", "type=percolator") ); client().prepareIndex("test") .setId("1") @@ -810,7 +799,7 @@ public void testTakePositionOffsetGapIntoAccount() throws Exception { client().admin() .indices() .prepareCreate("test") - .addMapping("type", "field", "type=text,position_increment_gap=5", "query", "type=percolator") + .setMapping("field", "type=text,position_increment_gap=5", "query", "type=percolator") ); client().prepareIndex("test") .setId("1") @@ -832,13 +821,13 @@ public void testTakePositionOffsetGapIntoAccount() throws Exception { public void testManyPercolatorFields() throws Exception { String queryFieldName = randomAlphaOfLength(8); assertAcked( - client().admin().indices().prepareCreate("test1").addMapping("type", queryFieldName, "type=percolator", "field", "type=keyword") + client().admin().indices().prepareCreate("test1").setMapping(queryFieldName, "type=percolator", "field", "type=keyword") ); assertAcked( client().admin() .indices() .prepareCreate("test2") - .addMapping("type", queryFieldName, "type=percolator", "second_query_field", "type=percolator", "field", "type=keyword") + .setMapping(queryFieldName, "type=percolator", "second_query_field", "type=percolator", "field", "type=keyword") ); assertAcked( client().admin() @@ -867,7 +856,7 @@ public void testManyPercolatorFields() throws Exception { public void testWithMultiplePercolatorFields() throws Exception { String queryFieldName = randomAlphaOfLength(8); assertAcked( - client().admin().indices().prepareCreate("test1").addMapping("type", queryFieldName, "type=percolator", "field", "type=keyword") + client().admin().indices().prepareCreate("test1").setMapping(queryFieldName, "type=percolator", "field", "type=keyword") ); assertAcked( client().admin() @@ -1130,7 +1119,7 @@ public void testPercolateQueryWithNestedDocuments() throws Exception { } public void testPercolatorQueryViaMultiSearch() throws Exception { - assertAcked(client().admin().indices().prepareCreate("test").addMapping("type", "field1", "type=text", "query", "type=percolator")); + assertAcked(client().admin().indices().prepareCreate("test").setMapping("field1", "type=text", "query", "type=percolator")); client().prepareIndex("test") .setId("1") @@ -1248,7 +1237,7 @@ public void testDisallowExpensiveQueries() throws IOException { client().admin() .indices() .prepareCreate("test") - .addMapping("_doc", "id", "type=keyword", "field1", "type=keyword", "query", "type=percolator") + .setMapping("id", "type=keyword", "field1", "type=keyword", "query", "type=percolator") ); client().prepareIndex("test") @@ -1298,7 +1287,7 @@ public void testDisallowExpensiveQueries() throws IOException { public void testWrappedWithConstantScore() throws Exception { - assertAcked(client().admin().indices().prepareCreate("test").addMapping("_doc", "d", "type=date", "q", "type=percolator")); + assertAcked(client().admin().indices().prepareCreate("test").setMapping("d", "type=date", "q", "type=percolator")); client().prepareIndex("test") .setId("1") diff --git a/modules/percolator/src/test/java/org/opensearch/percolator/PercolatorQuerySearchTests.java b/modules/percolator/src/test/java/org/opensearch/percolator/PercolatorQuerySearchTests.java index d3da99ffbc102..1d77c9d472864 100644 --- a/modules/percolator/src/test/java/org/opensearch/percolator/PercolatorQuerySearchTests.java +++ b/modules/percolator/src/test/java/org/opensearch/percolator/PercolatorQuerySearchTests.java @@ -96,7 +96,7 @@ protected Map, Object>> pluginScripts() { } public void testPercolateScriptQuery() throws IOException { - client().admin().indices().prepareCreate("index").addMapping("type", "query", "type=percolator").get(); + client().admin().indices().prepareCreate("index").setMapping("query", "type=percolator").get(); client().prepareIndex("index") .setId("1") .setSource( diff --git a/plugins/mapper-size/src/internalClusterTest/java/org/opensearch/index/mapper/size/SizeMappingIT.java b/plugins/mapper-size/src/internalClusterTest/java/org/opensearch/index/mapper/size/SizeMappingIT.java index 375222cd2af44..3a430331167f6 100644 --- a/plugins/mapper-size/src/internalClusterTest/java/org/opensearch/index/mapper/size/SizeMappingIT.java +++ b/plugins/mapper-size/src/internalClusterTest/java/org/opensearch/index/mapper/size/SizeMappingIT.java @@ -36,7 +36,6 @@ import org.opensearch.action.support.master.AcknowledgedResponse; import org.opensearch.common.xcontent.XContentBuilder; import org.opensearch.common.xcontent.XContentType; -import org.opensearch.index.mapper.MapperService; import org.opensearch.plugin.mapper.MapperSizePlugin; import org.opensearch.plugins.Plugin; import org.opensearch.test.OpenSearchIntegTestCase; @@ -122,7 +121,7 @@ private void assertSizeMappingEnabled(String index, boolean enabled) throws IOEx } public void testBasic() throws Exception { - assertAcked(prepareCreate("test").addMapping(MapperService.SINGLE_MAPPING_NAME, "_size", "enabled=true")); + assertAcked(prepareCreate("test").setMapping("_size", "enabled=true")); final String source = "{\"f\":10}"; indexRandom(true, client().prepareIndex("test").setId("1").setSource(source, XContentType.JSON)); GetResponse getResponse = client().prepareGet("test", "1").setStoredFields("_size").get(); diff --git a/server/src/internalClusterTest/java/org/opensearch/action/admin/indices/create/ShrinkIndexIT.java b/server/src/internalClusterTest/java/org/opensearch/action/admin/indices/create/ShrinkIndexIT.java index ef5c56c50ed83..e8a6c68a41076 100644 --- a/server/src/internalClusterTest/java/org/opensearch/action/admin/indices/create/ShrinkIndexIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/action/admin/indices/create/ShrinkIndexIT.java @@ -71,7 +71,6 @@ import org.opensearch.index.Index; import org.opensearch.index.IndexService; import org.opensearch.index.engine.SegmentsStats; -import org.opensearch.index.mapper.MapperService; import org.opensearch.index.query.TermsQueryBuilder; import org.opensearch.index.seqno.SeqNoStats; import org.opensearch.index.shard.IndexShard; @@ -527,7 +526,7 @@ public void testCreateShrinkWithIndexSort() throws Exception { .put("sort.order", "desc") .put("number_of_shards", 8) .put("number_of_replicas", 0) - ).addMapping(MapperService.SINGLE_MAPPING_NAME, "id", "type=keyword,doc_values=true").get(); + ).setMapping("id", "type=keyword,doc_values=true").get(); for (int i = 0; i < 20; i++) { client().prepareIndex("source") .setId(Integer.toString(i)) diff --git a/server/src/internalClusterTest/java/org/opensearch/action/admin/indices/create/SplitIndexIT.java b/server/src/internalClusterTest/java/org/opensearch/action/admin/indices/create/SplitIndexIT.java index 42b1d5f4a757f..89e11984621da 100644 --- a/server/src/internalClusterTest/java/org/opensearch/action/admin/indices/create/SplitIndexIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/action/admin/indices/create/SplitIndexIT.java @@ -65,7 +65,6 @@ import org.opensearch.index.Index; import org.opensearch.index.IndexService; import org.opensearch.index.engine.SegmentsStats; -import org.opensearch.index.mapper.MapperService; import org.opensearch.index.query.TermsQueryBuilder; import org.opensearch.index.seqno.SeqNoStats; import org.opensearch.index.shard.IndexShard; @@ -136,12 +135,12 @@ private void splitToN(int sourceShards, int firstSplitShards, int secondSplitSha int numRoutingShards = MetadataCreateIndexService.calculateNumRoutingShards(secondSplitShards, Version.CURRENT) - 1; settings.put("index.routing_partition_size", randomIntBetween(1, numRoutingShards)); if (useNested) { - createInitialIndex.addMapping(MapperService.SINGLE_MAPPING_NAME, "_routing", "required=true", "nested1", "type=nested"); + createInitialIndex.setMapping("_routing", "required=true", "nested1", "type=nested"); } else { - createInitialIndex.addMapping(MapperService.SINGLE_MAPPING_NAME, "_routing", "required=true"); + createInitialIndex.setMapping("_routing", "required=true"); } } else if (useNested) { - createInitialIndex.addMapping(MapperService.SINGLE_MAPPING_NAME, "nested1", "type=nested"); + createInitialIndex.setMapping("nested1", "type=nested"); } logger.info("use routing {} use mixed routing {} use nested {}", useRouting, useMixedRouting, useNested); createInitialIndex.setSettings(settings).get(); @@ -523,7 +522,7 @@ public void testCreateSplitWithIndexSort() throws Exception { .put("sort.order", "desc") .put("number_of_shards", 2) .put("number_of_replicas", 0) - ).addMapping(MapperService.SINGLE_MAPPING_NAME, "id", "type=keyword,doc_values=true").get(); + ).setMapping("id", "type=keyword,doc_values=true").get(); for (int i = 0; i < 20; i++) { client().prepareIndex("source") .setId(Integer.toString(i)) diff --git a/server/src/internalClusterTest/java/org/opensearch/action/search/TransportSearchIT.java b/server/src/internalClusterTest/java/org/opensearch/action/search/TransportSearchIT.java index c7985d972de5e..a356d273f7060 100644 --- a/server/src/internalClusterTest/java/org/opensearch/action/search/TransportSearchIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/action/search/TransportSearchIT.java @@ -348,7 +348,7 @@ public void testSearchIdle() throws Exception { .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, randomIntBetween(1, 5)) .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, numOfReplicas) .put(IndexSettings.INDEX_SEARCH_IDLE_AFTER.getKey(), TimeValue.timeValueMillis(randomIntBetween(50, 500))); - assertAcked(prepareCreate("test").setSettings(settings).addMapping("_doc", "created_date", "type=date,format=yyyy-MM-dd")); + assertAcked(prepareCreate("test").setSettings(settings).setMapping("created_date", "type=date,format=yyyy-MM-dd")); ensureGreen("test"); assertBusy(() -> { for (String node : internalCluster().nodesInclude("test")) { diff --git a/server/src/internalClusterTest/java/org/opensearch/action/termvectors/GetTermVectorsIT.java b/server/src/internalClusterTest/java/org/opensearch/action/termvectors/GetTermVectorsIT.java index 8ab6450ead2af..b5e60f44983f5 100644 --- a/server/src/internalClusterTest/java/org/opensearch/action/termvectors/GetTermVectorsIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/action/termvectors/GetTermVectorsIT.java @@ -161,8 +161,7 @@ public void testNotIndexedField() throws Exception { // must be of type string and indexed. assertAcked( prepareCreate("test").addAlias(new Alias("alias")) - .addMapping( - "type1", + .setMapping( "field0", "type=integer,", // no tvs "field1", @@ -548,10 +547,8 @@ private void checkBrownFoxTermVector(Fields fields, String fieldName, boolean wi public void testDuelWithAndWithoutTermVectors() throws IOException, ExecutionException, InterruptedException { // setup indices String[] indexNames = new String[] { "with_tv", "without_tv" }; - assertAcked( - prepareCreate(indexNames[0]).addMapping("type1", "field1", "type=text,term_vector=with_positions_offsets,analyzer=keyword") - ); - assertAcked(prepareCreate(indexNames[1]).addMapping("type1", "field1", "type=text,term_vector=no,analyzer=keyword")); + assertAcked(prepareCreate(indexNames[0]).setMapping("field1", "type=text,term_vector=with_positions_offsets,analyzer=keyword")); + assertAcked(prepareCreate(indexNames[1]).setMapping("field1", "type=text,term_vector=no,analyzer=keyword")); ensureGreen(); // index documents with and without term vectors @@ -656,9 +653,7 @@ public void testSimpleWildCards() throws IOException { public void testArtificialVsExisting() throws ExecutionException, InterruptedException, IOException { // setup indices Settings.Builder settings = Settings.builder().put(indexSettings()).put("index.analysis.analyzer", "standard"); - assertAcked( - prepareCreate("test").setSettings(settings).addMapping("type1", "field1", "type=text,term_vector=with_positions_offsets") - ); + assertAcked(prepareCreate("test").setSettings(settings).setMapping("field1", "type=text,term_vector=with_positions_offsets")); ensureGreen(); // index documents existing document @@ -704,7 +699,7 @@ public void testArtificialVsExisting() throws ExecutionException, InterruptedExc public void testArtificialNoDoc() throws IOException { // setup indices Settings.Builder settings = Settings.builder().put(indexSettings()).put("index.analysis.analyzer", "standard"); - assertAcked(prepareCreate("test").setSettings(settings).addMapping("type1", "field1", "type=text")); + assertAcked(prepareCreate("test").setSettings(settings).setMapping("field1", "type=text")); ensureGreen(); // request tvs from artificial document @@ -929,7 +924,7 @@ public void testTermVectorsWithVersion() { public void testFilterLength() throws ExecutionException, InterruptedException, IOException { logger.info("Setting up the index ..."); Settings.Builder settings = Settings.builder().put(indexSettings()).put("index.analysis.analyzer", "keyword"); - assertAcked(prepareCreate("test").setSettings(settings).addMapping("type1", "tags", "type=text")); + assertAcked(prepareCreate("test").setSettings(settings).setMapping("tags", "type=text")); int numTerms = scaledRandomIntBetween(10, 50); logger.info("Indexing one document with tags of increasing length ..."); @@ -962,7 +957,7 @@ public void testFilterLength() throws ExecutionException, InterruptedException, public void testFilterTermFreq() throws ExecutionException, InterruptedException, IOException { logger.info("Setting up the index ..."); Settings.Builder settings = Settings.builder().put(indexSettings()).put("index.analysis.analyzer", "keyword"); - assertAcked(prepareCreate("test").setSettings(settings).addMapping("type1", "tags", "type=text")); + assertAcked(prepareCreate("test").setSettings(settings).setMapping("tags", "type=text")); logger.info("Indexing one document with tags of increasing frequencies ..."); int numTerms = scaledRandomIntBetween(10, 50); @@ -1000,7 +995,7 @@ public void testFilterDocFreq() throws ExecutionException, InterruptedException, .put(indexSettings()) .put("index.analysis.analyzer", "keyword") .put("index.number_of_shards", 1); // no dfs - assertAcked(prepareCreate("test").setSettings(settings).addMapping("type1", "tags", "type=text")); + assertAcked(prepareCreate("test").setSettings(settings).setMapping("tags", "type=text")); int numDocs = scaledRandomIntBetween(10, 50); // as many terms as there are docs logger.info("Indexing {} documents with tags of increasing dfs ...", numDocs); @@ -1030,9 +1025,7 @@ public void testFilterDocFreq() throws ExecutionException, InterruptedException, public void testArtificialDocWithPreference() throws InterruptedException, IOException { // setup indices Settings.Builder settings = Settings.builder().put(indexSettings()).put("index.analysis.analyzer", "standard"); - assertAcked( - prepareCreate("test").setSettings(settings).addMapping("type1", "field1", "type=text,term_vector=with_positions_offsets") - ); + assertAcked(prepareCreate("test").setSettings(settings).setMapping("field1", "type=text,term_vector=with_positions_offsets")); ensureGreen(); // index document @@ -1076,8 +1069,7 @@ public void testWithKeywordAndNormalizer() throws IOException, ExecutionExceptio .putList("index.analysis.normalizer.my_normalizer.filter", "lowercase"); assertAcked( prepareCreate(indexNames[0]).setSettings(builder.build()) - .addMapping( - "type1", + .setMapping( "field1", "type=text,term_vector=with_positions_offsets,analyzer=my_analyzer", "field2", @@ -1086,7 +1078,7 @@ public void testWithKeywordAndNormalizer() throws IOException, ExecutionExceptio ); assertAcked( prepareCreate(indexNames[1]).setSettings(builder.build()) - .addMapping("type1", "field1", "type=keyword,normalizer=my_normalizer", "field2", "type=keyword") + .setMapping("field1", "type=keyword,normalizer=my_normalizer", "field2", "type=keyword") ); ensureGreen(); diff --git a/server/src/internalClusterTest/java/org/opensearch/aliases/IndexAliasesIT.java b/server/src/internalClusterTest/java/org/opensearch/aliases/IndexAliasesIT.java index 2d01e4c031538..ff64a2cd90cb8 100644 --- a/server/src/internalClusterTest/java/org/opensearch/aliases/IndexAliasesIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/aliases/IndexAliasesIT.java @@ -226,7 +226,7 @@ public void testFailedFilter() throws Exception { public void testFilteringAliases() throws Exception { logger.info("--> creating index [test]"); - assertAcked(prepareCreate("test").addMapping("type", "user", "type=text")); + assertAcked(prepareCreate("test").setMapping("user", "type=text")); ensureGreen(); @@ -260,7 +260,7 @@ public void testEmptyFilter() throws Exception { public void testSearchingFilteringAliasesSingleIndex() throws Exception { logger.info("--> creating index [test]"); - assertAcked(prepareCreate("test").addMapping("type1", "id", "type=text", "name", "type=text,fielddata=true")); + assertAcked(prepareCreate("test").setMapping("id", "type=text", "name", "type=text,fielddata=true")); ensureGreen(); @@ -363,9 +363,9 @@ public void testSearchingFilteringAliasesSingleIndex() throws Exception { public void testSearchingFilteringAliasesTwoIndices() throws Exception { logger.info("--> creating index [test1]"); - assertAcked(prepareCreate("test1").addMapping("type1", "name", "type=text")); + assertAcked(prepareCreate("test1").setMapping("name", "type=text")); logger.info("--> creating index [test2]"); - assertAcked(prepareCreate("test2").addMapping("type1", "name", "type=text")); + assertAcked(prepareCreate("test2").setMapping("name", "type=text")); ensureGreen(); logger.info("--> adding filtering aliases to index [test1]"); @@ -593,8 +593,8 @@ public void testSearchingFilteringAliasesMultipleIndices() throws Exception { public void testDeletingByQueryFilteringAliases() throws Exception { logger.info("--> creating index [test1] and [test2"); - assertAcked(prepareCreate("test1").addMapping("type1", "name", "type=text")); - assertAcked(prepareCreate("test2").addMapping("type1", "name", "type=text")); + assertAcked(prepareCreate("test1").setMapping("name", "type=text")); + assertAcked(prepareCreate("test2").setMapping("name", "type=text")); ensureGreen(); logger.info("--> adding filtering aliases to index [test1]"); @@ -648,8 +648,8 @@ public void testDeletingByQueryFilteringAliases() throws Exception { public void testDeleteAliases() throws Exception { logger.info("--> creating index [test1] and [test2]"); - assertAcked(prepareCreate("test1").addMapping("type", "name", "type=text")); - assertAcked(prepareCreate("test2").addMapping("type", "name", "type=text")); + assertAcked(prepareCreate("test1").setMapping("name", "type=text")); + assertAcked(prepareCreate("test2").setMapping("name", "type=text")); ensureGreen(); logger.info("--> adding filtering aliases to index [test1]"); @@ -780,7 +780,7 @@ public void run() { public void testSameAlias() throws Exception { logger.info("--> creating index [test]"); - assertAcked(prepareCreate("test").addMapping("type", "name", "type=text")); + assertAcked(prepareCreate("test").setMapping("name", "type=text")); ensureGreen(); logger.info("--> creating alias1 "); @@ -1073,7 +1073,7 @@ public void testGetAllAliasesWorks() { public void testCreateIndexWithAliases() throws Exception { assertAcked( - prepareCreate("test").addMapping("type", "field", "type=text") + prepareCreate("test").setMapping("field", "type=text") .addAlias(new Alias("alias1")) .addAlias(new Alias("alias2").filter(QueryBuilders.boolQuery().mustNot(QueryBuilders.existsQuery("field")))) .addAlias(new Alias("alias3").indexRouting("index").searchRouting("search")) @@ -1103,7 +1103,7 @@ public void testCreateIndexWithAliasesInSource() throws Exception { public void testCreateIndexWithAliasesSource() throws Exception { assertAcked( - prepareCreate("test").addMapping("type", "field", "type=text") + prepareCreate("test").setMapping("field", "type=text") .setAliases( "{\n" + " \"alias1\" : {},\n" @@ -1180,7 +1180,7 @@ public void testAddAliasWithFilterNoMapping() throws Exception { } public void testAliasFilterWithNowInRangeFilterAndQuery() throws Exception { - assertAcked(prepareCreate("my-index").addMapping("my-type", "timestamp", "type=date")); + assertAcked(prepareCreate("my-index").setMapping("timestamp", "type=date")); assertAliasesVersionIncreases( "my-index", () -> assertAcked( diff --git a/server/src/internalClusterTest/java/org/opensearch/cluster/coordination/RareClusterStateIT.java b/server/src/internalClusterTest/java/org/opensearch/cluster/coordination/RareClusterStateIT.java index 3060b5c23fe75..6f4c6fca77196 100644 --- a/server/src/internalClusterTest/java/org/opensearch/cluster/coordination/RareClusterStateIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/cluster/coordination/RareClusterStateIT.java @@ -176,9 +176,7 @@ public void testDeleteCreateInOneBulk() throws Exception { internalCluster().startMasterOnlyNode(); String dataNode = internalCluster().startDataOnlyNode(); assertFalse(client().admin().cluster().prepareHealth().setWaitForNodes("2").get().isTimedOut()); - prepareCreate("test").setSettings(Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0)) - .addMapping(MapperService.SINGLE_MAPPING_NAME) - .get(); + prepareCreate("test").setSettings(Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0)).get(); ensureGreen("test"); // block none master node. diff --git a/server/src/internalClusterTest/java/org/opensearch/document/DocumentActionsIT.java b/server/src/internalClusterTest/java/org/opensearch/document/DocumentActionsIT.java index f3693be3b7227..1e40cc14bbb36 100644 --- a/server/src/internalClusterTest/java/org/opensearch/document/DocumentActionsIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/document/DocumentActionsIT.java @@ -69,7 +69,7 @@ */ public class DocumentActionsIT extends OpenSearchIntegTestCase { protected void createIndex() { - OpenSearchAssertions.assertAcked(prepareCreate(getConcreteIndexName()).addMapping("type1", "name", "type=keyword,store=true")); + OpenSearchAssertions.assertAcked(prepareCreate(getConcreteIndexName()).setMapping("name", "type=keyword,store=true")); } protected String getConcreteIndexName() { diff --git a/server/src/internalClusterTest/java/org/opensearch/document/ShardInfoIT.java b/server/src/internalClusterTest/java/org/opensearch/document/ShardInfoIT.java index be1335bd56ba9..5f217548794db 100644 --- a/server/src/internalClusterTest/java/org/opensearch/document/ShardInfoIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/document/ShardInfoIT.java @@ -125,7 +125,7 @@ private void prepareIndex(int numberOfPrimaryShards, boolean routingRequired) th Settings.builder() .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, numberOfPrimaryShards) .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, numCopies - 1) - ).addMapping("type", "_routing", "required=" + routingRequired).get() + ).setMapping("_routing", "required=" + routingRequired).get() ); for (int i = 0; i < numberOfPrimaryShards; i++) { ensureActiveShardCopies(i, numNodes); diff --git a/server/src/internalClusterTest/java/org/opensearch/explain/ExplainActionIT.java b/server/src/internalClusterTest/java/org/opensearch/explain/ExplainActionIT.java index 78069970c1a60..53da0309aa602 100644 --- a/server/src/internalClusterTest/java/org/opensearch/explain/ExplainActionIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/explain/ExplainActionIT.java @@ -115,7 +115,7 @@ public void testSimple() throws Exception { public void testExplainWithFields() throws Exception { assertAcked( - prepareCreate("test").addMapping("test", "obj1.field1", "type=keyword,store=true", "obj1.field2", "type=keyword,store=true") + prepareCreate("test").setMapping("obj1.field1", "type=keyword,store=true", "obj1.field2", "type=keyword,store=true") .addAlias(new Alias("alias")) ); ensureGreen("test"); @@ -212,7 +212,7 @@ public void testExplainWithSource() throws Exception { public void testExplainWithFilteredAlias() { assertAcked( - prepareCreate("test").addMapping("test", "field2", "type=text") + prepareCreate("test").setMapping("field2", "type=text") .addAlias(new Alias("alias1").filter(QueryBuilders.termQuery("field2", "value2"))) ); ensureGreen("test"); @@ -231,7 +231,7 @@ public void testExplainWithFilteredAliasFetchSource() throws Exception { client().admin() .indices() .prepareCreate("test") - .addMapping("test", "field2", "type=text") + .setMapping("field2", "type=text") .addAlias(new Alias("alias1").filter(QueryBuilders.termQuery("field2", "value2"))) ); ensureGreen("test"); diff --git a/server/src/internalClusterTest/java/org/opensearch/get/GetActionIT.java b/server/src/internalClusterTest/java/org/opensearch/get/GetActionIT.java index a09778582b604..2f811d4a901bf 100644 --- a/server/src/internalClusterTest/java/org/opensearch/get/GetActionIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/get/GetActionIT.java @@ -84,7 +84,7 @@ protected Collection> nodePlugins() { public void testSimpleGet() { assertAcked( - prepareCreate("test").addMapping("type1", "field1", "type=keyword,store=true", "field2", "type=keyword,store=true") + prepareCreate("test").setMapping("field1", "type=keyword,store=true", "field2", "type=keyword,store=true") .setSettings(Settings.builder().put("index.refresh_interval", -1)) .addAlias(new Alias("alias").writeIndex(randomFrom(true, false, null))) ); @@ -234,7 +234,7 @@ static String indexOrAlias() { public void testSimpleMultiGet() throws Exception { assertAcked( prepareCreate("test").addAlias(new Alias("alias").writeIndex(randomFrom(true, false, null))) - .addMapping("type1", "field", "type=keyword,store=true") + .setMapping("field", "type=keyword,store=true") .setSettings(Settings.builder().put("index.refresh_interval", -1)) ); ensureGreen(); diff --git a/server/src/internalClusterTest/java/org/opensearch/index/suggest/stats/SuggestStatsIT.java b/server/src/internalClusterTest/java/org/opensearch/index/suggest/stats/SuggestStatsIT.java index 1137a3038fd18..9940b1eb13a52 100644 --- a/server/src/internalClusterTest/java/org/opensearch/index/suggest/stats/SuggestStatsIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/index/suggest/stats/SuggestStatsIT.java @@ -79,12 +79,12 @@ public void testSimpleStats() throws Exception { assertAcked( prepareCreate("test1").setSettings( Settings.builder().put(SETTING_NUMBER_OF_SHARDS, shardsIdx1).put(SETTING_NUMBER_OF_REPLICAS, 0) - ).addMapping("type", "f", "type=text") + ).setMapping("f", "type=text") ); assertAcked( prepareCreate("test2").setSettings( Settings.builder().put(SETTING_NUMBER_OF_SHARDS, shardsIdx2).put(SETTING_NUMBER_OF_REPLICAS, 0) - ).addMapping("type", "f", "type=text") + ).setMapping("f", "type=text") ); assertThat(shardsIdx1 + shardsIdx2, equalTo(numAssignedShards("test1", "test2"))); assertThat(numAssignedShards("test1", "test2"), greaterThanOrEqualTo(2)); diff --git a/server/src/internalClusterTest/java/org/opensearch/indices/IndicesRequestCacheIT.java b/server/src/internalClusterTest/java/org/opensearch/indices/IndicesRequestCacheIT.java index 18940cba80799..12fee85288bc2 100644 --- a/server/src/internalClusterTest/java/org/opensearch/indices/IndicesRequestCacheIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/indices/IndicesRequestCacheIT.java @@ -73,7 +73,7 @@ public void testCacheAggs() throws Exception { client.admin() .indices() .prepareCreate("index") - .addMapping("type", "f", "type=date") + .setMapping("f", "type=date") .setSettings(Settings.builder().put(IndicesRequestCache.INDEX_CACHE_REQUEST_ENABLED_SETTING.getKey(), true)) .get() ); @@ -137,7 +137,7 @@ public void testQueryRewrite() throws Exception { client.admin() .indices() .prepareCreate("index") - .addMapping("type", "s", "type=date") + .setMapping("s", "type=date") .setSettings( Settings.builder() .put(IndicesRequestCache.INDEX_CACHE_REQUEST_ENABLED_SETTING.getKey(), true) @@ -208,7 +208,7 @@ public void testQueryRewriteMissingValues() throws Exception { client.admin() .indices() .prepareCreate("index") - .addMapping("type", "s", "type=date") + .setMapping("s", "type=date") .setSettings( Settings.builder() .put(IndicesRequestCache.INDEX_CACHE_REQUEST_ENABLED_SETTING.getKey(), true) @@ -274,7 +274,7 @@ public void testQueryRewriteDates() throws Exception { client.admin() .indices() .prepareCreate("index") - .addMapping("type", "d", "type=date") + .setMapping("d", "type=date") .setSettings( Settings.builder() .put(IndicesRequestCache.INDEX_CACHE_REQUEST_ENABLED_SETTING.getKey(), true) @@ -345,9 +345,9 @@ public void testQueryRewriteDatesWithNow() throws Exception { .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1) .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0) .build(); - assertAcked(client.admin().indices().prepareCreate("index-1").addMapping("type", "d", "type=date").setSettings(settings).get()); - assertAcked(client.admin().indices().prepareCreate("index-2").addMapping("type", "d", "type=date").setSettings(settings).get()); - assertAcked(client.admin().indices().prepareCreate("index-3").addMapping("type", "d", "type=date").setSettings(settings).get()); + assertAcked(client.admin().indices().prepareCreate("index-1").setMapping("d", "type=date").setSettings(settings).get()); + assertAcked(client.admin().indices().prepareCreate("index-2").setMapping("d", "type=date").setSettings(settings).get()); + assertAcked(client.admin().indices().prepareCreate("index-3").setMapping("d", "type=date").setSettings(settings).get()); ZonedDateTime now = ZonedDateTime.now(ZoneOffset.UTC); DateFormatter formatter = DateFormatter.forPattern("strict_date_optional_time"); indexRandom( @@ -426,7 +426,7 @@ public void testCanCache() throws Exception { .put("index.number_of_routing_shards", 2) .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0) .build(); - assertAcked(client.admin().indices().prepareCreate("index").addMapping("type", "s", "type=date").setSettings(settings).get()); + assertAcked(client.admin().indices().prepareCreate("index").setMapping("s", "type=date").setSettings(settings).get()); indexRandom( true, client.prepareIndex("index").setId("1").setRouting("1").setSource("s", "2016-03-19"), @@ -529,7 +529,7 @@ public void testCacheWithFilteredAlias() { client.admin() .indices() .prepareCreate("index") - .addMapping("type", "created_at", "type=date") + .setMapping("created_at", "type=date") .setSettings(settings) .addAlias(new Alias("last_week").filter(QueryBuilders.rangeQuery("created_at").gte("now-7d/d"))) .get() @@ -578,7 +578,7 @@ public void testProfileDisableCache() throws Exception { client.admin() .indices() .prepareCreate("index") - .addMapping("_doc", "k", "type=keyword") + .setMapping("k", "type=keyword") .setSettings( Settings.builder() .put(IndicesRequestCache.INDEX_CACHE_REQUEST_ENABLED_SETTING.getKey(), true) diff --git a/server/src/internalClusterTest/java/org/opensearch/indices/analyze/AnalyzeActionIT.java b/server/src/internalClusterTest/java/org/opensearch/indices/analyze/AnalyzeActionIT.java index 7218495898677..1d25051eefe44 100644 --- a/server/src/internalClusterTest/java/org/opensearch/indices/analyze/AnalyzeActionIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/indices/analyze/AnalyzeActionIT.java @@ -99,7 +99,7 @@ public void testSimpleAnalyzerTests() throws Exception { } public void testAnalyzeNumericField() throws IOException { - assertAcked(prepareCreate("test").addAlias(new Alias("alias")).addMapping("test", "long", "type=long", "double", "type=double")); + assertAcked(prepareCreate("test").addAlias(new Alias("alias")).setMapping("long", "type=long", "double", "type=double")); ensureGreen("test"); expectThrows( @@ -413,7 +413,7 @@ public void testCustomTokenFilterInRequest() throws Exception { } public void testAnalyzeKeywordField() throws IOException { - assertAcked(prepareCreate("test").addAlias(new Alias("alias")).addMapping("test", "keyword", "type=keyword")); + assertAcked(prepareCreate("test").addAlias(new Alias("alias")).setMapping("keyword", "type=keyword")); ensureGreen("test"); AnalyzeAction.Response analyzeResponse = client().admin().indices().prepareAnalyze(indexOrAlias(), "ABC").setField("keyword").get(); @@ -435,7 +435,7 @@ public void testAnalyzeNormalizedKeywordField() throws IOException { .put("index.analysis.normalizer.my_normalizer.type", "custom") .putList("index.analysis.normalizer.my_normalizer.filter", "lowercase") ) - .addMapping("test", "keyword", "type=keyword,normalizer=my_normalizer") + .setMapping("keyword", "type=keyword,normalizer=my_normalizer") ); ensureGreen("test"); diff --git a/server/src/internalClusterTest/java/org/opensearch/indices/memory/breaker/CircuitBreakerServiceIT.java b/server/src/internalClusterTest/java/org/opensearch/indices/memory/breaker/CircuitBreakerServiceIT.java index e9bb9f5a90477..2dc6b2085b866 100644 --- a/server/src/internalClusterTest/java/org/opensearch/indices/memory/breaker/CircuitBreakerServiceIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/indices/memory/breaker/CircuitBreakerServiceIT.java @@ -142,8 +142,7 @@ public void testMemoryBreaker() throws Exception { return; } assertAcked( - prepareCreate("cb-test", 1, Settings.builder().put(SETTING_NUMBER_OF_REPLICAS, between(0, 1))).addMapping( - "type", + prepareCreate("cb-test", 1, Settings.builder().put(SETTING_NUMBER_OF_REPLICAS, between(0, 1))).setMapping( "test", "type=text,fielddata=true" ) diff --git a/server/src/internalClusterTest/java/org/opensearch/indices/recovery/IndexPrimaryRelocationIT.java b/server/src/internalClusterTest/java/org/opensearch/indices/recovery/IndexPrimaryRelocationIT.java index 7fd2466647272..08cf33a342c65 100644 --- a/server/src/internalClusterTest/java/org/opensearch/indices/recovery/IndexPrimaryRelocationIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/indices/recovery/IndexPrimaryRelocationIT.java @@ -62,7 +62,7 @@ public void testPrimaryRelocationWhileIndexing() throws Exception { .indices() .prepareCreate("test") .setSettings(Settings.builder().put("index.number_of_shards", 1).put("index.number_of_replicas", 0)) - .addMapping("type", "field", "type=text") + .setMapping("field", "type=text") .get(); ensureGreen("test"); AtomicInteger numAutoGenDocs = new AtomicInteger(); diff --git a/server/src/internalClusterTest/java/org/opensearch/indices/stats/IndexStatsIT.java b/server/src/internalClusterTest/java/org/opensearch/indices/stats/IndexStatsIT.java index a24f55be3f010..be8a1ad4f0c61 100644 --- a/server/src/internalClusterTest/java/org/opensearch/indices/stats/IndexStatsIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/indices/stats/IndexStatsIT.java @@ -148,7 +148,7 @@ public void testFieldDataStats() { .indices() .prepareCreate("test") .setSettings(settingsBuilder().put("index.number_of_shards", 2)) - .addMapping("type", "field", "type=text,fielddata=true", "field2", "type=text,fielddata=true") + .setMapping("field", "type=text,fielddata=true", "field2", "type=text,fielddata=true") .get() ); ensureGreen(); @@ -270,7 +270,7 @@ public void testClearAllCaches() throws Exception { .indices() .prepareCreate("test") .setSettings(settingsBuilder().put("index.number_of_replicas", 0).put("index.number_of_shards", 2)) - .addMapping("type", "field", "type=text,fielddata=true") + .setMapping("field", "type=text,fielddata=true") .get() ); ensureGreen(); diff --git a/server/src/internalClusterTest/java/org/opensearch/indices/template/SimpleIndexTemplateIT.java b/server/src/internalClusterTest/java/org/opensearch/indices/template/SimpleIndexTemplateIT.java index 6065db46d8ee2..090cf81de60a3 100644 --- a/server/src/internalClusterTest/java/org/opensearch/indices/template/SimpleIndexTemplateIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/indices/template/SimpleIndexTemplateIT.java @@ -624,7 +624,7 @@ public void testIndexTemplateWithAliasesInSource() { ) .get(); - assertAcked(prepareCreate("test_index").addMapping("_doc")); + assertAcked(prepareCreate("test_index")); ensureGreen(); GetAliasesResponse getAliasesResponse = client().admin().indices().prepareGetAliases().setIndices("test_index").get(); @@ -663,7 +663,7 @@ public void testIndexTemplateWithAliasesSource() { ) .get(); - assertAcked(prepareCreate("test_index").addMapping("_doc")); + assertAcked(prepareCreate("test_index")); ensureGreen(); GetAliasesResponse getAliasesResponse = client().admin().indices().prepareGetAliases().setIndices("test_index").get(); diff --git a/server/src/internalClusterTest/java/org/opensearch/recovery/TruncatedRecoveryIT.java b/server/src/internalClusterTest/java/org/opensearch/recovery/TruncatedRecoveryIT.java index 71da9168c6205..1708454faf7b3 100644 --- a/server/src/internalClusterTest/java/org/opensearch/recovery/TruncatedRecoveryIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/recovery/TruncatedRecoveryIT.java @@ -108,7 +108,7 @@ public void testCancelRecoveryAndResume() throws Exception { // create the index and prevent allocation on any other nodes than the lucky one // we have no replicas so far and make sure that we allocate the primary on the lucky node assertAcked( - prepareCreate("test").addMapping("type1", "field1", "type=text", "the_id", "type=text") + prepareCreate("test").setMapping("field1", "type=text", "the_id", "type=text") .setSettings( Settings.builder() .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0) diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/AggregationsIntegrationIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/AggregationsIntegrationIT.java index 303b84151cf3e..6778765599fe9 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/AggregationsIntegrationIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/AggregationsIntegrationIT.java @@ -52,7 +52,7 @@ public class AggregationsIntegrationIT extends OpenSearchIntegTestCase { @Override public void setupSuiteScopeCluster() throws Exception { - assertAcked(prepareCreate("index").addMapping("type", "f", "type=keyword").get()); + assertAcked(prepareCreate("index").setMapping("f", "type=keyword").get()); numDocs = randomIntBetween(1, 20); List docs = new ArrayList<>(); for (int i = 0; i < numDocs; ++i) { diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/MetadataIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/MetadataIT.java index c00152a54bd37..f210af7c10fb3 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/MetadataIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/MetadataIT.java @@ -53,7 +53,7 @@ public class MetadataIT extends OpenSearchIntegTestCase { public void testMetadataSetOnAggregationResult() throws Exception { - assertAcked(client().admin().indices().prepareCreate("idx").addMapping("type", "name", "type=keyword").get()); + assertAcked(client().admin().indices().prepareCreate("idx").setMapping("name", "type=keyword").get()); IndexRequestBuilder[] builders = new IndexRequestBuilder[randomInt(30)]; for (int i = 0; i < builders.length; i++) { String name = "name_" + randomIntBetween(1, 10); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/MissingValueIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/MissingValueIT.java index 9135ca0f0a364..7d3f06760882d 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/MissingValueIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/MissingValueIT.java @@ -67,9 +67,7 @@ protected int maximumNumberOfShards() { @Override protected void setupSuiteScopeCluster() throws Exception { - assertAcked( - prepareCreate("idx").addMapping("type", "date", "type=date", "location", "type=geo_point", "str", "type=keyword").get() - ); + assertAcked(prepareCreate("idx").setMapping("date", "type=date", "location", "type=geo_point", "str", "type=keyword").get()); indexRandom( true, client().prepareIndex("idx").setId("1").setSource(), diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/AdjacencyMatrixIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/AdjacencyMatrixIT.java index 5d54359152816..af3cc85ed40c0 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/AdjacencyMatrixIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/AdjacencyMatrixIT.java @@ -117,7 +117,7 @@ public void setupSuiteScopeCluster() throws Exception { builders.add(client().prepareIndex("idx").setId("" + i).setSource(source)); } } - prepareCreate("empty_bucket_idx").addMapping("type", "value", "type=integer").get(); + prepareCreate("empty_bucket_idx").setMapping("value", "type=integer").get(); for (int i = 0; i < 2; i++) { builders.add( client().prepareIndex("empty_bucket_idx") diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/DateHistogramIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/DateHistogramIT.java index 2c095857089e1..782bcde39ce8d 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/DateHistogramIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/DateHistogramIT.java @@ -144,7 +144,7 @@ private IndexRequestBuilder indexDoc(int month, int day, int value) throws Excep public void setupSuiteScopeCluster() throws Exception { createIndex("idx", "idx_unmapped"); // TODO: would be nice to have more random data here - assertAcked(prepareCreate("empty_bucket_idx").addMapping("type", "value", "type=integer")); + assertAcked(prepareCreate("empty_bucket_idx").setMapping("value", "type=integer")); List builders = new ArrayList<>(); for (int i = 0; i < 2; i++) { builders.add( @@ -188,7 +188,7 @@ private void getMultiSortDocs(List builders) throws IOExcep addExpectedBucket(date(1, 6), 1, 5, 1); addExpectedBucket(date(1, 7), 1, 5, 1); - assertAcked(client().admin().indices().prepareCreate("sort_idx").addMapping("type", "date", "type=date").get()); + assertAcked(client().admin().indices().prepareCreate("sort_idx").setMapping("date", "type=date").get()); for (int i = 1; i <= 3; i++) { builders.add( client().prepareIndex("sort_idx") @@ -1038,7 +1038,7 @@ public void testEmptyAggregation() throws Exception { } public void testSingleValueWithTimeZone() throws Exception { - prepareCreate("idx2").addMapping("type", "date", "type=date").get(); + prepareCreate("idx2").setMapping("date", "type=date").get(); IndexRequestBuilder[] reqs = new IndexRequestBuilder[5]; ZonedDateTime date = date("2014-03-11T00:00:00+00:00"); for (int i = 0; i < reqs.length; i++) { @@ -1394,7 +1394,7 @@ public void testIssue6965() { } public void testDSTBoundaryIssue9491() throws InterruptedException, ExecutionException { - assertAcked(client().admin().indices().prepareCreate("test9491").addMapping("type", "d", "type=date").get()); + assertAcked(client().admin().indices().prepareCreate("test9491").setMapping("d", "type=date").get()); indexRandom( true, client().prepareIndex("test9491").setSource("d", "2014-10-08T13:00:00Z"), @@ -1417,7 +1417,7 @@ public void testDSTBoundaryIssue9491() throws InterruptedException, ExecutionExc } public void testIssue8209() throws InterruptedException, ExecutionException { - assertAcked(client().admin().indices().prepareCreate("test8209").addMapping("type", "d", "type=date").get()); + assertAcked(client().admin().indices().prepareCreate("test8209").setMapping("d", "type=date").get()); indexRandom( true, client().prepareIndex("test8209").setSource("d", "2014-01-01T00:00:00Z"), @@ -1498,7 +1498,7 @@ public void testFormatIndexUnmapped() throws InterruptedException, ExecutionExce */ public void testRewriteTimeZone_EpochMillisFormat() throws InterruptedException, ExecutionException { String index = "test31392"; - assertAcked(client().admin().indices().prepareCreate(index).addMapping("type", "d", "type=date,format=epoch_millis").get()); + assertAcked(client().admin().indices().prepareCreate(index).setMapping("d", "type=date,format=epoch_millis").get()); indexRandom(true, client().prepareIndex(index).setSource("d", "1477954800000")); ensureSearchable(index); SearchResponse response = client().prepareSearch(index) @@ -1608,7 +1608,7 @@ public void testDSTEndTransition() throws Exception { */ public void testScriptCaching() throws Exception { assertAcked( - prepareCreate("cache_test_idx").addMapping("type", "d", "type=date") + prepareCreate("cache_test_idx").setMapping("d", "type=date") .setSettings(Settings.builder().put("requests.cache.enable", true).put("number_of_shards", 1).put("number_of_replicas", 1)) .get() ); @@ -1828,7 +1828,7 @@ private ZonedDateTime key(Histogram.Bucket bucket) { * timeZones. */ public void testDateNanosHistogram() throws Exception { - assertAcked(prepareCreate("nanos").addMapping("_doc", "date", "type=date_nanos").get()); + assertAcked(prepareCreate("nanos").setMapping("date", "type=date_nanos").get()); indexRandom(true, client().prepareIndex("nanos").setId("1").setSource("date", "2000-01-01")); indexRandom(true, client().prepareIndex("nanos").setId("2").setSource("date", "2000-01-02")); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/DateHistogramOffsetIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/DateHistogramOffsetIT.java index 2505cb48245c3..19e5bdb8916b8 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/DateHistogramOffsetIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/DateHistogramOffsetIT.java @@ -72,7 +72,7 @@ private ZonedDateTime date(String date) { @Before public void beforeEachTest() throws IOException { - prepareCreate("idx2").addMapping("type", "date", "type=date").get(); + prepareCreate("idx2").setMapping("date", "type=date").get(); } @After diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/DateRangeIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/DateRangeIT.java index 7a28df00980cc..470ee6a4d2cea 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/DateRangeIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/DateRangeIT.java @@ -125,7 +125,7 @@ public void setupSuiteScopeCluster() throws Exception { for (int i = docs.size(); i < numDocs; ++i) { docs.add(indexDoc(randomIntBetween(6, 10), randomIntBetween(1, 20), randomInt(100))); } - assertAcked(prepareCreate("empty_bucket_idx").addMapping("type", "value", "type=integer")); + assertAcked(prepareCreate("empty_bucket_idx").setMapping("value", "type=integer")); for (int i = 0; i < 2; i++) { docs.add( client().prepareIndex("empty_bucket_idx") @@ -913,7 +913,7 @@ public void testNoRangesInQuery() { */ public void testScriptCaching() throws Exception { assertAcked( - prepareCreate("cache_test_idx").addMapping("type", "date", "type=date") + prepareCreate("cache_test_idx").setMapping("date", "type=date") .setSettings(Settings.builder().put("requests.cache.enable", true).put("number_of_shards", 1).put("number_of_replicas", 1)) .get() ); @@ -1070,7 +1070,7 @@ public void testScriptCaching() throws Exception { */ public void testRangeWithFormatStringValue() throws Exception { String indexName = "dateformat_test_idx"; - assertAcked(prepareCreate(indexName).addMapping("type", "date", "type=date,format=strict_hour_minute_second")); + assertAcked(prepareCreate(indexName).setMapping("date", "type=date,format=strict_hour_minute_second")); indexRandom( true, client().prepareIndex(indexName).setId("1").setSource(jsonBuilder().startObject().field("date", "00:16:40").endObject()), @@ -1132,7 +1132,7 @@ public void testRangeWithFormatStringValue() throws Exception { */ public void testRangeWithFormatNumericValue() throws Exception { String indexName = "dateformat_numeric_test_idx"; - assertAcked(prepareCreate(indexName).addMapping("type", "date", "type=date,format=epoch_second")); + assertAcked(prepareCreate(indexName).setMapping("date", "type=date,format=epoch_second")); indexRandom( true, client().prepareIndex(indexName).setId("1").setSource(jsonBuilder().startObject().field("date", 1002).endObject()), diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/DiversifiedSamplerIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/DiversifiedSamplerIT.java index aa4bb671d14e8..5b01e7573908c 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/DiversifiedSamplerIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/DiversifiedSamplerIT.java @@ -78,14 +78,14 @@ public void setupSuiteScopeCluster() throws Exception { assertAcked( prepareCreate("test").setSettings( Settings.builder().put(SETTING_NUMBER_OF_SHARDS, NUM_SHARDS).put(SETTING_NUMBER_OF_REPLICAS, 0) - ).addMapping("book", "author", "type=keyword", "name", "type=keyword", "genre", "type=keyword", "price", "type=float") + ).setMapping("author", "type=keyword", "name", "type=keyword", "genre", "type=keyword", "price", "type=float") ); createIndex("idx_unmapped"); // idx_unmapped_author is same as main index but missing author field assertAcked( prepareCreate("idx_unmapped_author").setSettings( Settings.builder().put(SETTING_NUMBER_OF_SHARDS, NUM_SHARDS).put(SETTING_NUMBER_OF_REPLICAS, 0) - ).addMapping("book", "name", "type=keyword", "genre", "type=keyword", "price", "type=float") + ).setMapping("name", "type=keyword", "genre", "type=keyword", "price", "type=float") ); ensureGreen(); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/DoubleTermsIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/DoubleTermsIT.java index 6c6e6ccc679e8..3093c7490a2a5 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/DoubleTermsIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/DoubleTermsIT.java @@ -178,7 +178,7 @@ public void setupSuiteScopeCluster() throws Exception { } createIndex("idx_unmapped"); - assertAcked(prepareCreate("empty_bucket_idx").addMapping("type", SINGLE_VALUED_FIELD_NAME, "type=integer")); + assertAcked(prepareCreate("empty_bucket_idx").setMapping(SINGLE_VALUED_FIELD_NAME, "type=integer")); for (int i = 0; i < 2; i++) { builders.add( client().prepareIndex("empty_bucket_idx") @@ -238,7 +238,7 @@ private void getMultiSortDocs(List builders) throws IOExcep bucketProps.put("sum_d", 1d); expectedMultiSortBuckets.put((Double) bucketProps.get("_term"), bucketProps); - assertAcked(prepareCreate("sort_idx").addMapping("multi_sort_type", SINGLE_VALUED_FIELD_NAME, "type=double")); + assertAcked(prepareCreate("sort_idx").setMapping(SINGLE_VALUED_FIELD_NAME, "type=double")); for (int i = 1; i <= 3; i++) { builders.add( client().prepareIndex("sort_idx") @@ -980,7 +980,7 @@ public void testOtherDocCount() { */ public void testScriptCaching() throws Exception { assertAcked( - prepareCreate("cache_test_idx").addMapping("type", "d", "type=float") + prepareCreate("cache_test_idx").setMapping("d", "type=float") .setSettings(Settings.builder().put("requests.cache.enable", true).put("number_of_shards", 1).put("number_of_replicas", 1)) .get() ); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/FilterIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/FilterIT.java index b938db8891d7b..0845db4f332d7 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/FilterIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/FilterIT.java @@ -90,7 +90,7 @@ public void setupSuiteScopeCluster() throws Exception { builders.add(client().prepareIndex("idx").setId("" + i).setSource(source)); } } - prepareCreate("empty_bucket_idx").addMapping("type", "value", "type=integer").get(); + prepareCreate("empty_bucket_idx").setMapping("value", "type=integer").get(); for (int i = 0; i < 2; i++) { builders.add( client().prepareIndex("empty_bucket_idx") diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/FiltersIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/FiltersIT.java index 0b895f32a1259..a64fe61b29b8a 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/FiltersIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/FiltersIT.java @@ -109,7 +109,7 @@ public void setupSuiteScopeCluster() throws Exception { builders.add(client().prepareIndex("idx").setId("" + i).setSource(source)); } } - prepareCreate("empty_bucket_idx").addMapping("type", "value", "type=integer").get(); + prepareCreate("empty_bucket_idx").setMapping("value", "type=integer").get(); for (int i = 0; i < 2; i++) { builders.add( client().prepareIndex("empty_bucket_idx") diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/GeoDistanceIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/GeoDistanceIT.java index a2d6533ae0afb..603a141abcaec 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/GeoDistanceIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/GeoDistanceIT.java @@ -93,9 +93,9 @@ private IndexRequestBuilder indexCity(String idx, String name, String... latLons @Override public void setupSuiteScopeCluster() throws Exception { Settings settings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, version).build(); - prepareCreate("idx").setSettings(settings).addMapping("type", "location", "type=geo_point", "city", "type=keyword").get(); + prepareCreate("idx").setSettings(settings).setMapping("location", "type=geo_point", "city", "type=keyword").get(); - prepareCreate("idx-multi").addMapping("type", "location", "type=geo_point", "city", "type=keyword").get(); + prepareCreate("idx-multi").setMapping("location", "type=geo_point", "city", "type=keyword").get(); createIndex("idx_unmapped"); @@ -138,7 +138,7 @@ public void setupSuiteScopeCluster() throws Exception { cities.add(indexCity("idx-multi", cityName)); } indexRandom(true, cities); - prepareCreate("empty_bucket_idx").addMapping("type", "value", "type=integer", "location", "type=geo_point").get(); + prepareCreate("empty_bucket_idx").setMapping("value", "type=integer", "location", "type=geo_point").get(); List builders = new ArrayList<>(); for (int i = 0; i < 2; i++) { builders.add( diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/GeoHashGridIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/GeoHashGridIT.java index c7c21c203af61..56d918feef9d8 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/GeoHashGridIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/GeoHashGridIT.java @@ -101,7 +101,7 @@ public void setupSuiteScopeCluster() throws Exception { Settings settings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, version).build(); - assertAcked(prepareCreate("idx").setSettings(settings).addMapping("type", "location", "type=geo_point", "city", "type=keyword")); + assertAcked(prepareCreate("idx").setSettings(settings).setMapping("location", "type=geo_point", "city", "type=keyword")); List cities = new ArrayList<>(); Random random = random(); @@ -126,7 +126,7 @@ public void setupSuiteScopeCluster() throws Exception { indexRandom(true, cities); assertAcked( - prepareCreate("multi_valued_idx").setSettings(settings).addMapping("type", "location", "type=geo_point", "city", "type=keyword") + prepareCreate("multi_valued_idx").setSettings(settings).setMapping("location", "type=geo_point", "city", "type=keyword") ); cities = new ArrayList<>(); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/HistogramIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/HistogramIT.java index fbdcc6b878943..dae788abe0d10 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/HistogramIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/HistogramIT.java @@ -180,7 +180,7 @@ public void setupSuiteScopeCluster() throws Exception { getMultiSortDocs(builders); - assertAcked(prepareCreate("empty_bucket_idx").addMapping("type", SINGLE_VALUED_FIELD_NAME, "type=integer")); + assertAcked(prepareCreate("empty_bucket_idx").setMapping(SINGLE_VALUED_FIELD_NAME, "type=integer")); for (int i = 0; i < 2; i++) { builders.add( client().prepareIndex("empty_bucket_idx") @@ -211,7 +211,7 @@ private void getMultiSortDocs(List builders) throws IOExcep addExpectedBucket(6, 1, 5, 1); addExpectedBucket(7, 1, 5, 1); - assertAcked(client().admin().indices().prepareCreate("sort_idx").addMapping("type", SINGLE_VALUED_FIELD_NAME, "type=double").get()); + assertAcked(client().admin().indices().prepareCreate("sort_idx").setMapping(SINGLE_VALUED_FIELD_NAME, "type=double").get()); for (int i = 1; i <= 3; i++) { builders.add( client().prepareIndex("sort_idx") @@ -1124,7 +1124,7 @@ public void testExeptionOnNegativerInterval() { } public void testDecimalIntervalAndOffset() throws Exception { - assertAcked(prepareCreate("decimal_values").addMapping("type", "d", "type=float").get()); + assertAcked(prepareCreate("decimal_values").setMapping("d", "type=float").get()); indexRandom( true, client().prepareIndex("decimal_values").setId("1").setSource("d", -0.6), @@ -1151,7 +1151,7 @@ public void testDecimalIntervalAndOffset() throws Exception { */ public void testScriptCaching() throws Exception { assertAcked( - prepareCreate("cache_test_idx").addMapping("type", "d", "type=float") + prepareCreate("cache_test_idx").setMapping("d", "type=float") .setSettings(Settings.builder().put("requests.cache.enable", true).put("number_of_shards", 1).put("number_of_replicas", 1)) .get() ); @@ -1349,7 +1349,7 @@ public void testInvalidBounds() { } public void testHardBounds() throws Exception { - assertAcked(prepareCreate("test").addMapping("type", "d", "type=double").get()); + assertAcked(prepareCreate("test").setMapping("d", "type=double").get()); indexRandom( true, client().prepareIndex("test").setId("1").setSource("d", -0.6), diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/IpRangeIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/IpRangeIT.java index b768631225b90..f8f666aaa3c1b 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/IpRangeIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/IpRangeIT.java @@ -70,7 +70,7 @@ protected Collection> nodePlugins() { @Override public void setupSuiteScopeCluster() throws Exception { - assertAcked(prepareCreate("idx").addMapping("type", "ip", "type=ip", "ips", "type=ip")); + assertAcked(prepareCreate("idx").setMapping("ip", "type=ip", "ips", "type=ip")); waitForRelocation(ClusterHealthStatus.GREEN); indexRandom( diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/IpTermsIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/IpTermsIT.java index 53ff70dd240d1..cff51e74fdbd0 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/IpTermsIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/IpTermsIT.java @@ -76,7 +76,7 @@ protected Map, Object>> pluginScripts() { } public void testScriptValue() throws Exception { - assertAcked(prepareCreate("index").addMapping("type", "ip", "type=ip")); + assertAcked(prepareCreate("index").setMapping("ip", "type=ip")); indexRandom( true, client().prepareIndex("index").setId("1").setSource("ip", "192.168.1.7"), @@ -104,7 +104,7 @@ public void testScriptValue() throws Exception { } public void testScriptValues() throws Exception { - assertAcked(prepareCreate("index").addMapping("type", "ip", "type=ip")); + assertAcked(prepareCreate("index").setMapping("ip", "type=ip")); indexRandom( true, client().prepareIndex("index").setId("1").setSource("ip", "192.168.1.7"), @@ -132,7 +132,7 @@ public void testScriptValues() throws Exception { } public void testMissingValue() throws Exception { - assertAcked(prepareCreate("index").addMapping("type", "ip", "type=ip")); + assertAcked(prepareCreate("index").setMapping("ip", "type=ip")); indexRandom( true, client().prepareIndex("index").setId("1").setSource("ip", "192.168.1.7"), diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/LongTermsIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/LongTermsIT.java index 115b30643ff21..87968bd2117c6 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/LongTermsIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/LongTermsIT.java @@ -164,7 +164,7 @@ public void setupSuiteScopeCluster() throws Exception { indexRandom(true, highCardBuilders); createIndex("idx_unmapped"); - assertAcked(prepareCreate("empty_bucket_idx").addMapping("type", SINGLE_VALUED_FIELD_NAME, "type=integer")); + assertAcked(prepareCreate("empty_bucket_idx").setMapping(SINGLE_VALUED_FIELD_NAME, "type=integer")); List builders = new ArrayList<>(); for (int i = 0; i < 2; i++) { builders.add( @@ -928,7 +928,7 @@ public void testOtherDocCount() { */ public void testScriptCaching() throws Exception { assertAcked( - prepareCreate("cache_test_idx").addMapping("type", "d", "type=long") + prepareCreate("cache_test_idx").setMapping("d", "type=long") .setSettings(Settings.builder().put("requests.cache.enable", true).put("number_of_shards", 1).put("number_of_replicas", 1)) .get() ); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/MinDocCountIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/MinDocCountIT.java index 47cddbf856090..08e696245209e 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/MinDocCountIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/MinDocCountIT.java @@ -117,7 +117,7 @@ protected Map, Object>> pluginScripts() { @Override public void setupSuiteScopeCluster() throws Exception { - assertAcked(client().admin().indices().prepareCreate("idx").addMapping("type", "s", "type=keyword").get()); + assertAcked(client().admin().indices().prepareCreate("idx").setMapping("s", "type=keyword").get()); cardinality = randomIntBetween(8, 30); final List indexRequests = new ArrayList<>(); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/NaNSortingIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/NaNSortingIT.java index f03a3bdeb1716..3a3e02c577096 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/NaNSortingIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/NaNSortingIT.java @@ -132,7 +132,7 @@ public String sortKey() { @Override public void setupSuiteScopeCluster() throws Exception { - assertAcked(client().admin().indices().prepareCreate("idx").addMapping("type", "string_value", "type=keyword").get()); + assertAcked(client().admin().indices().prepareCreate("idx").setMapping("string_value", "type=keyword").get()); final int numDocs = randomIntBetween(2, 10); for (int i = 0; i < numDocs; ++i) { final long value = randomInt(5); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/NestedIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/NestedIT.java index 969a48b514f1a..fcde2f628ecd7 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/NestedIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/NestedIT.java @@ -96,7 +96,7 @@ public class NestedIT extends OpenSearchIntegTestCase { @Override public void setupSuiteScopeCluster() throws Exception { - assertAcked(prepareCreate("idx").addMapping("type", "nested", "type=nested", "incorrect", "type=object")); + assertAcked(prepareCreate("idx").setMapping("nested", "type=nested", "incorrect", "type=object")); ensureGreen("idx"); List builders = new ArrayList<>(); @@ -126,7 +126,7 @@ public void setupSuiteScopeCluster() throws Exception { builders.add(client().prepareIndex("idx").setId("" + i + 1).setSource(source)); } - prepareCreate("empty_bucket_idx").addMapping("type", "value", "type=integer", "nested", "type=nested").get(); + prepareCreate("empty_bucket_idx").setMapping("value", "type=integer", "nested", "type=nested").get(); ensureGreen("empty_bucket_idx"); for (int i = 0; i < 2; i++) { builders.add( @@ -539,7 +539,7 @@ public void testParentFilterResolvedCorrectly() throws Exception { public void testNestedSameDocIdProcessedMultipleTime() throws Exception { assertAcked( prepareCreate("idx4").setSettings(Settings.builder().put(SETTING_NUMBER_OF_SHARDS, 1).put(SETTING_NUMBER_OF_REPLICAS, 0)) - .addMapping("product", "categories", "type=keyword", "name", "type=text", "property", "type=nested") + .setMapping("categories", "type=keyword", "name", "type=text", "property", "type=nested") ); ensureGreen("idx4"); @@ -808,7 +808,7 @@ public void testExtractInnerHitBuildersWithDuplicateHitName() throws Exception { assertAcked( prepareCreate("idxduplicatehitnames").setSettings( Settings.builder().put(SETTING_NUMBER_OF_SHARDS, 1).put(SETTING_NUMBER_OF_REPLICAS, 0) - ).addMapping("product", "categories", "type=keyword", "name", "type=text", "property", "type=nested") + ).setMapping("categories", "type=keyword", "name", "type=text", "property", "type=nested") ); ensureGreen("idxduplicatehitnames"); @@ -832,7 +832,7 @@ public void testExtractInnerHitBuildersWithDuplicatePath() throws Exception { assertAcked( prepareCreate("idxnullhitnames").setSettings( Settings.builder().put(SETTING_NUMBER_OF_SHARDS, 1).put(SETTING_NUMBER_OF_REPLICAS, 0) - ).addMapping("product", "categories", "type=keyword", "name", "type=text", "property", "type=nested") + ).setMapping("categories", "type=keyword", "name", "type=text", "property", "type=nested") ); ensureGreen("idxnullhitnames"); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/RangeIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/RangeIT.java index bfbfc53ed7e76..c46d6dcd847e1 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/RangeIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/RangeIT.java @@ -136,7 +136,7 @@ public void setupSuiteScopeCluster() throws Exception { ); } createIndex("idx_unmapped"); - prepareCreate("empty_bucket_idx").addMapping("type", SINGLE_VALUED_FIELD_NAME, "type=integer").get(); + prepareCreate("empty_bucket_idx").setMapping(SINGLE_VALUED_FIELD_NAME, "type=integer").get(); for (int i = 0; i < 2; i++) { builders.add( client().prepareIndex("empty_bucket_idx") @@ -152,8 +152,8 @@ public void setupSuiteScopeCluster() throws Exception { // Create two indices and add the field 'route_length_miles' as an alias in // one, and a concrete field in the other. - prepareCreate("old_index").addMapping("_doc", "distance", "type=double", "route_length_miles", "type=alias,path=distance").get(); - prepareCreate("new_index").addMapping("_doc", "route_length_miles", "type=double").get(); + prepareCreate("old_index").setMapping("distance", "type=double", "route_length_miles", "type=alias,path=distance").get(); + prepareCreate("new_index").setMapping("route_length_miles", "type=double").get(); builders.add(client().prepareIndex("old_index").setSource("distance", 42.0)); builders.add(client().prepareIndex("old_index").setSource("distance", 50.5)); @@ -931,7 +931,7 @@ public void testEmptyAggregation() throws Exception { */ public void testScriptCaching() throws Exception { assertAcked( - prepareCreate("cache_test_idx").addMapping("type", "i", "type=integer") + prepareCreate("cache_test_idx").setMapping("i", "type=integer") .setSettings(Settings.builder().put("requests.cache.enable", true).put("number_of_shards", 1).put("number_of_replicas", 1)) .get() ); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/SamplerIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/SamplerIT.java index 94204b6519374..0bfeff9297ce8 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/SamplerIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/SamplerIT.java @@ -77,14 +77,14 @@ public void setupSuiteScopeCluster() throws Exception { assertAcked( prepareCreate("test").setSettings( Settings.builder().put(SETTING_NUMBER_OF_SHARDS, NUM_SHARDS).put(SETTING_NUMBER_OF_REPLICAS, 0) - ).addMapping("book", "author", "type=keyword", "name", "type=text", "genre", "type=keyword", "price", "type=float") + ).setMapping("author", "type=keyword", "name", "type=text", "genre", "type=keyword", "price", "type=float") ); createIndex("idx_unmapped"); // idx_unmapped_author is same as main index but missing author field assertAcked( prepareCreate("idx_unmapped_author").setSettings( Settings.builder().put(SETTING_NUMBER_OF_SHARDS, NUM_SHARDS).put(SETTING_NUMBER_OF_REPLICAS, 0) - ).addMapping("book", "name", "type=text", "genre", "type=keyword", "price", "type=float") + ).setMapping("name", "type=text", "genre", "type=keyword", "price", "type=float") ); ensureGreen(); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/ShardReduceIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/ShardReduceIT.java index 2300e42b84bbc..7352dc7170a21 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/ShardReduceIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/ShardReduceIT.java @@ -94,8 +94,7 @@ private IndexRequestBuilder indexDoc(String date, int value) throws Exception { @Override public void setupSuiteScopeCluster() throws Exception { assertAcked( - prepareCreate("idx").addMapping( - "type", + prepareCreate("idx").setMapping( "nested", "type=nested", "ip", diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/SignificantTermsSignificanceScoreIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/SignificantTermsSignificanceScoreIT.java index cbcc9c396fc06..9c334df1d806b 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/SignificantTermsSignificanceScoreIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/SignificantTermsSignificanceScoreIT.java @@ -40,7 +40,6 @@ import org.opensearch.common.xcontent.XContentBuilder; import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.common.xcontent.XContentType; -import org.opensearch.index.mapper.MapperService; import org.opensearch.index.query.QueryBuilder; import org.opensearch.index.query.QueryBuilders; import org.opensearch.plugins.Plugin; @@ -215,7 +214,7 @@ public void testPopularTermManyDeletedDocs() throws Exception { String settings = "{\"index.number_of_shards\": 1, \"index.number_of_replicas\": 0}"; assertAcked( prepareCreate(INDEX_NAME).setSettings(settings, XContentType.JSON) - .addMapping("_doc", "text", "type=keyword", CLASS_FIELD, "type=keyword") + .setMapping("text", "type=keyword", CLASS_FIELD, "type=keyword") ); String[] cat1v1 = { "constant", "one" }; String[] cat1v2 = { "constant", "uno" }; @@ -453,7 +452,7 @@ public void testSubAggregations() throws Exception { private void indexEqualTestData() throws ExecutionException, InterruptedException { assertAcked( prepareCreate("test").setSettings(Settings.builder().put(SETTING_NUMBER_OF_SHARDS, 1).put(SETTING_NUMBER_OF_REPLICAS, 0)) - .addMapping("_doc", "text", "type=text,fielddata=true", "class", "type=keyword") + .setMapping("text", "type=text,fielddata=true", "class", "type=keyword") ); createIndex("idx_unmapped"); @@ -545,9 +544,7 @@ private void indexRandomFrequencies01(String type) throws ExecutionException, In if (type.equals("text")) { textMappings += ",fielddata=true"; } - assertAcked( - prepareCreate(INDEX_NAME).addMapping(MapperService.SINGLE_MAPPING_NAME, TEXT_FIELD, textMappings, CLASS_FIELD, "type=keyword") - ); + assertAcked(prepareCreate(INDEX_NAME).setMapping(TEXT_FIELD, textMappings, CLASS_FIELD, "type=keyword")); String[] gb = { "0", "1" }; List indexRequestBuilderList = new ArrayList<>(); for (int i = 0; i < randomInt(20); i++) { @@ -575,7 +572,7 @@ public void testReduceFromSeveralShards() throws IOException, ExecutionException */ public void testScriptCaching() throws Exception { assertAcked( - prepareCreate("cache_test_idx").addMapping("type", "d", "type=long") + prepareCreate("cache_test_idx").setMapping("d", "type=long") .setSettings(Settings.builder().put("requests.cache.enable", true).put("number_of_shards", 1).put("number_of_replicas", 1)) .get() ); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/TermsDocCountErrorIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/TermsDocCountErrorIT.java index 9b941860177bb..3d76b994ebac3 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/TermsDocCountErrorIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/TermsDocCountErrorIT.java @@ -74,7 +74,7 @@ public static String randomExecutionHint() { @Override public void setupSuiteScopeCluster() throws Exception { - assertAcked(client().admin().indices().prepareCreate("idx").addMapping("type", STRING_FIELD_NAME, "type=keyword").get()); + assertAcked(client().admin().indices().prepareCreate("idx").setMapping(STRING_FIELD_NAME, "type=keyword").get()); List builders = new ArrayList<>(); int numDocs = between(10, 200); int numUniqueTerms = between(2, numDocs / 2); @@ -92,7 +92,7 @@ public void setupSuiteScopeCluster() throws Exception { ); } assertAcked( - prepareCreate("idx_single_shard").addMapping("type", STRING_FIELD_NAME, "type=keyword") + prepareCreate("idx_single_shard").setMapping(STRING_FIELD_NAME, "type=keyword") .setSettings(Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1)) ); for (int i = 0; i < numDocs; i++) { @@ -125,7 +125,7 @@ public void setupSuiteScopeCluster() throws Exception { ); } assertAcked( - prepareCreate("idx_fixed_docs_0").addMapping("type", STRING_FIELD_NAME, "type=keyword") + prepareCreate("idx_fixed_docs_0").setMapping(STRING_FIELD_NAME, "type=keyword") .setSettings(Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1)) ); Map shard0DocsPerTerm = new HashMap<>(); @@ -151,7 +151,7 @@ public void setupSuiteScopeCluster() throws Exception { } assertAcked( - prepareCreate("idx_fixed_docs_1").addMapping("type", STRING_FIELD_NAME, "type=keyword") + prepareCreate("idx_fixed_docs_1").setMapping(STRING_FIELD_NAME, "type=keyword") .setSettings(Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1)) ); Map shard1DocsPerTerm = new HashMap<>(); @@ -177,7 +177,7 @@ public void setupSuiteScopeCluster() throws Exception { } assertAcked( - prepareCreate("idx_fixed_docs_2").addMapping("type", STRING_FIELD_NAME, "type=keyword") + prepareCreate("idx_fixed_docs_2").setMapping(STRING_FIELD_NAME, "type=keyword") .setSettings(Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1)) ); Map shard2DocsPerTerm = new HashMap<>(); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/TermsShardMinDocCountIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/TermsShardMinDocCountIT.java index af006210326d8..852c3760751b3 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/TermsShardMinDocCountIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/TermsShardMinDocCountIT.java @@ -35,7 +35,6 @@ import org.opensearch.action.search.SearchResponse; import org.opensearch.common.settings.Settings; import org.opensearch.common.xcontent.XContentType; -import org.opensearch.index.mapper.MapperService; import org.opensearch.index.query.QueryBuilders; import org.opensearch.search.aggregations.BucketOrder; import org.opensearch.search.aggregations.bucket.filter.InternalFilter; @@ -73,7 +72,7 @@ public void testShardMinDocCountSignificantTermsTest() throws Exception { } assertAcked( prepareCreate(index).setSettings(Settings.builder().put(SETTING_NUMBER_OF_SHARDS, 1).put(SETTING_NUMBER_OF_REPLICAS, 0)) - .addMapping(MapperService.SINGLE_MAPPING_NAME, "text", textMappings) + .setMapping("text", textMappings) ); List indexBuilders = new ArrayList<>(); @@ -142,7 +141,7 @@ public void testShardMinDocCountTermsTest() throws Exception { } assertAcked( prepareCreate(index).setSettings(Settings.builder().put(SETTING_NUMBER_OF_SHARDS, 1).put(SETTING_NUMBER_OF_REPLICAS, 0)) - .addMapping(MapperService.SINGLE_MAPPING_NAME, "text", termMappings) + .setMapping("text", termMappings) ); List indexBuilders = new ArrayList<>(); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/terms/StringTermsIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/terms/StringTermsIT.java index 252ffeb4ca0e7..3190bcb72fcbb 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/terms/StringTermsIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/terms/StringTermsIT.java @@ -157,15 +157,7 @@ public void setupSuiteScopeCluster() throws Exception { client().admin() .indices() .prepareCreate("idx") - .addMapping( - "type", - SINGLE_VALUED_FIELD_NAME, - "type=keyword", - MULTI_VALUED_FIELD_NAME, - "type=keyword", - "tag", - "type=keyword" - ) + .setMapping(SINGLE_VALUED_FIELD_NAME, "type=keyword", MULTI_VALUED_FIELD_NAME, "type=keyword", "tag", "type=keyword") .get() ); List builders = new ArrayList<>(); @@ -193,15 +185,7 @@ public void setupSuiteScopeCluster() throws Exception { client().admin() .indices() .prepareCreate("high_card_idx") - .addMapping( - "type", - SINGLE_VALUED_FIELD_NAME, - "type=keyword", - MULTI_VALUED_FIELD_NAME, - "type=keyword", - "tag", - "type=keyword" - ) + .setMapping(SINGLE_VALUED_FIELD_NAME, "type=keyword", MULTI_VALUED_FIELD_NAME, "type=keyword", "tag", "type=keyword") .get() ); for (int i = 0; i < 100; i++) { @@ -218,7 +202,7 @@ public void setupSuiteScopeCluster() throws Exception { ) ); } - prepareCreate("empty_bucket_idx").addMapping("type", SINGLE_VALUED_FIELD_NAME, "type=integer").get(); + prepareCreate("empty_bucket_idx").setMapping(SINGLE_VALUED_FIELD_NAME, "type=integer").get(); for (int i = 0; i < 2; i++) { builders.add( @@ -281,15 +265,7 @@ private void getMultiSortDocs(List builders) throws IOExcep client().admin() .indices() .prepareCreate("sort_idx") - .addMapping( - "type", - SINGLE_VALUED_FIELD_NAME, - "type=keyword", - MULTI_VALUED_FIELD_NAME, - "type=keyword", - "tag", - "type=keyword" - ) + .setMapping(SINGLE_VALUED_FIELD_NAME, "type=keyword", MULTI_VALUED_FIELD_NAME, "type=keyword", "tag", "type=keyword") .get() ); for (int i = 1; i <= 3; i++) { @@ -1262,7 +1238,7 @@ public void testOtherDocCount() { */ public void testScriptCaching() throws Exception { assertAcked( - prepareCreate("cache_test_idx").addMapping("type", "d", "type=keyword") + prepareCreate("cache_test_idx").setMapping("d", "type=keyword") .setSettings(Settings.builder().put("requests.cache.enable", true).put("number_of_shards", 1).put("number_of_replicas", 1)) .get() ); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/CardinalityIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/CardinalityIT.java index 7996e2bee44c1..147f451c14de8 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/CardinalityIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/CardinalityIT.java @@ -491,7 +491,7 @@ public void testAsSubAgg() throws Exception { */ public void testScriptCaching() throws Exception { assertAcked( - prepareCreate("cache_test_idx").addMapping("type", "d", "type=long") + prepareCreate("cache_test_idx").setMapping("d", "type=long") .setSettings(Settings.builder().put("requests.cache.enable", true).put("number_of_shards", 1).put("number_of_replicas", 1)) .get() ); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/ExtendedStatsIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/ExtendedStatsIT.java index 9549aad5399b5..cd0a649659c6e 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/ExtendedStatsIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/ExtendedStatsIT.java @@ -869,7 +869,7 @@ private void checkUpperLowerBounds(ExtendedStats stats, double sigma) { */ public void testScriptCaching() throws Exception { assertAcked( - prepareCreate("cache_test_idx").addMapping("type", "d", "type=long") + prepareCreate("cache_test_idx").setMapping("d", "type=long") .setSettings(Settings.builder().put("requests.cache.enable", true).put("number_of_shards", 1).put("number_of_replicas", 1)) .get() ); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/HDRPercentileRanksIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/HDRPercentileRanksIT.java index 7aa602fff2ee8..20fc6aaee20c9 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/HDRPercentileRanksIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/HDRPercentileRanksIT.java @@ -585,7 +585,7 @@ public void testOrderByEmptyAggregation() throws Exception { */ public void testScriptCaching() throws Exception { assertAcked( - prepareCreate("cache_test_idx").addMapping("type", "d", "type=long") + prepareCreate("cache_test_idx").setMapping("d", "type=long") .setSettings(Settings.builder().put("requests.cache.enable", true).put("number_of_shards", 1).put("number_of_replicas", 1)) .get() ); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/HDRPercentilesIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/HDRPercentilesIT.java index 68f8cf6da575a..2660dbe0a88ed 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/HDRPercentilesIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/HDRPercentilesIT.java @@ -554,7 +554,7 @@ public void testOrderByEmptyAggregation() throws Exception { */ public void testScriptCaching() throws Exception { assertAcked( - prepareCreate("cache_test_idx").addMapping("type", "d", "type=long") + prepareCreate("cache_test_idx").setMapping("d", "type=long") .setSettings(Settings.builder().put("requests.cache.enable", true).put("number_of_shards", 1).put("number_of_replicas", 1)) .get() ); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/MedianAbsoluteDeviationIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/MedianAbsoluteDeviationIT.java index 79f1809fc2f3a..226b4dbca18d9 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/MedianAbsoluteDeviationIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/MedianAbsoluteDeviationIT.java @@ -137,7 +137,7 @@ public void setupSuiteScopeCluster() throws Exception { indexRandom(true, builders); - prepareCreate("empty_bucket_idx").addMapping("type", "value", "type=integer").get(); + prepareCreate("empty_bucket_idx").setMapping("value", "type=integer").get(); builders = new ArrayList<>(); for (int i = 0; i < 2; i++) { @@ -516,7 +516,7 @@ public void testOrderByEmptyAggregation() throws Exception { */ public void testScriptCaching() throws Exception { assertAcked( - prepareCreate("cache_test_idx").addMapping("type", "d", "type=long") + prepareCreate("cache_test_idx").setMapping("d", "type=long") .setSettings(Settings.builder().put("requests.cache.enable", true).put("number_of_shards", 1).put("number_of_replicas", 1)) .get() ); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/ScriptedMetricIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/ScriptedMetricIT.java index beacf7aa1ccec..27dbc56cf3b79 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/ScriptedMetricIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/ScriptedMetricIT.java @@ -310,7 +310,7 @@ public void setupSuiteScopeCluster() throws Exception { // "1". then each test will have // to check that this bucket exists with the appropriate sub // aggregations. - prepareCreate("empty_bucket_idx").addMapping("type", "value", "type=integer").get(); + prepareCreate("empty_bucket_idx").setMapping("value", "type=integer").get(); builders = new ArrayList<>(); for (int i = 0; i < 2; i++) { builders.add( @@ -1183,7 +1183,7 @@ public void testScriptCaching() throws Exception { Script ndRandom = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "return Math.random()", Collections.emptyMap()); assertAcked( - prepareCreate("cache_test_idx").addMapping("type", "d", "type=long") + prepareCreate("cache_test_idx").setMapping("d", "type=long") .setSettings(Settings.builder().put("requests.cache.enable", true).put("number_of_shards", 1).put("number_of_replicas", 1)) .get() ); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/StatsIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/StatsIT.java index 27fc26a114cc4..debdde8e13fe7 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/StatsIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/StatsIT.java @@ -258,7 +258,7 @@ private void assertShardExecutionState(SearchResponse response, int expectedFail */ public void testScriptCaching() throws Exception { assertAcked( - prepareCreate("cache_test_idx").addMapping("type", "d", "type=long") + prepareCreate("cache_test_idx").setMapping("d", "type=long") .setSettings(Settings.builder().put("requests.cache.enable", true).put("number_of_shards", 1).put("number_of_replicas", 1)) .get() ); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/SumIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/SumIT.java index e9b8c91090695..fe236f04c19e8 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/SumIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/SumIT.java @@ -79,8 +79,7 @@ public void setupSuiteScopeCluster() throws Exception { // Create two indices and add the field 'route_length_miles' as an alias in // one, and a concrete field in the other. - prepareCreate("old_index").addMapping( - "_doc", + prepareCreate("old_index").setMapping( "transit_mode", "type=keyword", "distance", @@ -88,7 +87,7 @@ public void setupSuiteScopeCluster() throws Exception { "route_length_miles", "type=alias,path=distance" ).get(); - prepareCreate("new_index").addMapping("_doc", "transit_mode", "type=keyword", "route_length_miles", "type=double").get(); + prepareCreate("new_index").setMapping("transit_mode", "type=keyword", "route_length_miles", "type=double").get(); List builders = new ArrayList<>(); builders.add(client().prepareIndex("old_index").setSource("transit_mode", "train", "distance", 42.0)); @@ -236,7 +235,7 @@ public void testOrderByEmptyAggregation() throws Exception { */ public void testScriptCaching() throws Exception { assertAcked( - prepareCreate("cache_test_idx").addMapping("type", "d", "type=long") + prepareCreate("cache_test_idx").setMapping("d", "type=long") .setSettings(Settings.builder().put("requests.cache.enable", true).put("number_of_shards", 1).put("number_of_replicas", 1)) .get() ); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/TDigestPercentileRanksIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/TDigestPercentileRanksIT.java index 8b28261f7f00b..adf027222d7d9 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/TDigestPercentileRanksIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/TDigestPercentileRanksIT.java @@ -497,7 +497,7 @@ public void testOrderByEmptyAggregation() throws Exception { */ public void testScriptCaching() throws Exception { assertAcked( - prepareCreate("cache_test_idx").addMapping("type", "d", "type=long") + prepareCreate("cache_test_idx").setMapping("d", "type=long") .setSettings(Settings.builder().put("requests.cache.enable", true).put("number_of_shards", 1).put("number_of_replicas", 1)) .get() ); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/TDigestPercentilesIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/TDigestPercentilesIT.java index 2da6ac3f9e586..fda15f9b90ea2 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/TDigestPercentilesIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/TDigestPercentilesIT.java @@ -469,7 +469,7 @@ public void testOrderByEmptyAggregation() throws Exception { */ public void testScriptCaching() throws Exception { assertAcked( - prepareCreate("cache_test_idx").addMapping("type", "d", "type=long") + prepareCreate("cache_test_idx").setMapping("d", "type=long") .setSettings(Settings.builder().put("requests.cache.enable", true).put("number_of_shards", 1).put("number_of_replicas", 1)) .get() ); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/TopHitsIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/TopHitsIT.java index 53a3cd4da5446..c3240c5eef7c5 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/TopHitsIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/TopHitsIT.java @@ -135,8 +135,8 @@ public static String randomExecutionHint() { @Override public void setupSuiteScopeCluster() throws Exception { - assertAcked(prepareCreate("idx").addMapping("type", TERMS_AGGS_FIELD, "type=keyword")); - assertAcked(prepareCreate("field-collapsing").addMapping("type", "group", "type=keyword")); + assertAcked(prepareCreate("idx").setMapping(TERMS_AGGS_FIELD, "type=keyword")); + assertAcked(prepareCreate("field-collapsing").setMapping("group", "type=keyword")); createIndex("empty"); assertAcked( prepareCreate("articles").setMapping( @@ -1143,7 +1143,7 @@ public void testNoStoredFields() throws Exception { public void testScriptCaching() throws Exception { try { assertAcked( - prepareCreate("cache_test_idx").addMapping("type", "d", "type=long") + prepareCreate("cache_test_idx").setMapping("d", "type=long") .setSettings( Settings.builder().put("requests.cache.enable", true).put("number_of_shards", 1).put("number_of_replicas", 1) ) diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/ValueCountIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/ValueCountIT.java index 6d3fe1ed3f190..82e667bccc576 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/ValueCountIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/ValueCountIT.java @@ -238,7 +238,7 @@ public void testMultiValuedScriptWithParams() throws Exception { */ public void testScriptCaching() throws Exception { assertAcked( - prepareCreate("cache_test_idx").addMapping("type", "d", "type=long") + prepareCreate("cache_test_idx").setMapping("d", "type=long") .setSettings(Settings.builder().put("requests.cache.enable", true).put("number_of_shards", 1).put("number_of_replicas", 1)) .get() ); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/AvgBucketIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/AvgBucketIT.java index 590587185b80e..6cd16a47e98d2 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/AvgBucketIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/AvgBucketIT.java @@ -71,7 +71,7 @@ public class AvgBucketIT extends OpenSearchIntegTestCase { @Override public void setupSuiteScopeCluster() throws Exception { - assertAcked(client().admin().indices().prepareCreate("idx").addMapping("type", "tag", "type=keyword").get()); + assertAcked(client().admin().indices().prepareCreate("idx").setMapping("tag", "type=keyword").get()); createIndex("idx_unmapped"); numDocs = randomIntBetween(6, 20); @@ -100,7 +100,7 @@ public void setupSuiteScopeCluster() throws Exception { valueCounts[bucket]++; } - assertAcked(prepareCreate("empty_bucket_idx").addMapping("type", SINGLE_VALUED_FIELD_NAME, "type=integer")); + assertAcked(prepareCreate("empty_bucket_idx").setMapping(SINGLE_VALUED_FIELD_NAME, "type=integer")); for (int i = 0; i < 2; i++) { builders.add( client().prepareIndex("empty_bucket_idx") diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/DateDerivativeIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/DateDerivativeIT.java index d7f16b25a46e8..2c7890fb7b1cb 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/DateDerivativeIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/DateDerivativeIT.java @@ -103,7 +103,7 @@ public void setupSuiteScopeCluster() throws Exception { createIndex("idx"); createIndex("idx_unmapped"); // TODO: would be nice to have more random data here - prepareCreate("empty_bucket_idx").addMapping("type", "value", "type=integer").get(); + prepareCreate("empty_bucket_idx").setMapping("value", "type=integer").get(); List builders = new ArrayList<>(); for (int i = 0; i < 2; i++) { builders.add( diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/DerivativeIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/DerivativeIT.java index cff655e040124..c03ed8277a3b4 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/DerivativeIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/DerivativeIT.java @@ -140,7 +140,7 @@ public void setupSuiteScopeCluster() throws Exception { valueCounts_empty = new Long[] { 1L, 1L, 2L, 0L, 2L, 2L, 0L, 0L, 0L, 3L, 2L, 1L }; firstDerivValueCounts_empty = new Double[] { null, 0d, 1d, -2d, 2d, 0d, -2d, 0d, 0d, 3d, -1d, -1d }; - assertAcked(prepareCreate("empty_bucket_idx").addMapping("type", SINGLE_VALUED_FIELD_NAME, "type=integer")); + assertAcked(prepareCreate("empty_bucket_idx").setMapping(SINGLE_VALUED_FIELD_NAME, "type=integer")); for (int i = 0; i < valueCounts_empty.length; i++) { for (int docs = 0; docs < valueCounts_empty[i]; docs++) { builders.add(client().prepareIndex("empty_bucket_idx").setSource(newDocBuilder(i))); @@ -154,7 +154,7 @@ public void setupSuiteScopeCluster() throws Exception { firstDerivValueCounts_empty_rnd = new Double[numBuckets_empty_rnd]; firstDerivValueCounts_empty_rnd[0] = null; - assertAcked(prepareCreate("empty_bucket_idx_rnd").addMapping("type", SINGLE_VALUED_FIELD_NAME, "type=integer")); + assertAcked(prepareCreate("empty_bucket_idx_rnd").setMapping(SINGLE_VALUED_FIELD_NAME, "type=integer")); for (int i = 0; i < numBuckets_empty_rnd; i++) { valueCounts_empty_rnd[i] = (long) randomIntBetween(1, 10); // make approximately half of the buckets empty diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/ExtendedStatsBucketIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/ExtendedStatsBucketIT.java index 4400181eb2226..85fe794b05fc6 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/ExtendedStatsBucketIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/ExtendedStatsBucketIT.java @@ -74,7 +74,7 @@ public class ExtendedStatsBucketIT extends OpenSearchIntegTestCase { @Override public void setupSuiteScopeCluster() throws Exception { - assertAcked(client().admin().indices().prepareCreate("idx").addMapping("type", "tag", "type=keyword").get()); + assertAcked(client().admin().indices().prepareCreate("idx").setMapping("tag", "type=keyword").get()); createIndex("idx_unmapped", "idx_gappy"); numDocs = randomIntBetween(6, 20); @@ -113,7 +113,7 @@ public void setupSuiteScopeCluster() throws Exception { ); } - assertAcked(prepareCreate("empty_bucket_idx").addMapping("type", SINGLE_VALUED_FIELD_NAME, "type=integer")); + assertAcked(prepareCreate("empty_bucket_idx").setMapping(SINGLE_VALUED_FIELD_NAME, "type=integer")); for (int i = 0; i < 2; i++) { builders.add( client().prepareIndex("empty_bucket_idx") diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/MaxBucketIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/MaxBucketIT.java index 0d63e6d719610..22890620d6b15 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/MaxBucketIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/MaxBucketIT.java @@ -85,7 +85,7 @@ public class MaxBucketIT extends OpenSearchIntegTestCase { @Override public void setupSuiteScopeCluster() throws Exception { - assertAcked(client().admin().indices().prepareCreate("idx").addMapping("type", "tag", "type=keyword").get()); + assertAcked(client().admin().indices().prepareCreate("idx").setMapping("tag", "type=keyword").get()); createIndex("idx_unmapped"); numDocs = randomIntBetween(6, 20); @@ -114,7 +114,7 @@ public void setupSuiteScopeCluster() throws Exception { valueCounts[bucket]++; } - assertAcked(prepareCreate("empty_bucket_idx").addMapping("type", SINGLE_VALUED_FIELD_NAME, "type=integer")); + assertAcked(prepareCreate("empty_bucket_idx").setMapping(SINGLE_VALUED_FIELD_NAME, "type=integer")); for (int i = 0; i < 2; i++) { builders.add( client().prepareIndex("empty_bucket_idx") diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/MinBucketIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/MinBucketIT.java index 33cc350f10ff1..b3929943f0d02 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/MinBucketIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/MinBucketIT.java @@ -71,7 +71,7 @@ public class MinBucketIT extends OpenSearchIntegTestCase { @Override public void setupSuiteScopeCluster() throws Exception { - assertAcked(client().admin().indices().prepareCreate("idx").addMapping("type", "tag", "type=keyword").get()); + assertAcked(client().admin().indices().prepareCreate("idx").setMapping("tag", "type=keyword").get()); createIndex("idx_unmapped"); numDocs = randomIntBetween(6, 20); @@ -100,7 +100,7 @@ public void setupSuiteScopeCluster() throws Exception { valueCounts[bucket]++; } - assertAcked(prepareCreate("empty_bucket_idx").addMapping("type", SINGLE_VALUED_FIELD_NAME, "type=integer")); + assertAcked(prepareCreate("empty_bucket_idx").setMapping(SINGLE_VALUED_FIELD_NAME, "type=integer")); for (int i = 0; i < 2; i++) { builders.add( client().prepareIndex("empty_bucket_idx") diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/PercentilesBucketIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/PercentilesBucketIT.java index 6728c9f888aeb..1da079781dc63 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/PercentilesBucketIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/PercentilesBucketIT.java @@ -75,7 +75,7 @@ public class PercentilesBucketIT extends OpenSearchIntegTestCase { @Override public void setupSuiteScopeCluster() throws Exception { - assertAcked(client().admin().indices().prepareCreate("idx").addMapping("type", "tag", "type=keyword").get()); + assertAcked(client().admin().indices().prepareCreate("idx").setMapping("tag", "type=keyword").get()); createIndex("idx_unmapped"); numDocs = randomIntBetween(6, 20); @@ -104,7 +104,7 @@ public void setupSuiteScopeCluster() throws Exception { valueCounts[bucket]++; } - assertAcked(prepareCreate("empty_bucket_idx").addMapping("type", SINGLE_VALUED_FIELD_NAME, "type=integer")); + assertAcked(prepareCreate("empty_bucket_idx").setMapping(SINGLE_VALUED_FIELD_NAME, "type=integer")); for (int i = 0; i < 2; i++) { builders.add( client().prepareIndex("empty_bucket_idx") diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/StatsBucketIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/StatsBucketIT.java index 90b0aba10e40a..e9f34f6aa65d9 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/StatsBucketIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/StatsBucketIT.java @@ -71,7 +71,7 @@ public class StatsBucketIT extends OpenSearchIntegTestCase { @Override public void setupSuiteScopeCluster() throws Exception { - assertAcked(client().admin().indices().prepareCreate("idx").addMapping("type", "tag", "type=keyword").get()); + assertAcked(client().admin().indices().prepareCreate("idx").setMapping("tag", "type=keyword").get()); createIndex("idx_unmapped"); numDocs = randomIntBetween(6, 20); @@ -100,7 +100,7 @@ public void setupSuiteScopeCluster() throws Exception { valueCounts[bucket]++; } - assertAcked(prepareCreate("empty_bucket_idx").addMapping("type", SINGLE_VALUED_FIELD_NAME, "type=integer")); + assertAcked(prepareCreate("empty_bucket_idx").setMapping(SINGLE_VALUED_FIELD_NAME, "type=integer")); for (int i = 0; i < 2; i++) { builders.add( client().prepareIndex("empty_bucket_idx") diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/SumBucketIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/SumBucketIT.java index 873c43d8b0f4c..5bd962017c247 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/SumBucketIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/SumBucketIT.java @@ -71,7 +71,7 @@ public class SumBucketIT extends OpenSearchIntegTestCase { @Override public void setupSuiteScopeCluster() throws Exception { - assertAcked(client().admin().indices().prepareCreate("idx").addMapping("type", "tag", "type=keyword").get()); + assertAcked(client().admin().indices().prepareCreate("idx").setMapping("tag", "type=keyword").get()); createIndex("idx_unmapped"); numDocs = randomIntBetween(6, 20); @@ -100,7 +100,7 @@ public void setupSuiteScopeCluster() throws Exception { valueCounts[bucket]++; } - assertAcked(prepareCreate("empty_bucket_idx").addMapping("type", SINGLE_VALUED_FIELD_NAME, "type=integer")); + assertAcked(prepareCreate("empty_bucket_idx").setMapping(SINGLE_VALUED_FIELD_NAME, "type=integer")); for (int i = 0; i < 2; i++) { builders.add( client().prepareIndex("empty_bucket_idx") diff --git a/server/src/internalClusterTest/java/org/opensearch/search/basic/SearchWhileRelocatingIT.java b/server/src/internalClusterTest/java/org/opensearch/search/basic/SearchWhileRelocatingIT.java index fedb6b18d93fb..c184d876dcb33 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/basic/SearchWhileRelocatingIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/basic/SearchWhileRelocatingIT.java @@ -65,7 +65,7 @@ private void testSearchAndRelocateConcurrently(final int numberOfReplicas) throw .indices() .prepareCreate("test") .setSettings(Settings.builder().put("index.number_of_shards", numShards).put("index.number_of_replicas", numberOfReplicas)) - .addMapping("type", "loc", "type=geo_point", "test", "type=text") + .setMapping("loc", "type=geo_point", "test", "type=text") .get(); ensureGreen(); List indexBuilders = new ArrayList<>(); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/basic/TransportSearchFailuresIT.java b/server/src/internalClusterTest/java/org/opensearch/search/basic/TransportSearchFailuresIT.java index 7982d9f5781fc..34b202cc09cf7 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/basic/TransportSearchFailuresIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/basic/TransportSearchFailuresIT.java @@ -65,7 +65,7 @@ protected int maximumNumberOfReplicas() { public void testFailedSearchWithWrongQuery() throws Exception { logger.info("Start Testing failed search with wrong query"); - assertAcked(prepareCreate("test", 1).addMapping("type", "foo", "type=geo_point")); + assertAcked(prepareCreate("test", 1).setMapping("foo", "type=geo_point")); NumShards test = getNumShards("test"); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/fetch/subphase/InnerHitsIT.java b/server/src/internalClusterTest/java/org/opensearch/search/fetch/subphase/InnerHitsIT.java index e28ef3c00a485..837e1e7e23ddf 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/fetch/subphase/InnerHitsIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/fetch/subphase/InnerHitsIT.java @@ -242,7 +242,7 @@ public void testSimpleNested() throws Exception { } public void testRandomNested() throws Exception { - assertAcked(prepareCreate("idx").addMapping("type", "field1", "type=nested", "field2", "type=nested")); + assertAcked(prepareCreate("idx").setMapping("field1", "type=nested", "field2", "type=nested")); int numDocs = scaledRandomIntBetween(25, 100); List requestBuilders = new ArrayList<>(); @@ -538,7 +538,7 @@ public void testNestedMultipleLayers() throws Exception { // Issue #9723 public void testNestedDefinedAsObject() throws Exception { - assertAcked(prepareCreate("articles").addMapping("article", "comments", "type=nested", "title", "type=text")); + assertAcked(prepareCreate("articles").setMapping("comments", "type=nested", "title", "type=text")); List requests = new ArrayList<>(); requests.add( @@ -852,7 +852,7 @@ public void testMatchesQueriesNestedInnerHits() throws Exception { } public void testNestedSource() throws Exception { - assertAcked(prepareCreate("index1").addMapping("message", "comments", "type=nested")); + assertAcked(prepareCreate("index1").setMapping("comments", "type=nested")); client().prepareIndex("index1") .setId("1") .setSource( @@ -947,7 +947,7 @@ public void testNestedSource() throws Exception { } public void testInnerHitsWithIgnoreUnmapped() throws Exception { - assertAcked(prepareCreate("index1").addMapping("_doc", "nested_type", "type=nested")); + assertAcked(prepareCreate("index1").setMapping("nested_type", "type=nested")); createIndex("index2"); client().prepareIndex("index1").setId("1").setSource("nested_type", Collections.singletonMap("key", "value")).get(); client().prepareIndex("index2").setId("3").setSource("key", "value").get(); @@ -967,7 +967,7 @@ public void testInnerHitsWithIgnoreUnmapped() throws Exception { } public void testUseMaxDocInsteadOfSize() throws Exception { - assertAcked(prepareCreate("index2").addMapping("type", "nested", "type=nested")); + assertAcked(prepareCreate("index2").setMapping("nested", "type=nested")); client().admin() .indices() .prepareUpdateSettings("index2") @@ -990,7 +990,7 @@ public void testUseMaxDocInsteadOfSize() throws Exception { } public void testTooHighResultWindow() throws Exception { - assertAcked(prepareCreate("index2").addMapping("type", "nested", "type=nested")); + assertAcked(prepareCreate("index2").setMapping("nested", "type=nested")); client().prepareIndex("index2") .setId("1") .setSource( diff --git a/server/src/internalClusterTest/java/org/opensearch/search/fetch/subphase/highlight/HighlighterSearchIT.java b/server/src/internalClusterTest/java/org/opensearch/search/fetch/subphase/highlight/HighlighterSearchIT.java index 74c2922ebf328..193a48cf0daa6 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/fetch/subphase/highlight/HighlighterSearchIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/fetch/subphase/highlight/HighlighterSearchIT.java @@ -332,9 +332,7 @@ public void testHighlightingWhenFieldsAreNotStoredThereIsNoSource() throws IOExc // see #3486 public void testHighTermFrequencyDoc() throws IOException { - assertAcked( - prepareCreate("test").addMapping("test", "name", "type=text,term_vector=with_positions_offsets,store=" + randomBoolean()) - ); + assertAcked(prepareCreate("test").setMapping("name", "type=text,term_vector=with_positions_offsets,store=" + randomBoolean())); StringBuilder builder = new StringBuilder(); for (int i = 0; i < 6000; i++) { builder.append("abc").append(" "); @@ -350,8 +348,7 @@ public void testHighTermFrequencyDoc() throws IOException { public void testEnsureNoNegativeOffsets() throws Exception { assertAcked( - prepareCreate("test").addMapping( - "type1", + prepareCreate("test").setMapping( "no_long_term", "type=text,term_vector=with_positions_offsets", "long_term", @@ -620,8 +617,7 @@ public void testSourceLookupHighlightingUsingPostingsHighlighter() throws Except public void testHighlightIssue1994() throws Exception { assertAcked( - prepareCreate("test").addMapping( - "type1", + prepareCreate("test").setMapping( "title", "type=text,store=false", "titleTV", @@ -697,8 +693,7 @@ public void testGlobalHighlightingSettingsOverriddenAtFieldLevel() { // Issue #5175 public void testHighlightingOnWildcardFields() throws Exception { assertAcked( - prepareCreate("test").addMapping( - "type1", + prepareCreate("test").setMapping( "field-postings", "type=text,index_options=offsets", "field-fvh", @@ -1277,7 +1272,7 @@ public XContentBuilder type1TermVectorMapping() throws IOException { } public void testSameContent() throws Exception { - assertAcked(prepareCreate("test").addMapping("type1", "title", "type=text,store=true,term_vector=with_positions_offsets")); + assertAcked(prepareCreate("test").setMapping("title", "type=text,store=true,term_vector=with_positions_offsets")); IndexRequestBuilder[] indexRequestBuilders = new IndexRequestBuilder[5]; for (int i = 0; i < 5; i++) { @@ -1305,7 +1300,7 @@ public void testSameContent() throws Exception { } public void testFastVectorHighlighterOffsetParameter() throws Exception { - assertAcked(prepareCreate("test").addMapping("type1", "title", "type=text,store=true,term_vector=with_positions_offsets").get()); + assertAcked(prepareCreate("test").setMapping("title", "type=text,store=true,term_vector=with_positions_offsets").get()); IndexRequestBuilder[] indexRequestBuilders = new IndexRequestBuilder[5]; for (int i = 0; i < 5; i++) { @@ -1327,7 +1322,7 @@ public void testFastVectorHighlighterOffsetParameter() throws Exception { } public void testEscapeHtml() throws Exception { - assertAcked(prepareCreate("test").addMapping("type1", "title", "type=text,store=true")); + assertAcked(prepareCreate("test").setMapping("title", "type=text,store=true")); IndexRequestBuilder[] indexRequestBuilders = new IndexRequestBuilder[5]; for (int i = 0; i < indexRequestBuilders.length; i++) { @@ -1348,7 +1343,7 @@ public void testEscapeHtml() throws Exception { } public void testEscapeHtmlVector() throws Exception { - assertAcked(prepareCreate("test").addMapping("type1", "title", "type=text,store=true,term_vector=with_positions_offsets")); + assertAcked(prepareCreate("test").setMapping("title", "type=text,store=true,term_vector=with_positions_offsets")); IndexRequestBuilder[] indexRequestBuilders = new IndexRequestBuilder[5]; for (int i = 0; i < 5; i++) { @@ -1547,7 +1542,7 @@ public void testMultiMapperNoVectorFromSource() throws Exception { } public void testFastVectorHighlighterShouldFailIfNoTermVectors() throws Exception { - assertAcked(prepareCreate("test").addMapping("type1", "title", "type=text,store=true,term_vector=no")); + assertAcked(prepareCreate("test").setMapping("title", "type=text,store=true,term_vector=no")); ensureGreen(); IndexRequestBuilder[] indexRequestBuilders = new IndexRequestBuilder[5]; @@ -1584,9 +1579,7 @@ public void testFastVectorHighlighterShouldFailIfNoTermVectors() throws Exceptio } public void testDisableFastVectorHighlighter() throws Exception { - assertAcked( - prepareCreate("test").addMapping("type1", "title", "type=text,store=true,term_vector=with_positions_offsets,analyzer=classic") - ); + assertAcked(prepareCreate("test").setMapping("title", "type=text,store=true,term_vector=with_positions_offsets,analyzer=classic")); ensureGreen(); IndexRequestBuilder[] indexRequestBuilders = new IndexRequestBuilder[5]; @@ -1645,7 +1638,7 @@ public void testDisableFastVectorHighlighter() throws Exception { } public void testFSHHighlightAllMvFragments() throws Exception { - assertAcked(prepareCreate("test").addMapping("type1", "tags", "type=text,term_vector=with_positions_offsets")); + assertAcked(prepareCreate("test").setMapping("tags", "type=text,term_vector=with_positions_offsets")); ensureGreen(); client().prepareIndex("test") .setId("1") @@ -1737,7 +1730,7 @@ public void testCommonTermsTermVector() throws IOException { } public void testPlainHighlightDifferentFragmenter() throws Exception { - assertAcked(prepareCreate("test").addMapping("type1", "tags", "type=text")); + assertAcked(prepareCreate("test").setMapping("tags", "type=text")); ensureGreen(); client().prepareIndex("test") .setId("1") @@ -1824,8 +1817,7 @@ public void testPlainHighlighterMultipleFields() { public void testFastVectorHighlighterMultipleFields() { assertAcked( - prepareCreate("test").addMapping( - "type1", + prepareCreate("test").setMapping( "field1", "type=text,term_vector=with_positions_offsets", "field2", @@ -1849,7 +1841,7 @@ public void testFastVectorHighlighterMultipleFields() { } public void testMissingStoredField() throws Exception { - assertAcked(prepareCreate("test").addMapping("type1", "highlight_field", "type=text,store=true")); + assertAcked(prepareCreate("test").setMapping("highlight_field", "type=text,store=true")); ensureGreen(); client().prepareIndex("test").setId("1").setSource(jsonBuilder().startObject().field("field", "highlight").endObject()).get(); refresh(); @@ -1869,8 +1861,7 @@ public void testMissingStoredField() throws Exception { // Issue #3211 public void testNumericHighlighting() throws Exception { assertAcked( - prepareCreate("test").addMapping( - "test", + prepareCreate("test").setMapping( "text", "type=text", "byte", @@ -1911,7 +1902,7 @@ public void testResetTwice() throws Exception { assertAcked( prepareCreate("test").setSettings( Settings.builder().put(indexSettings()).put("analysis.analyzer.my_analyzer.type", "mock_whitespace").build() - ).addMapping("type", "text", "type=text,analyzer=my_analyzer") + ).setMapping("text", "type=text,analyzer=my_analyzer") ); ensureGreen(); client().prepareIndex("test").setId("1").setSource("text", "opensearch test").get(); @@ -1927,8 +1918,7 @@ public void testResetTwice() throws Exception { public void testHighlightUsesHighlightQuery() throws IOException { assertAcked( - prepareCreate("test").addMapping( - "type1", + prepareCreate("test").setMapping( "text", "type=text," + randomStoreField() + "term_vector=with_positions_offsets,index_options=offsets" ) @@ -1974,8 +1964,7 @@ private static String randomStoreField() { public void testHighlightNoMatchSize() throws IOException { assertAcked( - prepareCreate("test").addMapping( - "type1", + prepareCreate("test").setMapping( "text", "type=text," + randomStoreField() + "term_vector=with_positions_offsets,index_options=offsets" ) @@ -2085,8 +2074,7 @@ public void testHighlightNoMatchSize() throws IOException { public void testHighlightNoMatchSizeWithMultivaluedFields() throws IOException { assertAcked( - prepareCreate("test").addMapping( - "type1", + prepareCreate("test").setMapping( "text", "type=text," + randomStoreField() + "term_vector=with_positions_offsets,index_options=offsets" ) @@ -2181,8 +2169,7 @@ public void testHighlightNoMatchSizeWithMultivaluedFields() throws IOException { public void testHighlightNoMatchSizeNumberOfFragments() throws IOException { assertAcked( - prepareCreate("test").addMapping( - "type1", + prepareCreate("test").setMapping( "text", "type=text," + randomStoreField() + "term_vector=with_positions_offsets,index_options=offsets" ) @@ -2506,7 +2493,7 @@ public void testPostingsHighlighterOrderByScore() throws Exception { } public void testPostingsHighlighterEscapeHtml() throws Exception { - assertAcked(prepareCreate("test").addMapping("type1", "title", "type=text," + randomStoreField() + "index_options=offsets")); + assertAcked(prepareCreate("test").setMapping("title", "type=text," + randomStoreField() + "index_options=offsets")); IndexRequestBuilder[] indexRequestBuilders = new IndexRequestBuilder[5]; for (int i = 0; i < 5; i++) { @@ -3342,7 +3329,7 @@ public void testHighlightQueryRewriteDatesWithNow() throws Exception { client().admin() .indices() .prepareCreate("index-1") - .addMapping("type", "d", "type=date", "field", "type=text,store=true,term_vector=with_positions_offsets") + .setMapping("d", "type=date", "field", "type=text,store=true,term_vector=with_positions_offsets") .setSettings(Settings.builder().put("index.number_of_replicas", 0).put("index.number_of_shards", 2)) .get() ); @@ -3461,9 +3448,7 @@ public void testWithNestedQuery() throws Exception { public void testWithNormalizer() throws Exception { Builder builder = Settings.builder().put(indexSettings()).putList("index.analysis.normalizer.my_normalizer.filter", "lowercase"); - assertAcked( - prepareCreate("test").setSettings(builder.build()).addMapping("doc", "keyword", "type=keyword,normalizer=my_normalizer") - ); + assertAcked(prepareCreate("test").setSettings(builder.build()).setMapping("keyword", "type=keyword,normalizer=my_normalizer")); ensureGreen(); client().prepareIndex("test") @@ -3485,7 +3470,7 @@ public void testWithNormalizer() throws Exception { } public void testDisableHighlightIdField() throws Exception { - assertAcked(prepareCreate("test").addMapping("doc", "keyword", "type=keyword")); + assertAcked(prepareCreate("test").setMapping("keyword", "type=keyword")); ensureGreen(); client().prepareIndex("test") diff --git a/server/src/internalClusterTest/java/org/opensearch/search/fieldcaps/FieldCapabilitiesIT.java b/server/src/internalClusterTest/java/org/opensearch/search/fieldcaps/FieldCapabilitiesIT.java index aed2975ed3234..dacf388e2faa0 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/fieldcaps/FieldCapabilitiesIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/fieldcaps/FieldCapabilitiesIT.java @@ -222,8 +222,8 @@ public void testWithIndexAlias() { } public void testWithIndexFilter() throws InterruptedException { - assertAcked(prepareCreate("index-1").addMapping("_doc", "timestamp", "type=date", "field1", "type=keyword")); - assertAcked(prepareCreate("index-2").addMapping("_doc", "timestamp", "type=date", "field1", "type=long")); + assertAcked(prepareCreate("index-1").setMapping("timestamp", "type=date", "field1", "type=keyword")); + assertAcked(prepareCreate("index-2").setMapping("timestamp", "type=date", "field1", "type=long")); List reqs = new ArrayList<>(); reqs.add(client().prepareIndex("index-1").setSource("timestamp", "2015-07-08")); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/fields/SearchFieldsIT.java b/server/src/internalClusterTest/java/org/opensearch/search/fields/SearchFieldsIT.java index 46d47d838f68b..25782f8dc18db 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/fields/SearchFieldsIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/fields/SearchFieldsIT.java @@ -444,7 +444,7 @@ public void testScriptFieldWithNanos() throws Exception { } public void testIdBasedScriptFields() throws Exception { - prepareCreate("test").addMapping("type1", "num1", "type=long").get(); + prepareCreate("test").setMapping("num1", "type=long").get(); int numDocs = randomIntBetween(1, 30); IndexRequestBuilder[] indexRequestBuilders = new IndexRequestBuilder[numDocs]; @@ -839,7 +839,7 @@ public void testGetFieldsComplexField() throws Exception { // see #8203 public void testSingleValueFieldDatatField() throws ExecutionException, InterruptedException { - assertAcked(client().admin().indices().prepareCreate("test").addMapping("type", "test_field", "type=keyword").get()); + assertAcked(client().admin().indices().prepareCreate("test").setMapping("test_field", "type=keyword").get()); indexRandom(true, client().prepareIndex("test").setId("1").setSource("test_field", "foobar")); refresh(); SearchResponse searchResponse = client().prepareSearch("test") @@ -1114,8 +1114,7 @@ public void testDocValueFields() throws Exception { public void testScriptFields() throws Exception { assertAcked( - prepareCreate("index").addMapping( - "type", + prepareCreate("index").setMapping( "s", "type=keyword", "l", diff --git a/server/src/internalClusterTest/java/org/opensearch/search/functionscore/RandomScoreFunctionIT.java b/server/src/internalClusterTest/java/org/opensearch/search/functionscore/RandomScoreFunctionIT.java index 670f5e65eb575..0701e96b71f38 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/functionscore/RandomScoreFunctionIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/functionscore/RandomScoreFunctionIT.java @@ -168,8 +168,7 @@ public void testConsistentHitsWithSameSeed() throws Exception { public void testScoreAccessWithinScript() throws Exception { assertAcked( - prepareCreate("test").addMapping( - "type", + prepareCreate("test").setMapping( "body", "type=text", "index", diff --git a/server/src/internalClusterTest/java/org/opensearch/search/geo/GeoPolygonIT.java b/server/src/internalClusterTest/java/org/opensearch/search/geo/GeoPolygonIT.java index c2d75b6aa55af..d0b017732b270 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/geo/GeoPolygonIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/geo/GeoPolygonIT.java @@ -66,8 +66,7 @@ protected void setupSuiteScopeCluster() throws Exception { Settings settings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, version).build(); assertAcked( - prepareCreate("test").setSettings(settings) - .addMapping("type1", "location", "type=geo_point", "alias", "type=alias,path=location") + prepareCreate("test").setSettings(settings).setMapping("location", "type=geo_point", "alias", "type=alias,path=location") ); ensureGreen(); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/geo/GeoShapeIntegrationIT.java b/server/src/internalClusterTest/java/org/opensearch/search/geo/GeoShapeIntegrationIT.java index 7315155e39520..89eb6038d8110 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/geo/GeoShapeIntegrationIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/geo/GeoShapeIntegrationIT.java @@ -136,9 +136,7 @@ public void testOrientationPersistence() throws Exception { */ public void testIgnoreMalformed() throws Exception { // create index - assertAcked( - client().admin().indices().prepareCreate("test").addMapping("geometry", "shape", "type=geo_shape,ignore_malformed=true").get() - ); + assertAcked(client().admin().indices().prepareCreate("test").setMapping("shape", "type=geo_shape,ignore_malformed=true").get()); ensureGreen(); // test self crossing ccw poly not crossing dateline @@ -188,7 +186,7 @@ public void testIgnoreMalformed() throws Exception { public void testMappingUpdate() throws Exception { // create index - assertAcked(client().admin().indices().prepareCreate("test").addMapping("geometry", "shape", "type=geo_shape").get()); + assertAcked(client().admin().indices().prepareCreate("test").setMapping("shape", "type=geo_shape").get()); ensureGreen(); String update = "{\n" diff --git a/server/src/internalClusterTest/java/org/opensearch/search/geo/LegacyGeoShapeIntegrationIT.java b/server/src/internalClusterTest/java/org/opensearch/search/geo/LegacyGeoShapeIntegrationIT.java index 28b00acd21479..61af97d46e7f3 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/geo/LegacyGeoShapeIntegrationIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/geo/LegacyGeoShapeIntegrationIT.java @@ -131,11 +131,7 @@ public void testOrientationPersistence() throws Exception { public void testIgnoreMalformed() throws Exception { // create index assertAcked( - client().admin() - .indices() - .prepareCreate("test") - .addMapping("geometry", "shape", "type=geo_shape,tree=quadtree,ignore_malformed=true") - .get() + client().admin().indices().prepareCreate("test").setMapping("shape", "type=geo_shape,tree=quadtree,ignore_malformed=true").get() ); ensureGreen(); @@ -226,11 +222,7 @@ public void testIndexShapeRouting() throws Exception { public void testLegacyCircle() throws Exception { // create index assertAcked( - client().admin() - .indices() - .prepareCreate("test") - .addMapping("geometry", "shape", "type=geo_shape,strategy=recursive,tree=geohash") - .get() + client().admin().indices().prepareCreate("test").setMapping("shape", "type=geo_shape,strategy=recursive,tree=geohash").get() ); ensureGreen(); @@ -255,11 +247,7 @@ public void testDisallowExpensiveQueries() throws InterruptedException, IOExcept try { // create index assertAcked( - client().admin() - .indices() - .prepareCreate("test") - .addMapping("_doc", "shape", "type=geo_shape,strategy=recursive,tree=geohash") - .get() + client().admin().indices().prepareCreate("test").setMapping("shape", "type=geo_shape,strategy=recursive,tree=geohash").get() ); ensureGreen(); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/morelikethis/MoreLikeThisIT.java b/server/src/internalClusterTest/java/org/opensearch/search/morelikethis/MoreLikeThisIT.java index 69b2e655dd0ad..b3253b036bda6 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/morelikethis/MoreLikeThisIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/morelikethis/MoreLikeThisIT.java @@ -42,7 +42,6 @@ import org.opensearch.common.settings.Settings; import org.opensearch.common.xcontent.XContentBuilder; import org.opensearch.common.xcontent.XContentFactory; -import org.opensearch.index.mapper.MapperService; import org.opensearch.index.query.MoreLikeThisQueryBuilder; import org.opensearch.index.query.MoreLikeThisQueryBuilder.Item; import org.opensearch.index.query.QueryBuilder; @@ -183,7 +182,7 @@ public void testMoreLikeThisForZeroTokensInOneOfTheAnalyzedFields() throws Excep public void testSimpleMoreLikeOnLongField() throws Exception { logger.info("Creating index test"); - assertAcked(prepareCreate("test").addMapping("type1", "some_long", "type=long")); + assertAcked(prepareCreate("test").setMapping("some_long", "type=long")); logger.info("Running Cluster Health"); assertThat(ensureGreen(), equalTo(ClusterHealthStatus.GREEN)); @@ -598,7 +597,7 @@ public void testSimpleMoreLikeThisIds() throws Exception { public void testMoreLikeThisMultiValueFields() throws Exception { logger.info("Creating the index ..."); assertAcked( - prepareCreate("test").addMapping("type1", "text", "type=text,analyzer=keyword") + prepareCreate("test").setMapping("text", "type=text,analyzer=keyword") .setSettings(Settings.builder().put(SETTING_NUMBER_OF_SHARDS, 1)) ); ensureGreen(); @@ -632,7 +631,7 @@ public void testMoreLikeThisMultiValueFields() throws Exception { public void testMinimumShouldMatch() throws ExecutionException, InterruptedException { logger.info("Creating the index ..."); assertAcked( - prepareCreate("test").addMapping("type1", "text", "type=text,analyzer=whitespace") + prepareCreate("test").setMapping("text", "type=text,analyzer=whitespace") .setSettings(Settings.builder().put(SETTING_NUMBER_OF_SHARDS, 1)) ); ensureGreen(); @@ -693,15 +692,7 @@ public void testMoreLikeThisArtificialDocs() throws Exception { public void testMoreLikeThisMalformedArtificialDocs() throws Exception { logger.info("Creating the index ..."); - assertAcked( - prepareCreate("test").addMapping( - MapperService.SINGLE_MAPPING_NAME, - "text", - "type=text,analyzer=whitespace", - "date", - "type=date" - ) - ); + assertAcked(prepareCreate("test").setMapping("text", "type=text,analyzer=whitespace", "date", "type=date")); ensureGreen("test"); logger.info("Creating an index with a single document ..."); @@ -790,9 +781,7 @@ public void testMoreLikeThisUnlike() throws ExecutionException, InterruptedExcep } public void testSelectFields() throws IOException, ExecutionException, InterruptedException { - assertAcked( - prepareCreate("test").addMapping("type1", "text", "type=text,analyzer=whitespace", "text1", "type=text,analyzer=whitespace") - ); + assertAcked(prepareCreate("test").setMapping("text", "type=text,analyzer=whitespace", "text1", "type=text,analyzer=whitespace")); ensureGreen("test"); indexRandom( diff --git a/server/src/internalClusterTest/java/org/opensearch/search/nested/SimpleNestedIT.java b/server/src/internalClusterTest/java/org/opensearch/search/nested/SimpleNestedIT.java index e3c1abff5d206..d2d23cd47fc01 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/nested/SimpleNestedIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/nested/SimpleNestedIT.java @@ -73,7 +73,7 @@ public class SimpleNestedIT extends OpenSearchIntegTestCase { public void testSimpleNested() throws Exception { - assertAcked(prepareCreate("test").addMapping("type1", "nested1", "type=nested")); + assertAcked(prepareCreate("test").setMapping("nested1", "type=nested")); ensureGreen(); // check on no data, see it works @@ -1597,7 +1597,7 @@ public void testCheckFixedBitSetCache() throws Exception { if (loadFixedBitSeLazily) { settingsBuilder.put("index.load_fixed_bitset_filters_eagerly", false); } - assertAcked(prepareCreate("test").setSettings(settingsBuilder).addMapping("type")); + assertAcked(prepareCreate("test").setSettings(settingsBuilder)); client().prepareIndex("test").setId("0").setSource("field", "value").get(); client().prepareIndex("test").setId("1").setSource("field", "value").get(); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/profile/aggregation/AggregationProfilerIT.java b/server/src/internalClusterTest/java/org/opensearch/search/profile/aggregation/AggregationProfilerIT.java index 7d63db78e205a..f3d1a479f1b46 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/profile/aggregation/AggregationProfilerIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/profile/aggregation/AggregationProfilerIT.java @@ -108,7 +108,7 @@ protected void setupSuiteScopeCluster() throws Exception { .indices() .prepareCreate("idx") .setSettings(org.opensearch.common.collect.Map.of("number_of_shards", 1, "number_of_replicas", 0)) - .addMapping("type", STRING_FIELD, "type=keyword", NUMBER_FIELD, "type=integer", TAG_FIELD, "type=keyword") + .setMapping(STRING_FIELD, "type=keyword", NUMBER_FIELD, "type=integer", TAG_FIELD, "type=keyword") .get() ); List builders = new ArrayList<>(); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/query/MultiMatchQueryIT.java b/server/src/internalClusterTest/java/org/opensearch/search/query/MultiMatchQueryIT.java index 66b42fe266887..d87bbfb1fb69c 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/query/MultiMatchQueryIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/query/MultiMatchQueryIT.java @@ -1015,7 +1015,7 @@ public void testFuzzyFieldLevelBoosting() throws InterruptedException, Execution CreateIndexRequestBuilder builder = prepareCreate(idx).setSettings( Settings.builder().put(indexSettings()).put(SETTING_NUMBER_OF_SHARDS, 3).put(SETTING_NUMBER_OF_REPLICAS, 0) ); - assertAcked(builder.addMapping("type", "title", "type=text", "body", "type=text")); + assertAcked(builder.setMapping("title", "type=text", "body", "type=text")); ensureGreen(); List builders = new ArrayList<>(); builders.add(client().prepareIndex(idx).setId("1").setSource("title", "foo", "body", "bar")); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/query/ScriptScoreQueryIT.java b/server/src/internalClusterTest/java/org/opensearch/search/query/ScriptScoreQueryIT.java index 59f9e020df0d9..d736365a6e236 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/query/ScriptScoreQueryIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/query/ScriptScoreQueryIT.java @@ -88,7 +88,7 @@ protected Map, Object>> pluginScripts() { // 2) score is calculated based on a script with params // 3) min score applied public void testScriptScore() { - assertAcked(prepareCreate("test-index").addMapping("_doc", "field1", "type=text", "field2", "type=double")); + assertAcked(prepareCreate("test-index").setMapping("field1", "type=text", "field2", "type=double")); int docCount = 10; for (int i = 1; i <= docCount; i++) { client().prepareIndex("test-index").setId("" + i).setSource("field1", "text" + (i % 2), "field2", i).get(); @@ -114,7 +114,7 @@ public void testScriptScore() { } public void testScriptScoreBoolQuery() { - assertAcked(prepareCreate("test-index").addMapping("_doc", "field1", "type=text", "field2", "type=double")); + assertAcked(prepareCreate("test-index").setMapping("field1", "type=text", "field2", "type=double")); int docCount = 10; for (int i = 1; i <= docCount; i++) { client().prepareIndex("test-index").setId("" + i).setSource("field1", "text" + i, "field2", i).get(); @@ -136,7 +136,7 @@ public void testScriptScoreBoolQuery() { public void testRewrittenQuery() { assertAcked( prepareCreate("test-index2").setSettings(Settings.builder().put("index.number_of_shards", 1)) - .addMapping("_doc", "field1", "type=date", "field2", "type=double") + .setMapping("field1", "type=date", "field2", "type=double") ); client().prepareIndex("test-index2").setId("1").setSource("field1", "2019-09-01", "field2", 1).get(); client().prepareIndex("test-index2").setId("2").setSource("field1", "2019-10-01", "field2", 2).get(); @@ -154,7 +154,7 @@ public void testRewrittenQuery() { public void testDisallowExpensiveQueries() { try { - assertAcked(prepareCreate("test-index").addMapping("_doc", "field1", "type=text", "field2", "type=double")); + assertAcked(prepareCreate("test-index").setMapping("field1", "type=text", "field2", "type=double")); int docCount = 10; for (int i = 1; i <= docCount; i++) { client().prepareIndex("test-index").setId("" + i).setSource("field1", "text" + (i % 2), "field2", i).get(); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/query/SearchQueryIT.java b/server/src/internalClusterTest/java/org/opensearch/search/query/SearchQueryIT.java index fa2d79ecb2017..fed5561c1df64 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/query/SearchQueryIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/query/SearchQueryIT.java @@ -211,7 +211,7 @@ public void testIssue3177() { } public void testIndexOptions() throws Exception { - assertAcked(prepareCreate("test").addMapping("type1", "field1", "type=text,index_options=docs")); + assertAcked(prepareCreate("test").setMapping("field1", "type=text,index_options=docs")); indexRandom( true, client().prepareIndex("test").setId("1").setSource("field1", "quick brown fox", "field2", "quick brown fox"), @@ -337,7 +337,7 @@ public void testCommonTermsQuery() throws Exception { client().admin() .indices() .prepareCreate("test") - .addMapping("type1", "field1", "type=text,analyzer=whitespace") + .setMapping("field1", "type=text,analyzer=whitespace") .setSettings(Settings.builder().put(SETTING_NUMBER_OF_SHARDS, 1)) .get(); indexRandom( @@ -479,7 +479,7 @@ public void testLowercaseExpandedTerms() { public void testDateRangeInQueryString() { // the mapping needs to be provided upfront otherwise we are not sure how many failures we get back // as with dynamic mappings some shards might be lacking behind and parse a different query - assertAcked(prepareCreate("test").addMapping("type", "past", "type=date", "future", "type=date")); + assertAcked(prepareCreate("test").setMapping("past", "type=date", "future", "type=date")); ZonedDateTime now = ZonedDateTime.now(ZoneOffset.UTC); String aMonthAgo = DateTimeFormatter.ISO_LOCAL_DATE.format(now.minusMonths(1)); @@ -505,7 +505,7 @@ public void testDateRangeInQueryString() { public void testDateRangeInQueryStringWithTimeZone_7880() { // the mapping needs to be provided upfront otherwise we are not sure how many failures we get back // as with dynamic mappings some shards might be lacking behind and parse a different query - assertAcked(prepareCreate("test").addMapping("type", "past", "type=date")); + assertAcked(prepareCreate("test").setMapping("past", "type=date")); ZoneId timeZone = randomZone(); String now = DateFormatter.forPattern("strict_date_optional_time").format(Instant.now().atZone(timeZone)); @@ -523,7 +523,7 @@ public void testDateRangeInQueryStringWithTimeZone_7880() { public void testDateRangeInQueryStringWithTimeZone_10477() { // the mapping needs to be provided upfront otherwise we are not sure how many failures we get back // as with dynamic mappings some shards might be lacking behind and parse a different query - assertAcked(prepareCreate("test").addMapping("type", "past", "type=date")); + assertAcked(prepareCreate("test").setMapping("past", "type=date")); client().prepareIndex("test").setId("1").setSource("past", "2015-04-05T23:00:00+0000").get(); client().prepareIndex("test").setId("2").setSource("past", "2015-04-06T00:00:00+0000").get(); @@ -732,7 +732,7 @@ public void testFiltersWithCustomCacheKey() throws Exception { } public void testMatchQueryNumeric() throws Exception { - assertAcked(prepareCreate("test").addMapping("type1", "long", "type=long", "double", "type=double")); + assertAcked(prepareCreate("test").setMapping("long", "type=long", "double", "type=double")); indexRandom( true, @@ -752,7 +752,7 @@ public void testMatchQueryNumeric() throws Exception { } public void testMatchQueryFuzzy() throws Exception { - assertAcked(prepareCreate("test").addMapping("_doc", "text", "type=text")); + assertAcked(prepareCreate("test").setMapping("text", "type=text")); indexRandom( true, @@ -846,9 +846,7 @@ public void testMultiMatchQuery() throws Exception { } public void testMatchQueryZeroTermsQuery() { - assertAcked( - prepareCreate("test").addMapping("type1", "field1", "type=text,analyzer=classic", "field2", "type=text,analyzer=classic") - ); + assertAcked(prepareCreate("test").setMapping("field1", "type=text,analyzer=classic", "field2", "type=text,analyzer=classic")); client().prepareIndex("test").setId("1").setSource("field1", "value1").get(); client().prepareIndex("test").setId("2").setSource("field1", "value2").get(); refresh(); @@ -869,9 +867,7 @@ public void testMatchQueryZeroTermsQuery() { } public void testMultiMatchQueryZeroTermsQuery() { - assertAcked( - prepareCreate("test").addMapping("type1", "field1", "type=text,analyzer=classic", "field2", "type=text,analyzer=classic") - ); + assertAcked(prepareCreate("test").setMapping("field1", "type=text,analyzer=classic", "field2", "type=text,analyzer=classic")); client().prepareIndex("test").setId("1").setSource("field1", "value1", "field2", "value2").get(); client().prepareIndex("test").setId("2").setSource("field1", "value3", "field2", "value4").get(); refresh(); @@ -1039,7 +1035,7 @@ public void testSpecialRangeSyntaxInQueryString() { } public void testEmptytermsQuery() throws Exception { - assertAcked(prepareCreate("test").addMapping("type", "term", "type=text")); + assertAcked(prepareCreate("test").setMapping("term", "type=text")); indexRandom( true, @@ -1059,7 +1055,7 @@ public void testEmptytermsQuery() throws Exception { } public void testTermsQuery() throws Exception { - assertAcked(prepareCreate("test").addMapping("type", "str", "type=text", "lng", "type=long", "dbl", "type=double")); + assertAcked(prepareCreate("test").setMapping("str", "type=text", "lng", "type=long", "dbl", "type=double")); indexRandom( true, @@ -1117,7 +1113,7 @@ public void testTermsQuery() throws Exception { } public void testTermsLookupFilter() throws Exception { - assertAcked(prepareCreate("lookup").addMapping("type", "terms", "type=text", "other", "type=text")); + assertAcked(prepareCreate("lookup").setMapping("terms", "type=text", "other", "type=text")); assertAcked( prepareCreate("lookup2").setMapping( jsonBuilder().startObject() @@ -1133,8 +1129,8 @@ public void testTermsLookupFilter() throws Exception { .endObject() ) ); - assertAcked(prepareCreate("lookup3").addMapping("type", "_source", "enabled=false", "terms", "type=text")); - assertAcked(prepareCreate("test").addMapping("type", "term", "type=text")); + assertAcked(prepareCreate("lookup3").setMapping("_source", "enabled=false", "terms", "type=text")); + assertAcked(prepareCreate("test").setMapping("term", "type=text")); indexRandom( true, @@ -1283,8 +1279,7 @@ public void testBasicQueryById() throws Exception { public void testNumericTermsAndRanges() throws Exception { assertAcked( - prepareCreate("test").addMapping( - "type1", + prepareCreate("test").setMapping( "num_byte", "type=byte", "num_short", @@ -1400,8 +1395,7 @@ public void testNumericTermsAndRanges() throws Exception { public void testNumericRangeFilter_2826() throws Exception { assertAcked( - prepareCreate("test").addMapping( - "type1", + prepareCreate("test").setMapping( "num_byte", "type=byte", "num_short", @@ -1780,7 +1774,7 @@ public void testDateProvidedAsNumber() throws InterruptedException { } public void testRangeQueryWithTimeZone() throws Exception { - assertAcked(prepareCreate("test").addMapping("type1", "date", "type=date", "num", "type=integer")); + assertAcked(prepareCreate("test").setMapping("date", "type=date", "num", "type=integer")); indexRandom( true, @@ -1955,7 +1949,7 @@ public void testQueryStringParserCache() throws Exception { } public void testRangeQueryRangeFields_24744() throws Exception { - assertAcked(prepareCreate("test").addMapping("type1", "int_range", "type=integer_range")); + assertAcked(prepareCreate("test").setMapping("int_range", "type=integer_range")); client().prepareIndex("test") .setId("1") @@ -2064,7 +2058,7 @@ public void testWildcardQueryNormalizationOnKeywordField() { .put("index.analysis.normalizer.lowercase_normalizer.type", "custom") .putList("index.analysis.normalizer.lowercase_normalizer.filter", "lowercase") .build() - ).addMapping("_doc", "field1", "type=keyword,normalizer=lowercase_normalizer") + ).setMapping("field1", "type=keyword,normalizer=lowercase_normalizer") ); client().prepareIndex("test").setId("1").setSource("field1", "Bbb Aaa").get(); refresh(); @@ -2091,7 +2085,7 @@ public void testWildcardQueryNormalizationOnTextField() { .put("index.analysis.analyzer.lowercase_analyzer.tokenizer", "standard") .putList("index.analysis.analyzer.lowercase_analyzer.filter", "lowercase") .build() - ).addMapping("_doc", "field1", "type=text,analyzer=lowercase_analyzer") + ).setMapping("field1", "type=text,analyzer=lowercase_analyzer") ); client().prepareIndex("test").setId("1").setSource("field1", "Bbb Aaa").get(); refresh(); @@ -2119,7 +2113,7 @@ public void testWildcardQueryNormalizationKeywordSpecialCharacters() { .put("index.analysis.normalizer.no_wildcard.type", "custom") .put("index.analysis.normalizer.no_wildcard.char_filter", "no_wildcard") .build() - ).addMapping("_doc", "field", "type=keyword,normalizer=no_wildcard") + ).setMapping("field", "type=keyword,normalizer=no_wildcard") ); client().prepareIndex("test").setId("1").setSource("field", "label-1").get(); refresh(); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/scriptfilter/ScriptQuerySearchIT.java b/server/src/internalClusterTest/java/org/opensearch/search/scriptfilter/ScriptQuerySearchIT.java index 3bc9eb5b25261..5e45fd8d0ad2a 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/scriptfilter/ScriptQuerySearchIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/scriptfilter/ScriptQuerySearchIT.java @@ -240,7 +240,7 @@ public void testCustomScriptBoost() throws Exception { public void testDisallowExpensiveQueries() { try { - assertAcked(prepareCreate("test-index").addMapping("_doc", "num1", "type=double")); + assertAcked(prepareCreate("test-index").setMapping("num1", "type=double")); int docCount = 10; for (int i = 1; i <= docCount; i++) { client().prepareIndex("test-index").setId("" + i).setSource("num1", i).get(); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/scroll/SearchScrollIT.java b/server/src/internalClusterTest/java/org/opensearch/search/scroll/SearchScrollIT.java index 5c56671384868..3818566e2eb11 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/scroll/SearchScrollIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/scroll/SearchScrollIT.java @@ -541,7 +541,7 @@ public void testStringSortMissingAscTerminates() throws Exception { assertAcked( prepareCreate("test").setSettings( Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1).put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0) - ).addMapping("test", "no_field", "type=keyword", "some_field", "type=keyword") + ).setMapping("no_field", "type=keyword", "some_field", "type=keyword") ); client().prepareIndex("test").setId("1").setSource("some_field", "test").get(); refresh(); @@ -718,7 +718,7 @@ public void testScrollRewrittenToMatchNoDocs() { .indices() .prepareCreate("test") .setSettings(Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, numShards)) - .addMapping("_doc", "created_date", "type=date,format=yyyy-MM-dd") + .setMapping("created_date", "type=date,format=yyyy-MM-dd") ); client().prepareIndex("test").setId("1").setSource("created_date", "2020-01-01").get(); client().prepareIndex("test").setId("2").setSource("created_date", "2020-01-02").get(); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/searchafter/SearchAfterIT.java b/server/src/internalClusterTest/java/org/opensearch/search/searchafter/SearchAfterIT.java index aae6c1dec48b3..926e21294ffc8 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/searchafter/SearchAfterIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/searchafter/SearchAfterIT.java @@ -59,13 +59,10 @@ public class SearchAfterIT extends OpenSearchIntegTestCase { private static final String INDEX_NAME = "test"; - private static final String TYPE_NAME = "type1"; private static final int NUM_DOCS = 100; public void testsShouldFail() throws Exception { - assertAcked( - client().admin().indices().prepareCreate("test").addMapping("type1", "field1", "type=long", "field2", "type=keyword").get() - ); + assertAcked(client().admin().indices().prepareCreate("test").setMapping("field1", "type=long", "field2", "type=keyword").get()); ensureGreen(); indexRandom(true, client().prepareIndex("test").setId("0").setSource("field1", 0, "field2", "toto")); { @@ -159,7 +156,7 @@ public void testsShouldFail() throws Exception { } public void testWithNullStrings() throws InterruptedException { - assertAcked(client().admin().indices().prepareCreate("test").addMapping("type1", "field2", "type=keyword").get()); + assertAcked(client().admin().indices().prepareCreate("test").setMapping("field2", "type=keyword").get()); ensureGreen(); indexRandom( true, @@ -219,7 +216,7 @@ public void testWithSimpleTypes() throws Exception { if (reqSize == 0) { reqSize = 1; } - assertSearchFromWithSortValues(INDEX_NAME, TYPE_NAME, documents, reqSize); + assertSearchFromWithSortValues(INDEX_NAME, documents, reqSize); } private static class ListComparator implements Comparator { @@ -250,10 +247,10 @@ public int compare(List o1, List o2) { private ListComparator LST_COMPARATOR = new ListComparator(); - private void assertSearchFromWithSortValues(String indexName, String typeName, List documents, int reqSize) throws Exception { + private void assertSearchFromWithSortValues(String indexName, List documents, int reqSize) throws Exception { int numFields = documents.get(0).size(); { - createIndexMappingsFromObjectType(indexName, typeName, documents.get(0)); + createIndexMappingsFromObjectType(indexName, documents.get(0)); List requests = new ArrayList<>(); for (int i = 0; i < documents.size(); i++) { XContentBuilder builder = jsonBuilder(); @@ -289,7 +286,7 @@ private void assertSearchFromWithSortValues(String indexName, String typeName, L } } - private void createIndexMappingsFromObjectType(String indexName, String typeName, List types) { + private void createIndexMappingsFromObjectType(String indexName, List types) { CreateIndexRequestBuilder indexRequestBuilder = client().admin().indices().prepareCreate(indexName); List mappings = new ArrayList<>(); int numFields = types.size(); @@ -323,7 +320,7 @@ private void createIndexMappingsFromObjectType(String indexName, String typeName fail("Can't match type [" + type + "]"); } } - indexRequestBuilder.addMapping(typeName, mappings.toArray(new String[0])).get(); + indexRequestBuilder.setMapping(mappings.toArray(new String[0])).get(); ensureGreen(); } diff --git a/server/src/internalClusterTest/java/org/opensearch/search/simple/SimpleSearchIT.java b/server/src/internalClusterTest/java/org/opensearch/search/simple/SimpleSearchIT.java index 0652b38228ec5..7382d4e157bc4 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/simple/SimpleSearchIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/simple/SimpleSearchIT.java @@ -295,7 +295,7 @@ public void testSimpleTerminateAfterCount() throws Exception { public void testSimpleIndexSortEarlyTerminate() throws Exception { prepareCreate("test").setSettings( Settings.builder().put(SETTING_NUMBER_OF_SHARDS, 1).put(SETTING_NUMBER_OF_REPLICAS, 0).put("index.sort.field", "rank") - ).addMapping(MapperService.SINGLE_MAPPING_NAME, "rank", "type=integer").get(); + ).setMapping("rank", "type=integer").get(); ensureGreen(); int max = randomIntBetween(3, 29); List docbuilders = new ArrayList<>(max); @@ -498,7 +498,7 @@ public void testTooLargeRescoreOkByDynamicResultWindowSetting() throws Exception } public void testQueryNumericFieldWithRegex() throws Exception { - assertAcked(prepareCreate("idx").addMapping("type", "num", "type=integer")); + assertAcked(prepareCreate("idx").setMapping("num", "type=integer")); ensureGreen("idx"); try { @@ -510,7 +510,7 @@ public void testQueryNumericFieldWithRegex() throws Exception { } public void testTermQueryBigInt() throws Exception { - prepareCreate("idx").addMapping("type", "field", "type=keyword").get(); + prepareCreate("idx").setMapping("field", "type=keyword").get(); ensureGreen("idx"); client().prepareIndex("idx") diff --git a/server/src/internalClusterTest/java/org/opensearch/search/sort/FieldSortIT.java b/server/src/internalClusterTest/java/org/opensearch/search/sort/FieldSortIT.java index 3d1d407b386e1..4bf4cd138cbd1 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/sort/FieldSortIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/sort/FieldSortIT.java @@ -137,7 +137,7 @@ public void testIssue8226() { final boolean useMapping = randomBoolean(); for (int i = 0; i < numIndices; i++) { if (useMapping) { - assertAcked(prepareCreate("test_" + i).addAlias(new Alias("test")).addMapping("foo", "entry", "type=long")); + assertAcked(prepareCreate("test_" + i).addAlias(new Alias("test")).setMapping("entry", "type=long")); } else { assertAcked(prepareCreate("test_" + i).addAlias(new Alias("test"))); } @@ -243,7 +243,7 @@ public void testIssue6614() throws ExecutionException, InterruptedException { } public void testTrackScores() throws Exception { - assertAcked(client().admin().indices().prepareCreate("test").addMapping("type1", "svalue", "type=keyword").get()); + assertAcked(client().admin().indices().prepareCreate("test").setMapping("svalue", "type=keyword").get()); ensureGreen(); index( "test", @@ -354,7 +354,7 @@ public void testRandomSorting() throws IOException, InterruptedException, Execut } public void test3078() { - assertAcked(client().admin().indices().prepareCreate("test").addMapping("type", "field", "type=keyword").get()); + assertAcked(client().admin().indices().prepareCreate("test").setMapping("field", "type=keyword").get()); ensureGreen(); for (int i = 1; i < 101; i++) { @@ -492,7 +492,7 @@ public void testScoreSortDirectionWithFunctionScore() throws Exception { } public void testIssue2986() { - assertAcked(client().admin().indices().prepareCreate("test").addMapping("post", "field1", "type=keyword").get()); + assertAcked(client().admin().indices().prepareCreate("test").setMapping("field1", "type=keyword").get()); client().prepareIndex("test").setId("1").setSource("{\"field1\":\"value1\"}", XContentType.JSON).get(); client().prepareIndex("test").setId("2").setSource("{\"field1\":\"value2\"}", XContentType.JSON).get(); @@ -516,7 +516,7 @@ public void testIssue2991() { } catch (Exception e) { // ignore } - assertAcked(client().admin().indices().prepareCreate("test").addMapping("type", "tag", "type=keyword").get()); + assertAcked(client().admin().indices().prepareCreate("test").setMapping("tag", "type=keyword").get()); ensureGreen(); client().prepareIndex("test").setId("1").setSource("tag", "alpha").get(); refresh(); @@ -1610,11 +1610,11 @@ public void testSortDuelBetweenSingleShardAndMultiShardIndex() throws Exception assertAcked( prepareCreate("test1").setSettings( Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, between(2, maximumNumberOfShards())) - ).addMapping("type", sortField, "type=long").get() + ).setMapping(sortField, "type=long").get() ); assertAcked( prepareCreate("test2").setSettings(Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1)) - .addMapping("type", sortField, "type=long") + .setMapping(sortField, "type=long") .get() ); @@ -1650,7 +1650,7 @@ public void testCustomFormat() throws Exception { // Use an ip field, which uses different internal/external // representations of values, to make sure values are both correctly // rendered and parsed (search_after) - assertAcked(prepareCreate("test").addMapping("type", "ip", "type=ip")); + assertAcked(prepareCreate("test").setMapping("ip", "type=ip")); indexRandom( true, client().prepareIndex("test").setId("1").setSource("ip", "192.168.1.7"), @@ -1671,7 +1671,7 @@ public void testCustomFormat() throws Exception { } public void testScriptFieldSort() throws Exception { - assertAcked(prepareCreate("test").addMapping("t", "keyword", "type=keyword", "number", "type=integer")); + assertAcked(prepareCreate("test").setMapping("keyword", "type=keyword", "number", "type=integer")); ensureGreen(); final int numDocs = randomIntBetween(10, 20); IndexRequestBuilder[] indexReqs = new IndexRequestBuilder[numDocs]; @@ -1721,10 +1721,8 @@ public void testScriptFieldSort() throws Exception { public void testFieldAlias() throws Exception { // Create two indices and add the field 'route_length_miles' as an alias in // one, and a concrete field in the other. - assertAcked( - prepareCreate("old_index").addMapping("_doc", "distance", "type=double", "route_length_miles", "type=alias,path=distance") - ); - assertAcked(prepareCreate("new_index").addMapping("_doc", "route_length_miles", "type=double")); + assertAcked(prepareCreate("old_index").setMapping("distance", "type=double", "route_length_miles", "type=alias,path=distance")); + assertAcked(prepareCreate("new_index").setMapping("route_length_miles", "type=double")); ensureGreen("old_index", "new_index"); List builders = new ArrayList<>(); @@ -1749,10 +1747,8 @@ public void testFieldAlias() throws Exception { public void testFieldAliasesWithMissingValues() throws Exception { // Create two indices and add the field 'route_length_miles' as an alias in // one, and a concrete field in the other. - assertAcked( - prepareCreate("old_index").addMapping("_doc", "distance", "type=double", "route_length_miles", "type=alias,path=distance") - ); - assertAcked(prepareCreate("new_index").addMapping("_doc", "route_length_miles", "type=double")); + assertAcked(prepareCreate("old_index").setMapping("distance", "type=double", "route_length_miles", "type=alias,path=distance")); + assertAcked(prepareCreate("new_index").setMapping("route_length_miles", "type=double")); ensureGreen("old_index", "new_index"); List builders = new ArrayList<>(); @@ -1775,9 +1771,9 @@ public void testFieldAliasesWithMissingValues() throws Exception { } public void testCastNumericType() throws Exception { - assertAcked(prepareCreate("index_double").addMapping("_doc", "field", "type=double")); - assertAcked(prepareCreate("index_long").addMapping("_doc", "field", "type=long")); - assertAcked(prepareCreate("index_float").addMapping("_doc", "field", "type=float")); + assertAcked(prepareCreate("index_double").setMapping("field", "type=double")); + assertAcked(prepareCreate("index_long").setMapping("field", "type=long")); + assertAcked(prepareCreate("index_float").setMapping("field", "type=float")); ensureGreen("index_double", "index_long", "index_float"); List builders = new ArrayList<>(); @@ -1821,8 +1817,8 @@ public void testCastNumericType() throws Exception { } public void testCastDate() throws Exception { - assertAcked(prepareCreate("index_date").addMapping("_doc", "field", "type=date")); - assertAcked(prepareCreate("index_date_nanos").addMapping("_doc", "field", "type=date_nanos")); + assertAcked(prepareCreate("index_date").setMapping("field", "type=date")); + assertAcked(prepareCreate("index_date_nanos").setMapping("field", "type=date_nanos")); ensureGreen("index_date", "index_date_nanos"); List builders = new ArrayList<>(); @@ -1937,7 +1933,7 @@ public void testCastDate() throws Exception { } public void testCastNumericTypeExceptions() throws Exception { - assertAcked(prepareCreate("index").addMapping("_doc", "keyword", "type=keyword", "ip", "type=ip")); + assertAcked(prepareCreate("index").setMapping("keyword", "type=keyword", "ip", "type=ip")); ensureGreen("index"); for (String invalidField : new String[] { "keyword", "ip" }) { for (String numericType : new String[] { "long", "double", "date", "date_nanos" }) { @@ -1957,7 +1953,7 @@ public void testCastNumericTypeExceptions() throws Exception { public void testLongSortOptimizationCorrectResults() { assertAcked( prepareCreate("test1").setSettings(Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 2)) - .addMapping("_doc", "long_field", "type=long") + .setMapping("long_field", "type=long") .get() ); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/sort/GeoDistanceSortBuilderIT.java b/server/src/internalClusterTest/java/org/opensearch/search/sort/GeoDistanceSortBuilderIT.java index c283444666f0b..aebd83592e793 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/sort/GeoDistanceSortBuilderIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/sort/GeoDistanceSortBuilderIT.java @@ -83,7 +83,7 @@ public void testManyToManyGeoPoints() throws ExecutionException, InterruptedExce */ Version version = randomBoolean() ? Version.CURRENT : VersionUtils.randomIndexCompatibleVersion(random()); Settings settings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, version).build(); - assertAcked(prepareCreate("index").setSettings(settings).addMapping("type", LOCATION_FIELD, "type=geo_point")); + assertAcked(prepareCreate("index").setSettings(settings).setMapping(LOCATION_FIELD, "type=geo_point")); XContentBuilder d1Builder = jsonBuilder(); GeoPoint[] d1Points = { new GeoPoint(3, 2), new GeoPoint(4, 1) }; createShuffeldJSONArray(d1Builder, d1Points); @@ -174,7 +174,7 @@ public void testSingeToManyAvgMedian() throws ExecutionException, InterruptedExc */ Version version = randomBoolean() ? Version.CURRENT : VersionUtils.randomIndexCompatibleVersion(random()); Settings settings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, version).build(); - assertAcked(prepareCreate("index").setSettings(settings).addMapping("type", LOCATION_FIELD, "type=geo_point")); + assertAcked(prepareCreate("index").setSettings(settings).setMapping(LOCATION_FIELD, "type=geo_point")); XContentBuilder d1Builder = jsonBuilder(); GeoPoint[] d1Points = { new GeoPoint(0, 1), new GeoPoint(0, 4), new GeoPoint(0, 10) }; createShuffeldJSONArray(d1Builder, d1Points); @@ -248,7 +248,7 @@ public void testManyToManyGeoPointsWithDifferentFormats() throws ExecutionExcept */ Version version = randomBoolean() ? Version.CURRENT : VersionUtils.randomIndexCompatibleVersion(random()); Settings settings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, version).build(); - assertAcked(prepareCreate("index").setSettings(settings).addMapping("type", LOCATION_FIELD, "type=geo_point")); + assertAcked(prepareCreate("index").setSettings(settings).setMapping(LOCATION_FIELD, "type=geo_point")); XContentBuilder d1Builder = jsonBuilder(); GeoPoint[] d1Points = { new GeoPoint(2.5, 1), new GeoPoint(2.75, 2), new GeoPoint(3, 3), new GeoPoint(3.25, 4) }; createShuffeldJSONArray(d1Builder, d1Points); @@ -306,7 +306,7 @@ public void testManyToManyGeoPointsWithDifferentFormats() throws ExecutionExcept } public void testSinglePointGeoDistanceSort() throws ExecutionException, InterruptedException, IOException { - assertAcked(prepareCreate("index").addMapping("type", LOCATION_FIELD, "type=geo_point")); + assertAcked(prepareCreate("index").setMapping(LOCATION_FIELD, "type=geo_point")); indexRandom( true, client().prepareIndex("index") @@ -382,8 +382,7 @@ private static void checkCorrectSortOrderForGeoSort(SearchResponse searchRespons public void testCrossIndexIgnoreUnmapped() throws Exception { assertAcked( - prepareCreate("test1").addMapping("type", "str_field", "type=keyword", "long_field", "type=long", "double_field", "type=double") - .get() + prepareCreate("test1").setMapping("str_field", "type=keyword", "long_field", "type=long", "double_field", "type=double").get() ); assertAcked(prepareCreate("test2").get()); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/source/MetadataFetchingIT.java b/server/src/internalClusterTest/java/org/opensearch/search/source/MetadataFetchingIT.java index 758d749f0be8e..4f6dd89285bee 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/source/MetadataFetchingIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/source/MetadataFetchingIT.java @@ -69,7 +69,7 @@ public void testSimple() { } public void testInnerHits() { - assertAcked(prepareCreate("test").addMapping("_doc", "nested", "type=nested")); + assertAcked(prepareCreate("test").setMapping("nested", "type=nested")); ensureGreen(); client().prepareIndex("test").setId("1").setSource("field", "value", "nested", Collections.singletonMap("title", "foo")).get(); refresh(); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/suggest/SuggestSearchIT.java b/server/src/internalClusterTest/java/org/opensearch/search/suggest/SuggestSearchIT.java index cd3921d91a3ed..07e58db836740 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/suggest/SuggestSearchIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/suggest/SuggestSearchIT.java @@ -97,7 +97,7 @@ public class SuggestSearchIT extends OpenSearchIntegTestCase { // see #3196 public void testSuggestAcrossMultipleIndices() throws IOException { - assertAcked(prepareCreate("test").addMapping("type1", "text", "type=text")); + assertAcked(prepareCreate("test").setMapping("text", "type=text")); ensureGreen(); index("test", "type1", "1", "text", "abcd"); @@ -111,7 +111,7 @@ public void testSuggestAcrossMultipleIndices() throws IOException { .text("abcd"); logger.info("--> run suggestions with one index"); searchSuggest("test", termSuggest); - assertAcked(prepareCreate("test_1").addMapping("type1", "text", "type=text")); + assertAcked(prepareCreate("test_1").setMapping("text", "type=text")); ensureGreen(); index("test_1", "type1", "1", "text", "ab cd"); @@ -342,7 +342,7 @@ public void testUnmappedField() throws IOException, InterruptedException, Execut } public void testSimple() throws Exception { - assertAcked(prepareCreate("test").addMapping("type1", "text", "type=text")); + assertAcked(prepareCreate("test").setMapping("text", "type=text")); ensureGreen(); index("test", "type1", "1", "text", "abcd"); @@ -367,7 +367,7 @@ public void testSimple() throws Exception { } public void testEmpty() throws Exception { - assertAcked(prepareCreate("test").addMapping("type1", "text", "type=text")); + assertAcked(prepareCreate("test").setMapping("text", "type=text")); ensureGreen(); index("test", "type1", "1", "text", "bar"); @@ -386,7 +386,7 @@ public void testEmpty() throws Exception { } public void testEmptyIndex() throws Exception { - assertAcked(prepareCreate("test").addMapping("type1", "text", "type=text")); + assertAcked(prepareCreate("test").setMapping("text", "type=text")); ensureGreen(); // use SuggestMode.ALWAYS, otherwise the results can vary between requests. @@ -412,7 +412,7 @@ public void testEmptyIndex() throws Exception { } public void testWithMultipleCommands() throws Exception { - assertAcked(prepareCreate("test").addMapping("typ1", "field1", "type=text", "field2", "type=text")); + assertAcked(prepareCreate("test").setMapping("field1", "type=text", "field2", "type=text")); ensureGreen(); index("test", "typ1", "1", "field1", "prefix_abcd", "field2", "prefix_efgh"); @@ -516,7 +516,7 @@ public void testSizeAndSort() throws Exception { // see #2817 public void testStopwordsOnlyPhraseSuggest() throws IOException { assertAcked( - prepareCreate("test").addMapping("typ1", "body", "type=text,analyzer=stopwd") + prepareCreate("test").setMapping("body", "type=text,analyzer=stopwd") .setSettings( Settings.builder() .put("index.analysis.analyzer.stopwd.tokenizer", "standard") diff --git a/server/src/internalClusterTest/java/org/opensearch/update/UpdateIT.java b/server/src/internalClusterTest/java/org/opensearch/update/UpdateIT.java index c6ec91a6ab078..16b0e8829b1a7 100644 --- a/server/src/internalClusterTest/java/org/opensearch/update/UpdateIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/update/UpdateIT.java @@ -542,7 +542,7 @@ public void testUpdateRequestWithScriptAndShouldUpsertDoc() throws Exception { } public void testContextVariables() throws Exception { - assertAcked(prepareCreate("test").addAlias(new Alias("alias")).addMapping("type1")); + assertAcked(prepareCreate("test").addAlias(new Alias("alias"))); ensureGreen(); // Index some documents diff --git a/server/src/internalClusterTest/java/org/opensearch/validate/SimpleValidateQueryIT.java b/server/src/internalClusterTest/java/org/opensearch/validate/SimpleValidateQueryIT.java index a97c4a0d13f12..86a51e7367ade 100644 --- a/server/src/internalClusterTest/java/org/opensearch/validate/SimpleValidateQueryIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/validate/SimpleValidateQueryIT.java @@ -297,7 +297,7 @@ public void testExplainNoQuery() { public void testExplainFilteredAlias() { assertAcked( - prepareCreate("test").addMapping("test", "field", "type=text") + prepareCreate("test").setMapping("field", "type=text") .addAlias(new Alias("alias").filter(QueryBuilders.termQuery("field", "value1"))) ); ensureGreen(); @@ -318,7 +318,7 @@ public void testExplainWithRewriteValidateQuery() throws Exception { client().admin() .indices() .prepareCreate("test") - .addMapping(MapperService.SINGLE_MAPPING_NAME, "field", "type=text,analyzer=whitespace") + .setMapping("field", "type=text,analyzer=whitespace") .setSettings(Settings.builder().put(SETTING_NUMBER_OF_SHARDS, 1)) .get(); client().prepareIndex("test").setId("1").setSource("field", "quick lazy huge brown pidgin").get(); @@ -380,7 +380,7 @@ public void testExplainWithRewriteValidateQueryAllShards() throws Exception { client().admin() .indices() .prepareCreate("test") - .addMapping(MapperService.SINGLE_MAPPING_NAME, "field", "type=text,analyzer=whitespace") + .setMapping("field", "type=text,analyzer=whitespace") .setSettings(Settings.builder().put(SETTING_NUMBER_OF_SHARDS, 2).put("index.number_of_routing_shards", 2)) .get(); // We are relying on specific routing behaviors for the result to be right, so @@ -484,7 +484,7 @@ public void testExplainTermsQueryWithLookup() throws Exception { client().admin() .indices() .prepareCreate("twitter") - .addMapping("_doc", "user", "type=integer", "followers", "type=integer") + .setMapping("user", "type=integer", "followers", "type=integer") .setSettings(Settings.builder().put(SETTING_NUMBER_OF_SHARDS, 2).put("index.number_of_routing_shards", 2)) .get(); client().prepareIndex("twitter").setId("1").setSource("followers", new int[] { 1, 2, 3 }).get(); diff --git a/server/src/main/java/org/opensearch/action/admin/indices/create/CreateIndexRequestBuilder.java b/server/src/main/java/org/opensearch/action/admin/indices/create/CreateIndexRequestBuilder.java index ac80b3eb6a155..cc99f63c6a844 100644 --- a/server/src/main/java/org/opensearch/action/admin/indices/create/CreateIndexRequestBuilder.java +++ b/server/src/main/java/org/opensearch/action/admin/indices/create/CreateIndexRequestBuilder.java @@ -141,10 +141,8 @@ public CreateIndexRequestBuilder setMapping(Map source) { /** * A specialized simplified mapping source method, takes the form of simple properties definition: * ("field1", "type=string,store=true"). - * @deprecated types are being removed */ - @Deprecated - public CreateIndexRequestBuilder addMapping(String type, String... source) { + public CreateIndexRequestBuilder setMapping(String... source) { request.simpleMapping(source); return this; } diff --git a/server/src/test/java/org/opensearch/cluster/metadata/MetadataMappingServiceTests.java b/server/src/test/java/org/opensearch/cluster/metadata/MetadataMappingServiceTests.java index 94bf162303127..5388c0a866c5f 100644 --- a/server/src/test/java/org/opensearch/cluster/metadata/MetadataMappingServiceTests.java +++ b/server/src/test/java/org/opensearch/cluster/metadata/MetadataMappingServiceTests.java @@ -39,7 +39,6 @@ import org.opensearch.common.compress.CompressedXContent; import org.opensearch.index.Index; import org.opensearch.index.IndexService; -import org.opensearch.index.mapper.MapperService; import org.opensearch.plugins.Plugin; import org.opensearch.test.OpenSearchSingleNodeTestCase; import org.opensearch.test.InternalSettingsPlugin; @@ -58,10 +57,7 @@ protected Collection> getPlugins() { } public void testMappingClusterStateUpdateDoesntChangeExistingIndices() throws Exception { - final IndexService indexService = createIndex( - "test", - client().admin().indices().prepareCreate("test").addMapping(MapperService.SINGLE_MAPPING_NAME) - ); + final IndexService indexService = createIndex("test", client().admin().indices().prepareCreate("test").setMapping()); final CompressedXContent currentMapping = indexService.mapperService().documentMapper().mappingSource(); final MetadataMappingService mappingService = getInstanceFromNode(MetadataMappingService.class); @@ -86,7 +82,7 @@ public void testMappingClusterStateUpdateDoesntChangeExistingIndices() throws Ex } public void testClusterStateIsNotChangedWithIdenticalMappings() throws Exception { - createIndex("test", client().admin().indices().prepareCreate("test").addMapping("type")); + createIndex("test", client().admin().indices().prepareCreate("test")); final MetadataMappingService mappingService = getInstanceFromNode(MetadataMappingService.class); final ClusterService clusterService = getInstanceFromNode(ClusterService.class); @@ -106,7 +102,7 @@ public void testClusterStateIsNotChangedWithIdenticalMappings() throws Exception } public void testMappingVersion() throws Exception { - final IndexService indexService = createIndex("test", client().admin().indices().prepareCreate("test").addMapping("type")); + final IndexService indexService = createIndex("test", client().admin().indices().prepareCreate("test")); final long previousVersion = indexService.getMetadata().getMappingVersion(); final MetadataMappingService mappingService = getInstanceFromNode(MetadataMappingService.class); final ClusterService clusterService = getInstanceFromNode(ClusterService.class); @@ -122,7 +118,7 @@ public void testMappingVersion() throws Exception { } public void testMappingVersionUnchanged() throws Exception { - final IndexService indexService = createIndex("test", client().admin().indices().prepareCreate("test").addMapping("type")); + final IndexService indexService = createIndex("test", client().admin().indices().prepareCreate("test").setMapping()); final long previousVersion = indexService.getMetadata().getMappingVersion(); final MetadataMappingService mappingService = getInstanceFromNode(MetadataMappingService.class); final ClusterService clusterService = getInstanceFromNode(ClusterService.class); diff --git a/server/src/test/java/org/opensearch/index/fieldstats/FieldStatsProviderRefreshTests.java b/server/src/test/java/org/opensearch/index/fieldstats/FieldStatsProviderRefreshTests.java index d7cf873e133df..35c16c9954622 100644 --- a/server/src/test/java/org/opensearch/index/fieldstats/FieldStatsProviderRefreshTests.java +++ b/server/src/test/java/org/opensearch/index/fieldstats/FieldStatsProviderRefreshTests.java @@ -55,7 +55,7 @@ public void testQueryRewriteOnRefresh() throws Exception { client().admin() .indices() .prepareCreate("index") - .addMapping("type", "s", "type=text") + .setMapping("s", "type=text") .setSettings( Settings.builder() .put(IndicesRequestCache.INDEX_CACHE_REQUEST_ENABLED_SETTING.getKey(), true) diff --git a/server/src/test/java/org/opensearch/index/mapper/UpdateMappingTests.java b/server/src/test/java/org/opensearch/index/mapper/UpdateMappingTests.java index 80fd4edc6ac78..0349307ad20ea 100644 --- a/server/src/test/java/org/opensearch/index/mapper/UpdateMappingTests.java +++ b/server/src/test/java/org/opensearch/index/mapper/UpdateMappingTests.java @@ -259,7 +259,7 @@ public void testRejectFieldDefinedTwice() throws IOException { } public void testMappingVersion() { - createIndex("test", client().admin().indices().prepareCreate("test").addMapping(MapperService.SINGLE_MAPPING_NAME)); + createIndex("test", client().admin().indices().prepareCreate("test")); final ClusterService clusterService = getInstanceFromNode(ClusterService.class); { final long previousVersion = clusterService.state().metadata().index("test").getMappingVersion(); diff --git a/server/src/test/java/org/opensearch/index/query/CommonTermsQueryParserTests.java b/server/src/test/java/org/opensearch/index/query/CommonTermsQueryParserTests.java index 1dcda74308f45..61ef0c746995a 100644 --- a/server/src/test/java/org/opensearch/index/query/CommonTermsQueryParserTests.java +++ b/server/src/test/java/org/opensearch/index/query/CommonTermsQueryParserTests.java @@ -38,8 +38,7 @@ public class CommonTermsQueryParserTests extends OpenSearchSingleNodeTestCase { public void testWhenParsedQueryIsNullNoNullPointerExceptionIsThrown() { final String index = "test-index"; - final String type = "test-type"; - client().admin().indices().prepareCreate(index).addMapping(type, "name", "type=text,analyzer=stop").execute().actionGet(); + client().admin().indices().prepareCreate(index).setMapping("name", "type=text,analyzer=stop").execute().actionGet(); ensureGreen(); CommonTermsQueryBuilder commonTermsQueryBuilder = new CommonTermsQueryBuilder("name", "the").queryName("query-name"); diff --git a/server/src/test/java/org/opensearch/search/aggregations/bucket/ShardSizeTestCase.java b/server/src/test/java/org/opensearch/search/aggregations/bucket/ShardSizeTestCase.java index bf16f70d400fc..d6981d1c34652 100644 --- a/server/src/test/java/org/opensearch/search/aggregations/bucket/ShardSizeTestCase.java +++ b/server/src/test/java/org/opensearch/search/aggregations/bucket/ShardSizeTestCase.java @@ -55,7 +55,7 @@ protected int numberOfShards() { } protected void createIdx(String keyFieldMapping) { - assertAcked(prepareCreate("idx").addMapping("type", "key", keyFieldMapping)); + assertAcked(prepareCreate("idx").setMapping("key", keyFieldMapping)); } protected static String routing1; // routing key to shard 1 diff --git a/server/src/test/java/org/opensearch/search/aggregations/metrics/AbstractGeoTestCase.java b/server/src/test/java/org/opensearch/search/aggregations/metrics/AbstractGeoTestCase.java index 9243a1ccd517e..5dd8421bf5fc1 100644 --- a/server/src/test/java/org/opensearch/search/aggregations/metrics/AbstractGeoTestCase.java +++ b/server/src/test/java/org/opensearch/search/aggregations/metrics/AbstractGeoTestCase.java @@ -87,8 +87,7 @@ public abstract class AbstractGeoTestCase extends OpenSearchIntegTestCase { public void setupSuiteScopeCluster() throws Exception { createIndex(UNMAPPED_IDX_NAME); assertAcked( - prepareCreate(IDX_NAME).addMapping( - "type", + prepareCreate(IDX_NAME).setMapping( SINGLE_VALUED_FIELD_NAME, "type=geo_point", MULTI_VALUED_FIELD_NAME, @@ -168,11 +167,10 @@ public void setupSuiteScopeCluster() throws Exception { ); } - assertAcked(prepareCreate(EMPTY_IDX_NAME).addMapping("type", SINGLE_VALUED_FIELD_NAME, "type=geo_point")); + assertAcked(prepareCreate(EMPTY_IDX_NAME).setMapping(SINGLE_VALUED_FIELD_NAME, "type=geo_point")); assertAcked( - prepareCreate(DATELINE_IDX_NAME).addMapping( - "type", + prepareCreate(DATELINE_IDX_NAME).setMapping( SINGLE_VALUED_FIELD_NAME, "type=geo_point", MULTI_VALUED_FIELD_NAME, @@ -205,8 +203,7 @@ public void setupSuiteScopeCluster() throws Exception { } assertAcked( prepareCreate(HIGH_CARD_IDX_NAME).setSettings(Settings.builder().put("number_of_shards", 2)) - .addMapping( - "type", + .setMapping( SINGLE_VALUED_FIELD_NAME, "type=geo_point", MULTI_VALUED_FIELD_NAME, @@ -247,7 +244,7 @@ public void setupSuiteScopeCluster() throws Exception { client().prepareIndex(IDX_ZERO_NAME) .setSource(jsonBuilder().startObject().array(SINGLE_VALUED_FIELD_NAME, 0.0, 1.0).endObject()) ); - assertAcked(prepareCreate(IDX_ZERO_NAME).addMapping("type", SINGLE_VALUED_FIELD_NAME, "type=geo_point")); + assertAcked(prepareCreate(IDX_ZERO_NAME).setMapping(SINGLE_VALUED_FIELD_NAME, "type=geo_point")); indexRandom(true, builders); ensureSearchable(); diff --git a/server/src/test/java/org/opensearch/search/geo/GeoShapeQueryTests.java b/server/src/test/java/org/opensearch/search/geo/GeoShapeQueryTests.java index 4247a2ccabff2..d6c5e4f9a6083 100644 --- a/server/src/test/java/org/opensearch/search/geo/GeoShapeQueryTests.java +++ b/server/src/test/java/org/opensearch/search/geo/GeoShapeQueryTests.java @@ -148,7 +148,7 @@ protected XContentBuilder createRandomMapping() throws Exception { public void testShapeFetchingPath() throws Exception { createIndex("shapes"); - client().admin().indices().prepareCreate("test").addMapping("type", "geo", "type=geo_shape").get(); + client().admin().indices().prepareCreate("test").setMapping("geo", "type=geo_shape").get(); String location = "\"geo\" : {\"type\":\"polygon\", \"coordinates\":[[[-10,-10],[10,-10],[10,10],[-10,10],[-10,-10]]]}"; @@ -538,14 +538,9 @@ public void testPointQuery() throws Exception { PointBuilder pb = new PointBuilder(pt[0], pt[1]); gcb.shape(pb); if (randomBoolean()) { - client().admin().indices().prepareCreate("test").addMapping("type", "geo", "type=geo_shape").execute().actionGet(); + client().admin().indices().prepareCreate("test").setMapping("geo", "type=geo_shape").execute().actionGet(); } else { - client().admin() - .indices() - .prepareCreate("test") - .addMapping("type", "geo", "type=geo_shape,tree=quadtree") - .execute() - .actionGet(); + client().admin().indices().prepareCreate("test").setMapping("geo", "type=geo_shape,tree=quadtree").execute().actionGet(); } XContentBuilder docSource = gcb.toXContent(jsonBuilder().startObject().field("geo"), null).endObject(); client().prepareIndex("test").setId("1").setSource(docSource).setRefreshPolicy(IMMEDIATE).get(); @@ -578,14 +573,9 @@ public void testContainsShapeQuery() throws Exception { } if (usePrefixTrees) { - client().admin() - .indices() - .prepareCreate("test") - .addMapping("type", "geo", "type=geo_shape,tree=quadtree") - .execute() - .actionGet(); + client().admin().indices().prepareCreate("test").setMapping("geo", "type=geo_shape,tree=quadtree").execute().actionGet(); } else { - client().admin().indices().prepareCreate("test").addMapping("type", "geo", "type=geo_shape").execute().actionGet(); + client().admin().indices().prepareCreate("test").setMapping("geo", "type=geo_shape").execute().actionGet(); } XContentBuilder docSource = gcb.toXContent(jsonBuilder().startObject().field("geo"), null).endObject(); @@ -805,7 +795,7 @@ public void testQueryRandomGeoCollection() throws Exception { public void testShapeFilterWithDefinedGeoCollection() throws Exception { createIndex("shapes"); - client().admin().indices().prepareCreate("test").addMapping("type", "geo", "type=geo_shape,tree=quadtree").get(); + client().admin().indices().prepareCreate("test").setMapping("geo", "type=geo_shape,tree=quadtree").get(); XContentBuilder docSource = jsonBuilder().startObject() .startObject("geo") diff --git a/server/src/test/java/org/opensearch/test/search/aggregations/bucket/SharedSignificantTermsTestMethods.java b/server/src/test/java/org/opensearch/test/search/aggregations/bucket/SharedSignificantTermsTestMethods.java index c2c2728a348f5..1092bc4f8f47c 100644 --- a/server/src/test/java/org/opensearch/test/search/aggregations/bucket/SharedSignificantTermsTestMethods.java +++ b/server/src/test/java/org/opensearch/test/search/aggregations/bucket/SharedSignificantTermsTestMethods.java @@ -100,7 +100,7 @@ public static void index01Docs(String type, String settings, OpenSearchIntegTest assertAcked( testCase.prepareCreate(INDEX_NAME) .setSettings(settings, XContentType.JSON) - .addMapping("_doc", "text", textMappings, CLASS_FIELD, "type=keyword") + .setMapping("text", textMappings, CLASS_FIELD, "type=keyword") ); String[] gb = { "0", "1" }; List indexRequestBuilderList = new ArrayList<>(); diff --git a/test/framework/src/main/java/org/opensearch/search/aggregations/metrics/AbstractNumericTestCase.java b/test/framework/src/main/java/org/opensearch/search/aggregations/metrics/AbstractNumericTestCase.java index 495eb73e3f39a..a4f6b97115bb0 100644 --- a/test/framework/src/main/java/org/opensearch/search/aggregations/metrics/AbstractNumericTestCase.java +++ b/test/framework/src/main/java/org/opensearch/search/aggregations/metrics/AbstractNumericTestCase.java @@ -76,7 +76,7 @@ public void setupSuiteScopeCluster() throws Exception { // two docs {value: 0} and {value : 2}, then building a histogram agg with interval 1 and with empty // buckets computed.. the empty bucket is the one associated with key "1". then each test will have // to check that this bucket exists with the appropriate sub aggregations. - prepareCreate("empty_bucket_idx").addMapping("type", "value", "type=integer").execute().actionGet(); + prepareCreate("empty_bucket_idx").setMapping("value", "type=integer").execute().actionGet(); builders = new ArrayList<>(); for (int i = 0; i < 2; i++) { builders.add( diff --git a/test/framework/src/main/java/org/opensearch/test/OpenSearchSingleNodeTestCase.java b/test/framework/src/main/java/org/opensearch/test/OpenSearchSingleNodeTestCase.java index fe65d14bbcd0f..3fe26d92bd17f 100644 --- a/test/framework/src/main/java/org/opensearch/test/OpenSearchSingleNodeTestCase.java +++ b/test/framework/src/main/java/org/opensearch/test/OpenSearchSingleNodeTestCase.java @@ -322,8 +322,8 @@ protected IndexService createIndex(String index, Settings settings, String type, @Deprecated protected IndexService createIndex(String index, Settings settings, String type, String... mappings) { CreateIndexRequestBuilder createIndexRequestBuilder = client().admin().indices().prepareCreate(index).setSettings(settings); - if (type != null) { - createIndexRequestBuilder.addMapping(type, mappings); + if (mappings != null) { + createIndexRequestBuilder.setMapping(mappings); } return createIndex(index, createIndexRequestBuilder); } From 9f83deadf40441e43f6f8fefd56141fad9e6626f Mon Sep 17 00:00:00 2001 From: Suraj Singh <79435743+dreamer-89@users.noreply.github.com> Date: Thu, 17 Mar 2022 18:38:10 -0700 Subject: [PATCH 45/46] [Remove] Type from Percolate query API (#2490) * [Remove] Type from Percolator query API Signed-off-by: Suraj Singh * Address review comment Signed-off-by: Suraj Singh --- .../percolator/PercolatorQuerySearchIT.java | 14 +- .../percolator/PercolateQueryBuilder.java | 169 +++++------------- .../PercolateQueryBuilderTests.java | 60 +------ 3 files changed, 61 insertions(+), 182 deletions(-) diff --git a/modules/percolator/src/internalClusterTest/java/org/opensearch/percolator/PercolatorQuerySearchIT.java b/modules/percolator/src/internalClusterTest/java/org/opensearch/percolator/PercolatorQuerySearchIT.java index f78b74e272ebf..8d3c37bc9b039 100644 --- a/modules/percolator/src/internalClusterTest/java/org/opensearch/percolator/PercolatorQuerySearchIT.java +++ b/modules/percolator/src/internalClusterTest/java/org/opensearch/percolator/PercolatorQuerySearchIT.java @@ -397,14 +397,14 @@ public void testPercolatorQueryExistingDocument() throws Exception { logger.info("percolating empty doc"); SearchResponse response = client().prepareSearch() - .setQuery(new PercolateQueryBuilder("query", "test", "type", "1", null, null, null)) + .setQuery(new PercolateQueryBuilder("query", "test", "1", null, null, null)) .get(); assertHitCount(response, 1); assertThat(response.getHits().getAt(0).getId(), equalTo("1")); logger.info("percolating doc with 1 field"); response = client().prepareSearch() - .setQuery(new PercolateQueryBuilder("query", "test", "type", "5", null, null, null)) + .setQuery(new PercolateQueryBuilder("query", "test", "5", null, null, null)) .addSort("id", SortOrder.ASC) .get(); assertHitCount(response, 2); @@ -413,7 +413,7 @@ public void testPercolatorQueryExistingDocument() throws Exception { logger.info("percolating doc with 2 fields"); response = client().prepareSearch() - .setQuery(new PercolateQueryBuilder("query", "test", "type", "6", null, null, null)) + .setQuery(new PercolateQueryBuilder("query", "test", "6", null, null, null)) .addSort("id", SortOrder.ASC) .get(); assertHitCount(response, 3); @@ -438,7 +438,7 @@ public void testPercolatorQueryExistingDocumentSourceDisabled() throws Exception logger.info("percolating empty doc with source disabled"); IllegalArgumentException e = expectThrows( IllegalArgumentException.class, - () -> { client().prepareSearch().setQuery(new PercolateQueryBuilder("query", "test", "type", "1", null, null, null)).get(); } + () -> { client().prepareSearch().setQuery(new PercolateQueryBuilder("query", "test", "1", null, null, null)).get(); } ); assertThat(e.getMessage(), containsString("source disabled")); } @@ -1193,10 +1193,10 @@ public void testPercolatorQueryViaMultiSearch() throws Exception { ) ) ) - .add(client().prepareSearch("test").setQuery(new PercolateQueryBuilder("query", "test", "type", "5", null, null, null))) + .add(client().prepareSearch("test").setQuery(new PercolateQueryBuilder("query", "test", "5", null, null, null))) .add( client().prepareSearch("test") // non existing doc, so error element - .setQuery(new PercolateQueryBuilder("query", "test", "type", "6", null, null, null)) + .setQuery(new PercolateQueryBuilder("query", "test", "6", null, null, null)) ) .get(); @@ -1228,7 +1228,7 @@ public void testPercolatorQueryViaMultiSearch() throws Exception { item = response.getResponses()[5]; assertThat(item.getResponse(), nullValue()); assertThat(item.getFailureMessage(), notNullValue()); - assertThat(item.getFailureMessage(), containsString("[test/type/6] couldn't be found")); + assertThat(item.getFailureMessage(), containsString("[test/6] couldn't be found")); } public void testDisallowExpensiveQueries() throws IOException { diff --git a/modules/percolator/src/main/java/org/opensearch/percolator/PercolateQueryBuilder.java b/modules/percolator/src/main/java/org/opensearch/percolator/PercolateQueryBuilder.java index 87f08e2ff50fc..b2130eca3bb02 100644 --- a/modules/percolator/src/main/java/org/opensearch/percolator/PercolateQueryBuilder.java +++ b/modules/percolator/src/main/java/org/opensearch/percolator/PercolateQueryBuilder.java @@ -67,7 +67,6 @@ import org.opensearch.common.io.stream.NamedWriteableRegistry; import org.opensearch.common.io.stream.StreamInput; import org.opensearch.common.io.stream.StreamOutput; -import org.opensearch.common.logging.DeprecationLogger; import org.opensearch.common.xcontent.ConstructingObjectParser; import org.opensearch.common.xcontent.LoggingDeprecationHandler; import org.opensearch.common.xcontent.NamedXContentRegistry; @@ -111,19 +110,11 @@ public class PercolateQueryBuilder extends AbstractQueryBuilder { public static final String NAME = "percolate"; - private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(ParseField.class); - static final String DOCUMENT_TYPE_DEPRECATION_MESSAGE = "[types removal] Types are deprecated in [percolate] queries. " - + "The [document_type] should no longer be specified."; - static final String TYPE_DEPRECATION_MESSAGE = "[types removal] Types are deprecated in [percolate] queries. " - + "The [type] of the indexed document should no longer be specified."; - static final ParseField DOCUMENT_FIELD = new ParseField("document"); static final ParseField DOCUMENTS_FIELD = new ParseField("documents"); private static final ParseField NAME_FIELD = new ParseField("name"); private static final ParseField QUERY_FIELD = new ParseField("field"); - private static final ParseField DOCUMENT_TYPE_FIELD = new ParseField("document_type"); private static final ParseField INDEXED_DOCUMENT_FIELD_INDEX = new ParseField("index"); - private static final ParseField INDEXED_DOCUMENT_FIELD_TYPE = new ParseField("type"); private static final ParseField INDEXED_DOCUMENT_FIELD_ID = new ParseField("id"); private static final ParseField INDEXED_DOCUMENT_FIELD_ROUTING = new ParseField("routing"); private static final ParseField INDEXED_DOCUMENT_FIELD_PREFERENCE = new ParseField("preference"); @@ -131,29 +122,16 @@ public class PercolateQueryBuilder extends AbstractQueryBuilder documents; private final XContentType documentXContentType; private final String indexedDocumentIndex; - @Deprecated - private final String indexedDocumentType; private final String indexedDocumentId; private final String indexedDocumentRouting; private final String indexedDocumentPreference; private final Long indexedDocumentVersion; private final Supplier documentSupplier; - /** - * @deprecated use {@link #PercolateQueryBuilder(String, BytesReference, XContentType)} with the document content - * type to avoid autodetection. - */ - @Deprecated - public PercolateQueryBuilder(String field, String documentType, BytesReference document) { - this(field, documentType, Collections.singletonList(document), XContentHelper.xContentType(document)); - } - /** * Creates a percolator query builder instance for percolating a provided document. * @@ -162,7 +140,7 @@ public PercolateQueryBuilder(String field, String documentType, BytesReference d * @param documentXContentType The content type of the binary blob containing the document to percolate */ public PercolateQueryBuilder(String field, BytesReference document, XContentType documentXContentType) { - this(field, null, Collections.singletonList(document), documentXContentType); + this(field, Collections.singletonList(document), documentXContentType); } /** @@ -173,11 +151,6 @@ public PercolateQueryBuilder(String field, BytesReference document, XContentType * @param documentXContentType The content type of the binary blob containing the document to percolate */ public PercolateQueryBuilder(String field, List documents, XContentType documentXContentType) { - this(field, null, documents, documentXContentType); - } - - @Deprecated - public PercolateQueryBuilder(String field, String documentType, List documents, XContentType documentXContentType) { if (field == null) { throw new IllegalArgumentException("[field] is a required argument"); } @@ -185,11 +158,9 @@ public PercolateQueryBuilder(String field, String documentType, List documentSupplier) { - if (field == null) { - throw new IllegalArgumentException("[field] is a required argument"); - } - this.field = field; - this.documentType = documentType; - this.documents = Collections.emptyList(); - this.documentXContentType = null; - this.documentSupplier = documentSupplier; - indexedDocumentIndex = null; - indexedDocumentType = null; - indexedDocumentId = null; - indexedDocumentRouting = null; - indexedDocumentPreference = null; - indexedDocumentVersion = null; - } - /** * Creates a percolator query builder instance for percolating a document in a remote index. * * @param field The field that contains the percolator query * @param indexedDocumentIndex The index containing the document to percolate - * @param indexedDocumentType The type containing the document to percolate * @param indexedDocumentId The id of the document to percolate * @param indexedDocumentRouting The routing value for the document to percolate * @param indexedDocumentPreference The preference to use when fetching the document to percolate @@ -228,30 +181,6 @@ protected PercolateQueryBuilder(String field, String documentType, Supplier documentSupplier) { + if (field == null) { + throw new IllegalArgumentException("[field] is a required argument"); + } + this.field = field; + this.documents = Collections.emptyList(); + this.documentXContentType = null; + this.documentSupplier = documentSupplier; + indexedDocumentIndex = null; + indexedDocumentId = null; + indexedDocumentRouting = null; + indexedDocumentPreference = null; + indexedDocumentVersion = null; + } + /** * Read from a stream. */ @@ -286,9 +228,20 @@ public PercolateQueryBuilder( super(in); field = in.readString(); name = in.readOptionalString(); - documentType = in.readOptionalString(); + if (in.getVersion().before(Version.V_2_0_0)) { + String documentType = in.readOptionalString(); + if (documentType != null) { + throw new IllegalStateException("documentType must be null"); + } + } indexedDocumentIndex = in.readOptionalString(); - indexedDocumentType = in.readOptionalString(); + if (in.getVersion().before(Version.V_2_0_0)) { + String indexedDocumentType = in.readOptionalString(); + if (indexedDocumentType != null) { + throw new IllegalStateException("indexedDocumentType must be null"); + } + } + indexedDocumentId = in.readOptionalString(); indexedDocumentRouting = in.readOptionalString(); indexedDocumentPreference = in.readOptionalString(); @@ -322,9 +275,15 @@ protected void doWriteTo(StreamOutput out) throws IOException { } out.writeString(field); out.writeOptionalString(name); - out.writeOptionalString(documentType); + if (out.getVersion().before(Version.V_2_0_0)) { + // In 7x, typeless percolate queries are represented by null documentType values + out.writeOptionalString(null); + } out.writeOptionalString(indexedDocumentIndex); - out.writeOptionalString(indexedDocumentType); + if (out.getVersion().before(Version.V_2_0_0)) { + // In 7x, typeless percolate queries are represented by null indexedDocumentType values + out.writeOptionalString(null); + } out.writeOptionalString(indexedDocumentId); out.writeOptionalString(indexedDocumentRouting); out.writeOptionalString(indexedDocumentPreference); @@ -346,7 +305,6 @@ protected void doWriteTo(StreamOutput out) throws IOException { @Override protected void doXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(NAME); - builder.field(DOCUMENT_TYPE_FIELD.getPreferredName(), documentType); builder.field(QUERY_FIELD.getPreferredName(), field); if (name != null) { builder.field(NAME_FIELD.getPreferredName(), name); @@ -367,13 +325,10 @@ protected void doXContent(XContentBuilder builder, Params params) throws IOExcep } builder.endArray(); } - if (indexedDocumentIndex != null || indexedDocumentType != null || indexedDocumentId != null) { + if (indexedDocumentIndex != null || indexedDocumentId != null) { if (indexedDocumentIndex != null) { builder.field(INDEXED_DOCUMENT_FIELD_INDEX.getPreferredName(), indexedDocumentIndex); } - if (indexedDocumentType != null) { - builder.field(INDEXED_DOCUMENT_FIELD_TYPE.getPreferredName(), indexedDocumentType); - } if (indexedDocumentId != null) { builder.field(INDEXED_DOCUMENT_FIELD_ID.getPreferredName(), indexedDocumentId); } @@ -401,23 +356,12 @@ protected void doXContent(XContentBuilder builder, Params params) throws IOExcep String indexDocRouting = (String) args[5]; String indexDocPreference = (String) args[6]; Long indexedDocVersion = (Long) args[7]; - String indexedDocType = (String) args[8]; - String docType = (String) args[9]; if (indexedDocId != null) { - return new PercolateQueryBuilder( - field, - docType, - indexedDocIndex, - indexedDocType, - indexedDocId, - indexDocRouting, - indexDocPreference, - indexedDocVersion - ); + return new PercolateQueryBuilder(field, indexedDocIndex, indexedDocId, indexDocRouting, indexDocPreference, indexedDocVersion); } else if (document != null) { - return new PercolateQueryBuilder(field, docType, Collections.singletonList(document), XContentType.JSON); + return new PercolateQueryBuilder(field, Collections.singletonList(document), XContentType.JSON); } else { - return new PercolateQueryBuilder(field, docType, documents, XContentType.JSON); + return new PercolateQueryBuilder(field, documents, XContentType.JSON); } }); static { @@ -429,8 +373,6 @@ protected void doXContent(XContentBuilder builder, Params params) throws IOExcep PARSER.declareString(optionalConstructorArg(), INDEXED_DOCUMENT_FIELD_ROUTING); PARSER.declareString(optionalConstructorArg(), INDEXED_DOCUMENT_FIELD_PREFERENCE); PARSER.declareLong(optionalConstructorArg(), INDEXED_DOCUMENT_FIELD_VERSION); - PARSER.declareStringOrNull(optionalConstructorArg(), INDEXED_DOCUMENT_FIELD_TYPE); - PARSER.declareStringOrNull(optionalConstructorArg(), DOCUMENT_TYPE_FIELD); PARSER.declareString(PercolateQueryBuilder::setName, NAME_FIELD); PARSER.declareString(PercolateQueryBuilder::queryName, AbstractQueryBuilder.NAME_FIELD); PARSER.declareFloat(PercolateQueryBuilder::boost, BOOST_FIELD); @@ -461,10 +403,8 @@ public static PercolateQueryBuilder fromXContent(XContentParser parser) throws I @Override protected boolean doEquals(PercolateQueryBuilder other) { return Objects.equals(field, other.field) - && Objects.equals(documentType, other.documentType) && Objects.equals(documents, other.documents) && Objects.equals(indexedDocumentIndex, other.indexedDocumentIndex) - && Objects.equals(indexedDocumentType, other.indexedDocumentType) && Objects.equals(documentSupplier, other.documentSupplier) && Objects.equals(indexedDocumentId, other.indexedDocumentId); @@ -472,7 +412,7 @@ protected boolean doEquals(PercolateQueryBuilder other) { @Override protected int doHashCode() { - return Objects.hash(field, documentType, documents, indexedDocumentIndex, indexedDocumentType, indexedDocumentId, documentSupplier); + return Objects.hash(field, documents, indexedDocumentIndex, indexedDocumentId, documentSupplier); } @Override @@ -491,7 +431,6 @@ protected QueryBuilder doRewrite(QueryRewriteContext queryShardContext) { } else { PercolateQueryBuilder rewritten = new PercolateQueryBuilder( field, - documentType, Collections.singletonList(source), XContentHelper.xContentType(source) ); @@ -513,20 +452,14 @@ protected QueryBuilder doRewrite(QueryRewriteContext queryShardContext) { client.get(getRequest, ActionListener.wrap(getResponse -> { if (getResponse.isExists() == false) { throw new ResourceNotFoundException( - "indexed document [{}{}/{}] couldn't be found", + "indexed document [{}/{}] couldn't be found", indexedDocumentIndex, - indexedDocumentType == null ? "" : "/" + indexedDocumentType, indexedDocumentId ); } if (getResponse.isSourceEmpty()) { throw new IllegalArgumentException( - "indexed document [" - + indexedDocumentIndex - + (indexedDocumentType == null ? "" : "/" + indexedDocumentType) - + "/" - + indexedDocumentId - + "] source disabled" + "indexed document [" + indexedDocumentIndex + "/" + indexedDocumentId + "] source disabled" ); } documentSupplier.set(getResponse.getSourceAsBytesRef()); @@ -534,7 +467,7 @@ protected QueryBuilder doRewrite(QueryRewriteContext queryShardContext) { }, listener::onFailure)); }); - PercolateQueryBuilder rewritten = new PercolateQueryBuilder(field, documentType, documentSupplier::get); + PercolateQueryBuilder rewritten = new PercolateQueryBuilder(field, documentSupplier::get); if (name != null) { rewritten.setName(name); } @@ -576,14 +509,6 @@ protected Query doToQuery(QueryShardContext context) throws IOException { final DocumentMapper docMapper; final MapperService mapperService = context.getMapperService(); String type = mapperService.documentMapper().type(); - if (documentType != null) { - deprecationLogger.deprecate("percolate_with_document_type", DOCUMENT_TYPE_DEPRECATION_MESSAGE); - if (documentType.equals(type) == false) { - throw new IllegalArgumentException( - "specified document_type [" + documentType + "] is not equal to the actual type [" + type + "]" - ); - } - } docMapper = mapperService.documentMapper(); for (BytesReference document : documents) { docs.add(docMapper.parse(new SourceToParse(context.index().getName(), "_temp_id", document, documentXContentType))); @@ -631,10 +556,6 @@ public String getField() { return field; } - public String getDocumentType() { - return documentType; - } - public List getDocuments() { return documents; } diff --git a/modules/percolator/src/test/java/org/opensearch/percolator/PercolateQueryBuilderTests.java b/modules/percolator/src/test/java/org/opensearch/percolator/PercolateQueryBuilderTests.java index 44d8d64086091..87aa28a3346bc 100644 --- a/modules/percolator/src/test/java/org/opensearch/percolator/PercolateQueryBuilderTests.java +++ b/modules/percolator/src/test/java/org/opensearch/percolator/PercolateQueryBuilderTests.java @@ -148,16 +148,14 @@ private PercolateQueryBuilder doCreateTestQueryBuilder(boolean indexedDocument) indexedDocumentVersion = (long) randomIntBetween(0, Integer.MAX_VALUE); queryBuilder = new PercolateQueryBuilder( queryField, - null, indexedDocumentIndex, - null, indexedDocumentId, indexedDocumentRouting, indexedDocumentPreference, indexedDocumentVersion ); } else { - queryBuilder = new PercolateQueryBuilder(queryField, null, documentSource, XContentType.JSON); + queryBuilder = new PercolateQueryBuilder(queryField, documentSource, XContentType.JSON); } if (randomBoolean()) { queryBuilder.setName(randomAlphaOfLength(4)); @@ -217,7 +215,6 @@ protected GetResponse executeGet(GetRequest getRequest) { protected void doAssertLuceneQuery(PercolateQueryBuilder queryBuilder, Query query, QueryShardContext context) throws IOException { assertThat(query, Matchers.instanceOf(PercolateQuery.class)); PercolateQuery percolateQuery = (PercolateQuery) query; - assertNull(queryBuilder.getDocumentType()); assertThat(percolateQuery.getDocuments(), Matchers.equalTo(documentSource)); } @@ -227,12 +224,7 @@ public void testMustRewrite() throws IOException { IllegalStateException e = expectThrows(IllegalStateException.class, () -> pqb.toQuery(createShardContext())); assertThat(e.getMessage(), equalTo("query builder must be rewritten first")); QueryBuilder rewrite = rewriteAndFetch(pqb, createShardContext()); - PercolateQueryBuilder geoShapeQueryBuilder = new PercolateQueryBuilder( - pqb.getField(), - pqb.getDocumentType(), - documentSource, - XContentType.JSON - ); + PercolateQueryBuilder geoShapeQueryBuilder = new PercolateQueryBuilder(pqb.getField(), documentSource, XContentType.JSON); assertEquals(geoShapeQueryBuilder, rewrite); } @@ -259,25 +251,19 @@ public void testRequiredParameters() { ); assertThat(e.getMessage(), equalTo("[field] is a required argument")); - e = expectThrows(IllegalArgumentException.class, () -> new PercolateQueryBuilder("_field", "_document_type", null, null)); - assertThat(e.getMessage(), equalTo("[document] is a required argument")); - e = expectThrows( IllegalArgumentException.class, - () -> { new PercolateQueryBuilder(null, null, "_index", "_type", "_id", null, null, null); } + () -> new PercolateQueryBuilder("_field", (List) null, XContentType.JSON) ); + assertThat(e.getMessage(), equalTo("[document] is a required argument")); + + e = expectThrows(IllegalArgumentException.class, () -> { new PercolateQueryBuilder(null, "_index", "_id", null, null, null); }); assertThat(e.getMessage(), equalTo("[field] is a required argument")); - e = expectThrows( - IllegalArgumentException.class, - () -> { new PercolateQueryBuilder("_field", "_document_type", null, "_type", "_id", null, null, null); } - ); + e = expectThrows(IllegalArgumentException.class, () -> { new PercolateQueryBuilder("_field", null, "_id", null, null, null); }); assertThat(e.getMessage(), equalTo("[index] is a required argument")); - e = expectThrows( - IllegalArgumentException.class, - () -> { new PercolateQueryBuilder("_field", "_document_type", "_index", "_type", null, null, null, null); } - ); + e = expectThrows(IllegalArgumentException.class, () -> { new PercolateQueryBuilder("_field", "_index", null, null, null, null); }); assertThat(e.getMessage(), equalTo("[id] is a required argument")); } @@ -287,15 +273,6 @@ public void testFromJsonNoDocumentType() throws IOException { queryBuilder.toQuery(queryShardContext); } - public void testFromJsonWithDocumentType() throws IOException { - QueryShardContext queryShardContext = createShardContext(); - QueryBuilder queryBuilder = parseQuery( - "{\"percolate\" : { \"document\": {}, \"document_type\":\"" + docType + "\", \"field\":\"" + queryField + "\"}}" - ); - queryBuilder.toQuery(queryShardContext); - assertWarnings(PercolateQueryBuilder.DOCUMENT_TYPE_DEPRECATION_MESSAGE); - } - public void testFromJsonNoType() throws IOException { indexedDocumentIndex = randomAlphaOfLength(4); indexedDocumentId = randomAlphaOfLength(4); @@ -315,25 +292,6 @@ public void testFromJsonNoType() throws IOException { rewriteAndFetch(queryBuilder, queryShardContext).toQuery(queryShardContext); } - public void testFromJsonWithType() throws IOException { - indexedDocumentIndex = randomAlphaOfLength(4); - indexedDocumentId = randomAlphaOfLength(4); - indexedDocumentVersion = Versions.MATCH_ANY; - documentSource = Collections.singletonList(randomSource(new HashSet<>())); - - QueryShardContext queryShardContext = createShardContext(); - QueryBuilder queryBuilder = parseQuery( - "{\"percolate\" : { \"index\": \"" - + indexedDocumentIndex - + "\", \"type\": \"_doc\", \"id\": \"" - + indexedDocumentId - + "\", \"field\":\"" - + queryField - + "\"}}" - ); - rewriteAndFetch(queryBuilder, queryShardContext).toQuery(queryShardContext); - } - public void testBothDocumentAndDocumentsSpecified() { IllegalArgumentException e = expectThrows( IllegalArgumentException.class, @@ -426,7 +384,7 @@ public void testSettingNameWhileRewritingWhenDocumentSupplierAndSourceNotNull() Supplier supplier = () -> new BytesArray("{\"test\": \"test\"}"); String testName = "name1"; QueryShardContext shardContext = createShardContext(); - PercolateQueryBuilder percolateQueryBuilder = new PercolateQueryBuilder(queryField, null, supplier); + PercolateQueryBuilder percolateQueryBuilder = new PercolateQueryBuilder(queryField, supplier); percolateQueryBuilder.setName(testName); QueryBuilder rewrittenQueryBuilder = percolateQueryBuilder.doRewrite(shardContext); From ba00dd44bbc18b3162a5e5b482589294dbb47f33 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 18 Mar 2022 16:03:03 +0000 Subject: [PATCH 46/46] Bump commons-logging from 1.1.3 to 1.2 in /plugins/repository-hdfs Bumps commons-logging from 1.1.3 to 1.2. --- updated-dependencies: - dependency-name: commons-logging:commons-logging dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] --- plugins/repository-hdfs/build.gradle | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/plugins/repository-hdfs/build.gradle b/plugins/repository-hdfs/build.gradle index c9c7c8e6ffced..ee3adc49f2d64 100644 --- a/plugins/repository-hdfs/build.gradle +++ b/plugins/repository-hdfs/build.gradle @@ -68,7 +68,7 @@ dependencies { api 'com.google.code.gson:gson:2.9.0' runtimeOnly 'com.google.guava:guava:30.1.1-jre' api 'com.google.protobuf:protobuf-java:3.19.3' - api 'commons-logging:commons-logging:1.1.3' + api 'commons-logging:commons-logging:1.2' api 'commons-cli:commons-cli:1.2' api "commons-codec:commons-codec:${versions.commonscodec}" api 'commons-collections:commons-collections:3.2.2'